##// END OF EJS Templates
caches: new cache implementation for remote functions
marcink -
r739:6b84a339 default
parent child Browse files
Show More
@@ -1,891 +1,994 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17 18 import collections
18 19 import logging
19 20 import os
20 21 import posixpath as vcspath
21 22 import re
22 23 import stat
23 24 import traceback
24 25 import urllib
25 26 import urllib2
26 27 from functools import wraps
27 28
28 29 import more_itertools
29 30 import pygit2
30 31 from pygit2 import Repository as LibGit2Repo
31 32 from dulwich import index, objects
32 33 from dulwich.client import HttpGitClient, LocalGitClient
33 34 from dulwich.errors import (
34 35 NotGitRepository, ChecksumMismatch, WrongObjectException,
35 36 MissingCommitError, ObjectMissing, HangupException,
36 37 UnexpectedCommandError)
37 38 from dulwich.repo import Repo as DulwichRepo
38 39 from dulwich.server import update_server_info
39 40
40 41 from vcsserver import exceptions, settings, subprocessio
41 42 from vcsserver.utils import safe_str
42 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
43 from vcsserver.base import RepoFactory, obfuscate_qs
43 44 from vcsserver.hgcompat import (
44 45 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
45 46 from vcsserver.git_lfs.lib import LFSOidStore
46 47
47 48 DIR_STAT = stat.S_IFDIR
48 49 FILE_MODE = stat.S_IFMT
49 50 GIT_LINK = objects.S_IFGITLINK
51 PEELED_REF_MARKER = '^{}'
52
50 53
51 54 log = logging.getLogger(__name__)
52 55
53 56
57 def str_to_dulwich(value):
58 """
59 Dulwich 0.10.1a requires `unicode` objects to be passed in.
60 """
61 return value.decode(settings.WIRE_ENCODING)
62
63
54 64 def reraise_safe_exceptions(func):
55 65 """Converts Dulwich exceptions to something neutral."""
56 66
57 67 @wraps(func)
58 68 def wrapper(*args, **kwargs):
59 69 try:
60 70 return func(*args, **kwargs)
61 71 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
62 72 exc = exceptions.LookupException(org_exc=e)
63 73 raise exc(safe_str(e))
64 74 except (HangupException, UnexpectedCommandError) as e:
65 75 exc = exceptions.VcsException(org_exc=e)
66 76 raise exc(safe_str(e))
67 77 except Exception as e:
68 78 # NOTE(marcink): becuase of how dulwich handles some exceptions
69 79 # (KeyError on empty repos), we cannot track this and catch all
70 80 # exceptions, it's an exceptions from other handlers
71 81 #if not hasattr(e, '_vcs_kind'):
72 82 #log.exception("Unhandled exception in git remote call")
73 83 #raise_from_original(exceptions.UnhandledException)
74 84 raise
75 85 return wrapper
76 86
77 87
78 88 class Repo(DulwichRepo):
79 89 """
80 90 A wrapper for dulwich Repo class.
81 91
82 92 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
83 93 "Too many open files" error. We need to close all opened file descriptors
84 94 once the repo object is destroyed.
85 95 """
86 96 def __del__(self):
87 97 if hasattr(self, 'object_store'):
88 98 self.close()
89 99
90 100
91 101 class Repository(LibGit2Repo):
92 102
93 103 def __enter__(self):
94 104 return self
95 105
96 106 def __exit__(self, exc_type, exc_val, exc_tb):
97 107 self.free()
98 108
99 109
100 110 class GitFactory(RepoFactory):
101 111 repo_type = 'git'
102 112
103 113 def _create_repo(self, wire, create, use_libgit2=False):
104 114 if use_libgit2:
105 115 return Repository(wire['path'])
106 116 else:
107 117 repo_path = str_to_dulwich(wire['path'])
108 118 return Repo(repo_path)
109 119
110 120 def repo(self, wire, create=False, use_libgit2=False):
111 121 """
112 122 Get a repository instance for the given path.
113 123 """
114 region = self._cache_region
115 context = wire.get('context', None)
116 repo_path = wire.get('path', '')
117 context_uid = '{}'.format(context)
118 cache = wire.get('cache', True)
119 cache_on = context and cache
120
121 @region.conditional_cache_on_arguments(condition=cache_on)
122 def create_new_repo(_repo_type, _repo_path, _context_uid, _use_libgit2):
123 return self._create_repo(wire, create, use_libgit2)
124
125 repo = create_new_repo(self.repo_type, repo_path, context_uid, use_libgit2)
126 return repo
124 return self._create_repo(wire, create, use_libgit2)
127 125
128 126 def repo_libgit2(self, wire):
129 127 return self.repo(wire, use_libgit2=True)
130 128
131 129
132 130 class GitRemote(object):
133 131
134 132 def __init__(self, factory):
135 133 self._factory = factory
136 self.peeled_ref_marker = '^{}'
137 134 self._bulk_methods = {
138 135 "date": self.date,
139 136 "author": self.author,
137 "branch": self.branch,
140 138 "message": self.message,
141 139 "parents": self.parents,
142 140 "_commit": self.revision,
143 141 }
142 self.region = self._factory._cache_region
144 143
145 144 def _wire_to_config(self, wire):
146 145 if 'config' in wire:
147 146 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
148 147 return {}
149 148
150 149 def _remote_conf(self, config):
151 150 params = [
152 151 '-c', 'core.askpass=""',
153 152 ]
154 153 ssl_cert_dir = config.get('vcs_ssl_dir')
155 154 if ssl_cert_dir:
156 155 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
157 156 return params
158 157
158 def _cache_on(self, wire):
159 context = wire.get('context', '')
160 context_uid = '{}'.format(context)
161 repo_id = wire.get('repo_id', '')
162 cache = wire.get('cache', True)
163 cache_on = context and cache
164 return cache_on, context_uid, repo_id
165
166 @reraise_safe_exceptions
167 def discover_git_version(self):
168 stdout, _ = self.run_git_command(
169 {}, ['--version'], _bare=True, _safe=True)
170 prefix = 'git version'
171 if stdout.startswith(prefix):
172 stdout = stdout[len(prefix):]
173 return stdout.strip()
174
159 175 @reraise_safe_exceptions
160 176 def is_empty(self, wire):
161 177 repo_init = self._factory.repo_libgit2(wire)
162 178 with repo_init as repo:
163 179
164 180 try:
165 181 has_head = repo.head.name
166 182 if has_head:
167 183 return False
168 184
169 185 # NOTE(marcink): check again using more expensive method
170 186 return repo.is_empty
171 187 except Exception:
172 188 pass
173 189
174 190 return True
175 191
176 192 @reraise_safe_exceptions
177 193 def add_object(self, wire, content):
178 194 repo_init = self._factory.repo_libgit2(wire)
179 195 with repo_init as repo:
180 196 blob = objects.Blob()
181 197 blob.set_raw_string(content)
182 198 repo.object_store.add_object(blob)
183 199 return blob.id
184 200
185 201 @reraise_safe_exceptions
186 202 def assert_correct_path(self, wire):
187 try:
188 repo_init = self._factory.repo_libgit2(wire)
189 with repo_init as repo:
190 pass
191 except pygit2.GitError:
192 path = wire.get('path')
193 tb = traceback.format_exc()
194 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
195 return False
203 cache_on, context_uid, repo_id = self._cache_on(wire)
204 @self.region.conditional_cache_on_arguments(condition=cache_on)
205 def _assert_correct_path(_context_uid, _repo_id):
206 try:
207 repo_init = self._factory.repo_libgit2(wire)
208 with repo_init as repo:
209 pass
210 except pygit2.GitError:
211 path = wire.get('path')
212 tb = traceback.format_exc()
213 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
214 return False
196 215
197 return True
216 return True
217 return _assert_correct_path(context_uid, repo_id)
198 218
199 219 @reraise_safe_exceptions
200 220 def bare(self, wire):
201 221 repo_init = self._factory.repo_libgit2(wire)
202 222 with repo_init as repo:
203 223 return repo.is_bare
204 224
205 225 @reraise_safe_exceptions
206 226 def blob_as_pretty_string(self, wire, sha):
207 227 repo_init = self._factory.repo_libgit2(wire)
208 228 with repo_init as repo:
209 229 blob_obj = repo[sha]
210 230 blob = blob_obj.data
211 231 return blob
212 232
213 233 @reraise_safe_exceptions
214 234 def blob_raw_length(self, wire, sha):
215 repo_init = self._factory.repo_libgit2(wire)
216 with repo_init as repo:
217 blob = repo[sha]
218 return blob.size
235 cache_on, context_uid, repo_id = self._cache_on(wire)
236 @self.region.conditional_cache_on_arguments(condition=cache_on)
237 def _blob_raw_length(_context_uid, _repo_id, _sha):
238
239 repo_init = self._factory.repo_libgit2(wire)
240 with repo_init as repo:
241 blob = repo[sha]
242 return blob.size
243
244 return _blob_raw_length(context_uid, repo_id, sha)
219 245
220 246 def _parse_lfs_pointer(self, raw_content):
221 247
222 248 spec_string = 'version https://git-lfs.github.com/spec'
223 249 if raw_content and raw_content.startswith(spec_string):
224 250 pattern = re.compile(r"""
225 251 (?:\n)?
226 252 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
227 253 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
228 254 ^size[ ](?P<oid_size>[0-9]+)\n
229 255 (?:\n)?
230 256 """, re.VERBOSE | re.MULTILINE)
231 257 match = pattern.match(raw_content)
232 258 if match:
233 259 return match.groupdict()
234 260
235 261 return {}
236 262
237 263 @reraise_safe_exceptions
238 264 def is_large_file(self, wire, sha):
239 repo_init = self._factory.repo_libgit2(wire)
240 with repo_init as repo:
241 blob = repo[sha]
242 if blob.is_binary:
243 return {}
244 265
245 return self._parse_lfs_pointer(blob.data)
266 cache_on, context_uid, repo_id = self._cache_on(wire)
267 @self.region.conditional_cache_on_arguments(condition=cache_on)
268 def _is_large_file(_context_uid, _repo_id, _sha):
269 repo_init = self._factory.repo_libgit2(wire)
270 with repo_init as repo:
271 blob = repo[sha]
272 if blob.is_binary:
273 return {}
274
275 return self._parse_lfs_pointer(blob.data)
276
277 return _is_large_file(context_uid, repo_id, sha)
246 278
247 279 @reraise_safe_exceptions
248 280 def in_largefiles_store(self, wire, oid):
249 281 conf = self._wire_to_config(wire)
250 282 repo_init = self._factory.repo_libgit2(wire)
251 283 with repo_init as repo:
252 284 repo_name = repo.path
253 285
254 286 store_location = conf.get('vcs_git_lfs_store_location')
255 287 if store_location:
256 288
257 289 store = LFSOidStore(
258 290 oid=oid, repo=repo_name, store_location=store_location)
259 291 return store.has_oid()
260 292
261 293 return False
262 294
263 295 @reraise_safe_exceptions
264 296 def store_path(self, wire, oid):
265 297 conf = self._wire_to_config(wire)
266 298 repo_init = self._factory.repo_libgit2(wire)
267 299 with repo_init as repo:
268 300 repo_name = repo.path
269 301
270 302 store_location = conf.get('vcs_git_lfs_store_location')
271 303 if store_location:
272 304 store = LFSOidStore(
273 305 oid=oid, repo=repo_name, store_location=store_location)
274 306 return store.oid_path
275 307 raise ValueError('Unable to fetch oid with path {}'.format(oid))
276 308
277 309 @reraise_safe_exceptions
278 310 def bulk_request(self, wire, rev, pre_load):
279 result = {}
280 for attr in pre_load:
281 try:
282 method = self._bulk_methods[attr]
283 args = [wire, rev]
284 result[attr] = method(*args)
285 except KeyError as e:
286 raise exceptions.VcsException(e)("Unknown bulk attribute: %s" % attr)
287 return result
311 cache_on, context_uid, repo_id = self._cache_on(wire)
312 @self.region.conditional_cache_on_arguments(condition=cache_on)
313 def _bulk_request(_context_uid, _repo_id, _rev, _pre_load):
314 result = {}
315 for attr in pre_load:
316 try:
317 method = self._bulk_methods[attr]
318 args = [wire, rev]
319 result[attr] = method(*args)
320 except KeyError as e:
321 raise exceptions.VcsException(e)(
322 "Unknown bulk attribute: %s" % attr)
323 return result
324
325 return _bulk_request(context_uid, repo_id, rev, sorted(pre_load))
288 326
289 327 def _build_opener(self, url):
290 328 handlers = []
291 329 url_obj = url_parser(url)
292 330 _, authinfo = url_obj.authinfo()
293 331
294 332 if authinfo:
295 333 # create a password manager
296 334 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
297 335 passmgr.add_password(*authinfo)
298 336
299 337 handlers.extend((httpbasicauthhandler(passmgr),
300 338 httpdigestauthhandler(passmgr)))
301 339
302 340 return urllib2.build_opener(*handlers)
303 341
304 342 def _type_id_to_name(self, type_id):
305 343 return {
306 344 1: b'commit',
307 345 2: b'tree',
308 346 3: b'blob',
309 347 4: b'tag'
310 348 }[type_id]
311 349
312 350 @reraise_safe_exceptions
313 351 def check_url(self, url, config):
314 352 url_obj = url_parser(url)
315 353 test_uri, _ = url_obj.authinfo()
316 354 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
317 355 url_obj.query = obfuscate_qs(url_obj.query)
318 356 cleaned_uri = str(url_obj)
319 357 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
320 358
321 359 if not test_uri.endswith('info/refs'):
322 360 test_uri = test_uri.rstrip('/') + '/info/refs'
323 361
324 362 o = self._build_opener(url)
325 363 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
326 364
327 365 q = {"service": 'git-upload-pack'}
328 366 qs = '?%s' % urllib.urlencode(q)
329 367 cu = "%s%s" % (test_uri, qs)
330 368 req = urllib2.Request(cu, None, {})
331 369
332 370 try:
333 371 log.debug("Trying to open URL %s", cleaned_uri)
334 372 resp = o.open(req)
335 373 if resp.code != 200:
336 374 raise exceptions.URLError()('Return Code is not 200')
337 375 except Exception as e:
338 376 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
339 377 # means it cannot be cloned
340 378 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
341 379
342 380 # now detect if it's proper git repo
343 381 gitdata = resp.read()
344 382 if 'service=git-upload-pack' in gitdata:
345 383 pass
346 384 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
347 385 # old style git can return some other format !
348 386 pass
349 387 else:
350 388 raise exceptions.URLError()(
351 389 "url [%s] does not look like an git" % (cleaned_uri,))
352 390
353 391 return True
354 392
355 393 @reraise_safe_exceptions
356 394 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
357 395 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
358 396 remote_refs = self.pull(wire, url, apply_refs=False)
359 397 repo = self._factory.repo(wire)
360 398 if isinstance(valid_refs, list):
361 399 valid_refs = tuple(valid_refs)
362 400
363 401 for k in remote_refs:
364 402 # only parse heads/tags and skip so called deferred tags
365 403 if k.startswith(valid_refs) and not k.endswith(deferred):
366 404 repo[k] = remote_refs[k]
367 405
368 406 if update_after_clone:
369 407 # we want to checkout HEAD
370 408 repo["HEAD"] = remote_refs["HEAD"]
371 409 index.build_index_from_tree(repo.path, repo.index_path(),
372 410 repo.object_store, repo["HEAD"].tree)
373 411
412 @reraise_safe_exceptions
413 def branch(self, wire, commit_id):
414 cache_on, context_uid, repo_id = self._cache_on(wire)
415 cache_on = False
416 @self.region.conditional_cache_on_arguments(condition=cache_on)
417 def _branch(_context_uid, _repo_id, _commit_id):
418 regex = re.compile('^refs/heads')
419
420 def filter_with(ref):
421 return regex.match(ref[0]) and ref[1] == _commit_id
422
423 branches = filter(filter_with, self.get_refs(wire).items())
424 return [x[0].split('refs/heads/')[-1] for x in branches]
425
426 return _branch(context_uid, repo_id, commit_id)
427
428 @reraise_safe_exceptions
429 def commit_branches(self, wire, commit_id):
430 cache_on, context_uid, repo_id = self._cache_on(wire)
431 @self.region.conditional_cache_on_arguments(condition=cache_on)
432 def _commit_branches(_context_uid, _repo_id, _commit_id):
433 repo_init = self._factory.repo_libgit2(wire)
434 with repo_init as repo:
435 branches = [x for x in repo.branches.with_commit(_commit_id)]
436 return branches
437
438 return _commit_branches(context_uid, repo_id, commit_id)
439
374 440 # TODO: this is quite complex, check if that can be simplified
375 441 @reraise_safe_exceptions
376 442 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
377 443 repo = self._factory.repo(wire)
378 444 object_store = repo.object_store
379 445
380 446 # Create tree and populates it with blobs
381 447 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
382 448
383 449 for node in updated:
384 450 # Compute subdirs if needed
385 451 dirpath, nodename = vcspath.split(node['path'])
386 452 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
387 453 parent = commit_tree
388 454 ancestors = [('', parent)]
389 455
390 456 # Tries to dig for the deepest existing tree
391 457 while dirnames:
392 458 curdir = dirnames.pop(0)
393 459 try:
394 460 dir_id = parent[curdir][1]
395 461 except KeyError:
396 462 # put curdir back into dirnames and stops
397 463 dirnames.insert(0, curdir)
398 464 break
399 465 else:
400 466 # If found, updates parent
401 467 parent = repo[dir_id]
402 468 ancestors.append((curdir, parent))
403 469 # Now parent is deepest existing tree and we need to create
404 470 # subtrees for dirnames (in reverse order)
405 471 # [this only applies for nodes from added]
406 472 new_trees = []
407 473
408 474 blob = objects.Blob.from_string(node['content'])
409 475
410 476 if dirnames:
411 477 # If there are trees which should be created we need to build
412 478 # them now (in reverse order)
413 479 reversed_dirnames = list(reversed(dirnames))
414 480 curtree = objects.Tree()
415 481 curtree[node['node_path']] = node['mode'], blob.id
416 482 new_trees.append(curtree)
417 483 for dirname in reversed_dirnames[:-1]:
418 484 newtree = objects.Tree()
419 485 newtree[dirname] = (DIR_STAT, curtree.id)
420 486 new_trees.append(newtree)
421 487 curtree = newtree
422 488 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
423 489 else:
424 490 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
425 491
426 492 new_trees.append(parent)
427 493 # Update ancestors
428 494 reversed_ancestors = reversed(
429 495 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
430 496 for parent, tree, path in reversed_ancestors:
431 497 parent[path] = (DIR_STAT, tree.id)
432 498 object_store.add_object(tree)
433 499
434 500 object_store.add_object(blob)
435 501 for tree in new_trees:
436 502 object_store.add_object(tree)
437 503
438 504 for node_path in removed:
439 505 paths = node_path.split('/')
440 506 tree = commit_tree
441 507 trees = [tree]
442 508 # Traverse deep into the forest...
443 509 for path in paths:
444 510 try:
445 511 obj = repo[tree[path][1]]
446 512 if isinstance(obj, objects.Tree):
447 513 trees.append(obj)
448 514 tree = obj
449 515 except KeyError:
450 516 break
451 517 # Cut down the blob and all rotten trees on the way back...
452 518 for path, tree in reversed(zip(paths, trees)):
453 519 del tree[path]
454 520 if tree:
455 521 # This tree still has elements - don't remove it or any
456 522 # of it's parents
457 523 break
458 524
459 525 object_store.add_object(commit_tree)
460 526
461 527 # Create commit
462 528 commit = objects.Commit()
463 529 commit.tree = commit_tree.id
464 530 for k, v in commit_data.iteritems():
465 531 setattr(commit, k, v)
466 532 object_store.add_object(commit)
467 533
468 534 self.create_branch(wire, branch, commit.id)
469 535
470 536 # dulwich set-ref
471 537 ref = 'refs/heads/%s' % branch
472 538 repo.refs[ref] = commit.id
473 539
474 540 return commit.id
475 541
476 542 @reraise_safe_exceptions
477 543 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
478 544 if url != 'default' and '://' not in url:
479 545 client = LocalGitClient(url)
480 546 else:
481 547 url_obj = url_parser(url)
482 548 o = self._build_opener(url)
483 549 url, _ = url_obj.authinfo()
484 550 client = HttpGitClient(base_url=url, opener=o)
485 551 repo = self._factory.repo(wire)
486 552
487 553 determine_wants = repo.object_store.determine_wants_all
488 554 if refs:
489 555 def determine_wants_requested(references):
490 556 return [references[r] for r in references if r in refs]
491 557 determine_wants = determine_wants_requested
492 558
493 559 try:
494 560 remote_refs = client.fetch(
495 561 path=url, target=repo, determine_wants=determine_wants)
496 562 except NotGitRepository as e:
497 563 log.warning(
498 564 'Trying to fetch from "%s" failed, not a Git repository.', url)
499 565 # Exception can contain unicode which we convert
500 566 raise exceptions.AbortException(e)(repr(e))
501 567
502 568 # mikhail: client.fetch() returns all the remote refs, but fetches only
503 569 # refs filtered by `determine_wants` function. We need to filter result
504 570 # as well
505 571 if refs:
506 572 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
507 573
508 574 if apply_refs:
509 575 # TODO: johbo: Needs proper test coverage with a git repository
510 576 # that contains a tag object, so that we would end up with
511 577 # a peeled ref at this point.
512 578 for k in remote_refs:
513 if k.endswith(self.peeled_ref_marker):
579 if k.endswith(PEELED_REF_MARKER):
514 580 log.debug("Skipping peeled reference %s", k)
515 581 continue
516 582 repo[k] = remote_refs[k]
517 583
518 584 if refs and not update_after:
519 585 # mikhail: explicitly set the head to the last ref.
520 586 repo['HEAD'] = remote_refs[refs[-1]]
521 587
522 588 if update_after:
523 589 # we want to checkout HEAD
524 590 repo["HEAD"] = remote_refs["HEAD"]
525 591 index.build_index_from_tree(repo.path, repo.index_path(),
526 592 repo.object_store, repo["HEAD"].tree)
527 593 return remote_refs
528 594
529 595 @reraise_safe_exceptions
530 596 def sync_fetch(self, wire, url, refs=None):
531 597 repo = self._factory.repo(wire)
532 598 if refs and not isinstance(refs, (list, tuple)):
533 599 refs = [refs]
534 600 config = self._wire_to_config(wire)
535 601 # get all remote refs we'll use to fetch later
536 602 output, __ = self.run_git_command(
537 603 wire, ['ls-remote', url], fail_on_stderr=False,
538 604 _copts=self._remote_conf(config),
539 605 extra_env={'GIT_TERMINAL_PROMPT': '0'})
540 606
541 607 remote_refs = collections.OrderedDict()
542 608 fetch_refs = []
543 609
544 610 for ref_line in output.splitlines():
545 611 sha, ref = ref_line.split('\t')
546 612 sha = sha.strip()
547 613 if ref in remote_refs:
548 614 # duplicate, skip
549 615 continue
550 if ref.endswith(self.peeled_ref_marker):
616 if ref.endswith(PEELED_REF_MARKER):
551 617 log.debug("Skipping peeled reference %s", ref)
552 618 continue
553 619 # don't sync HEAD
554 620 if ref in ['HEAD']:
555 621 continue
556 622
557 623 remote_refs[ref] = sha
558 624
559 625 if refs and sha in refs:
560 626 # we filter fetch using our specified refs
561 627 fetch_refs.append('{}:{}'.format(ref, ref))
562 628 elif not refs:
563 629 fetch_refs.append('{}:{}'.format(ref, ref))
564 630 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
565 631 if fetch_refs:
566 632 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
567 633 fetch_refs_chunks = list(chunk)
568 634 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
569 635 _out, _err = self.run_git_command(
570 636 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
571 637 fail_on_stderr=False,
572 638 _copts=self._remote_conf(config),
573 639 extra_env={'GIT_TERMINAL_PROMPT': '0'})
574 640
575 641 return remote_refs
576 642
577 643 @reraise_safe_exceptions
578 644 def sync_push(self, wire, url, refs=None):
579 645 if not self.check_url(url, wire):
580 646 return
581 647 config = self._wire_to_config(wire)
582 repo = self._factory.repo(wire)
648 self._factory.repo(wire)
583 649 self.run_git_command(
584 650 wire, ['push', url, '--mirror'], fail_on_stderr=False,
585 651 _copts=self._remote_conf(config),
586 652 extra_env={'GIT_TERMINAL_PROMPT': '0'})
587 653
588 654 @reraise_safe_exceptions
589 655 def get_remote_refs(self, wire, url):
590 656 repo = Repo(url)
591 657 return repo.get_refs()
592 658
593 659 @reraise_safe_exceptions
594 660 def get_description(self, wire):
595 661 repo = self._factory.repo(wire)
596 662 return repo.get_description()
597 663
598 664 @reraise_safe_exceptions
599 665 def get_missing_revs(self, wire, rev1, rev2, path2):
600 666 repo = self._factory.repo(wire)
601 667 LocalGitClient(thin_packs=False).fetch(path2, repo)
602 668
603 669 wire_remote = wire.copy()
604 670 wire_remote['path'] = path2
605 671 repo_remote = self._factory.repo(wire_remote)
606 672 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
607 673
608 674 revs = [
609 675 x.commit.id
610 676 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
611 677 return revs
612 678
613 679 @reraise_safe_exceptions
614 680 def get_object(self, wire, sha):
615 repo_init = self._factory.repo_libgit2(wire)
616 with repo_init as repo:
681
682 cache_on, context_uid, repo_id = self._cache_on(wire)
683 @self.region.conditional_cache_on_arguments(condition=cache_on)
684 def _get_object(_context_uid, _repo_id, _sha):
685 repo_init = self._factory.repo_libgit2(wire)
686 with repo_init as repo:
617 687
618 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
619 try:
620 commit = repo.revparse_single(sha)
621 except (KeyError, ValueError) as e:
622 raise exceptions.LookupException(e)(missing_commit_err)
688 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
689 try:
690 commit = repo.revparse_single(sha)
691 except (KeyError, ValueError) as e:
692 raise exceptions.LookupException(e)(missing_commit_err)
623 693
624 if isinstance(commit, pygit2.Tag):
625 commit = repo.get(commit.target)
694 if isinstance(commit, pygit2.Tag):
695 commit = repo.get(commit.target)
626 696
627 # check for dangling commit
628 branches = [x for x in repo.branches.with_commit(commit.hex)]
629 if not branches:
630 raise exceptions.LookupException(None)(missing_commit_err)
697 # check for dangling commit
698 branches = [x for x in repo.branches.with_commit(commit.hex)]
699 if not branches:
700 raise exceptions.LookupException(None)(missing_commit_err)
701
702 commit_id = commit.hex
703 type_id = commit.type
631 704
632 commit_id = commit.hex
633 type_id = commit.type
705 return {
706 'id': commit_id,
707 'type': self._type_id_to_name(type_id),
708 'commit_id': commit_id,
709 'idx': 0
710 }
634 711
635 return {
636 'id': commit_id,
637 'type': self._type_id_to_name(type_id),
638 'commit_id': commit_id,
639 'idx': 0
640 }
712 return _get_object(context_uid, repo_id, sha)
641 713
642 714 @reraise_safe_exceptions
643 715 def get_refs(self, wire):
644 repo_init = self._factory.repo_libgit2(wire)
645 with repo_init as repo:
646 result = {}
647 for ref in repo.references:
648 peeled_sha = repo.lookup_reference(ref).peel()
649 result[ref] = peeled_sha.hex
716 cache_on, context_uid, repo_id = self._cache_on(wire)
717 @self.region.conditional_cache_on_arguments(condition=cache_on)
718 def _get_refs(_context_uid, _repo_id):
719
720 repo_init = self._factory.repo_libgit2(wire)
721 with repo_init as repo:
722 regex = re.compile('^refs/(heads|tags)/')
723 return {x.name: x.target.hex for x in
724 filter(lambda ref: regex.match(ref.name) ,repo.listall_reference_objects())}
725
726 return _get_refs(context_uid, repo_id)
650 727
651 return result
728 @reraise_safe_exceptions
729 def get_branch_pointers(self, wire):
730 cache_on, context_uid, repo_id = self._cache_on(wire)
731 @self.region.conditional_cache_on_arguments(condition=cache_on)
732 def _get_branch_pointers(_context_uid, _repo_id):
733
734 repo_init = self._factory.repo_libgit2(wire)
735 regex = re.compile('^refs/heads')
736 with repo_init as repo:
737 branches = filter(lambda ref: regex.match(ref.name), repo.listall_reference_objects())
738 return {x.target.hex: x.shorthand for x in branches}
739
740 return _get_branch_pointers(context_uid, repo_id)
652 741
653 742 @reraise_safe_exceptions
654 743 def head(self, wire, show_exc=True):
655 repo_init = self._factory.repo_libgit2(wire)
656 with repo_init as repo:
657 try:
658 return repo.head.peel().hex
659 except Exception:
660 if show_exc:
661 raise
744 cache_on, context_uid, repo_id = self._cache_on(wire)
745 @self.region.conditional_cache_on_arguments(condition=cache_on)
746 def _head(_context_uid, _repo_id, _show_exc):
747 repo_init = self._factory.repo_libgit2(wire)
748 with repo_init as repo:
749 try:
750 return repo.head.peel().hex
751 except Exception:
752 if show_exc:
753 raise
754 return _head(context_uid, repo_id, show_exc)
662 755
663 756 @reraise_safe_exceptions
664 757 def init(self, wire):
665 758 repo_path = str_to_dulwich(wire['path'])
666 759 self.repo = Repo.init(repo_path)
667 760
668 761 @reraise_safe_exceptions
669 762 def init_bare(self, wire):
670 763 repo_path = str_to_dulwich(wire['path'])
671 764 self.repo = Repo.init_bare(repo_path)
672 765
673 766 @reraise_safe_exceptions
674 767 def revision(self, wire, rev):
675 repo_init = self._factory.repo_libgit2(wire)
676 with repo_init as repo:
677 commit = repo[rev]
678 obj_data = {
679 'id': commit.id.hex,
680 }
681 # tree objects itself don't have tree_id attribute
682 if hasattr(commit, 'tree_id'):
683 obj_data['tree'] = commit.tree_id.hex
684 768
685 return obj_data
769 cache_on, context_uid, repo_id = self._cache_on(wire)
770 @self.region.conditional_cache_on_arguments(condition=cache_on)
771 def _revision(_context_uid, _repo_id, _rev):
772 repo_init = self._factory.repo_libgit2(wire)
773 with repo_init as repo:
774 commit = repo[rev]
775 obj_data = {
776 'id': commit.id.hex,
777 }
778 # tree objects itself don't have tree_id attribute
779 if hasattr(commit, 'tree_id'):
780 obj_data['tree'] = commit.tree_id.hex
781
782 return obj_data
783 return _revision(context_uid, repo_id, rev)
686 784
687 785 @reraise_safe_exceptions
688 786 def date(self, wire, rev):
689 787 repo_init = self._factory.repo_libgit2(wire)
690 788 with repo_init as repo:
691 789 commit = repo[rev]
692 790 # TODO(marcink): check dulwich difference of offset vs timezone
693 791 return [commit.commit_time, commit.commit_time_offset]
694 792
695 793 @reraise_safe_exceptions
696 794 def author(self, wire, rev):
697 795 repo_init = self._factory.repo_libgit2(wire)
698 796 with repo_init as repo:
699 797 commit = repo[rev]
700 798 if commit.author.email:
701 799 return u"{} <{}>".format(commit.author.name, commit.author.email)
702 800
703 801 return u"{}".format(commit.author.raw_name)
704 802
705 803 @reraise_safe_exceptions
706 804 def message(self, wire, rev):
707 805 repo_init = self._factory.repo_libgit2(wire)
708 806 with repo_init as repo:
709 807 commit = repo[rev]
710 808 return commit.message
711 809
712 810 @reraise_safe_exceptions
713 811 def parents(self, wire, rev):
714 repo_init = self._factory.repo_libgit2(wire)
715 with repo_init as repo:
716 commit = repo[rev]
717 return [x.hex for x in commit.parent_ids]
812 cache_on, context_uid, repo_id = self._cache_on(wire)
813 @self.region.conditional_cache_on_arguments(condition=cache_on)
814 def _parents(_context_uid, _repo_id, _rev):
815 repo_init = self._factory.repo_libgit2(wire)
816 with repo_init as repo:
817 commit = repo[rev]
818 return [x.hex for x in commit.parent_ids]
819 return _parents(context_uid, repo_id, rev)
718 820
719 821 @reraise_safe_exceptions
720 822 def set_refs(self, wire, key, value):
721 823 repo_init = self._factory.repo_libgit2(wire)
722 824 with repo_init as repo:
723 825 repo.references.create(key, value, force=True)
724 826
725 827 @reraise_safe_exceptions
726 828 def create_branch(self, wire, branch_name, commit_id, force=False):
727 829 repo_init = self._factory.repo_libgit2(wire)
728 830 with repo_init as repo:
729 831 commit = repo[commit_id]
730 832
731 833 if force:
732 834 repo.branches.local.create(branch_name, commit, force=force)
733 835 elif not repo.branches.get(branch_name):
734 836 # create only if that branch isn't existing
735 837 repo.branches.local.create(branch_name, commit, force=force)
736 838
737 839 @reraise_safe_exceptions
738 840 def remove_ref(self, wire, key):
739 841 repo_init = self._factory.repo_libgit2(wire)
740 842 with repo_init as repo:
741 843 repo.references.delete(key)
742 844
743 845 @reraise_safe_exceptions
744 846 def tag_remove(self, wire, tag_name):
745 847 repo_init = self._factory.repo_libgit2(wire)
746 848 with repo_init as repo:
747 849 key = 'refs/tags/{}'.format(tag_name)
748 850 repo.references.delete(key)
749 851
750 852 @reraise_safe_exceptions
751 853 def tree_changes(self, wire, source_id, target_id):
752 854 # TODO(marcink): remove this seems it's only used by tests
753 855 repo = self._factory.repo(wire)
754 856 source = repo[source_id].tree if source_id else None
755 857 target = repo[target_id].tree
756 858 result = repo.object_store.tree_changes(source, target)
757 859 return list(result)
758 860
759 861 @reraise_safe_exceptions
760 862 def tree_and_type_for_path(self, wire, commit_id, path):
761 repo_init = self._factory.repo_libgit2(wire)
863
864 cache_on, context_uid, repo_id = self._cache_on(wire)
865 @self.region.conditional_cache_on_arguments(condition=cache_on)
866 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
867 repo_init = self._factory.repo_libgit2(wire)
762 868
763 with repo_init as repo:
764 commit = repo[commit_id]
765 try:
766 tree = commit.tree[path]
767 except KeyError:
768 return None, None, None
869 with repo_init as repo:
870 commit = repo[commit_id]
871 try:
872 tree = commit.tree[path]
873 except KeyError:
874 return None, None, None
769 875
770 return tree.id.hex, tree.type, tree.filemode
876 return tree.id.hex, tree.type, tree.filemode
877 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
771 878
772 879 @reraise_safe_exceptions
773 880 def tree_items(self, wire, tree_id):
774 repo_init = self._factory.repo_libgit2(wire)
881
882 cache_on, context_uid, repo_id = self._cache_on(wire)
883 @self.region.conditional_cache_on_arguments(condition=cache_on)
884 def _tree_items(_context_uid, _repo_id, _tree_id):
775 885
776 with repo_init as repo:
777 try:
778 tree = repo[tree_id]
779 except KeyError:
780 raise ObjectMissing('No tree with id: {}'.format(tree_id))
886 repo_init = self._factory.repo_libgit2(wire)
887 with repo_init as repo:
888 try:
889 tree = repo[tree_id]
890 except KeyError:
891 raise ObjectMissing('No tree with id: {}'.format(tree_id))
781 892
782 result = []
783 for item in tree:
784 item_sha = item.hex
785 item_mode = item.filemode
786 item_type = item.type
893 result = []
894 for item in tree:
895 item_sha = item.hex
896 item_mode = item.filemode
897 item_type = item.type
787 898
788 if item_type == 'commit':
789 # NOTE(marcink): submodules we translate to 'link' for backward compat
790 item_type = 'link'
899 if item_type == 'commit':
900 # NOTE(marcink): submodules we translate to 'link' for backward compat
901 item_type = 'link'
791 902
792 result.append((item.name, item_mode, item_sha, item_type))
793 return result
903 result.append((item.name, item_mode, item_sha, item_type))
904 return result
905 return _tree_items(context_uid, repo_id, tree_id)
794 906
795 907 @reraise_safe_exceptions
796 908 def update_server_info(self, wire):
797 909 repo = self._factory.repo(wire)
798 910 update_server_info(repo)
799 911
800 912 @reraise_safe_exceptions
801 def discover_git_version(self):
802 stdout, _ = self.run_git_command(
803 {}, ['--version'], _bare=True, _safe=True)
804 prefix = 'git version'
805 if stdout.startswith(prefix):
806 stdout = stdout[len(prefix):]
807 return stdout.strip()
808
809 @reraise_safe_exceptions
810 913 def get_all_commit_ids(self, wire):
811 914
812 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
813 try:
814 output, __ = self.run_git_command(wire, cmd)
815 return output.splitlines()
816 except Exception:
817 # Can be raised for empty repositories
818 return []
915 cache_on, context_uid, repo_id = self._cache_on(wire)
916 @self.region.conditional_cache_on_arguments(condition=cache_on)
917 def _get_all_commit_ids(_context_uid, _repo_id):
918
919 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
920 try:
921 output, __ = self.run_git_command(wire, cmd)
922 return output.splitlines()
923 except Exception:
924 # Can be raised for empty repositories
925 return []
926 return _get_all_commit_ids(context_uid, repo_id)
819 927
820 928 @reraise_safe_exceptions
821 929 def run_git_command(self, wire, cmd, **opts):
822 930 path = wire.get('path', None)
823 931
824 932 if path and os.path.isdir(path):
825 933 opts['cwd'] = path
826 934
827 935 if '_bare' in opts:
828 936 _copts = []
829 937 del opts['_bare']
830 938 else:
831 939 _copts = ['-c', 'core.quotepath=false', ]
832 940 safe_call = False
833 941 if '_safe' in opts:
834 942 # no exc on failure
835 943 del opts['_safe']
836 944 safe_call = True
837 945
838 946 if '_copts' in opts:
839 947 _copts.extend(opts['_copts'] or [])
840 948 del opts['_copts']
841 949
842 950 gitenv = os.environ.copy()
843 951 gitenv.update(opts.pop('extra_env', {}))
844 952 # need to clean fix GIT_DIR !
845 953 if 'GIT_DIR' in gitenv:
846 954 del gitenv['GIT_DIR']
847 955 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
848 956 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
849 957
850 958 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
851 959 _opts = {'env': gitenv, 'shell': False}
852 960
853 961 try:
854 962 _opts.update(opts)
855 963 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
856 964
857 965 return ''.join(p), ''.join(p.error)
858 966 except (EnvironmentError, OSError) as err:
859 967 cmd = ' '.join(cmd) # human friendly CMD
860 968 tb_err = ("Couldn't run git command (%s).\n"
861 969 "Original error was:%s\n"
862 970 "Call options:%s\n"
863 971 % (cmd, err, _opts))
864 972 log.exception(tb_err)
865 973 if safe_call:
866 974 return '', err
867 975 else:
868 976 raise exceptions.VcsException()(tb_err)
869 977
870 978 @reraise_safe_exceptions
871 979 def install_hooks(self, wire, force=False):
872 980 from vcsserver.hook_utils import install_git_hooks
873 repo = self._factory.repo(wire)
874 return install_git_hooks(repo.path, repo.bare, force_create=force)
981 bare = self.bare(wire)
982 path = wire['path']
983 return install_git_hooks(path, bare, force_create=force)
875 984
876 985 @reraise_safe_exceptions
877 986 def get_hooks_info(self, wire):
878 987 from vcsserver.hook_utils import (
879 988 get_git_pre_hook_version, get_git_post_hook_version)
880 repo = self._factory.repo(wire)
989 bare = self.bare(wire)
990 path = wire['path']
881 991 return {
882 'pre_version': get_git_pre_hook_version(repo.path, repo.bare),
883 'post_version': get_git_post_hook_version(repo.path, repo.bare),
992 'pre_version': get_git_pre_hook_version(path, bare),
993 'post_version': get_git_post_hook_version(path, bare),
884 994 }
885
886
887 def str_to_dulwich(value):
888 """
889 Dulwich 0.10.1a requires `unicode` objects to be passed in.
890 """
891 return value.decode(settings.WIRE_ENCODING)
@@ -1,874 +1,926 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import logging
20 20 import stat
21 21 import urllib
22 22 import urllib2
23 23 import traceback
24 24
25 25 from hgext import largefiles, rebase
26 26 from hgext.strip import strip as hgext_strip
27 27 from mercurial import commands
28 28 from mercurial import unionrepo
29 29 from mercurial import verify
30 30
31 31 import vcsserver
32 32 from vcsserver import exceptions
33 33 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
34 34 from vcsserver.hgcompat import (
35 35 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
36 36 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
37 37 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
38 38 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
39 39 RepoLookupError, InterventionRequired, RequirementError)
40 40
41 41 log = logging.getLogger(__name__)
42 42
43 43
44 44 def make_ui_from_config(repo_config):
45 45
46 46 class LoggingUI(ui.ui):
47 47 def status(self, *msg, **opts):
48 48 log.info(' '.join(msg).rstrip('\n'))
49 49 super(LoggingUI, self).status(*msg, **opts)
50 50
51 51 def warn(self, *msg, **opts):
52 52 log.warn(' '.join(msg).rstrip('\n'))
53 53 super(LoggingUI, self).warn(*msg, **opts)
54 54
55 55 def error(self, *msg, **opts):
56 56 log.error(' '.join(msg).rstrip('\n'))
57 57 super(LoggingUI, self).error(*msg, **opts)
58 58
59 59 def note(self, *msg, **opts):
60 60 log.info(' '.join(msg).rstrip('\n'))
61 61 super(LoggingUI, self).note(*msg, **opts)
62 62
63 63 def debug(self, *msg, **opts):
64 64 log.debug(' '.join(msg).rstrip('\n'))
65 65 super(LoggingUI, self).debug(*msg, **opts)
66 66
67 67 baseui = LoggingUI()
68 68
69 69 # clean the baseui object
70 70 baseui._ocfg = hgconfig.config()
71 71 baseui._ucfg = hgconfig.config()
72 72 baseui._tcfg = hgconfig.config()
73 73
74 74 for section, option, value in repo_config:
75 75 baseui.setconfig(section, option, value)
76 76
77 77 # make our hgweb quiet so it doesn't print output
78 78 baseui.setconfig('ui', 'quiet', 'true')
79 79
80 80 baseui.setconfig('ui', 'paginate', 'never')
81 81 # for better Error reporting of Mercurial
82 82 baseui.setconfig('ui', 'message-output', 'stderr')
83 83
84 84 # force mercurial to only use 1 thread, otherwise it may try to set a
85 85 # signal in a non-main thread, thus generating a ValueError.
86 86 baseui.setconfig('worker', 'numcpus', 1)
87 87
88 88 # If there is no config for the largefiles extension, we explicitly disable
89 89 # it here. This overrides settings from repositories hgrc file. Recent
90 90 # mercurial versions enable largefiles in hgrc on clone from largefile
91 91 # repo.
92 92 if not baseui.hasconfig('extensions', 'largefiles'):
93 93 log.debug('Explicitly disable largefiles extension for repo.')
94 94 baseui.setconfig('extensions', 'largefiles', '!')
95 95
96 96 return baseui
97 97
98 98
99 99 def reraise_safe_exceptions(func):
100 100 """Decorator for converting mercurial exceptions to something neutral."""
101 101
102 102 def wrapper(*args, **kwargs):
103 103 try:
104 104 return func(*args, **kwargs)
105 105 except (Abort, InterventionRequired) as e:
106 106 raise_from_original(exceptions.AbortException(e))
107 107 except RepoLookupError as e:
108 108 raise_from_original(exceptions.LookupException(e))
109 109 except RequirementError as e:
110 110 raise_from_original(exceptions.RequirementException(e))
111 111 except RepoError as e:
112 112 raise_from_original(exceptions.VcsException(e))
113 113 except LookupError as e:
114 114 raise_from_original(exceptions.LookupException(e))
115 115 except Exception as e:
116 116 if not hasattr(e, '_vcs_kind'):
117 117 log.exception("Unhandled exception in hg remote call")
118 118 raise_from_original(exceptions.UnhandledException(e))
119 119
120 120 raise
121 121 return wrapper
122 122
123 123
124 124 class MercurialFactory(RepoFactory):
125 125 repo_type = 'hg'
126 126
127 127 def _create_config(self, config, hooks=True):
128 128 if not hooks:
129 129 hooks_to_clean = frozenset((
130 130 'changegroup.repo_size', 'preoutgoing.pre_pull',
131 131 'outgoing.pull_logger', 'prechangegroup.pre_push'))
132 132 new_config = []
133 133 for section, option, value in config:
134 134 if section == 'hooks' and option in hooks_to_clean:
135 135 continue
136 136 new_config.append((section, option, value))
137 137 config = new_config
138 138
139 139 baseui = make_ui_from_config(config)
140 140 return baseui
141 141
142 142 def _create_repo(self, wire, create):
143 143 baseui = self._create_config(wire["config"])
144 144 return instance(baseui, wire["path"], create)
145 145
146 146 def repo(self, wire, create=False):
147 147 """
148 148 Get a repository instance for the given path.
149 149 """
150 region = self._cache_region
151 context = wire.get('context', None)
152 repo_path = wire.get('path', '')
153 context_uid = '{}'.format(context)
154 cache = wire.get('cache', True)
155 cache_on = context and cache
156
157 @region.conditional_cache_on_arguments(condition=cache_on)
158 def create_new_repo(_repo_type, _repo_path, _context_uid):
159 return self._create_repo(wire, create)
160
161 return create_new_repo(self.repo_type, repo_path, context_uid)
150 return self._create_repo(wire, create)
162 151
163 152
164 153 class HgRemote(object):
165 154
166 155 def __init__(self, factory):
167 156 self._factory = factory
168
169 157 self._bulk_methods = {
170 158 "affected_files": self.ctx_files,
171 159 "author": self.ctx_user,
172 160 "branch": self.ctx_branch,
173 161 "children": self.ctx_children,
174 162 "date": self.ctx_date,
175 163 "message": self.ctx_description,
176 164 "parents": self.ctx_parents,
177 165 "status": self.ctx_status,
178 166 "obsolete": self.ctx_obsolete,
179 167 "phase": self.ctx_phase,
180 168 "hidden": self.ctx_hidden,
181 169 "_file_paths": self.ctx_list,
182 170 }
171 self.region = self._factory._cache_region
183 172
184 173 def _get_ctx(self, repo, ref):
185 174 return get_ctx(repo, ref)
186 175
176 def _cache_on(self, wire):
177 context = wire.get('context', '')
178 context_uid = '{}'.format(context)
179 repo_id = wire.get('repo_id', '')
180 cache = wire.get('cache', True)
181 cache_on = context and cache
182 return cache_on, context_uid, repo_id
183
187 184 @reraise_safe_exceptions
188 185 def discover_hg_version(self):
189 186 from mercurial import util
190 187 return util.version()
191 188
192 189 @reraise_safe_exceptions
193 190 def is_empty(self, wire):
194 191 repo = self._factory.repo(wire)
195 192
196 193 try:
197 194 return len(repo) == 0
198 195 except Exception:
199 196 log.exception("failed to read object_store")
200 197 return False
201 198
202 199 @reraise_safe_exceptions
203 200 def archive_repo(self, archive_path, mtime, file_info, kind):
204 201 if kind == "tgz":
205 202 archiver = archival.tarit(archive_path, mtime, "gz")
206 203 elif kind == "tbz2":
207 204 archiver = archival.tarit(archive_path, mtime, "bz2")
208 205 elif kind == 'zip':
209 206 archiver = archival.zipit(archive_path, mtime)
210 207 else:
211 208 raise exceptions.ArchiveException()(
212 209 'Remote does not support: "%s".' % kind)
213 210
214 211 for f_path, f_mode, f_is_link, f_content in file_info:
215 212 archiver.addfile(f_path, f_mode, f_is_link, f_content)
216 213 archiver.done()
217 214
218 215 @reraise_safe_exceptions
219 216 def bookmarks(self, wire):
220 repo = self._factory.repo(wire)
221 return dict(repo._bookmarks)
217 cache_on, context_uid, repo_id = self._cache_on(wire)
218 @self.region.conditional_cache_on_arguments(condition=cache_on)
219 def _bookmarks(_context_uid, _repo_id):
220 repo = self._factory.repo(wire)
221 return dict(repo._bookmarks)
222
223 return _bookmarks(context_uid, repo_id)
222 224
223 225 @reraise_safe_exceptions
224 226 def branches(self, wire, normal, closed):
225 repo = self._factory.repo(wire)
226 iter_branches = repo.branchmap().iterbranches()
227 bt = {}
228 for branch_name, _heads, tip, is_closed in iter_branches:
229 if normal and not is_closed:
230 bt[branch_name] = tip
231 if closed and is_closed:
232 bt[branch_name] = tip
227 cache_on, context_uid, repo_id = self._cache_on(wire)
228 @self.region.conditional_cache_on_arguments(condition=cache_on)
229 def _branches(_context_uid, _repo_id, _normal, _closed):
230 repo = self._factory.repo(wire)
231 iter_branches = repo.branchmap().iterbranches()
232 bt = {}
233 for branch_name, _heads, tip, is_closed in iter_branches:
234 if normal and not is_closed:
235 bt[branch_name] = tip
236 if closed and is_closed:
237 bt[branch_name] = tip
233 238
234 return bt
239 return bt
240
241 return _branches(context_uid, repo_id, normal, closed)
235 242
236 243 @reraise_safe_exceptions
237 244 def bulk_request(self, wire, rev, pre_load):
238 result = {}
239 for attr in pre_load:
240 try:
241 method = self._bulk_methods[attr]
242 result[attr] = method(wire, rev)
243 except KeyError as e:
244 raise exceptions.VcsException(e)(
245 'Unknown bulk attribute: "%s"' % attr)
246 return result
245 cache_on, context_uid, repo_id = self._cache_on(wire)
246 @self.region.conditional_cache_on_arguments(condition=cache_on)
247 def _bulk_request(_context_uid, _repo_id, _rev, _pre_load):
248 result = {}
249 for attr in pre_load:
250 try:
251 method = self._bulk_methods[attr]
252 result[attr] = method(wire, rev)
253 except KeyError as e:
254 raise exceptions.VcsException(e)(
255 'Unknown bulk attribute: "%s"' % attr)
256 return result
257
258 return _bulk_request(context_uid, repo_id, rev, sorted(pre_load))
247 259
248 260 @reraise_safe_exceptions
249 261 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
250 262 baseui = self._factory._create_config(wire["config"], hooks=hooks)
251 263 clone(baseui, source, dest, noupdate=not update_after_clone)
252 264
253 265 @reraise_safe_exceptions
254 def commitctx(
255 self, wire, message, parents, commit_time, commit_timezone,
256 user, files, extra, removed, updated):
266 def commitctx(self, wire, message, parents, commit_time, commit_timezone,
267 user, files, extra, removed, updated):
257 268
258 269 repo = self._factory.repo(wire)
259 270 baseui = self._factory._create_config(wire['config'])
260 271 publishing = baseui.configbool('phases', 'publish')
261 272 if publishing:
262 273 new_commit = 'public'
263 274 else:
264 275 new_commit = 'draft'
265 276
266 277 def _filectxfn(_repo, ctx, path):
267 278 """
268 279 Marks given path as added/changed/removed in a given _repo. This is
269 280 for internal mercurial commit function.
270 281 """
271 282
272 283 # check if this path is removed
273 284 if path in removed:
274 285 # returning None is a way to mark node for removal
275 286 return None
276 287
277 288 # check if this path is added
278 289 for node in updated:
279 290 if node['path'] == path:
280 291 return memfilectx(
281 292 _repo,
282 293 changectx=ctx,
283 294 path=node['path'],
284 295 data=node['content'],
285 296 islink=False,
286 297 isexec=bool(node['mode'] & stat.S_IXUSR),
287 copied=False)
298 copysource=False)
288 299
289 300 raise exceptions.AbortException()(
290 301 "Given path haven't been marked as added, "
291 302 "changed or removed (%s)" % path)
292 303
293 304 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
294 305
295 306 commit_ctx = memctx(
296 307 repo=repo,
297 308 parents=parents,
298 309 text=message,
299 310 files=files,
300 311 filectxfn=_filectxfn,
301 312 user=user,
302 313 date=(commit_time, commit_timezone),
303 314 extra=extra)
304 315
305 316 n = repo.commitctx(commit_ctx)
306 317 new_id = hex(n)
307 318
308 319 return new_id
309 320
310 321 @reraise_safe_exceptions
311 322 def ctx_branch(self, wire, revision):
312 repo = self._factory.repo(wire)
313 ctx = self._get_ctx(repo, revision)
314 return ctx.branch()
315 323
316 @reraise_safe_exceptions
317 def ctx_children(self, wire, revision):
318 repo = self._factory.repo(wire)
319 ctx = self._get_ctx(repo, revision)
320 return [child.rev() for child in ctx.children()]
324 cache_on, context_uid, repo_id = self._cache_on(wire)
325 @self.region.conditional_cache_on_arguments(condition=cache_on)
326 def _ctx_branch(_context_uid, _repo_id, _revision):
327 repo = self._factory.repo(wire)
328 ctx = self._get_ctx(repo, revision)
329 return ctx.branch()
330 return _ctx_branch(context_uid, repo_id, revision)
321 331
322 332 @reraise_safe_exceptions
323 333 def ctx_date(self, wire, revision):
324 334 repo = self._factory.repo(wire)
325 335 ctx = self._get_ctx(repo, revision)
326 336 return ctx.date()
327 337
328 338 @reraise_safe_exceptions
329 339 def ctx_description(self, wire, revision):
330 340 repo = self._factory.repo(wire)
331 341 ctx = self._get_ctx(repo, revision)
332 342 return ctx.description()
333 343
334 344 @reraise_safe_exceptions
335 345 def ctx_files(self, wire, revision):
336 repo = self._factory.repo(wire)
337 ctx = self._get_ctx(repo, revision)
338 return ctx.files()
346
347 cache_on, context_uid, repo_id = self._cache_on(wire)
348 @self.region.conditional_cache_on_arguments(condition=cache_on)
349 def _ctx_files(_context_uid, _repo_id, _revision):
350 repo = self._factory.repo(wire)
351 ctx = self._get_ctx(repo, revision)
352 return ctx.files()
353
354 return _ctx_files(context_uid, repo_id, revision)
339 355
340 356 @reraise_safe_exceptions
341 357 def ctx_list(self, path, revision):
342 358 repo = self._factory.repo(path)
343 359 ctx = self._get_ctx(repo, revision)
344 360 return list(ctx)
345 361
346 362 @reraise_safe_exceptions
347 363 def ctx_parents(self, wire, revision):
348 repo = self._factory.repo(wire)
349 ctx = self._get_ctx(repo, revision)
350 return [parent.rev() for parent in ctx.parents()]
364 cache_on, context_uid, repo_id = self._cache_on(wire)
365 @self.region.conditional_cache_on_arguments(condition=cache_on)
366 def _ctx_parents(_context_uid, _repo_id, _revision):
367 repo = self._factory.repo(wire)
368 ctx = self._get_ctx(repo, revision)
369 return [parent.rev() for parent in ctx.parents()
370 if not (parent.hidden() or parent.obsolete())]
371
372 return _ctx_parents(context_uid, repo_id, revision)
373
374 @reraise_safe_exceptions
375 def ctx_children(self, wire, revision):
376 cache_on, context_uid, repo_id = self._cache_on(wire)
377 @self.region.conditional_cache_on_arguments(condition=cache_on)
378 def _ctx_children(_context_uid, _repo_id, _revision):
379 repo = self._factory.repo(wire)
380 ctx = self._get_ctx(repo, revision)
381 return [child.rev() for child in ctx.children()
382 if not (child.hidden() or child.obsolete())]
383
384 return _ctx_children(context_uid, repo_id, revision)
351 385
352 386 @reraise_safe_exceptions
353 387 def ctx_phase(self, wire, revision):
354 388 repo = self._factory.repo(wire)
355 389 ctx = self._get_ctx(repo, revision)
356 390 # public=0, draft=1, secret=3
357 391 return ctx.phase()
358 392
359 393 @reraise_safe_exceptions
360 394 def ctx_obsolete(self, wire, revision):
361 395 repo = self._factory.repo(wire)
362 396 ctx = self._get_ctx(repo, revision)
363 397 return ctx.obsolete()
364 398
365 399 @reraise_safe_exceptions
366 400 def ctx_hidden(self, wire, revision):
367 401 repo = self._factory.repo(wire)
368 402 ctx = self._get_ctx(repo, revision)
369 403 return ctx.hidden()
370 404
371 405 @reraise_safe_exceptions
372 406 def ctx_substate(self, wire, revision):
373 407 repo = self._factory.repo(wire)
374 408 ctx = self._get_ctx(repo, revision)
375 409 return ctx.substate
376 410
377 411 @reraise_safe_exceptions
378 412 def ctx_status(self, wire, revision):
379 413 repo = self._factory.repo(wire)
380 414 ctx = self._get_ctx(repo, revision)
381 415 status = repo[ctx.p1().node()].status(other=ctx.node())
382 416 # object of status (odd, custom named tuple in mercurial) is not
383 417 # correctly serializable, we make it a list, as the underling
384 418 # API expects this to be a list
385 419 return list(status)
386 420
387 421 @reraise_safe_exceptions
388 422 def ctx_user(self, wire, revision):
389 423 repo = self._factory.repo(wire)
390 424 ctx = self._get_ctx(repo, revision)
391 425 return ctx.user()
392 426
393 427 @reraise_safe_exceptions
394 428 def check_url(self, url, config):
395 429 _proto = None
396 430 if '+' in url[:url.find('://')]:
397 431 _proto = url[0:url.find('+')]
398 432 url = url[url.find('+') + 1:]
399 433 handlers = []
400 434 url_obj = url_parser(url)
401 435 test_uri, authinfo = url_obj.authinfo()
402 436 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
403 437 url_obj.query = obfuscate_qs(url_obj.query)
404 438
405 439 cleaned_uri = str(url_obj)
406 440 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
407 441
408 442 if authinfo:
409 443 # create a password manager
410 444 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
411 445 passmgr.add_password(*authinfo)
412 446
413 447 handlers.extend((httpbasicauthhandler(passmgr),
414 448 httpdigestauthhandler(passmgr)))
415 449
416 450 o = urllib2.build_opener(*handlers)
417 451 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
418 452 ('Accept', 'application/mercurial-0.1')]
419 453
420 454 q = {"cmd": 'between'}
421 455 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
422 456 qs = '?%s' % urllib.urlencode(q)
423 457 cu = "%s%s" % (test_uri, qs)
424 458 req = urllib2.Request(cu, None, {})
425 459
426 460 try:
427 461 log.debug("Trying to open URL %s", cleaned_uri)
428 462 resp = o.open(req)
429 463 if resp.code != 200:
430 464 raise exceptions.URLError()('Return Code is not 200')
431 465 except Exception as e:
432 466 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
433 467 # means it cannot be cloned
434 468 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
435 469
436 470 # now check if it's a proper hg repo, but don't do it for svn
437 471 try:
438 472 if _proto == 'svn':
439 473 pass
440 474 else:
441 475 # check for pure hg repos
442 476 log.debug(
443 477 "Verifying if URL is a Mercurial repository: %s",
444 478 cleaned_uri)
445 479 ui = make_ui_from_config(config)
446 480 peer_checker = makepeer(ui, url)
447 481 peer_checker.lookup('tip')
448 482 except Exception as e:
449 483 log.warning("URL is not a valid Mercurial repository: %s",
450 484 cleaned_uri)
451 485 raise exceptions.URLError(e)(
452 486 "url [%s] does not look like an hg repo org_exc: %s"
453 487 % (cleaned_uri, e))
454 488
455 489 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
456 490 return True
457 491
458 492 @reraise_safe_exceptions
459 def diff(
460 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
461 context):
493 def diff(self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews, context):
462 494 repo = self._factory.repo(wire)
463 495
464 496 if file_filter:
465 497 match_filter = match(file_filter[0], '', [file_filter[1]])
466 498 else:
467 499 match_filter = file_filter
468 500 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
469 501
470 502 try:
471 503 return "".join(patch.diff(
472 504 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
473 505 except RepoLookupError as e:
474 506 raise exceptions.LookupException(e)()
475 507
476 508 @reraise_safe_exceptions
477 509 def node_history(self, wire, revision, path, limit):
478 510 repo = self._factory.repo(wire)
479 511
480 512 ctx = self._get_ctx(repo, revision)
481 513 fctx = ctx.filectx(path)
482 514
483 515 def history_iter():
484 516 limit_rev = fctx.rev()
485 517 for obj in reversed(list(fctx.filelog())):
486 518 obj = fctx.filectx(obj)
487 519 ctx = obj.changectx()
488 520 if ctx.hidden() or ctx.obsolete():
489 521 continue
490 522
491 523 if limit_rev >= obj.rev():
492 524 yield obj
493 525
494 526 history = []
495 527 for cnt, obj in enumerate(history_iter()):
496 528 if limit and cnt >= limit:
497 529 break
498 530 history.append(hex(obj.node()))
499 531
500 532 return [x for x in history]
501 533
502 534 @reraise_safe_exceptions
503 535 def node_history_untill(self, wire, revision, path, limit):
504 536 repo = self._factory.repo(wire)
505 537 ctx = self._get_ctx(repo, revision)
506 538 fctx = ctx.filectx(path)
507 539
508 540 file_log = list(fctx.filelog())
509 541 if limit:
510 542 # Limit to the last n items
511 543 file_log = file_log[-limit:]
512 544
513 545 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
514 546
515 547 @reraise_safe_exceptions
516 548 def fctx_annotate(self, wire, revision, path):
517 549 repo = self._factory.repo(wire)
518 550 ctx = self._get_ctx(repo, revision)
519 551 fctx = ctx.filectx(path)
520 552
521 553 result = []
522 554 for i, annotate_obj in enumerate(fctx.annotate(), 1):
523 555 ln_no = i
524 556 sha = hex(annotate_obj.fctx.node())
525 557 content = annotate_obj.text
526 558 result.append((ln_no, sha, content))
527 559 return result
528 560
529 561 @reraise_safe_exceptions
530 def fctx_data(self, wire, revision, path):
562 def fctx_node_data(self, wire, revision, path):
531 563 repo = self._factory.repo(wire)
532 564 ctx = self._get_ctx(repo, revision)
533 565 fctx = ctx.filectx(path)
534 566 return fctx.data()
535 567
536 568 @reraise_safe_exceptions
537 569 def fctx_flags(self, wire, revision, path):
538 570 repo = self._factory.repo(wire)
539 571 ctx = self._get_ctx(repo, revision)
540 572 fctx = ctx.filectx(path)
541 573 return fctx.flags()
542 574
543 575 @reraise_safe_exceptions
544 576 def fctx_size(self, wire, revision, path):
545 577 repo = self._factory.repo(wire)
546 578 ctx = self._get_ctx(repo, revision)
547 579 fctx = ctx.filectx(path)
548 580 return fctx.size()
549 581
550 582 @reraise_safe_exceptions
551 583 def get_all_commit_ids(self, wire, name):
552 repo = self._factory.repo(wire)
553 repo = repo.filtered(name)
554 revs = map(lambda x: hex(x[7]), repo.changelog.index)
555 return revs
584 cache_on, context_uid, repo_id = self._cache_on(wire)
585 @self.region.conditional_cache_on_arguments(condition=cache_on)
586 def _get_all_commit_ids(_context_uid, _repo_id, _name):
587 repo = self._factory.repo(wire)
588 repo = repo.filtered(name)
589 revs = map(lambda x: hex(x[7]), repo.changelog.index)
590 return revs
591 return _get_all_commit_ids(context_uid, repo_id, name)
556 592
557 593 @reraise_safe_exceptions
558 594 def get_config_value(self, wire, section, name, untrusted=False):
559 595 repo = self._factory.repo(wire)
560 596 return repo.ui.config(section, name, untrusted=untrusted)
561 597
562 598 @reraise_safe_exceptions
563 599 def get_config_bool(self, wire, section, name, untrusted=False):
564 600 repo = self._factory.repo(wire)
565 601 return repo.ui.configbool(section, name, untrusted=untrusted)
566 602
567 603 @reraise_safe_exceptions
568 604 def get_config_list(self, wire, section, name, untrusted=False):
569 605 repo = self._factory.repo(wire)
570 606 return repo.ui.configlist(section, name, untrusted=untrusted)
571 607
572 608 @reraise_safe_exceptions
573 609 def is_large_file(self, wire, path):
574 return largefiles.lfutil.isstandin(path)
610 cache_on, context_uid, repo_id = self._cache_on(wire)
611 @self.region.conditional_cache_on_arguments(condition=cache_on)
612 def _is_large_file(_context_uid, _repo_id, _path):
613 return largefiles.lfutil.isstandin(path)
614
615 return _is_large_file(context_uid, repo_id, path)
575 616
576 617 @reraise_safe_exceptions
577 618 def in_largefiles_store(self, wire, sha):
578 619 repo = self._factory.repo(wire)
579 620 return largefiles.lfutil.instore(repo, sha)
580 621
581 622 @reraise_safe_exceptions
582 623 def in_user_cache(self, wire, sha):
583 624 repo = self._factory.repo(wire)
584 625 return largefiles.lfutil.inusercache(repo.ui, sha)
585 626
586 627 @reraise_safe_exceptions
587 628 def store_path(self, wire, sha):
588 629 repo = self._factory.repo(wire)
589 630 return largefiles.lfutil.storepath(repo, sha)
590 631
591 632 @reraise_safe_exceptions
592 633 def link(self, wire, sha, path):
593 634 repo = self._factory.repo(wire)
594 635 largefiles.lfutil.link(
595 636 largefiles.lfutil.usercachepath(repo.ui, sha), path)
596 637
597 638 @reraise_safe_exceptions
598 639 def localrepository(self, wire, create=False):
599 640 self._factory.repo(wire, create=create)
600 641
601 642 @reraise_safe_exceptions
602 643 def lookup(self, wire, revision, both):
603
604 repo = self._factory.repo(wire)
644 cache_on, context_uid, repo_id = self._cache_on(wire)
645 @self.region.conditional_cache_on_arguments(condition=cache_on)
646 def _lookup(_context_uid, _repo_id, _revision, _both):
605 647
606 if isinstance(revision, int):
607 # NOTE(marcink):
608 # since Mercurial doesn't support negative indexes properly
609 # we need to shift accordingly by one to get proper index, e.g
610 # repo[-1] => repo[-2]
611 # repo[0] => repo[-1]
612 if revision <= 0:
613 revision = revision + -1
614 try:
615 ctx = self._get_ctx(repo, revision)
616 except (TypeError, RepoLookupError) as e:
617 e._org_exc_tb = traceback.format_exc()
618 raise exceptions.LookupException(e)(revision)
619 except LookupError as e:
620 e._org_exc_tb = traceback.format_exc()
621 raise exceptions.LookupException(e)(e.name)
648 repo = self._factory.repo(wire)
649 rev = _revision
650 if isinstance(rev, int):
651 # NOTE(marcink):
652 # since Mercurial doesn't support negative indexes properly
653 # we need to shift accordingly by one to get proper index, e.g
654 # repo[-1] => repo[-2]
655 # repo[0] => repo[-1]
656 if rev <= 0:
657 rev = rev + -1
658 try:
659 ctx = self._get_ctx(repo, rev)
660 except (TypeError, RepoLookupError) as e:
661 e._org_exc_tb = traceback.format_exc()
662 raise exceptions.LookupException(e)(rev)
663 except LookupError as e:
664 e._org_exc_tb = traceback.format_exc()
665 raise exceptions.LookupException(e)(e.name)
622 666
623 if not both:
624 return ctx.hex()
667 if not both:
668 return ctx.hex()
625 669
626 ctx = repo[ctx.hex()]
627 return ctx.hex(), ctx.rev()
670 ctx = repo[ctx.hex()]
671 return ctx.hex(), ctx.rev()
672
673 return _lookup(context_uid, repo_id, revision, both)
628 674
629 675 @reraise_safe_exceptions
630 676 def pull(self, wire, url, commit_ids=None):
631 677 repo = self._factory.repo(wire)
632 678 # Disable any prompts for this repo
633 679 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
634 680
635 681 remote = peer(repo, {}, url)
636 682 # Disable any prompts for this remote
637 683 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
638 684
639 685 if commit_ids:
640 686 commit_ids = [bin(commit_id) for commit_id in commit_ids]
641 687
642 688 return exchange.pull(
643 689 repo, remote, heads=commit_ids, force=None).cgresult
644 690
645 691 @reraise_safe_exceptions
646 692 def sync_push(self, wire, url):
647 693 if not self.check_url(url, wire['config']):
648 694 return
649 695
650 696 repo = self._factory.repo(wire)
651 697
652 698 # Disable any prompts for this repo
653 699 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
654 700
655 701 bookmarks = dict(repo._bookmarks).keys()
656 702 remote = peer(repo, {}, url)
657 703 # Disable any prompts for this remote
658 704 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
659 705
660 706 return exchange.push(
661 707 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
662 708
663 709 @reraise_safe_exceptions
664 710 def revision(self, wire, rev):
665 711 repo = self._factory.repo(wire)
666 712 ctx = self._get_ctx(repo, rev)
667 713 return ctx.rev()
668 714
669 715 @reraise_safe_exceptions
670 def rev_range(self, wire, filter):
671 repo = self._factory.repo(wire)
672 revisions = [rev for rev in revrange(repo, filter)]
673 return revisions
716 def rev_range(self, wire, commit_filter):
717 cache_on, context_uid, repo_id = self._cache_on(wire)
718 @self.region.conditional_cache_on_arguments(condition=cache_on)
719 def _rev_range(_context_uid, _repo_id, _filter):
720 repo = self._factory.repo(wire)
721 revisions = [rev for rev in revrange(repo, commit_filter)]
722 return revisions
723
724 return _rev_range(context_uid, repo_id, sorted(commit_filter))
674 725
675 726 @reraise_safe_exceptions
676 727 def rev_range_hash(self, wire, node):
677 728 repo = self._factory.repo(wire)
678 729
679 730 def get_revs(repo, rev_opt):
680 731 if rev_opt:
681 732 revs = revrange(repo, rev_opt)
682 733 if len(revs) == 0:
683 734 return (nullrev, nullrev)
684 735 return max(revs), min(revs)
685 736 else:
686 737 return len(repo) - 1, 0
687 738
688 739 stop, start = get_revs(repo, [node + ':'])
689 740 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
690 741 return revs
691 742
692 743 @reraise_safe_exceptions
693 744 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
694 745 other_path = kwargs.pop('other_path', None)
695 746
696 747 # case when we want to compare two independent repositories
697 748 if other_path and other_path != wire["path"]:
698 749 baseui = self._factory._create_config(wire["config"])
699 750 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
700 751 else:
701 752 repo = self._factory.repo(wire)
702 753 return list(repo.revs(rev_spec, *args))
703 754
704 755 @reraise_safe_exceptions
705 756 def strip(self, wire, revision, update, backup):
706 757 repo = self._factory.repo(wire)
707 758 ctx = self._get_ctx(repo, revision)
708 759 hgext_strip(
709 760 repo.baseui, repo, ctx.node(), update=update, backup=backup)
710 761
711 762 @reraise_safe_exceptions
712 763 def verify(self, wire,):
713 764 repo = self._factory.repo(wire)
714 765 baseui = self._factory._create_config(wire['config'])
715 766 baseui.setconfig('ui', 'quiet', 'false')
716 767 output = io.BytesIO()
717 768
718 769 def write(data, **unused_kwargs):
719 770 output.write(data)
720 771 baseui.write = write
721 772
722 773 repo.ui = baseui
723 774 verify.verify(repo)
724 775 return output.getvalue()
725 776
726 777 @reraise_safe_exceptions
727 def tag(self, wire, name, revision, message, local, user,
728 tag_time, tag_timezone):
778 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
729 779 repo = self._factory.repo(wire)
730 780 ctx = self._get_ctx(repo, revision)
731 781 node = ctx.node()
732 782
733 783 date = (tag_time, tag_timezone)
734 784 try:
735 785 hg_tag.tag(repo, name, node, message, local, user, date)
736 786 except Abort as e:
737 787 log.exception("Tag operation aborted")
738 788 # Exception can contain unicode which we convert
739 789 raise exceptions.AbortException(e)(repr(e))
740 790
741 791 @reraise_safe_exceptions
742 792 def tags(self, wire):
743 repo = self._factory.repo(wire)
744 return repo.tags()
793 cache_on, context_uid, repo_id = self._cache_on(wire)
794 @self.region.conditional_cache_on_arguments(condition=cache_on)
795 def _tags(_context_uid, _repo_id):
796 repo = self._factory.repo(wire)
797 return repo.tags()
798
799 return _tags(context_uid, repo_id)
745 800
746 801 @reraise_safe_exceptions
747 802 def update(self, wire, node=None, clean=False):
748 803 repo = self._factory.repo(wire)
749 804 baseui = self._factory._create_config(wire['config'])
750 805 commands.update(baseui, repo, node=node, clean=clean)
751 806
752 807 @reraise_safe_exceptions
753 808 def identify(self, wire):
754 809 repo = self._factory.repo(wire)
755 810 baseui = self._factory._create_config(wire['config'])
756 811 output = io.BytesIO()
757 812 baseui.write = output.write
758 813 # This is required to get a full node id
759 814 baseui.debugflag = True
760 815 commands.identify(baseui, repo, id=True)
761 816
762 817 return output.getvalue()
763 818
764 819 @reraise_safe_exceptions
765 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
766 hooks=True):
820 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
767 821 repo = self._factory.repo(wire)
768 822 baseui = self._factory._create_config(wire['config'], hooks=hooks)
769 823
770 824 # Mercurial internally has a lot of logic that checks ONLY if
771 825 # option is defined, we just pass those if they are defined then
772 826 opts = {}
773 827 if bookmark:
774 828 opts['bookmark'] = bookmark
775 829 if branch:
776 830 opts['branch'] = branch
777 831 if revision:
778 832 opts['rev'] = revision
779 833
780 834 commands.pull(baseui, repo, source, **opts)
781 835
782 836 @reraise_safe_exceptions
783 837 def heads(self, wire, branch=None):
784 838 repo = self._factory.repo(wire)
785 839 baseui = self._factory._create_config(wire['config'])
786 840 output = io.BytesIO()
787 841
788 842 def write(data, **unused_kwargs):
789 843 output.write(data)
790 844
791 845 baseui.write = write
792 846 if branch:
793 847 args = [branch]
794 848 else:
795 849 args = []
796 850 commands.heads(baseui, repo, template='{node} ', *args)
797 851
798 852 return output.getvalue()
799 853
800 854 @reraise_safe_exceptions
801 855 def ancestor(self, wire, revision1, revision2):
802 856 repo = self._factory.repo(wire)
803 857 changelog = repo.changelog
804 858 lookup = repo.lookup
805 859 a = changelog.ancestor(lookup(revision1), lookup(revision2))
806 860 return hex(a)
807 861
808 862 @reraise_safe_exceptions
809 def push(self, wire, revisions, dest_path, hooks=True,
810 push_branches=False):
863 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
811 864 repo = self._factory.repo(wire)
812 865 baseui = self._factory._create_config(wire['config'], hooks=hooks)
813 866 commands.push(baseui, repo, dest=dest_path, rev=revisions,
814 867 new_branch=push_branches)
815 868
816 869 @reraise_safe_exceptions
817 870 def merge(self, wire, revision):
818 871 repo = self._factory.repo(wire)
819 872 baseui = self._factory._create_config(wire['config'])
820 873 repo.ui.setconfig('ui', 'merge', 'internal:dump')
821 874
822 875 # In case of sub repositories are used mercurial prompts the user in
823 876 # case of merge conflicts or different sub repository sources. By
824 877 # setting the interactive flag to `False` mercurial doesn't prompt the
825 878 # used but instead uses a default value.
826 879 repo.ui.setconfig('ui', 'interactive', False)
827 880 commands.merge(baseui, repo, rev=revision)
828 881
829 882 @reraise_safe_exceptions
830 883 def merge_state(self, wire):
831 884 repo = self._factory.repo(wire)
832 885 repo.ui.setconfig('ui', 'merge', 'internal:dump')
833 886
834 887 # In case of sub repositories are used mercurial prompts the user in
835 888 # case of merge conflicts or different sub repository sources. By
836 889 # setting the interactive flag to `False` mercurial doesn't prompt the
837 890 # used but instead uses a default value.
838 891 repo.ui.setconfig('ui', 'interactive', False)
839 892 ms = hg_merge.mergestate(repo)
840 893 return [x for x in ms.unresolved()]
841 894
842 895 @reraise_safe_exceptions
843 896 def commit(self, wire, message, username, close_branch=False):
844 897 repo = self._factory.repo(wire)
845 898 baseui = self._factory._create_config(wire['config'])
846 899 repo.ui.setconfig('ui', 'username', username)
847 900 commands.commit(baseui, repo, message=message, close_branch=close_branch)
848 901
849
850 902 @reraise_safe_exceptions
851 903 def rebase(self, wire, source=None, dest=None, abort=False):
852 904 repo = self._factory.repo(wire)
853 905 baseui = self._factory._create_config(wire['config'])
854 906 repo.ui.setconfig('ui', 'merge', 'internal:dump')
855 907 rebase.rebase(
856 908 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
857 909
858 910 @reraise_safe_exceptions
859 911 def bookmark(self, wire, bookmark, revision=None):
860 912 repo = self._factory.repo(wire)
861 913 baseui = self._factory._create_config(wire['config'])
862 914 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
863 915
864 916 @reraise_safe_exceptions
865 917 def install_hooks(self, wire, force=False):
866 918 # we don't need any special hooks for Mercurial
867 919 pass
868 920
869 921 @reraise_safe_exceptions
870 922 def get_hooks_info(self, wire):
871 923 return {
872 924 'pre_version': vcsserver.__version__,
873 925 'post_version': vcsserver.__version__,
874 926 }
@@ -1,772 +1,793 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 from __future__ import absolute_import
19 19
20 20 import os
21 21 import subprocess
22 22 from urllib2 import URLError
23 23 import urlparse
24 24 import logging
25 25 import posixpath as vcspath
26 26 import StringIO
27 27 import urllib
28 28 import traceback
29 29
30 30 import svn.client
31 31 import svn.core
32 32 import svn.delta
33 33 import svn.diff
34 34 import svn.fs
35 35 import svn.repos
36 36
37 37 from vcsserver import svn_diff, exceptions, subprocessio, settings
38 38 from vcsserver.base import RepoFactory, raise_from_original
39 39
40 40 log = logging.getLogger(__name__)
41 41
42 42
43 43 # Set of svn compatible version flags.
44 44 # Compare with subversion/svnadmin/svnadmin.c
45 45 svn_compatible_versions = {
46 46 'pre-1.4-compatible',
47 47 'pre-1.5-compatible',
48 48 'pre-1.6-compatible',
49 49 'pre-1.8-compatible',
50 50 'pre-1.9-compatible'
51 51 }
52 52
53 53 svn_compatible_versions_map = {
54 54 'pre-1.4-compatible': '1.3',
55 55 'pre-1.5-compatible': '1.4',
56 56 'pre-1.6-compatible': '1.5',
57 57 'pre-1.8-compatible': '1.7',
58 58 'pre-1.9-compatible': '1.8',
59 59 }
60 60
61 61
62 62 def reraise_safe_exceptions(func):
63 63 """Decorator for converting svn exceptions to something neutral."""
64 64 def wrapper(*args, **kwargs):
65 65 try:
66 66 return func(*args, **kwargs)
67 67 except Exception as e:
68 68 if not hasattr(e, '_vcs_kind'):
69 69 log.exception("Unhandled exception in svn remote call")
70 70 raise_from_original(exceptions.UnhandledException(e))
71 71 raise
72 72 return wrapper
73 73
74 74
75 75 class SubversionFactory(RepoFactory):
76 76 repo_type = 'svn'
77 77
78 78 def _create_repo(self, wire, create, compatible_version):
79 79 path = svn.core.svn_path_canonicalize(wire['path'])
80 80 if create:
81 81 fs_config = {'compatible-version': '1.9'}
82 82 if compatible_version:
83 83 if compatible_version not in svn_compatible_versions:
84 84 raise Exception('Unknown SVN compatible version "{}"'
85 85 .format(compatible_version))
86 86 fs_config['compatible-version'] = \
87 87 svn_compatible_versions_map[compatible_version]
88 88
89 89 log.debug('Create SVN repo with config "%s"', fs_config)
90 90 repo = svn.repos.create(path, "", "", None, fs_config)
91 91 else:
92 92 repo = svn.repos.open(path)
93 93
94 94 log.debug('Got SVN object: %s', repo)
95 95 return repo
96 96
97 97 def repo(self, wire, create=False, compatible_version=None):
98 98 """
99 99 Get a repository instance for the given path.
100 100 """
101 region = self._cache_region
102 context = wire.get('context', None)
103 repo_path = wire.get('path', '')
104 context_uid = '{}'.format(context)
105 cache = wire.get('cache', True)
106 cache_on = context and cache
107
108 @region.conditional_cache_on_arguments(condition=cache_on)
109 def create_new_repo(_repo_type, _repo_path, _context_uid, compatible_version_id):
110 return self._create_repo(wire, create, compatible_version)
111
112 return create_new_repo(self.repo_type, repo_path, context_uid,
113 compatible_version)
101 return self._create_repo(wire, create, compatible_version)
114 102
115 103
116 104 NODE_TYPE_MAPPING = {
117 105 svn.core.svn_node_file: 'file',
118 106 svn.core.svn_node_dir: 'dir',
119 107 }
120 108
121 109
122 110 class SvnRemote(object):
123 111
124 112 def __init__(self, factory, hg_factory=None):
125 113 self._factory = factory
126 114 # TODO: Remove once we do not use internal Mercurial objects anymore
127 115 # for subversion
128 116 self._hg_factory = hg_factory
117 self.region = self._factory._cache_region
118
119 def _cache_on(self, wire):
120 context = wire.get('context', '')
121 context_uid = '{}'.format(context)
122 repo_id = wire.get('repo_id', '')
123 cache = wire.get('cache', True)
124 cache_on = context and cache
125 return cache_on, context_uid, repo_id
129 126
130 127 @reraise_safe_exceptions
131 128 def discover_svn_version(self):
132 129 try:
133 130 import svn.core
134 131 svn_ver = svn.core.SVN_VERSION
135 132 except ImportError:
136 133 svn_ver = None
137 134 return svn_ver
138 135
139 136 @reraise_safe_exceptions
140 137 def is_empty(self, wire):
141 repo = self._factory.repo(wire)
142 138
143 139 try:
144 140 return self.lookup(wire, -1) == 0
145 141 except Exception:
146 142 log.exception("failed to read object_store")
147 143 return False
148 144
149 145 def check_url(self, url, config_items):
150 146 # this can throw exception if not installed, but we detect this
151 147 from hgsubversion import svnrepo
152 148
153 149 baseui = self._hg_factory._create_config(config_items)
154 150 # uuid function get's only valid UUID from proper repo, else
155 151 # throws exception
156 152 try:
157 153 svnrepo.svnremoterepo(baseui, url).svn.uuid
158 154 except Exception:
159 155 tb = traceback.format_exc()
160 156 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
161 157 raise URLError(
162 158 '"%s" is not a valid Subversion source url.' % (url, ))
163 159 return True
164 160
165 161 def is_path_valid_repository(self, wire, path):
166 162
167 163 # NOTE(marcink): short circuit the check for SVN repo
168 164 # the repos.open might be expensive to check, but we have one cheap
169 165 # pre condition that we can use, to check for 'format' file
170 166
171 167 if not os.path.isfile(os.path.join(path, 'format')):
172 168 return False
173 169
174 170 try:
175 171 svn.repos.open(path)
176 172 except svn.core.SubversionException:
177 173 tb = traceback.format_exc()
178 174 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
179 175 return False
180 176 return True
181 177
182 178 @reraise_safe_exceptions
183 179 def verify(self, wire,):
184 180 repo_path = wire['path']
185 181 if not self.is_path_valid_repository(wire, repo_path):
186 182 raise Exception(
187 183 "Path %s is not a valid Subversion repository." % repo_path)
188 184
189 185 cmd = ['svnadmin', 'info', repo_path]
190 186 stdout, stderr = subprocessio.run_command(cmd)
191 187 return stdout
192 188
193 189 def lookup(self, wire, revision):
194 190 if revision not in [-1, None, 'HEAD']:
195 191 raise NotImplementedError
196 192 repo = self._factory.repo(wire)
197 193 fs_ptr = svn.repos.fs(repo)
198 194 head = svn.fs.youngest_rev(fs_ptr)
199 195 return head
200 196
201 197 def lookup_interval(self, wire, start_ts, end_ts):
202 198 repo = self._factory.repo(wire)
203 199 fsobj = svn.repos.fs(repo)
204 200 start_rev = None
205 201 end_rev = None
206 202 if start_ts:
207 203 start_ts_svn = apr_time_t(start_ts)
208 204 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
209 205 else:
210 206 start_rev = 1
211 207 if end_ts:
212 208 end_ts_svn = apr_time_t(end_ts)
213 209 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
214 210 else:
215 211 end_rev = svn.fs.youngest_rev(fsobj)
216 212 return start_rev, end_rev
217 213
218 214 def revision_properties(self, wire, revision):
219 repo = self._factory.repo(wire)
220 fs_ptr = svn.repos.fs(repo)
221 return svn.fs.revision_proplist(fs_ptr, revision)
215
216 cache_on, context_uid, repo_id = self._cache_on(wire)
217 @self.region.conditional_cache_on_arguments(condition=cache_on)
218 def _revision_properties(_context_uid, _repo_id, _revision):
219 repo = self._factory.repo(wire)
220 fs_ptr = svn.repos.fs(repo)
221 return svn.fs.revision_proplist(fs_ptr, revision)
222 return _revision_properties(context_uid, repo_id, revision)
222 223
223 224 def revision_changes(self, wire, revision):
224 225
225 226 repo = self._factory.repo(wire)
226 227 fsobj = svn.repos.fs(repo)
227 228 rev_root = svn.fs.revision_root(fsobj, revision)
228 229
229 230 editor = svn.repos.ChangeCollector(fsobj, rev_root)
230 231 editor_ptr, editor_baton = svn.delta.make_editor(editor)
231 232 base_dir = ""
232 233 send_deltas = False
233 234 svn.repos.replay2(
234 235 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
235 236 editor_ptr, editor_baton, None)
236 237
237 238 added = []
238 239 changed = []
239 240 removed = []
240 241
241 242 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
242 243 for path, change in editor.changes.iteritems():
243 244 # TODO: Decide what to do with directory nodes. Subversion can add
244 245 # empty directories.
245 246
246 247 if change.item_kind == svn.core.svn_node_dir:
247 248 continue
248 249 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
249 250 added.append(path)
250 251 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
251 252 svn.repos.CHANGE_ACTION_REPLACE]:
252 253 changed.append(path)
253 254 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
254 255 removed.append(path)
255 256 else:
256 257 raise NotImplementedError(
257 258 "Action %s not supported on path %s" % (
258 259 change.action, path))
259 260
260 261 changes = {
261 262 'added': added,
262 263 'changed': changed,
263 264 'removed': removed,
264 265 }
265 266 return changes
266 267
268 @reraise_safe_exceptions
267 269 def node_history(self, wire, path, revision, limit):
268 cross_copies = False
269 repo = self._factory.repo(wire)
270 fsobj = svn.repos.fs(repo)
271 rev_root = svn.fs.revision_root(fsobj, revision)
270 cache_on, context_uid, repo_id = self._cache_on(wire)
271 @self.region.conditional_cache_on_arguments(condition=cache_on)
272 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
273 cross_copies = False
274 repo = self._factory.repo(wire)
275 fsobj = svn.repos.fs(repo)
276 rev_root = svn.fs.revision_root(fsobj, revision)
272 277
273 history_revisions = []
274 history = svn.fs.node_history(rev_root, path)
275 history = svn.fs.history_prev(history, cross_copies)
276 while history:
277 __, node_revision = svn.fs.history_location(history)
278 history_revisions.append(node_revision)
279 if limit and len(history_revisions) >= limit:
280 break
278 history_revisions = []
279 history = svn.fs.node_history(rev_root, path)
281 280 history = svn.fs.history_prev(history, cross_copies)
282 return history_revisions
281 while history:
282 __, node_revision = svn.fs.history_location(history)
283 history_revisions.append(node_revision)
284 if limit and len(history_revisions) >= limit:
285 break
286 history = svn.fs.history_prev(history, cross_copies)
287 return history_revisions
288 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
283 289
284 290 def node_properties(self, wire, path, revision):
285 291 repo = self._factory.repo(wire)
286 292 fsobj = svn.repos.fs(repo)
287 293 rev_root = svn.fs.revision_root(fsobj, revision)
288 294 return svn.fs.node_proplist(rev_root, path)
289 295
290 296 def file_annotate(self, wire, path, revision):
291 297 abs_path = 'file://' + urllib.pathname2url(
292 298 vcspath.join(wire['path'], path))
293 299 file_uri = svn.core.svn_path_canonicalize(abs_path)
294 300
295 301 start_rev = svn_opt_revision_value_t(0)
296 302 peg_rev = svn_opt_revision_value_t(revision)
297 303 end_rev = peg_rev
298 304
299 305 annotations = []
300 306
301 307 def receiver(line_no, revision, author, date, line, pool):
302 308 annotations.append((line_no, revision, line))
303 309
304 310 # TODO: Cannot use blame5, missing typemap function in the swig code
305 311 try:
306 312 svn.client.blame2(
307 313 file_uri, peg_rev, start_rev, end_rev,
308 314 receiver, svn.client.create_context())
309 315 except svn.core.SubversionException as exc:
310 316 log.exception("Error during blame operation.")
311 317 raise Exception(
312 318 "Blame not supported or file does not exist at path %s. "
313 319 "Error %s." % (path, exc))
314 320
315 321 return annotations
316 322
317 def get_node_type(self, wire, path, rev=None):
318 repo = self._factory.repo(wire)
319 fs_ptr = svn.repos.fs(repo)
320 if rev is None:
321 rev = svn.fs.youngest_rev(fs_ptr)
322 root = svn.fs.revision_root(fs_ptr, rev)
323 node = svn.fs.check_path(root, path)
324 return NODE_TYPE_MAPPING.get(node, None)
323 def get_node_type(self, wire, path, revision=None):
324
325 cache_on, context_uid, repo_id = self._cache_on(wire)
326 @self.region.conditional_cache_on_arguments(condition=cache_on)
327 def _get_node_type(_context_uid, _repo_id, _path, _revision):
328 repo = self._factory.repo(wire)
329 fs_ptr = svn.repos.fs(repo)
330 if _revision is None:
331 _revision = svn.fs.youngest_rev(fs_ptr)
332 root = svn.fs.revision_root(fs_ptr, _revision)
333 node = svn.fs.check_path(root, path)
334 return NODE_TYPE_MAPPING.get(node, None)
335 return _get_node_type(context_uid, repo_id, path, revision)
325 336
326 337 def get_nodes(self, wire, path, revision=None):
327 repo = self._factory.repo(wire)
328 fsobj = svn.repos.fs(repo)
329 if revision is None:
330 revision = svn.fs.youngest_rev(fsobj)
331 root = svn.fs.revision_root(fsobj, revision)
332 entries = svn.fs.dir_entries(root, path)
333 result = []
334 for entry_path, entry_info in entries.iteritems():
335 result.append(
336 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
337 return result
338
339 cache_on, context_uid, repo_id = self._cache_on(wire)
340 @self.region.conditional_cache_on_arguments(condition=cache_on)
341 def _get_nodes(_context_uid, _repo_id, _path, _revision):
342 repo = self._factory.repo(wire)
343 fsobj = svn.repos.fs(repo)
344 if _revision is None:
345 _revision = svn.fs.youngest_rev(fsobj)
346 root = svn.fs.revision_root(fsobj, _revision)
347 entries = svn.fs.dir_entries(root, path)
348 result = []
349 for entry_path, entry_info in entries.iteritems():
350 result.append(
351 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
352 return result
353 return _get_nodes(context_uid, repo_id, path, revision)
338 354
339 355 def get_file_content(self, wire, path, rev=None):
340 356 repo = self._factory.repo(wire)
341 357 fsobj = svn.repos.fs(repo)
342 358 if rev is None:
343 359 rev = svn.fs.youngest_revision(fsobj)
344 360 root = svn.fs.revision_root(fsobj, rev)
345 361 content = svn.core.Stream(svn.fs.file_contents(root, path))
346 362 return content.read()
347 363
348 364 def get_file_size(self, wire, path, revision=None):
349 repo = self._factory.repo(wire)
350 fsobj = svn.repos.fs(repo)
351 if revision is None:
352 revision = svn.fs.youngest_revision(fsobj)
353 root = svn.fs.revision_root(fsobj, revision)
354 size = svn.fs.file_length(root, path)
355 return size
365
366 cache_on, context_uid, repo_id = self._cache_on(wire)
367 @self.region.conditional_cache_on_arguments(condition=cache_on)
368 def _get_file_size(_context_uid, _repo_id, _path, _revision):
369 repo = self._factory.repo(wire)
370 fsobj = svn.repos.fs(repo)
371 if _revision is None:
372 _revision = svn.fs.youngest_revision(fsobj)
373 root = svn.fs.revision_root(fsobj, _revision)
374 size = svn.fs.file_length(root, path)
375 return size
376 return _get_file_size(context_uid, repo_id, path, revision)
356 377
357 378 def create_repository(self, wire, compatible_version=None):
358 379 log.info('Creating Subversion repository in path "%s"', wire['path'])
359 380 self._factory.repo(wire, create=True,
360 381 compatible_version=compatible_version)
361 382
362 383 def get_url_and_credentials(self, src_url):
363 384 obj = urlparse.urlparse(src_url)
364 385 username = obj.username or None
365 386 password = obj.password or None
366 387 return username, password, src_url
367 388
368 389 def import_remote_repository(self, wire, src_url):
369 390 repo_path = wire['path']
370 391 if not self.is_path_valid_repository(wire, repo_path):
371 392 raise Exception(
372 393 "Path %s is not a valid Subversion repository." % repo_path)
373 394
374 395 username, password, src_url = self.get_url_and_credentials(src_url)
375 396 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
376 397 '--trust-server-cert-failures=unknown-ca']
377 398 if username and password:
378 399 rdump_cmd += ['--username', username, '--password', password]
379 400 rdump_cmd += [src_url]
380 401
381 402 rdump = subprocess.Popen(
382 403 rdump_cmd,
383 404 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
384 405 load = subprocess.Popen(
385 406 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
386 407
387 408 # TODO: johbo: This can be a very long operation, might be better
388 409 # to track some kind of status and provide an api to check if the
389 410 # import is done.
390 411 rdump.wait()
391 412 load.wait()
392 413
393 414 log.debug('Return process ended with code: %s', rdump.returncode)
394 415 if rdump.returncode != 0:
395 416 errors = rdump.stderr.read()
396 417 log.error('svnrdump dump failed: statuscode %s: message: %s',
397 418 rdump.returncode, errors)
398 419 reason = 'UNKNOWN'
399 420 if 'svnrdump: E230001:' in errors:
400 421 reason = 'INVALID_CERTIFICATE'
401 422
402 423 if reason == 'UNKNOWN':
403 424 reason = 'UNKNOWN:{}'.format(errors)
404 425 raise Exception(
405 426 'Failed to dump the remote repository from %s. Reason:%s' % (
406 427 src_url, reason))
407 428 if load.returncode != 0:
408 429 raise Exception(
409 430 'Failed to load the dump of remote repository from %s.' %
410 431 (src_url, ))
411 432
412 433 def commit(self, wire, message, author, timestamp, updated, removed):
413 434 assert isinstance(message, str)
414 435 assert isinstance(author, str)
415 436
416 437 repo = self._factory.repo(wire)
417 438 fsobj = svn.repos.fs(repo)
418 439
419 440 rev = svn.fs.youngest_rev(fsobj)
420 441 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
421 442 txn_root = svn.fs.txn_root(txn)
422 443
423 444 for node in updated:
424 445 TxnNodeProcessor(node, txn_root).update()
425 446 for node in removed:
426 447 TxnNodeProcessor(node, txn_root).remove()
427 448
428 449 commit_id = svn.repos.fs_commit_txn(repo, txn)
429 450
430 451 if timestamp:
431 452 apr_time = apr_time_t(timestamp)
432 453 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
433 454 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
434 455
435 456 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
436 457 return commit_id
437 458
438 459 def diff(self, wire, rev1, rev2, path1=None, path2=None,
439 460 ignore_whitespace=False, context=3):
440 461
441 462 wire.update(cache=False)
442 463 repo = self._factory.repo(wire)
443 464 diff_creator = SvnDiffer(
444 465 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
445 466 try:
446 467 return diff_creator.generate_diff()
447 468 except svn.core.SubversionException as e:
448 469 log.exception(
449 470 "Error during diff operation operation. "
450 471 "Path might not exist %s, %s" % (path1, path2))
451 472 return ""
452 473
453 474 @reraise_safe_exceptions
454 475 def is_large_file(self, wire, path):
455 476 return False
456 477
457 478 @reraise_safe_exceptions
458 479 def run_svn_command(self, wire, cmd, **opts):
459 480 path = wire.get('path', None)
460 481
461 482 if path and os.path.isdir(path):
462 483 opts['cwd'] = path
463 484
464 485 safe_call = False
465 486 if '_safe' in opts:
466 487 safe_call = True
467 488
468 489 svnenv = os.environ.copy()
469 490 svnenv.update(opts.pop('extra_env', {}))
470 491
471 492 _opts = {'env': svnenv, 'shell': False}
472 493
473 494 try:
474 495 _opts.update(opts)
475 496 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
476 497
477 498 return ''.join(p), ''.join(p.error)
478 499 except (EnvironmentError, OSError) as err:
479 500 cmd = ' '.join(cmd) # human friendly CMD
480 501 tb_err = ("Couldn't run svn command (%s).\n"
481 502 "Original error was:%s\n"
482 503 "Call options:%s\n"
483 504 % (cmd, err, _opts))
484 505 log.exception(tb_err)
485 506 if safe_call:
486 507 return '', err
487 508 else:
488 509 raise exceptions.VcsException()(tb_err)
489 510
490 511 @reraise_safe_exceptions
491 512 def install_hooks(self, wire, force=False):
492 513 from vcsserver.hook_utils import install_svn_hooks
493 514 repo_path = wire['path']
494 515 binary_dir = settings.BINARY_DIR
495 516 executable = None
496 517 if binary_dir:
497 518 executable = os.path.join(binary_dir, 'python')
498 519 return install_svn_hooks(
499 520 repo_path, executable=executable, force_create=force)
500 521
501 522 @reraise_safe_exceptions
502 523 def get_hooks_info(self, wire):
503 524 from vcsserver.hook_utils import (
504 525 get_svn_pre_hook_version, get_svn_post_hook_version)
505 526 repo_path = wire['path']
506 527 return {
507 528 'pre_version': get_svn_pre_hook_version(repo_path),
508 529 'post_version': get_svn_post_hook_version(repo_path),
509 530 }
510 531
511 532
512 533 class SvnDiffer(object):
513 534 """
514 535 Utility to create diffs based on difflib and the Subversion api
515 536 """
516 537
517 538 binary_content = False
518 539
519 540 def __init__(
520 541 self, repo, src_rev, src_path, tgt_rev, tgt_path,
521 542 ignore_whitespace, context):
522 543 self.repo = repo
523 544 self.ignore_whitespace = ignore_whitespace
524 545 self.context = context
525 546
526 547 fsobj = svn.repos.fs(repo)
527 548
528 549 self.tgt_rev = tgt_rev
529 550 self.tgt_path = tgt_path or ''
530 551 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
531 552 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
532 553
533 554 self.src_rev = src_rev
534 555 self.src_path = src_path or self.tgt_path
535 556 self.src_root = svn.fs.revision_root(fsobj, src_rev)
536 557 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
537 558
538 559 self._validate()
539 560
540 561 def _validate(self):
541 562 if (self.tgt_kind != svn.core.svn_node_none and
542 563 self.src_kind != svn.core.svn_node_none and
543 564 self.src_kind != self.tgt_kind):
544 565 # TODO: johbo: proper error handling
545 566 raise Exception(
546 567 "Source and target are not compatible for diff generation. "
547 568 "Source type: %s, target type: %s" %
548 569 (self.src_kind, self.tgt_kind))
549 570
550 571 def generate_diff(self):
551 572 buf = StringIO.StringIO()
552 573 if self.tgt_kind == svn.core.svn_node_dir:
553 574 self._generate_dir_diff(buf)
554 575 else:
555 576 self._generate_file_diff(buf)
556 577 return buf.getvalue()
557 578
558 579 def _generate_dir_diff(self, buf):
559 580 editor = DiffChangeEditor()
560 581 editor_ptr, editor_baton = svn.delta.make_editor(editor)
561 582 svn.repos.dir_delta2(
562 583 self.src_root,
563 584 self.src_path,
564 585 '', # src_entry
565 586 self.tgt_root,
566 587 self.tgt_path,
567 588 editor_ptr, editor_baton,
568 589 authorization_callback_allow_all,
569 590 False, # text_deltas
570 591 svn.core.svn_depth_infinity, # depth
571 592 False, # entry_props
572 593 False, # ignore_ancestry
573 594 )
574 595
575 596 for path, __, change in sorted(editor.changes):
576 597 self._generate_node_diff(
577 598 buf, change, path, self.tgt_path, path, self.src_path)
578 599
579 600 def _generate_file_diff(self, buf):
580 601 change = None
581 602 if self.src_kind == svn.core.svn_node_none:
582 603 change = "add"
583 604 elif self.tgt_kind == svn.core.svn_node_none:
584 605 change = "delete"
585 606 tgt_base, tgt_path = vcspath.split(self.tgt_path)
586 607 src_base, src_path = vcspath.split(self.src_path)
587 608 self._generate_node_diff(
588 609 buf, change, tgt_path, tgt_base, src_path, src_base)
589 610
590 611 def _generate_node_diff(
591 612 self, buf, change, tgt_path, tgt_base, src_path, src_base):
592 613
593 614 if self.src_rev == self.tgt_rev and tgt_base == src_base:
594 615 # makes consistent behaviour with git/hg to return empty diff if
595 616 # we compare same revisions
596 617 return
597 618
598 619 tgt_full_path = vcspath.join(tgt_base, tgt_path)
599 620 src_full_path = vcspath.join(src_base, src_path)
600 621
601 622 self.binary_content = False
602 623 mime_type = self._get_mime_type(tgt_full_path)
603 624
604 625 if mime_type and not mime_type.startswith('text'):
605 626 self.binary_content = True
606 627 buf.write("=" * 67 + '\n')
607 628 buf.write("Cannot display: file marked as a binary type.\n")
608 629 buf.write("svn:mime-type = %s\n" % mime_type)
609 630 buf.write("Index: %s\n" % (tgt_path, ))
610 631 buf.write("=" * 67 + '\n')
611 632 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
612 633 'tgt_path': tgt_path})
613 634
614 635 if change == 'add':
615 636 # TODO: johbo: SVN is missing a zero here compared to git
616 637 buf.write("new file mode 10644\n")
617 638
618 639 #TODO(marcink): intro to binary detection of svn patches
619 640 # if self.binary_content:
620 641 # buf.write('GIT binary patch\n')
621 642
622 643 buf.write("--- /dev/null\t(revision 0)\n")
623 644 src_lines = []
624 645 else:
625 646 if change == 'delete':
626 647 buf.write("deleted file mode 10644\n")
627 648
628 649 #TODO(marcink): intro to binary detection of svn patches
629 650 # if self.binary_content:
630 651 # buf.write('GIT binary patch\n')
631 652
632 653 buf.write("--- a/%s\t(revision %s)\n" % (
633 654 src_path, self.src_rev))
634 655 src_lines = self._svn_readlines(self.src_root, src_full_path)
635 656
636 657 if change == 'delete':
637 658 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
638 659 tgt_lines = []
639 660 else:
640 661 buf.write("+++ b/%s\t(revision %s)\n" % (
641 662 tgt_path, self.tgt_rev))
642 663 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
643 664
644 665 if not self.binary_content:
645 666 udiff = svn_diff.unified_diff(
646 667 src_lines, tgt_lines, context=self.context,
647 668 ignore_blank_lines=self.ignore_whitespace,
648 669 ignore_case=False,
649 670 ignore_space_changes=self.ignore_whitespace)
650 671 buf.writelines(udiff)
651 672
652 673 def _get_mime_type(self, path):
653 674 try:
654 675 mime_type = svn.fs.node_prop(
655 676 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
656 677 except svn.core.SubversionException:
657 678 mime_type = svn.fs.node_prop(
658 679 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
659 680 return mime_type
660 681
661 682 def _svn_readlines(self, fs_root, node_path):
662 683 if self.binary_content:
663 684 return []
664 685 node_kind = svn.fs.check_path(fs_root, node_path)
665 686 if node_kind not in (
666 687 svn.core.svn_node_file, svn.core.svn_node_symlink):
667 688 return []
668 689 content = svn.core.Stream(
669 690 svn.fs.file_contents(fs_root, node_path)).read()
670 691 return content.splitlines(True)
671 692
672 693
673 694
674 695 class DiffChangeEditor(svn.delta.Editor):
675 696 """
676 697 Records changes between two given revisions
677 698 """
678 699
679 700 def __init__(self):
680 701 self.changes = []
681 702
682 703 def delete_entry(self, path, revision, parent_baton, pool=None):
683 704 self.changes.append((path, None, 'delete'))
684 705
685 706 def add_file(
686 707 self, path, parent_baton, copyfrom_path, copyfrom_revision,
687 708 file_pool=None):
688 709 self.changes.append((path, 'file', 'add'))
689 710
690 711 def open_file(self, path, parent_baton, base_revision, file_pool=None):
691 712 self.changes.append((path, 'file', 'change'))
692 713
693 714
694 715 def authorization_callback_allow_all(root, path, pool):
695 716 return True
696 717
697 718
698 719 class TxnNodeProcessor(object):
699 720 """
700 721 Utility to process the change of one node within a transaction root.
701 722
702 723 It encapsulates the knowledge of how to add, update or remove
703 724 a node for a given transaction root. The purpose is to support the method
704 725 `SvnRemote.commit`.
705 726 """
706 727
707 728 def __init__(self, node, txn_root):
708 729 assert isinstance(node['path'], str)
709 730
710 731 self.node = node
711 732 self.txn_root = txn_root
712 733
713 734 def update(self):
714 735 self._ensure_parent_dirs()
715 736 self._add_file_if_node_does_not_exist()
716 737 self._update_file_content()
717 738 self._update_file_properties()
718 739
719 740 def remove(self):
720 741 svn.fs.delete(self.txn_root, self.node['path'])
721 742 # TODO: Clean up directory if empty
722 743
723 744 def _ensure_parent_dirs(self):
724 745 curdir = vcspath.dirname(self.node['path'])
725 746 dirs_to_create = []
726 747 while not self._svn_path_exists(curdir):
727 748 dirs_to_create.append(curdir)
728 749 curdir = vcspath.dirname(curdir)
729 750
730 751 for curdir in reversed(dirs_to_create):
731 752 log.debug('Creating missing directory "%s"', curdir)
732 753 svn.fs.make_dir(self.txn_root, curdir)
733 754
734 755 def _svn_path_exists(self, path):
735 756 path_status = svn.fs.check_path(self.txn_root, path)
736 757 return path_status != svn.core.svn_node_none
737 758
738 759 def _add_file_if_node_does_not_exist(self):
739 760 kind = svn.fs.check_path(self.txn_root, self.node['path'])
740 761 if kind == svn.core.svn_node_none:
741 762 svn.fs.make_file(self.txn_root, self.node['path'])
742 763
743 764 def _update_file_content(self):
744 765 assert isinstance(self.node['content'], str)
745 766 handler, baton = svn.fs.apply_textdelta(
746 767 self.txn_root, self.node['path'], None, None)
747 768 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
748 769
749 770 def _update_file_properties(self):
750 771 properties = self.node.get('properties', {})
751 772 for key, value in properties.iteritems():
752 773 svn.fs.change_node_prop(
753 774 self.txn_root, self.node['path'], key, value)
754 775
755 776
756 777 def apr_time_t(timestamp):
757 778 """
758 779 Convert a Python timestamp into APR timestamp type apr_time_t
759 780 """
760 781 return timestamp * 1E6
761 782
762 783
763 784 def svn_opt_revision_value_t(num):
764 785 """
765 786 Put `num` into a `svn_opt_revision_value_t` structure.
766 787 """
767 788 value = svn.core.svn_opt_revision_value_t()
768 789 value.number = num
769 790 revision = svn.core.svn_opt_revision_t()
770 791 revision.kind = svn.core.svn_opt_revision_number
771 792 revision.value = value
772 793 return revision
@@ -1,160 +1,160 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import inspect
19 19
20 20 import pytest
21 21 import dulwich.errors
22 22 from mock import Mock, patch
23 23
24 24 from vcsserver import git
25 25
26 26
27 27 SAMPLE_REFS = {
28 28 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
29 29 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
30 30 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
31 31 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
32 32 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
33 33 }
34 34
35 35
36 36 @pytest.fixture
37 37 def git_remote():
38 38 """
39 39 A GitRemote instance with a mock factory.
40 40 """
41 41 factory = Mock()
42 42 remote = git.GitRemote(factory)
43 43 return remote
44 44
45 45
46 46 def test_discover_git_version(git_remote):
47 47 version = git_remote.discover_git_version()
48 48 assert version
49 49
50 50
51 51 class TestGitFetch(object):
52 52 def setup(self):
53 53 self.mock_repo = Mock()
54 54 factory = Mock()
55 55 factory.repo = Mock(return_value=self.mock_repo)
56 56 self.remote_git = git.GitRemote(factory)
57 57
58 58 def test_fetches_all_when_no_commit_ids_specified(self):
59 59 def side_effect(determine_wants, *args, **kwargs):
60 60 determine_wants(SAMPLE_REFS)
61 61
62 62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
63 63 mock_fetch.side_effect = side_effect
64 self.remote_git.pull(wire=None, url='/tmp/', apply_refs=False)
64 self.remote_git.pull(wire={}, url='/tmp/', apply_refs=False)
65 65 determine_wants = self.mock_repo.object_store.determine_wants_all
66 66 determine_wants.assert_called_once_with(SAMPLE_REFS)
67 67
68 68 def test_fetches_specified_commits(self):
69 69 selected_refs = {
70 70 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
71 71 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
72 72 }
73 73
74 74 def side_effect(determine_wants, *args, **kwargs):
75 75 result = determine_wants(SAMPLE_REFS)
76 76 assert sorted(result) == sorted(selected_refs.values())
77 77 return result
78 78
79 79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
80 80 mock_fetch.side_effect = side_effect
81 81 self.remote_git.pull(
82 wire=None, url='/tmp/', apply_refs=False,
82 wire={}, url='/tmp/', apply_refs=False,
83 83 refs=selected_refs.keys())
84 84 determine_wants = self.mock_repo.object_store.determine_wants_all
85 85 assert determine_wants.call_count == 0
86 86
87 87 def test_get_remote_refs(self):
88 88 factory = Mock()
89 89 remote_git = git.GitRemote(factory)
90 90 url = 'http://example.com/test/test.git'
91 91 sample_refs = {
92 92 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
93 93 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
94 94 }
95 95
96 96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
97 97 mock_repo().get_refs.return_value = sample_refs
98 remote_refs = remote_git.get_remote_refs(wire=None, url=url)
98 remote_refs = remote_git.get_remote_refs(wire={}, url=url)
99 99 mock_repo().get_refs.assert_called_once_with()
100 100 assert remote_refs == sample_refs
101 101
102 102
103 103 class TestReraiseSafeExceptions(object):
104 104
105 105 def test_method_decorated_with_reraise_safe_exceptions(self):
106 106 factory = Mock()
107 107 git_remote = git.GitRemote(factory)
108 108
109 109 def fake_function():
110 110 return None
111 111
112 112 decorator = git.reraise_safe_exceptions(fake_function)
113 113
114 114 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
115 115 for method_name, method in methods:
116 116 if not method_name.startswith('_'):
117 117 assert method.im_func.__code__ == decorator.__code__
118 118
119 119 @pytest.mark.parametrize('side_effect, expected_type', [
120 120 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
121 121 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
122 122 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
123 123 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
124 124 (dulwich.errors.HangupException(), 'error'),
125 125 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
126 126 ])
127 127 def test_safe_exceptions_reraised(self, side_effect, expected_type):
128 128 @git.reraise_safe_exceptions
129 129 def fake_method():
130 130 raise side_effect
131 131
132 132 with pytest.raises(Exception) as exc_info:
133 133 fake_method()
134 134 assert type(exc_info.value) == Exception
135 135 assert exc_info.value._vcs_kind == expected_type
136 136
137 137
138 138 class TestDulwichRepoWrapper(object):
139 139 def test_calls_close_on_delete(self):
140 140 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
141 141 with isdir_patcher:
142 142 repo = git.Repo('/tmp/abcde')
143 143 with patch.object(git.DulwichRepo, 'close') as close_mock:
144 144 del repo
145 145 close_mock.assert_called_once_with()
146 146
147 147
148 148 class TestGitFactory(object):
149 149 def test_create_repo_returns_dulwich_wrapper(self):
150 150
151 151 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
152 152 mock.side_effect = {'repo_objects': ''}
153 153 factory = git.GitFactory()
154 154 wire = {
155 155 'path': '/tmp/abcde'
156 156 }
157 157 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
158 158 with isdir_patcher:
159 159 result = factory._create_repo(wire, True)
160 160 assert isinstance(result, git.Repo)
@@ -1,127 +1,108 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import inspect
19 19 import sys
20 20 import traceback
21 21
22 22 import pytest
23 23 from mercurial.error import LookupError
24 24 from mock import Mock, MagicMock, patch
25 25
26 26 from vcsserver import exceptions, hg, hgcompat
27 27
28 28
29 class TestHGLookup(object):
30 def setup(self):
31 self.mock_repo = MagicMock()
32 self.mock_repo.__getitem__.side_effect = LookupError(
33 'revision_or_commit_id', 'index', 'message')
34 factory = Mock()
35 factory.repo = Mock(return_value=self.mock_repo)
36 self.remote_hg = hg.HgRemote(factory)
37
38 def test_fail_lookup_hg(self):
39 with pytest.raises(Exception) as exc_info:
40 self.remote_hg.lookup(
41 wire=None, revision='revision_or_commit_id', both=True)
42
43 assert exc_info.value._vcs_kind == 'lookup'
44 assert 'revision_or_commit_id' in exc_info.value.args
45
46
47 29 class TestDiff(object):
48 30 def test_raising_safe_exception_when_lookup_failed(self):
49 repo = Mock()
31
50 32 factory = Mock()
51 factory.repo = Mock(return_value=repo)
52 33 hg_remote = hg.HgRemote(factory)
53 34 with patch('mercurial.patch.diff') as diff_mock:
54 35 diff_mock.side_effect = LookupError(
55 36 'deadbeef', 'index', 'message')
56 37 with pytest.raises(Exception) as exc_info:
57 38 hg_remote.diff(
58 wire=None, rev1='deadbeef', rev2='deadbee1',
39 wire={}, rev1='deadbeef', rev2='deadbee1',
59 40 file_filter=None, opt_git=True, opt_ignorews=True,
60 41 context=3)
61 42 assert type(exc_info.value) == Exception
62 43 assert exc_info.value._vcs_kind == 'lookup'
63 44
64 45
65 46 class TestReraiseSafeExceptions(object):
66 47 def test_method_decorated_with_reraise_safe_exceptions(self):
67 48 factory = Mock()
68 49 hg_remote = hg.HgRemote(factory)
69 50 methods = inspect.getmembers(hg_remote, predicate=inspect.ismethod)
70 51 decorator = hg.reraise_safe_exceptions(None)
71 52 for method_name, method in methods:
72 53 if not method_name.startswith('_'):
73 54 assert method.im_func.__code__ == decorator.__code__
74 55
75 56 @pytest.mark.parametrize('side_effect, expected_type', [
76 57 (hgcompat.Abort(), 'abort'),
77 58 (hgcompat.InterventionRequired(), 'abort'),
78 59 (hgcompat.RepoLookupError(), 'lookup'),
79 60 (hgcompat.LookupError('deadbeef', 'index', 'message'), 'lookup'),
80 61 (hgcompat.RepoError(), 'error'),
81 62 (hgcompat.RequirementError(), 'requirement'),
82 63 ])
83 64 def test_safe_exceptions_reraised(self, side_effect, expected_type):
84 65 @hg.reraise_safe_exceptions
85 66 def fake_method():
86 67 raise side_effect
87 68
88 69 with pytest.raises(Exception) as exc_info:
89 70 fake_method()
90 71 assert type(exc_info.value) == Exception
91 72 assert exc_info.value._vcs_kind == expected_type
92 73
93 74 def test_keeps_original_traceback(self):
94 75 @hg.reraise_safe_exceptions
95 76 def fake_method():
96 77 try:
97 78 raise hgcompat.Abort()
98 79 except:
99 80 self.original_traceback = traceback.format_tb(
100 81 sys.exc_info()[2])
101 82 raise
102 83
103 84 try:
104 85 fake_method()
105 86 except Exception:
106 87 new_traceback = traceback.format_tb(sys.exc_info()[2])
107 88
108 89 new_traceback_tail = new_traceback[-len(self.original_traceback):]
109 90 assert new_traceback_tail == self.original_traceback
110 91
111 92 def test_maps_unknow_exceptions_to_unhandled(self):
112 93 @hg.reraise_safe_exceptions
113 94 def stub_method():
114 95 raise ValueError('stub')
115 96
116 97 with pytest.raises(Exception) as exc_info:
117 98 stub_method()
118 99 assert exc_info.value._vcs_kind == 'unhandled'
119 100
120 101 def test_does_not_map_known_exceptions(self):
121 102 @hg.reraise_safe_exceptions
122 103 def stub_method():
123 104 raise exceptions.LookupException()('stub')
124 105
125 106 with pytest.raises(Exception) as exc_info:
126 107 stub_method()
127 108 assert exc_info.value._vcs_kind == 'lookup'
@@ -1,82 +1,87 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import mock
20 20 import pytest
21 21 import sys
22 22
23 23
24 24 class MockPopen(object):
25 25 def __init__(self, stderr):
26 26 self.stdout = io.BytesIO('')
27 27 self.stderr = io.BytesIO(stderr)
28 28 self.returncode = 1
29 29
30 30 def wait(self):
31 31 pass
32 32
33 33
34 34 INVALID_CERTIFICATE_STDERR = '\n'.join([
35 35 'svnrdump: E230001: Unable to connect to a repository at URL url',
36 36 'svnrdump: E230001: Server SSL certificate verification failed: issuer is not trusted',
37 37 ])
38 38
39 39
40 40 @pytest.mark.parametrize('stderr,expected_reason', [
41 41 (INVALID_CERTIFICATE_STDERR, 'INVALID_CERTIFICATE'),
42 42 ('svnrdump: E123456', 'UNKNOWN:svnrdump: E123456'),
43 43 ], ids=['invalid-cert-stderr', 'svnrdump-err-123456'])
44 44 @pytest.mark.xfail(sys.platform == "cygwin",
45 45 reason="SVN not packaged for Cygwin")
46 46 def test_import_remote_repository_certificate_error(stderr, expected_reason):
47 47 from vcsserver import svn
48 factory = mock.Mock()
49 factory.repo = mock.Mock(return_value=mock.Mock())
48 50
49 remote = svn.SvnRemote(None)
51 remote = svn.SvnRemote(factory)
50 52 remote.is_path_valid_repository = lambda wire, path: True
51 53
52 54 with mock.patch('subprocess.Popen',
53 55 return_value=MockPopen(stderr)):
54 56 with pytest.raises(Exception) as excinfo:
55 57 remote.import_remote_repository({'path': 'path'}, 'url')
56 58
57 59 expected_error_args = (
58 60 'Failed to dump the remote repository from url. Reason:{}'.format(expected_reason),)
59 61
60 62 assert excinfo.value.args == expected_error_args
61 63
62 64
63 65 def test_svn_libraries_can_be_imported():
64 66 import svn
65 67 import svn.client
66 68 assert svn.client is not None
67 69
68 70
69 71 @pytest.mark.parametrize('example_url, parts', [
70 72 ('http://server.com', (None, None, 'http://server.com')),
71 73 ('http://user@server.com', ('user', None, 'http://user@server.com')),
72 74 ('http://user:pass@server.com', ('user', 'pass', 'http://user:pass@server.com')),
73 75 ('<script>', (None, None, '<script>')),
74 76 ('http://', (None, None, 'http://')),
75 77 ])
76 78 def test_username_password_extraction_from_url(example_url, parts):
77 79 from vcsserver import svn
78 80
79 remote = svn.SvnRemote(None)
81 factory = mock.Mock()
82 factory.repo = mock.Mock(return_value=mock.Mock())
83
84 remote = svn.SvnRemote(factory)
80 85 remote.is_path_valid_repository = lambda wire, path: True
81 86
82 87 assert remote.get_url_and_credentials(example_url) == parts
General Comments 0
You need to be logged in to leave comments. Login now