##// END OF EJS Templates
vcs: turned some of the commands to be context_uid based. Those selected...
marcink -
r746:56295f10 default
parent child Browse files
Show More
@@ -1,994 +1,1092 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import collections
19 19 import logging
20 20 import os
21 21 import posixpath as vcspath
22 22 import re
23 23 import stat
24 24 import traceback
25 25 import urllib
26 26 import urllib2
27 27 from functools import wraps
28 28
29 29 import more_itertools
30 30 import pygit2
31 31 from pygit2 import Repository as LibGit2Repo
32 32 from dulwich import index, objects
33 33 from dulwich.client import HttpGitClient, LocalGitClient
34 34 from dulwich.errors import (
35 35 NotGitRepository, ChecksumMismatch, WrongObjectException,
36 36 MissingCommitError, ObjectMissing, HangupException,
37 37 UnexpectedCommandError)
38 38 from dulwich.repo import Repo as DulwichRepo
39 39 from dulwich.server import update_server_info
40 40
41 41 from vcsserver import exceptions, settings, subprocessio
42 from vcsserver.utils import safe_str
42 from vcsserver.utils import safe_str, safe_int
43 43 from vcsserver.base import RepoFactory, obfuscate_qs
44 44 from vcsserver.hgcompat import (
45 45 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
46 46 from vcsserver.git_lfs.lib import LFSOidStore
47 47
48 48 DIR_STAT = stat.S_IFDIR
49 49 FILE_MODE = stat.S_IFMT
50 50 GIT_LINK = objects.S_IFGITLINK
51 51 PEELED_REF_MARKER = '^{}'
52 52
53 53
54 54 log = logging.getLogger(__name__)
55 55
56 56
57 57 def str_to_dulwich(value):
58 58 """
59 59 Dulwich 0.10.1a requires `unicode` objects to be passed in.
60 60 """
61 61 return value.decode(settings.WIRE_ENCODING)
62 62
63 63
64 64 def reraise_safe_exceptions(func):
65 65 """Converts Dulwich exceptions to something neutral."""
66 66
67 67 @wraps(func)
68 68 def wrapper(*args, **kwargs):
69 69 try:
70 70 return func(*args, **kwargs)
71 71 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
72 72 exc = exceptions.LookupException(org_exc=e)
73 73 raise exc(safe_str(e))
74 74 except (HangupException, UnexpectedCommandError) as e:
75 75 exc = exceptions.VcsException(org_exc=e)
76 76 raise exc(safe_str(e))
77 77 except Exception as e:
78 78 # NOTE(marcink): becuase of how dulwich handles some exceptions
79 79 # (KeyError on empty repos), we cannot track this and catch all
80 80 # exceptions, it's an exceptions from other handlers
81 81 #if not hasattr(e, '_vcs_kind'):
82 82 #log.exception("Unhandled exception in git remote call")
83 83 #raise_from_original(exceptions.UnhandledException)
84 84 raise
85 85 return wrapper
86 86
87 87
88 88 class Repo(DulwichRepo):
89 89 """
90 90 A wrapper for dulwich Repo class.
91 91
92 92 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
93 93 "Too many open files" error. We need to close all opened file descriptors
94 94 once the repo object is destroyed.
95 95 """
96 96 def __del__(self):
97 97 if hasattr(self, 'object_store'):
98 98 self.close()
99 99
100 100
101 101 class Repository(LibGit2Repo):
102 102
103 103 def __enter__(self):
104 104 return self
105 105
106 106 def __exit__(self, exc_type, exc_val, exc_tb):
107 107 self.free()
108 108
109 109
110 110 class GitFactory(RepoFactory):
111 111 repo_type = 'git'
112 112
113 113 def _create_repo(self, wire, create, use_libgit2=False):
114 114 if use_libgit2:
115 115 return Repository(wire['path'])
116 116 else:
117 117 repo_path = str_to_dulwich(wire['path'])
118 118 return Repo(repo_path)
119 119
120 120 def repo(self, wire, create=False, use_libgit2=False):
121 121 """
122 122 Get a repository instance for the given path.
123 123 """
124 124 return self._create_repo(wire, create, use_libgit2)
125 125
126 126 def repo_libgit2(self, wire):
127 127 return self.repo(wire, use_libgit2=True)
128 128
129 129
130 130 class GitRemote(object):
131 EMPTY_COMMIT = '0' * 40
131 132
132 133 def __init__(self, factory):
133 134 self._factory = factory
134 135 self._bulk_methods = {
135 136 "date": self.date,
136 137 "author": self.author,
137 138 "branch": self.branch,
138 139 "message": self.message,
139 140 "parents": self.parents,
140 141 "_commit": self.revision,
141 142 }
142 143 self.region = self._factory._cache_region
143 144
144 145 def _wire_to_config(self, wire):
145 146 if 'config' in wire:
146 147 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
147 148 return {}
148 149
149 150 def _remote_conf(self, config):
150 151 params = [
151 152 '-c', 'core.askpass=""',
152 153 ]
153 154 ssl_cert_dir = config.get('vcs_ssl_dir')
154 155 if ssl_cert_dir:
155 156 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
156 157 return params
157 158
158 159 def _cache_on(self, wire):
159 160 context = wire.get('context', '')
160 161 context_uid = '{}'.format(context)
161 162 repo_id = wire.get('repo_id', '')
162 163 cache = wire.get('cache', True)
163 164 cache_on = context and cache
164 165 return cache_on, context_uid, repo_id
165 166
166 167 @reraise_safe_exceptions
167 168 def discover_git_version(self):
168 169 stdout, _ = self.run_git_command(
169 170 {}, ['--version'], _bare=True, _safe=True)
170 171 prefix = 'git version'
171 172 if stdout.startswith(prefix):
172 173 stdout = stdout[len(prefix):]
173 174 return stdout.strip()
174 175
175 176 @reraise_safe_exceptions
176 177 def is_empty(self, wire):
177 178 repo_init = self._factory.repo_libgit2(wire)
178 179 with repo_init as repo:
179 180
180 181 try:
181 182 has_head = repo.head.name
182 183 if has_head:
183 184 return False
184 185
185 186 # NOTE(marcink): check again using more expensive method
186 187 return repo.is_empty
187 188 except Exception:
188 189 pass
189 190
190 191 return True
191 192
192 193 @reraise_safe_exceptions
193 def add_object(self, wire, content):
194 repo_init = self._factory.repo_libgit2(wire)
195 with repo_init as repo:
196 blob = objects.Blob()
197 blob.set_raw_string(content)
198 repo.object_store.add_object(blob)
199 return blob.id
200
201 @reraise_safe_exceptions
202 194 def assert_correct_path(self, wire):
203 195 cache_on, context_uid, repo_id = self._cache_on(wire)
204 196 @self.region.conditional_cache_on_arguments(condition=cache_on)
205 197 def _assert_correct_path(_context_uid, _repo_id):
206 198 try:
207 199 repo_init = self._factory.repo_libgit2(wire)
208 200 with repo_init as repo:
209 201 pass
210 202 except pygit2.GitError:
211 203 path = wire.get('path')
212 204 tb = traceback.format_exc()
213 205 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
214 206 return False
215 207
216 208 return True
217 209 return _assert_correct_path(context_uid, repo_id)
218 210
219 211 @reraise_safe_exceptions
220 212 def bare(self, wire):
221 213 repo_init = self._factory.repo_libgit2(wire)
222 214 with repo_init as repo:
223 215 return repo.is_bare
224 216
225 217 @reraise_safe_exceptions
226 218 def blob_as_pretty_string(self, wire, sha):
227 219 repo_init = self._factory.repo_libgit2(wire)
228 220 with repo_init as repo:
229 221 blob_obj = repo[sha]
230 222 blob = blob_obj.data
231 223 return blob
232 224
233 225 @reraise_safe_exceptions
234 226 def blob_raw_length(self, wire, sha):
235 227 cache_on, context_uid, repo_id = self._cache_on(wire)
236 228 @self.region.conditional_cache_on_arguments(condition=cache_on)
237 def _blob_raw_length(_context_uid, _repo_id, _sha):
229 def _blob_raw_length(_repo_id, _sha):
238 230
239 231 repo_init = self._factory.repo_libgit2(wire)
240 232 with repo_init as repo:
241 233 blob = repo[sha]
242 234 return blob.size
243 235
244 return _blob_raw_length(context_uid, repo_id, sha)
236 return _blob_raw_length(repo_id, sha)
245 237
246 238 def _parse_lfs_pointer(self, raw_content):
247 239
248 240 spec_string = 'version https://git-lfs.github.com/spec'
249 241 if raw_content and raw_content.startswith(spec_string):
250 242 pattern = re.compile(r"""
251 243 (?:\n)?
252 244 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
253 245 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
254 246 ^size[ ](?P<oid_size>[0-9]+)\n
255 247 (?:\n)?
256 248 """, re.VERBOSE | re.MULTILINE)
257 249 match = pattern.match(raw_content)
258 250 if match:
259 251 return match.groupdict()
260 252
261 253 return {}
262 254
263 255 @reraise_safe_exceptions
264 def is_large_file(self, wire, sha):
256 def is_large_file(self, wire, commit_id):
265 257
266 258 cache_on, context_uid, repo_id = self._cache_on(wire)
267 259 @self.region.conditional_cache_on_arguments(condition=cache_on)
268 def _is_large_file(_context_uid, _repo_id, _sha):
260 def _is_large_file(_repo_id, _sha):
269 261 repo_init = self._factory.repo_libgit2(wire)
270 262 with repo_init as repo:
271 blob = repo[sha]
263 blob = repo[commit_id]
272 264 if blob.is_binary:
273 265 return {}
274 266
275 267 return self._parse_lfs_pointer(blob.data)
276 268
277 return _is_large_file(context_uid, repo_id, sha)
269 return _is_large_file(repo_id, commit_id)
278 270
279 271 @reraise_safe_exceptions
280 272 def in_largefiles_store(self, wire, oid):
281 273 conf = self._wire_to_config(wire)
282 274 repo_init = self._factory.repo_libgit2(wire)
283 275 with repo_init as repo:
284 276 repo_name = repo.path
285 277
286 278 store_location = conf.get('vcs_git_lfs_store_location')
287 279 if store_location:
288 280
289 281 store = LFSOidStore(
290 282 oid=oid, repo=repo_name, store_location=store_location)
291 283 return store.has_oid()
292 284
293 285 return False
294 286
295 287 @reraise_safe_exceptions
296 288 def store_path(self, wire, oid):
297 289 conf = self._wire_to_config(wire)
298 290 repo_init = self._factory.repo_libgit2(wire)
299 291 with repo_init as repo:
300 292 repo_name = repo.path
301 293
302 294 store_location = conf.get('vcs_git_lfs_store_location')
303 295 if store_location:
304 296 store = LFSOidStore(
305 297 oid=oid, repo=repo_name, store_location=store_location)
306 298 return store.oid_path
307 299 raise ValueError('Unable to fetch oid with path {}'.format(oid))
308 300
309 301 @reraise_safe_exceptions
310 302 def bulk_request(self, wire, rev, pre_load):
311 303 cache_on, context_uid, repo_id = self._cache_on(wire)
312 304 @self.region.conditional_cache_on_arguments(condition=cache_on)
313 def _bulk_request(_context_uid, _repo_id, _rev, _pre_load):
305 def _bulk_request(_repo_id, _rev, _pre_load):
314 306 result = {}
315 307 for attr in pre_load:
316 308 try:
317 309 method = self._bulk_methods[attr]
318 310 args = [wire, rev]
319 311 result[attr] = method(*args)
320 312 except KeyError as e:
321 313 raise exceptions.VcsException(e)(
322 314 "Unknown bulk attribute: %s" % attr)
323 315 return result
324 316
325 return _bulk_request(context_uid, repo_id, rev, sorted(pre_load))
317 return _bulk_request(repo_id, rev, sorted(pre_load))
326 318
327 319 def _build_opener(self, url):
328 320 handlers = []
329 321 url_obj = url_parser(url)
330 322 _, authinfo = url_obj.authinfo()
331 323
332 324 if authinfo:
333 325 # create a password manager
334 326 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
335 327 passmgr.add_password(*authinfo)
336 328
337 329 handlers.extend((httpbasicauthhandler(passmgr),
338 330 httpdigestauthhandler(passmgr)))
339 331
340 332 return urllib2.build_opener(*handlers)
341 333
342 334 def _type_id_to_name(self, type_id):
343 335 return {
344 336 1: b'commit',
345 337 2: b'tree',
346 338 3: b'blob',
347 339 4: b'tag'
348 340 }[type_id]
349 341
350 342 @reraise_safe_exceptions
351 343 def check_url(self, url, config):
352 344 url_obj = url_parser(url)
353 345 test_uri, _ = url_obj.authinfo()
354 346 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
355 347 url_obj.query = obfuscate_qs(url_obj.query)
356 348 cleaned_uri = str(url_obj)
357 349 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
358 350
359 351 if not test_uri.endswith('info/refs'):
360 352 test_uri = test_uri.rstrip('/') + '/info/refs'
361 353
362 354 o = self._build_opener(url)
363 355 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
364 356
365 357 q = {"service": 'git-upload-pack'}
366 358 qs = '?%s' % urllib.urlencode(q)
367 359 cu = "%s%s" % (test_uri, qs)
368 360 req = urllib2.Request(cu, None, {})
369 361
370 362 try:
371 363 log.debug("Trying to open URL %s", cleaned_uri)
372 364 resp = o.open(req)
373 365 if resp.code != 200:
374 366 raise exceptions.URLError()('Return Code is not 200')
375 367 except Exception as e:
376 368 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
377 369 # means it cannot be cloned
378 370 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
379 371
380 372 # now detect if it's proper git repo
381 373 gitdata = resp.read()
382 374 if 'service=git-upload-pack' in gitdata:
383 375 pass
384 376 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
385 377 # old style git can return some other format !
386 378 pass
387 379 else:
388 380 raise exceptions.URLError()(
389 381 "url [%s] does not look like an git" % (cleaned_uri,))
390 382
391 383 return True
392 384
393 385 @reraise_safe_exceptions
394 386 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
395 387 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
396 388 remote_refs = self.pull(wire, url, apply_refs=False)
397 389 repo = self._factory.repo(wire)
398 390 if isinstance(valid_refs, list):
399 391 valid_refs = tuple(valid_refs)
400 392
401 393 for k in remote_refs:
402 394 # only parse heads/tags and skip so called deferred tags
403 395 if k.startswith(valid_refs) and not k.endswith(deferred):
404 396 repo[k] = remote_refs[k]
405 397
406 398 if update_after_clone:
407 399 # we want to checkout HEAD
408 400 repo["HEAD"] = remote_refs["HEAD"]
409 401 index.build_index_from_tree(repo.path, repo.index_path(),
410 402 repo.object_store, repo["HEAD"].tree)
411 403
412 404 @reraise_safe_exceptions
413 405 def branch(self, wire, commit_id):
414 406 cache_on, context_uid, repo_id = self._cache_on(wire)
415 407 cache_on = False
416 408 @self.region.conditional_cache_on_arguments(condition=cache_on)
417 409 def _branch(_context_uid, _repo_id, _commit_id):
418 410 regex = re.compile('^refs/heads')
419 411
420 412 def filter_with(ref):
421 413 return regex.match(ref[0]) and ref[1] == _commit_id
422 414
423 415 branches = filter(filter_with, self.get_refs(wire).items())
424 416 return [x[0].split('refs/heads/')[-1] for x in branches]
425 417
426 418 return _branch(context_uid, repo_id, commit_id)
427 419
428 420 @reraise_safe_exceptions
429 421 def commit_branches(self, wire, commit_id):
430 422 cache_on, context_uid, repo_id = self._cache_on(wire)
431 423 @self.region.conditional_cache_on_arguments(condition=cache_on)
432 424 def _commit_branches(_context_uid, _repo_id, _commit_id):
433 425 repo_init = self._factory.repo_libgit2(wire)
434 426 with repo_init as repo:
435 427 branches = [x for x in repo.branches.with_commit(_commit_id)]
436 428 return branches
437 429
438 430 return _commit_branches(context_uid, repo_id, commit_id)
439 431
432 @reraise_safe_exceptions
433 def add_object(self, wire, content):
434 repo_init = self._factory.repo_libgit2(wire)
435 with repo_init as repo:
436 blob = objects.Blob()
437 blob.set_raw_string(content)
438 repo.object_store.add_object(blob)
439 return blob.id
440
440 441 # TODO: this is quite complex, check if that can be simplified
441 442 @reraise_safe_exceptions
442 443 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
443 444 repo = self._factory.repo(wire)
444 445 object_store = repo.object_store
445 446
446 447 # Create tree and populates it with blobs
447 448 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
448 449
449 450 for node in updated:
450 451 # Compute subdirs if needed
451 452 dirpath, nodename = vcspath.split(node['path'])
452 453 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
453 454 parent = commit_tree
454 455 ancestors = [('', parent)]
455 456
456 457 # Tries to dig for the deepest existing tree
457 458 while dirnames:
458 459 curdir = dirnames.pop(0)
459 460 try:
460 461 dir_id = parent[curdir][1]
461 462 except KeyError:
462 463 # put curdir back into dirnames and stops
463 464 dirnames.insert(0, curdir)
464 465 break
465 466 else:
466 467 # If found, updates parent
467 468 parent = repo[dir_id]
468 469 ancestors.append((curdir, parent))
469 470 # Now parent is deepest existing tree and we need to create
470 471 # subtrees for dirnames (in reverse order)
471 472 # [this only applies for nodes from added]
472 473 new_trees = []
473 474
474 475 blob = objects.Blob.from_string(node['content'])
475 476
476 477 if dirnames:
477 478 # If there are trees which should be created we need to build
478 479 # them now (in reverse order)
479 480 reversed_dirnames = list(reversed(dirnames))
480 481 curtree = objects.Tree()
481 482 curtree[node['node_path']] = node['mode'], blob.id
482 483 new_trees.append(curtree)
483 484 for dirname in reversed_dirnames[:-1]:
484 485 newtree = objects.Tree()
485 486 newtree[dirname] = (DIR_STAT, curtree.id)
486 487 new_trees.append(newtree)
487 488 curtree = newtree
488 489 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
489 490 else:
490 491 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
491 492
492 493 new_trees.append(parent)
493 494 # Update ancestors
494 495 reversed_ancestors = reversed(
495 496 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
496 497 for parent, tree, path in reversed_ancestors:
497 498 parent[path] = (DIR_STAT, tree.id)
498 499 object_store.add_object(tree)
499 500
500 501 object_store.add_object(blob)
501 502 for tree in new_trees:
502 503 object_store.add_object(tree)
503 504
504 505 for node_path in removed:
505 506 paths = node_path.split('/')
506 507 tree = commit_tree
507 508 trees = [tree]
508 509 # Traverse deep into the forest...
509 510 for path in paths:
510 511 try:
511 512 obj = repo[tree[path][1]]
512 513 if isinstance(obj, objects.Tree):
513 514 trees.append(obj)
514 515 tree = obj
515 516 except KeyError:
516 517 break
517 518 # Cut down the blob and all rotten trees on the way back...
518 519 for path, tree in reversed(zip(paths, trees)):
519 520 del tree[path]
520 521 if tree:
521 522 # This tree still has elements - don't remove it or any
522 523 # of it's parents
523 524 break
524 525
525 526 object_store.add_object(commit_tree)
526 527
527 528 # Create commit
528 529 commit = objects.Commit()
529 530 commit.tree = commit_tree.id
530 531 for k, v in commit_data.iteritems():
531 532 setattr(commit, k, v)
532 533 object_store.add_object(commit)
533 534
534 535 self.create_branch(wire, branch, commit.id)
535 536
536 537 # dulwich set-ref
537 538 ref = 'refs/heads/%s' % branch
538 539 repo.refs[ref] = commit.id
539 540
540 541 return commit.id
541 542
542 543 @reraise_safe_exceptions
543 544 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
544 545 if url != 'default' and '://' not in url:
545 546 client = LocalGitClient(url)
546 547 else:
547 548 url_obj = url_parser(url)
548 549 o = self._build_opener(url)
549 550 url, _ = url_obj.authinfo()
550 551 client = HttpGitClient(base_url=url, opener=o)
551 552 repo = self._factory.repo(wire)
552 553
553 554 determine_wants = repo.object_store.determine_wants_all
554 555 if refs:
555 556 def determine_wants_requested(references):
556 557 return [references[r] for r in references if r in refs]
557 558 determine_wants = determine_wants_requested
558 559
559 560 try:
560 561 remote_refs = client.fetch(
561 562 path=url, target=repo, determine_wants=determine_wants)
562 563 except NotGitRepository as e:
563 564 log.warning(
564 565 'Trying to fetch from "%s" failed, not a Git repository.', url)
565 566 # Exception can contain unicode which we convert
566 567 raise exceptions.AbortException(e)(repr(e))
567 568
568 569 # mikhail: client.fetch() returns all the remote refs, but fetches only
569 570 # refs filtered by `determine_wants` function. We need to filter result
570 571 # as well
571 572 if refs:
572 573 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
573 574
574 575 if apply_refs:
575 576 # TODO: johbo: Needs proper test coverage with a git repository
576 577 # that contains a tag object, so that we would end up with
577 578 # a peeled ref at this point.
578 579 for k in remote_refs:
579 580 if k.endswith(PEELED_REF_MARKER):
580 581 log.debug("Skipping peeled reference %s", k)
581 582 continue
582 583 repo[k] = remote_refs[k]
583 584
584 585 if refs and not update_after:
585 586 # mikhail: explicitly set the head to the last ref.
586 587 repo['HEAD'] = remote_refs[refs[-1]]
587 588
588 589 if update_after:
589 590 # we want to checkout HEAD
590 591 repo["HEAD"] = remote_refs["HEAD"]
591 592 index.build_index_from_tree(repo.path, repo.index_path(),
592 593 repo.object_store, repo["HEAD"].tree)
593 594 return remote_refs
594 595
595 596 @reraise_safe_exceptions
596 597 def sync_fetch(self, wire, url, refs=None):
597 598 repo = self._factory.repo(wire)
598 599 if refs and not isinstance(refs, (list, tuple)):
599 600 refs = [refs]
600 601 config = self._wire_to_config(wire)
601 602 # get all remote refs we'll use to fetch later
602 603 output, __ = self.run_git_command(
603 604 wire, ['ls-remote', url], fail_on_stderr=False,
604 605 _copts=self._remote_conf(config),
605 606 extra_env={'GIT_TERMINAL_PROMPT': '0'})
606 607
607 608 remote_refs = collections.OrderedDict()
608 609 fetch_refs = []
609 610
610 611 for ref_line in output.splitlines():
611 612 sha, ref = ref_line.split('\t')
612 613 sha = sha.strip()
613 614 if ref in remote_refs:
614 615 # duplicate, skip
615 616 continue
616 617 if ref.endswith(PEELED_REF_MARKER):
617 618 log.debug("Skipping peeled reference %s", ref)
618 619 continue
619 620 # don't sync HEAD
620 621 if ref in ['HEAD']:
621 622 continue
622 623
623 624 remote_refs[ref] = sha
624 625
625 626 if refs and sha in refs:
626 627 # we filter fetch using our specified refs
627 628 fetch_refs.append('{}:{}'.format(ref, ref))
628 629 elif not refs:
629 630 fetch_refs.append('{}:{}'.format(ref, ref))
630 631 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
631 632 if fetch_refs:
632 633 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
633 634 fetch_refs_chunks = list(chunk)
634 635 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
635 636 _out, _err = self.run_git_command(
636 637 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
637 638 fail_on_stderr=False,
638 639 _copts=self._remote_conf(config),
639 640 extra_env={'GIT_TERMINAL_PROMPT': '0'})
640 641
641 642 return remote_refs
642 643
643 644 @reraise_safe_exceptions
644 645 def sync_push(self, wire, url, refs=None):
645 646 if not self.check_url(url, wire):
646 647 return
647 648 config = self._wire_to_config(wire)
648 649 self._factory.repo(wire)
649 650 self.run_git_command(
650 651 wire, ['push', url, '--mirror'], fail_on_stderr=False,
651 652 _copts=self._remote_conf(config),
652 653 extra_env={'GIT_TERMINAL_PROMPT': '0'})
653 654
654 655 @reraise_safe_exceptions
655 656 def get_remote_refs(self, wire, url):
656 657 repo = Repo(url)
657 658 return repo.get_refs()
658 659
659 660 @reraise_safe_exceptions
660 661 def get_description(self, wire):
661 662 repo = self._factory.repo(wire)
662 663 return repo.get_description()
663 664
664 665 @reraise_safe_exceptions
665 666 def get_missing_revs(self, wire, rev1, rev2, path2):
666 667 repo = self._factory.repo(wire)
667 668 LocalGitClient(thin_packs=False).fetch(path2, repo)
668 669
669 670 wire_remote = wire.copy()
670 671 wire_remote['path'] = path2
671 672 repo_remote = self._factory.repo(wire_remote)
672 673 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
673 674
674 675 revs = [
675 676 x.commit.id
676 677 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
677 678 return revs
678 679
679 680 @reraise_safe_exceptions
680 681 def get_object(self, wire, sha):
681 682
682 683 cache_on, context_uid, repo_id = self._cache_on(wire)
683 684 @self.region.conditional_cache_on_arguments(condition=cache_on)
684 685 def _get_object(_context_uid, _repo_id, _sha):
685 686 repo_init = self._factory.repo_libgit2(wire)
686 687 with repo_init as repo:
687 688
688 689 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
689 690 try:
690 691 commit = repo.revparse_single(sha)
691 692 except (KeyError, ValueError) as e:
692 693 raise exceptions.LookupException(e)(missing_commit_err)
693 694
694 695 if isinstance(commit, pygit2.Tag):
695 696 commit = repo.get(commit.target)
696 697
697 698 # check for dangling commit
698 699 branches = [x for x in repo.branches.with_commit(commit.hex)]
699 700 if not branches:
700 701 raise exceptions.LookupException(None)(missing_commit_err)
701 702
702 703 commit_id = commit.hex
703 704 type_id = commit.type
704 705
705 706 return {
706 707 'id': commit_id,
707 708 'type': self._type_id_to_name(type_id),
708 709 'commit_id': commit_id,
709 710 'idx': 0
710 711 }
711 712
712 713 return _get_object(context_uid, repo_id, sha)
713 714
714 715 @reraise_safe_exceptions
715 716 def get_refs(self, wire):
716 717 cache_on, context_uid, repo_id = self._cache_on(wire)
717 718 @self.region.conditional_cache_on_arguments(condition=cache_on)
718 719 def _get_refs(_context_uid, _repo_id):
719 720
720 721 repo_init = self._factory.repo_libgit2(wire)
721 722 with repo_init as repo:
722 723 regex = re.compile('^refs/(heads|tags)/')
723 724 return {x.name: x.target.hex for x in
724 725 filter(lambda ref: regex.match(ref.name) ,repo.listall_reference_objects())}
725 726
726 727 return _get_refs(context_uid, repo_id)
727 728
728 729 @reraise_safe_exceptions
729 730 def get_branch_pointers(self, wire):
730 731 cache_on, context_uid, repo_id = self._cache_on(wire)
731 732 @self.region.conditional_cache_on_arguments(condition=cache_on)
732 733 def _get_branch_pointers(_context_uid, _repo_id):
733 734
734 735 repo_init = self._factory.repo_libgit2(wire)
735 736 regex = re.compile('^refs/heads')
736 737 with repo_init as repo:
737 738 branches = filter(lambda ref: regex.match(ref.name), repo.listall_reference_objects())
738 739 return {x.target.hex: x.shorthand for x in branches}
739 740
740 741 return _get_branch_pointers(context_uid, repo_id)
741 742
742 743 @reraise_safe_exceptions
743 744 def head(self, wire, show_exc=True):
744 745 cache_on, context_uid, repo_id = self._cache_on(wire)
745 746 @self.region.conditional_cache_on_arguments(condition=cache_on)
746 747 def _head(_context_uid, _repo_id, _show_exc):
747 748 repo_init = self._factory.repo_libgit2(wire)
748 749 with repo_init as repo:
749 750 try:
750 751 return repo.head.peel().hex
751 752 except Exception:
752 753 if show_exc:
753 754 raise
754 755 return _head(context_uid, repo_id, show_exc)
755 756
756 757 @reraise_safe_exceptions
757 758 def init(self, wire):
758 759 repo_path = str_to_dulwich(wire['path'])
759 760 self.repo = Repo.init(repo_path)
760 761
761 762 @reraise_safe_exceptions
762 763 def init_bare(self, wire):
763 764 repo_path = str_to_dulwich(wire['path'])
764 765 self.repo = Repo.init_bare(repo_path)
765 766
766 767 @reraise_safe_exceptions
767 768 def revision(self, wire, rev):
768 769
769 770 cache_on, context_uid, repo_id = self._cache_on(wire)
770 771 @self.region.conditional_cache_on_arguments(condition=cache_on)
771 772 def _revision(_context_uid, _repo_id, _rev):
772 773 repo_init = self._factory.repo_libgit2(wire)
773 774 with repo_init as repo:
774 775 commit = repo[rev]
775 776 obj_data = {
776 777 'id': commit.id.hex,
777 778 }
778 779 # tree objects itself don't have tree_id attribute
779 780 if hasattr(commit, 'tree_id'):
780 781 obj_data['tree'] = commit.tree_id.hex
781 782
782 783 return obj_data
783 784 return _revision(context_uid, repo_id, rev)
784 785
785 786 @reraise_safe_exceptions
786 def date(self, wire, rev):
787 def date(self, wire, commit_id):
788 cache_on, context_uid, repo_id = self._cache_on(wire)
789 @self.region.conditional_cache_on_arguments(condition=cache_on)
790 def _date(_repo_id, _commit_id):
787 791 repo_init = self._factory.repo_libgit2(wire)
788 792 with repo_init as repo:
789 commit = repo[rev]
793 commit = repo[commit_id]
790 794 # TODO(marcink): check dulwich difference of offset vs timezone
791 795 return [commit.commit_time, commit.commit_time_offset]
796 return _date(repo_id, commit_id)
792 797
793 798 @reraise_safe_exceptions
794 def author(self, wire, rev):
799 def author(self, wire, commit_id):
800 cache_on, context_uid, repo_id = self._cache_on(wire)
801 @self.region.conditional_cache_on_arguments(condition=cache_on)
802 def _author(_repo_id, _commit_id):
795 803 repo_init = self._factory.repo_libgit2(wire)
796 804 with repo_init as repo:
797 commit = repo[rev]
805 commit = repo[commit_id]
798 806 if commit.author.email:
799 807 return u"{} <{}>".format(commit.author.name, commit.author.email)
800 808
801 809 return u"{}".format(commit.author.raw_name)
810 return _author(repo_id, commit_id)
802 811
803 812 @reraise_safe_exceptions
804 def message(self, wire, rev):
813 def message(self, wire, commit_id):
814 cache_on, context_uid, repo_id = self._cache_on(wire)
815 @self.region.conditional_cache_on_arguments(condition=cache_on)
816 def _message(_repo_id, _commit_id):
805 817 repo_init = self._factory.repo_libgit2(wire)
806 818 with repo_init as repo:
807 commit = repo[rev]
819 commit = repo[commit_id]
808 820 return commit.message
821 return _message(repo_id, commit_id)
809 822
810 823 @reraise_safe_exceptions
811 def parents(self, wire, rev):
824 def parents(self, wire, commit_id):
825 cache_on, context_uid, repo_id = self._cache_on(wire)
826 @self.region.conditional_cache_on_arguments(condition=cache_on)
827 def _parents(_repo_id, _commit_id):
828 repo_init = self._factory.repo_libgit2(wire)
829 with repo_init as repo:
830 commit = repo[commit_id]
831 return [x.hex for x in commit.parent_ids]
832 return _parents(repo_id, commit_id)
833
834 @reraise_safe_exceptions
835 def children(self, wire, commit_id):
812 836 cache_on, context_uid, repo_id = self._cache_on(wire)
813 837 @self.region.conditional_cache_on_arguments(condition=cache_on)
814 def _parents(_context_uid, _repo_id, _rev):
815 repo_init = self._factory.repo_libgit2(wire)
816 with repo_init as repo:
817 commit = repo[rev]
818 return [x.hex for x in commit.parent_ids]
819 return _parents(context_uid, repo_id, rev)
838 def _children(_repo_id, _commit_id):
839 output, __ = self.run_git_command(
840 wire, ['rev-list', '--all', '--children'])
841
842 child_ids = []
843 pat = re.compile(r'^%s' % commit_id)
844 for l in output.splitlines():
845 if pat.match(l):
846 found_ids = l.split(' ')[1:]
847 child_ids.extend(found_ids)
848
849 return child_ids
850 return _children(repo_id, commit_id)
820 851
821 852 @reraise_safe_exceptions
822 853 def set_refs(self, wire, key, value):
823 854 repo_init = self._factory.repo_libgit2(wire)
824 855 with repo_init as repo:
825 856 repo.references.create(key, value, force=True)
826 857
827 858 @reraise_safe_exceptions
828 859 def create_branch(self, wire, branch_name, commit_id, force=False):
829 860 repo_init = self._factory.repo_libgit2(wire)
830 861 with repo_init as repo:
831 862 commit = repo[commit_id]
832 863
833 864 if force:
834 865 repo.branches.local.create(branch_name, commit, force=force)
835 866 elif not repo.branches.get(branch_name):
836 867 # create only if that branch isn't existing
837 868 repo.branches.local.create(branch_name, commit, force=force)
838 869
839 870 @reraise_safe_exceptions
840 871 def remove_ref(self, wire, key):
841 872 repo_init = self._factory.repo_libgit2(wire)
842 873 with repo_init as repo:
843 874 repo.references.delete(key)
844 875
845 876 @reraise_safe_exceptions
846 877 def tag_remove(self, wire, tag_name):
847 878 repo_init = self._factory.repo_libgit2(wire)
848 879 with repo_init as repo:
849 880 key = 'refs/tags/{}'.format(tag_name)
850 881 repo.references.delete(key)
851 882
852 883 @reraise_safe_exceptions
853 884 def tree_changes(self, wire, source_id, target_id):
854 885 # TODO(marcink): remove this seems it's only used by tests
855 886 repo = self._factory.repo(wire)
856 887 source = repo[source_id].tree if source_id else None
857 888 target = repo[target_id].tree
858 889 result = repo.object_store.tree_changes(source, target)
859 890 return list(result)
860 891
861 892 @reraise_safe_exceptions
862 893 def tree_and_type_for_path(self, wire, commit_id, path):
863 894
864 895 cache_on, context_uid, repo_id = self._cache_on(wire)
865 896 @self.region.conditional_cache_on_arguments(condition=cache_on)
866 897 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
867 898 repo_init = self._factory.repo_libgit2(wire)
868 899
869 900 with repo_init as repo:
870 901 commit = repo[commit_id]
871 902 try:
872 903 tree = commit.tree[path]
873 904 except KeyError:
874 905 return None, None, None
875 906
876 907 return tree.id.hex, tree.type, tree.filemode
877 908 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
878 909
879 910 @reraise_safe_exceptions
880 911 def tree_items(self, wire, tree_id):
881
882 912 cache_on, context_uid, repo_id = self._cache_on(wire)
883 913 @self.region.conditional_cache_on_arguments(condition=cache_on)
884 def _tree_items(_context_uid, _repo_id, _tree_id):
914 def _tree_items(_repo_id, _tree_id):
885 915
886 916 repo_init = self._factory.repo_libgit2(wire)
887 917 with repo_init as repo:
888 918 try:
889 919 tree = repo[tree_id]
890 920 except KeyError:
891 921 raise ObjectMissing('No tree with id: {}'.format(tree_id))
892 922
893 923 result = []
894 924 for item in tree:
895 925 item_sha = item.hex
896 926 item_mode = item.filemode
897 927 item_type = item.type
898 928
899 929 if item_type == 'commit':
900 930 # NOTE(marcink): submodules we translate to 'link' for backward compat
901 931 item_type = 'link'
902 932
903 933 result.append((item.name, item_mode, item_sha, item_type))
904 934 return result
905 return _tree_items(context_uid, repo_id, tree_id)
935 return _tree_items(repo_id, tree_id)
936
937 @reraise_safe_exceptions
938 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
939
940 flags = [
941 '-U%s' % context, '--full-index', '--binary', '-p',
942 '-M', '--abbrev=40']
943
944 if opt_ignorews:
945 flags.append('-w')
946
947 if commit_id_1 == self.EMPTY_COMMIT:
948 cmd = ['show'] + flags + [commit_id_2]
949 else:
950 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
951
952 if file_filter:
953 cmd.extend(['--', file_filter])
954
955 diff, __ = self.run_git_command(wire, cmd)
956 # If we used 'show' command, strip first few lines (until actual diff
957 # starts)
958 if commit_id_1 == self.EMPTY_COMMIT:
959 lines = diff.splitlines()
960 x = 0
961 for line in lines:
962 if line.startswith('diff'):
963 break
964 x += 1
965 # Append new line just like 'diff' command do
966 diff = '\n'.join(lines[x:]) + '\n'
967 return diff
968
969 @reraise_safe_exceptions
970 def node_history(self, wire, commit_id, path, limit):
971 cache_on, context_uid, repo_id = self._cache_on(wire)
972 @self.region.conditional_cache_on_arguments(condition=cache_on)
973 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
974 # optimize for n==1, rev-list is much faster for that use-case
975 if limit == 1:
976 cmd = ['rev-list', '-1', commit_id, '--', path]
977 else:
978 cmd = ['log']
979 if limit:
980 cmd.extend(['-n', str(safe_int(limit, 0))])
981 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
982
983 output, __ = self.run_git_command(wire, cmd)
984 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
985
986 return [x for x in commit_ids]
987 return _node_history(context_uid, repo_id, commit_id, path, limit)
988
989 @reraise_safe_exceptions
990 def node_annotate(self, wire, commit_id, path):
991
992 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
993 # -l ==> outputs long shas (and we need all 40 characters)
994 # --root ==> doesn't put '^' character for boundaries
995 # -r commit_id ==> blames for the given commit
996 output, __ = self.run_git_command(wire, cmd)
997
998 result = []
999 for i, blame_line in enumerate(output.split('\n')[:-1]):
1000 line_no = i + 1
1001 commit_id, line = re.split(r' ', blame_line, 1)
1002 result.append((line_no, commit_id, line))
1003 return result
906 1004
907 1005 @reraise_safe_exceptions
908 1006 def update_server_info(self, wire):
909 1007 repo = self._factory.repo(wire)
910 1008 update_server_info(repo)
911 1009
912 1010 @reraise_safe_exceptions
913 1011 def get_all_commit_ids(self, wire):
914 1012
915 1013 cache_on, context_uid, repo_id = self._cache_on(wire)
916 1014 @self.region.conditional_cache_on_arguments(condition=cache_on)
917 1015 def _get_all_commit_ids(_context_uid, _repo_id):
918 1016
919 1017 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
920 1018 try:
921 1019 output, __ = self.run_git_command(wire, cmd)
922 1020 return output.splitlines()
923 1021 except Exception:
924 1022 # Can be raised for empty repositories
925 1023 return []
926 1024 return _get_all_commit_ids(context_uid, repo_id)
927 1025
928 1026 @reraise_safe_exceptions
929 1027 def run_git_command(self, wire, cmd, **opts):
930 1028 path = wire.get('path', None)
931 1029
932 1030 if path and os.path.isdir(path):
933 1031 opts['cwd'] = path
934 1032
935 1033 if '_bare' in opts:
936 1034 _copts = []
937 1035 del opts['_bare']
938 1036 else:
939 1037 _copts = ['-c', 'core.quotepath=false', ]
940 1038 safe_call = False
941 1039 if '_safe' in opts:
942 1040 # no exc on failure
943 1041 del opts['_safe']
944 1042 safe_call = True
945 1043
946 1044 if '_copts' in opts:
947 1045 _copts.extend(opts['_copts'] or [])
948 1046 del opts['_copts']
949 1047
950 1048 gitenv = os.environ.copy()
951 1049 gitenv.update(opts.pop('extra_env', {}))
952 1050 # need to clean fix GIT_DIR !
953 1051 if 'GIT_DIR' in gitenv:
954 1052 del gitenv['GIT_DIR']
955 1053 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
956 1054 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
957 1055
958 1056 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
959 1057 _opts = {'env': gitenv, 'shell': False}
960 1058
961 1059 try:
962 1060 _opts.update(opts)
963 1061 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
964 1062
965 1063 return ''.join(p), ''.join(p.error)
966 1064 except (EnvironmentError, OSError) as err:
967 1065 cmd = ' '.join(cmd) # human friendly CMD
968 1066 tb_err = ("Couldn't run git command (%s).\n"
969 1067 "Original error was:%s\n"
970 1068 "Call options:%s\n"
971 1069 % (cmd, err, _opts))
972 1070 log.exception(tb_err)
973 1071 if safe_call:
974 1072 return '', err
975 1073 else:
976 1074 raise exceptions.VcsException()(tb_err)
977 1075
978 1076 @reraise_safe_exceptions
979 1077 def install_hooks(self, wire, force=False):
980 1078 from vcsserver.hook_utils import install_git_hooks
981 1079 bare = self.bare(wire)
982 1080 path = wire['path']
983 1081 return install_git_hooks(path, bare, force_create=force)
984 1082
985 1083 @reraise_safe_exceptions
986 1084 def get_hooks_info(self, wire):
987 1085 from vcsserver.hook_utils import (
988 1086 get_git_pre_hook_version, get_git_post_hook_version)
989 1087 bare = self.bare(wire)
990 1088 path = wire['path']
991 1089 return {
992 1090 'pre_version': get_git_pre_hook_version(path, bare),
993 1091 'post_version': get_git_post_hook_version(path, bare),
994 1092 }
@@ -1,958 +1,946 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import logging
20 20 import stat
21 21 import urllib
22 22 import urllib2
23 23 import traceback
24 24
25 25 from hgext import largefiles, rebase
26 26 from hgext.strip import strip as hgext_strip
27 27 from mercurial import commands
28 28 from mercurial import unionrepo
29 29 from mercurial import verify
30 30
31 31 import vcsserver
32 32 from vcsserver import exceptions
33 33 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
34 34 from vcsserver.hgcompat import (
35 35 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
36 36 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
37 37 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
38 38 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
39 39 RepoLookupError, InterventionRequired, RequirementError)
40 40
41 41 log = logging.getLogger(__name__)
42 42
43 43
44 44 def make_ui_from_config(repo_config):
45 45
46 46 class LoggingUI(ui.ui):
47 47 def status(self, *msg, **opts):
48 48 log.info(' '.join(msg).rstrip('\n'))
49 49 super(LoggingUI, self).status(*msg, **opts)
50 50
51 51 def warn(self, *msg, **opts):
52 52 log.warn(' '.join(msg).rstrip('\n'))
53 53 super(LoggingUI, self).warn(*msg, **opts)
54 54
55 55 def error(self, *msg, **opts):
56 56 log.error(' '.join(msg).rstrip('\n'))
57 57 super(LoggingUI, self).error(*msg, **opts)
58 58
59 59 def note(self, *msg, **opts):
60 60 log.info(' '.join(msg).rstrip('\n'))
61 61 super(LoggingUI, self).note(*msg, **opts)
62 62
63 63 def debug(self, *msg, **opts):
64 64 log.debug(' '.join(msg).rstrip('\n'))
65 65 super(LoggingUI, self).debug(*msg, **opts)
66 66
67 67 baseui = LoggingUI()
68 68
69 69 # clean the baseui object
70 70 baseui._ocfg = hgconfig.config()
71 71 baseui._ucfg = hgconfig.config()
72 72 baseui._tcfg = hgconfig.config()
73 73
74 74 for section, option, value in repo_config:
75 75 baseui.setconfig(section, option, value)
76 76
77 77 # make our hgweb quiet so it doesn't print output
78 78 baseui.setconfig('ui', 'quiet', 'true')
79 79
80 80 baseui.setconfig('ui', 'paginate', 'never')
81 81 # for better Error reporting of Mercurial
82 82 baseui.setconfig('ui', 'message-output', 'stderr')
83 83
84 84 # force mercurial to only use 1 thread, otherwise it may try to set a
85 85 # signal in a non-main thread, thus generating a ValueError.
86 86 baseui.setconfig('worker', 'numcpus', 1)
87 87
88 88 # If there is no config for the largefiles extension, we explicitly disable
89 89 # it here. This overrides settings from repositories hgrc file. Recent
90 90 # mercurial versions enable largefiles in hgrc on clone from largefile
91 91 # repo.
92 92 if not baseui.hasconfig('extensions', 'largefiles'):
93 93 log.debug('Explicitly disable largefiles extension for repo.')
94 94 baseui.setconfig('extensions', 'largefiles', '!')
95 95
96 96 return baseui
97 97
98 98
99 99 def reraise_safe_exceptions(func):
100 100 """Decorator for converting mercurial exceptions to something neutral."""
101 101
102 102 def wrapper(*args, **kwargs):
103 103 try:
104 104 return func(*args, **kwargs)
105 105 except (Abort, InterventionRequired) as e:
106 106 raise_from_original(exceptions.AbortException(e))
107 107 except RepoLookupError as e:
108 108 raise_from_original(exceptions.LookupException(e))
109 109 except RequirementError as e:
110 110 raise_from_original(exceptions.RequirementException(e))
111 111 except RepoError as e:
112 112 raise_from_original(exceptions.VcsException(e))
113 113 except LookupError as e:
114 114 raise_from_original(exceptions.LookupException(e))
115 115 except Exception as e:
116 116 if not hasattr(e, '_vcs_kind'):
117 117 log.exception("Unhandled exception in hg remote call")
118 118 raise_from_original(exceptions.UnhandledException(e))
119 119
120 120 raise
121 121 return wrapper
122 122
123 123
124 124 class MercurialFactory(RepoFactory):
125 125 repo_type = 'hg'
126 126
127 127 def _create_config(self, config, hooks=True):
128 128 if not hooks:
129 129 hooks_to_clean = frozenset((
130 130 'changegroup.repo_size', 'preoutgoing.pre_pull',
131 131 'outgoing.pull_logger', 'prechangegroup.pre_push'))
132 132 new_config = []
133 133 for section, option, value in config:
134 134 if section == 'hooks' and option in hooks_to_clean:
135 135 continue
136 136 new_config.append((section, option, value))
137 137 config = new_config
138 138
139 139 baseui = make_ui_from_config(config)
140 140 return baseui
141 141
142 142 def _create_repo(self, wire, create):
143 143 baseui = self._create_config(wire["config"])
144 144 return instance(baseui, wire["path"], create)
145 145
146 146 def repo(self, wire, create=False):
147 147 """
148 148 Get a repository instance for the given path.
149 149 """
150 150 return self._create_repo(wire, create)
151 151
152 152
153 153 class HgRemote(object):
154 154
155 155 def __init__(self, factory):
156 156 self._factory = factory
157 157 self._bulk_methods = {
158 158 "affected_files": self.ctx_files,
159 159 "author": self.ctx_user,
160 160 "branch": self.ctx_branch,
161 161 "children": self.ctx_children,
162 162 "date": self.ctx_date,
163 163 "message": self.ctx_description,
164 164 "parents": self.ctx_parents,
165 165 "status": self.ctx_status,
166 166 "obsolete": self.ctx_obsolete,
167 167 "phase": self.ctx_phase,
168 168 "hidden": self.ctx_hidden,
169 169 "_file_paths": self.ctx_list,
170 170 }
171 171 self.region = self._factory._cache_region
172 172
173 173 def _get_ctx(self, repo, ref):
174 174 return get_ctx(repo, ref)
175 175
176 176 def _cache_on(self, wire):
177 177 context = wire.get('context', '')
178 178 context_uid = '{}'.format(context)
179 179 repo_id = wire.get('repo_id', '')
180 180 cache = wire.get('cache', True)
181 181 cache_on = context and cache
182 182 return cache_on, context_uid, repo_id
183 183
184 184 @reraise_safe_exceptions
185 185 def discover_hg_version(self):
186 186 from mercurial import util
187 187 return util.version()
188 188
189 189 @reraise_safe_exceptions
190 190 def is_empty(self, wire):
191 191 repo = self._factory.repo(wire)
192 192
193 193 try:
194 194 return len(repo) == 0
195 195 except Exception:
196 196 log.exception("failed to read object_store")
197 197 return False
198 198
199 199 @reraise_safe_exceptions
200 200 def archive_repo(self, archive_path, mtime, file_info, kind):
201 201 if kind == "tgz":
202 202 archiver = archival.tarit(archive_path, mtime, "gz")
203 203 elif kind == "tbz2":
204 204 archiver = archival.tarit(archive_path, mtime, "bz2")
205 205 elif kind == 'zip':
206 206 archiver = archival.zipit(archive_path, mtime)
207 207 else:
208 208 raise exceptions.ArchiveException()(
209 209 'Remote does not support: "%s".' % kind)
210 210
211 211 for f_path, f_mode, f_is_link, f_content in file_info:
212 212 archiver.addfile(f_path, f_mode, f_is_link, f_content)
213 213 archiver.done()
214 214
215 215 @reraise_safe_exceptions
216 216 def bookmarks(self, wire):
217 217 cache_on, context_uid, repo_id = self._cache_on(wire)
218 218 @self.region.conditional_cache_on_arguments(condition=cache_on)
219 219 def _bookmarks(_context_uid, _repo_id):
220 220 repo = self._factory.repo(wire)
221 221 return dict(repo._bookmarks)
222 222
223 223 return _bookmarks(context_uid, repo_id)
224 224
225 225 @reraise_safe_exceptions
226 226 def branches(self, wire, normal, closed):
227 227 cache_on, context_uid, repo_id = self._cache_on(wire)
228 228 @self.region.conditional_cache_on_arguments(condition=cache_on)
229 229 def _branches(_context_uid, _repo_id, _normal, _closed):
230 230 repo = self._factory.repo(wire)
231 231 iter_branches = repo.branchmap().iterbranches()
232 232 bt = {}
233 233 for branch_name, _heads, tip, is_closed in iter_branches:
234 234 if normal and not is_closed:
235 235 bt[branch_name] = tip
236 236 if closed and is_closed:
237 237 bt[branch_name] = tip
238 238
239 239 return bt
240 240
241 241 return _branches(context_uid, repo_id, normal, closed)
242 242
243 243 @reraise_safe_exceptions
244 def bulk_request(self, wire, rev, pre_load):
244 def bulk_request(self, wire, commit_id, pre_load):
245 245 cache_on, context_uid, repo_id = self._cache_on(wire)
246 246 @self.region.conditional_cache_on_arguments(condition=cache_on)
247 def _bulk_request(_context_uid, _repo_id, _rev, _pre_load):
247 def _bulk_request(_repo_id, _commit_id, _pre_load):
248 248 result = {}
249 249 for attr in pre_load:
250 250 try:
251 251 method = self._bulk_methods[attr]
252 result[attr] = method(wire, rev)
252 result[attr] = method(wire, commit_id)
253 253 except KeyError as e:
254 254 raise exceptions.VcsException(e)(
255 255 'Unknown bulk attribute: "%s"' % attr)
256 256 return result
257 257
258 return _bulk_request(context_uid, repo_id, rev, sorted(pre_load))
259
260 @reraise_safe_exceptions
261 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
262 baseui = self._factory._create_config(wire["config"], hooks=hooks)
263 clone(baseui, source, dest, noupdate=not update_after_clone)
258 return _bulk_request(repo_id, commit_id, sorted(pre_load))
264 259
265 260 @reraise_safe_exceptions
266 def commitctx(self, wire, message, parents, commit_time, commit_timezone,
267 user, files, extra, removed, updated):
268
261 def ctx_branch(self, wire, commit_id):
262 cache_on, context_uid, repo_id = self._cache_on(wire)
263 @self.region.conditional_cache_on_arguments(condition=cache_on)
264 def _ctx_branch(_repo_id, _commit_id):
269 265 repo = self._factory.repo(wire)
270 baseui = self._factory._create_config(wire['config'])
271 publishing = baseui.configbool('phases', 'publish')
272 if publishing:
273 new_commit = 'public'
274 else:
275 new_commit = 'draft'
276
277 def _filectxfn(_repo, ctx, path):
278 """
279 Marks given path as added/changed/removed in a given _repo. This is
280 for internal mercurial commit function.
281 """
282
283 # check if this path is removed
284 if path in removed:
285 # returning None is a way to mark node for removal
286 return None
287
288 # check if this path is added
289 for node in updated:
290 if node['path'] == path:
291 return memfilectx(
292 _repo,
293 changectx=ctx,
294 path=node['path'],
295 data=node['content'],
296 islink=False,
297 isexec=bool(node['mode'] & stat.S_IXUSR),
298 copysource=False)
299
300 raise exceptions.AbortException()(
301 "Given path haven't been marked as added, "
302 "changed or removed (%s)" % path)
303
304 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
305
306 commit_ctx = memctx(
307 repo=repo,
308 parents=parents,
309 text=message,
310 files=files,
311 filectxfn=_filectxfn,
312 user=user,
313 date=(commit_time, commit_timezone),
314 extra=extra)
315
316 n = repo.commitctx(commit_ctx)
317 new_id = hex(n)
318
319 return new_id
266 ctx = self._get_ctx(repo, commit_id)
267 return ctx.branch()
268 return _ctx_branch(repo_id, commit_id)
320 269
321 270 @reraise_safe_exceptions
322 def ctx_branch(self, wire, revision):
323
271 def ctx_date(self, wire, commit_id):
324 272 cache_on, context_uid, repo_id = self._cache_on(wire)
325 273 @self.region.conditional_cache_on_arguments(condition=cache_on)
326 def _ctx_branch(_context_uid, _repo_id, _revision):
274 def _ctx_date(_repo_id, _commit_id):
327 275 repo = self._factory.repo(wire)
328 ctx = self._get_ctx(repo, revision)
329 return ctx.branch()
330 return _ctx_branch(context_uid, repo_id, revision)
331
332 @reraise_safe_exceptions
333 def ctx_date(self, wire, revision):
334 repo = self._factory.repo(wire)
335 ctx = self._get_ctx(repo, revision)
276 ctx = self._get_ctx(repo, commit_id)
336 277 return ctx.date()
278 return _ctx_date(repo_id, commit_id)
337 279
338 280 @reraise_safe_exceptions
339 281 def ctx_description(self, wire, revision):
340 282 repo = self._factory.repo(wire)
341 283 ctx = self._get_ctx(repo, revision)
342 284 return ctx.description()
343 285
344 286 @reraise_safe_exceptions
345 def ctx_files(self, wire, revision):
346
287 def ctx_files(self, wire, commit_id):
347 288 cache_on, context_uid, repo_id = self._cache_on(wire)
348 289 @self.region.conditional_cache_on_arguments(condition=cache_on)
349 def _ctx_files(_context_uid, _repo_id, _revision):
290 def _ctx_files(_repo_id, _commit_id):
350 291 repo = self._factory.repo(wire)
351 ctx = self._get_ctx(repo, revision)
292 ctx = self._get_ctx(repo, commit_id)
352 293 return ctx.files()
353 294
354 return _ctx_files(context_uid, repo_id, revision)
295 return _ctx_files(repo_id, commit_id)
355 296
356 297 @reraise_safe_exceptions
357 298 def ctx_list(self, path, revision):
358 299 repo = self._factory.repo(path)
359 300 ctx = self._get_ctx(repo, revision)
360 301 return list(ctx)
361 302
362 303 @reraise_safe_exceptions
363 def ctx_parents(self, wire, revision):
304 def ctx_parents(self, wire, commit_id):
364 305 cache_on, context_uid, repo_id = self._cache_on(wire)
365 306 @self.region.conditional_cache_on_arguments(condition=cache_on)
366 def _ctx_parents(_context_uid, _repo_id, _revision):
307 def _ctx_parents(_repo_id, _commit_id):
367 308 repo = self._factory.repo(wire)
368 ctx = self._get_ctx(repo, revision)
309 ctx = self._get_ctx(repo, commit_id)
369 310 return [parent.rev() for parent in ctx.parents()
370 311 if not (parent.hidden() or parent.obsolete())]
371 312
372 return _ctx_parents(context_uid, repo_id, revision)
313 return _ctx_parents(repo_id, commit_id)
373 314
374 315 @reraise_safe_exceptions
375 def ctx_children(self, wire, revision):
316 def ctx_children(self, wire, commit_id):
376 317 cache_on, context_uid, repo_id = self._cache_on(wire)
377 318 @self.region.conditional_cache_on_arguments(condition=cache_on)
378 def _ctx_children(_context_uid, _repo_id, _revision):
319 def _ctx_children(_repo_id, _commit_id):
379 320 repo = self._factory.repo(wire)
380 ctx = self._get_ctx(repo, revision)
321 ctx = self._get_ctx(repo, commit_id)
381 322 return [child.rev() for child in ctx.children()
382 323 if not (child.hidden() or child.obsolete())]
383 324
384 return _ctx_children(context_uid, repo_id, revision)
325 return _ctx_children(repo_id, commit_id)
385 326
386 327 @reraise_safe_exceptions
387 def ctx_phase(self, wire, revision):
328 def ctx_phase(self, wire, commit_id):
388 329 cache_on, context_uid, repo_id = self._cache_on(wire)
389 330 @self.region.conditional_cache_on_arguments(condition=cache_on)
390 def _ctx_phase(_context_uid, _repo_path, _revision):
331 def _ctx_phase(_context_uid, _repo_id, _commit_id):
391 332 repo = self._factory.repo(wire)
392 ctx = self._get_ctx(repo, revision)
333 ctx = self._get_ctx(repo, commit_id)
393 334 # public=0, draft=1, secret=3
394 335 return ctx.phase()
395 return _ctx_phase(context_uid, repo_id, revision)
336 return _ctx_phase(context_uid, repo_id, commit_id)
396 337
397 338 @reraise_safe_exceptions
398 def ctx_obsolete(self, wire, revision):
339 def ctx_obsolete(self, wire, commit_id):
399 340 cache_on, context_uid, repo_id = self._cache_on(wire)
400 341 @self.region.conditional_cache_on_arguments(condition=cache_on)
401 def _ctx_obsolete(_context_uid, _repo_path, _revision):
342 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
402 343 repo = self._factory.repo(wire)
403 ctx = self._get_ctx(repo, revision)
344 ctx = self._get_ctx(repo, commit_id)
404 345 return ctx.obsolete()
405 return _ctx_obsolete(context_uid, repo_id, revision)
346 return _ctx_obsolete(context_uid, repo_id, commit_id)
406 347
407 348 @reraise_safe_exceptions
408 def ctx_hidden(self, wire, revision):
349 def ctx_hidden(self, wire, commit_id):
409 350 cache_on, context_uid, repo_id = self._cache_on(wire)
410 351 @self.region.conditional_cache_on_arguments(condition=cache_on)
411 def _ctx_hidden(_context_uid, _repo_path, _revision):
352 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
412 353 repo = self._factory.repo(wire)
413 ctx = self._get_ctx(repo, revision)
354 ctx = self._get_ctx(repo, commit_id)
414 355 return ctx.hidden()
415 return _ctx_hidden(context_uid, repo_id, revision)
356 return _ctx_hidden(context_uid, repo_id, commit_id)
416 357
417 358 @reraise_safe_exceptions
418 359 def ctx_substate(self, wire, revision):
419 360 repo = self._factory.repo(wire)
420 361 ctx = self._get_ctx(repo, revision)
421 362 return ctx.substate
422 363
423 364 @reraise_safe_exceptions
424 365 def ctx_status(self, wire, revision):
425 366 repo = self._factory.repo(wire)
426 367 ctx = self._get_ctx(repo, revision)
427 368 status = repo[ctx.p1().node()].status(other=ctx.node())
428 369 # object of status (odd, custom named tuple in mercurial) is not
429 370 # correctly serializable, we make it a list, as the underling
430 371 # API expects this to be a list
431 372 return list(status)
432 373
433 374 @reraise_safe_exceptions
434 375 def ctx_user(self, wire, revision):
435 376 repo = self._factory.repo(wire)
436 377 ctx = self._get_ctx(repo, revision)
437 378 return ctx.user()
438 379
439 380 @reraise_safe_exceptions
440 381 def check_url(self, url, config):
441 382 _proto = None
442 383 if '+' in url[:url.find('://')]:
443 384 _proto = url[0:url.find('+')]
444 385 url = url[url.find('+') + 1:]
445 386 handlers = []
446 387 url_obj = url_parser(url)
447 388 test_uri, authinfo = url_obj.authinfo()
448 389 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
449 390 url_obj.query = obfuscate_qs(url_obj.query)
450 391
451 392 cleaned_uri = str(url_obj)
452 393 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
453 394
454 395 if authinfo:
455 396 # create a password manager
456 397 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
457 398 passmgr.add_password(*authinfo)
458 399
459 400 handlers.extend((httpbasicauthhandler(passmgr),
460 401 httpdigestauthhandler(passmgr)))
461 402
462 403 o = urllib2.build_opener(*handlers)
463 404 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
464 405 ('Accept', 'application/mercurial-0.1')]
465 406
466 407 q = {"cmd": 'between'}
467 408 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
468 409 qs = '?%s' % urllib.urlencode(q)
469 410 cu = "%s%s" % (test_uri, qs)
470 411 req = urllib2.Request(cu, None, {})
471 412
472 413 try:
473 414 log.debug("Trying to open URL %s", cleaned_uri)
474 415 resp = o.open(req)
475 416 if resp.code != 200:
476 417 raise exceptions.URLError()('Return Code is not 200')
477 418 except Exception as e:
478 419 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
479 420 # means it cannot be cloned
480 421 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
481 422
482 423 # now check if it's a proper hg repo, but don't do it for svn
483 424 try:
484 425 if _proto == 'svn':
485 426 pass
486 427 else:
487 428 # check for pure hg repos
488 429 log.debug(
489 430 "Verifying if URL is a Mercurial repository: %s",
490 431 cleaned_uri)
491 432 ui = make_ui_from_config(config)
492 433 peer_checker = makepeer(ui, url)
493 434 peer_checker.lookup('tip')
494 435 except Exception as e:
495 436 log.warning("URL is not a valid Mercurial repository: %s",
496 437 cleaned_uri)
497 438 raise exceptions.URLError(e)(
498 439 "url [%s] does not look like an hg repo org_exc: %s"
499 440 % (cleaned_uri, e))
500 441
501 442 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
502 443 return True
503 444
504 445 @reraise_safe_exceptions
505 def diff(self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews, context):
446 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
506 447 repo = self._factory.repo(wire)
507 448
508 449 if file_filter:
509 450 match_filter = match(file_filter[0], '', [file_filter[1]])
510 451 else:
511 452 match_filter = file_filter
512 453 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
513 454
514 455 try:
515 456 return "".join(patch.diff(
516 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
457 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
517 458 except RepoLookupError as e:
518 459 raise exceptions.LookupException(e)()
519 460
520 461 @reraise_safe_exceptions
521 462 def node_history(self, wire, revision, path, limit):
522
523 463 cache_on, context_uid, repo_id = self._cache_on(wire)
524 464 @self.region.conditional_cache_on_arguments(condition=cache_on)
525 def _node_history(_context_uid, _repo_path, _revision, _path, _limit):
465 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
526 466 repo = self._factory.repo(wire)
527 467
528 468 ctx = self._get_ctx(repo, revision)
529 469 fctx = ctx.filectx(path)
530 470
531 471 def history_iter():
532 472 limit_rev = fctx.rev()
533 473 for obj in reversed(list(fctx.filelog())):
534 474 obj = fctx.filectx(obj)
535 475 ctx = obj.changectx()
536 476 if ctx.hidden() or ctx.obsolete():
537 477 continue
538 478
539 479 if limit_rev >= obj.rev():
540 480 yield obj
541 481
542 482 history = []
543 483 for cnt, obj in enumerate(history_iter()):
544 484 if limit and cnt >= limit:
545 485 break
546 486 history.append(hex(obj.node()))
547 487
548 488 return [x for x in history]
549 489 return _node_history(context_uid, repo_id, revision, path, limit)
550 490
551 491 @reraise_safe_exceptions
552 492 def node_history_untill(self, wire, revision, path, limit):
553
554 493 cache_on, context_uid, repo_id = self._cache_on(wire)
555 494 @self.region.conditional_cache_on_arguments(condition=cache_on)
556 def _meth(_context_uid, _repo_path):
495 def _node_history_until(_context_uid, _repo_id):
557 496 repo = self._factory.repo(wire)
558 497 ctx = self._get_ctx(repo, revision)
559 498 fctx = ctx.filectx(path)
560 499
561 500 file_log = list(fctx.filelog())
562 501 if limit:
563 502 # Limit to the last n items
564 503 file_log = file_log[-limit:]
565 504
566 505 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
567 return _meth(context_uid, repo_id, revision, path, limit)
506 return _node_history_until(context_uid, repo_id, revision, path, limit)
568 507
569 508 @reraise_safe_exceptions
570 509 def fctx_annotate(self, wire, revision, path):
571 510 repo = self._factory.repo(wire)
572 511 ctx = self._get_ctx(repo, revision)
573 512 fctx = ctx.filectx(path)
574 513
575 514 result = []
576 515 for i, annotate_obj in enumerate(fctx.annotate(), 1):
577 516 ln_no = i
578 517 sha = hex(annotate_obj.fctx.node())
579 518 content = annotate_obj.text
580 519 result.append((ln_no, sha, content))
581 520 return result
582 521
583 522 @reraise_safe_exceptions
584 523 def fctx_node_data(self, wire, revision, path):
585 524 repo = self._factory.repo(wire)
586 525 ctx = self._get_ctx(repo, revision)
587 526 fctx = ctx.filectx(path)
588 527 return fctx.data()
589 528
590 529 @reraise_safe_exceptions
591 def fctx_flags(self, wire, revision, path):
530 def fctx_flags(self, wire, commit_id, path):
592 531 cache_on, context_uid, repo_id = self._cache_on(wire)
593 532 @self.region.conditional_cache_on_arguments(condition=cache_on)
594 def _fctx_flags(_context_uid, _repo_path, _revision, _path):
533 def _fctx_flags(_repo_id, _commit_id, _path):
595 534 repo = self._factory.repo(wire)
596 ctx = self._get_ctx(repo, revision)
535 ctx = self._get_ctx(repo, commit_id)
597 536 fctx = ctx.filectx(path)
598 537 return fctx.flags()
599 538
600 return _fctx_flags(context_uid, repo_id, revision, path)
539 return _fctx_flags(repo_id, commit_id, path)
601 540
602 541 @reraise_safe_exceptions
603 def fctx_size(self, wire, revision, path):
604
542 def fctx_size(self, wire, commit_id, path):
605 543 cache_on, context_uid, repo_id = self._cache_on(wire)
606 544 @self.region.conditional_cache_on_arguments(condition=cache_on)
607 def _fctx_size(_context_uid, _repo_path, _revision, _path):
545 def _fctx_size(_repo_id, _revision, _path):
608 546 repo = self._factory.repo(wire)
609 ctx = self._get_ctx(repo, revision)
547 ctx = self._get_ctx(repo, commit_id)
610 548 fctx = ctx.filectx(path)
611 549 return fctx.size()
612 return _fctx_size(context_uid, repo_id, revision, path)
550 return _fctx_size(repo_id, commit_id, path)
613 551
614 552 @reraise_safe_exceptions
615 553 def get_all_commit_ids(self, wire, name):
616 554 cache_on, context_uid, repo_id = self._cache_on(wire)
617 555 @self.region.conditional_cache_on_arguments(condition=cache_on)
618 556 def _get_all_commit_ids(_context_uid, _repo_id, _name):
619 557 repo = self._factory.repo(wire)
620 558 repo = repo.filtered(name)
621 559 revs = map(lambda x: hex(x[7]), repo.changelog.index)
622 560 return revs
623 561 return _get_all_commit_ids(context_uid, repo_id, name)
624 562
625 563 @reraise_safe_exceptions
626 564 def get_config_value(self, wire, section, name, untrusted=False):
627 565 repo = self._factory.repo(wire)
628 566 return repo.ui.config(section, name, untrusted=untrusted)
629 567
630 568 @reraise_safe_exceptions
631 def get_config_bool(self, wire, section, name, untrusted=False):
632 repo = self._factory.repo(wire)
633 return repo.ui.configbool(section, name, untrusted=untrusted)
634
635 @reraise_safe_exceptions
636 def get_config_list(self, wire, section, name, untrusted=False):
637 repo = self._factory.repo(wire)
638 return repo.ui.configlist(section, name, untrusted=untrusted)
639
640 @reraise_safe_exceptions
641 569 def is_large_file(self, wire, path):
642 570 cache_on, context_uid, repo_id = self._cache_on(wire)
643 571 @self.region.conditional_cache_on_arguments(condition=cache_on)
644 572 def _is_large_file(_context_uid, _repo_id, _path):
645 573 return largefiles.lfutil.isstandin(path)
646 574
647 575 return _is_large_file(context_uid, repo_id, path)
648 576
649 577 @reraise_safe_exceptions
650 578 def in_largefiles_store(self, wire, sha):
651 579 repo = self._factory.repo(wire)
652 580 return largefiles.lfutil.instore(repo, sha)
653 581
654 582 @reraise_safe_exceptions
655 583 def in_user_cache(self, wire, sha):
656 584 repo = self._factory.repo(wire)
657 585 return largefiles.lfutil.inusercache(repo.ui, sha)
658 586
659 587 @reraise_safe_exceptions
660 588 def store_path(self, wire, sha):
661 589 repo = self._factory.repo(wire)
662 590 return largefiles.lfutil.storepath(repo, sha)
663 591
664 592 @reraise_safe_exceptions
665 593 def link(self, wire, sha, path):
666 594 repo = self._factory.repo(wire)
667 595 largefiles.lfutil.link(
668 596 largefiles.lfutil.usercachepath(repo.ui, sha), path)
669 597
670 598 @reraise_safe_exceptions
671 599 def localrepository(self, wire, create=False):
672 600 self._factory.repo(wire, create=create)
673 601
674 602 @reraise_safe_exceptions
675 603 def lookup(self, wire, revision, both):
676 604 cache_on, context_uid, repo_id = self._cache_on(wire)
677 605 @self.region.conditional_cache_on_arguments(condition=cache_on)
678 606 def _lookup(_context_uid, _repo_id, _revision, _both):
679 607
680 608 repo = self._factory.repo(wire)
681 609 rev = _revision
682 610 if isinstance(rev, int):
683 611 # NOTE(marcink):
684 612 # since Mercurial doesn't support negative indexes properly
685 613 # we need to shift accordingly by one to get proper index, e.g
686 614 # repo[-1] => repo[-2]
687 615 # repo[0] => repo[-1]
688 616 if rev <= 0:
689 617 rev = rev + -1
690 618 try:
691 619 ctx = self._get_ctx(repo, rev)
692 620 except (TypeError, RepoLookupError) as e:
693 621 e._org_exc_tb = traceback.format_exc()
694 622 raise exceptions.LookupException(e)(rev)
695 623 except LookupError as e:
696 624 e._org_exc_tb = traceback.format_exc()
697 625 raise exceptions.LookupException(e)(e.name)
698 626
699 627 if not both:
700 628 return ctx.hex()
701 629
702 630 ctx = repo[ctx.hex()]
703 631 return ctx.hex(), ctx.rev()
704 632
705 633 return _lookup(context_uid, repo_id, revision, both)
706 634
707 635 @reraise_safe_exceptions
708 def pull(self, wire, url, commit_ids=None):
709 repo = self._factory.repo(wire)
710 # Disable any prompts for this repo
711 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
712
713 remote = peer(repo, {}, url)
714 # Disable any prompts for this remote
715 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
716
717 if commit_ids:
718 commit_ids = [bin(commit_id) for commit_id in commit_ids]
719
720 return exchange.pull(
721 repo, remote, heads=commit_ids, force=None).cgresult
722
723 @reraise_safe_exceptions
724 636 def sync_push(self, wire, url):
725 637 if not self.check_url(url, wire['config']):
726 638 return
727 639
728 640 repo = self._factory.repo(wire)
729 641
730 642 # Disable any prompts for this repo
731 643 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
732 644
733 645 bookmarks = dict(repo._bookmarks).keys()
734 646 remote = peer(repo, {}, url)
735 647 # Disable any prompts for this remote
736 648 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
737 649
738 650 return exchange.push(
739 651 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
740 652
741 653 @reraise_safe_exceptions
742 654 def revision(self, wire, rev):
743 655 repo = self._factory.repo(wire)
744 656 ctx = self._get_ctx(repo, rev)
745 657 return ctx.rev()
746 658
747 659 @reraise_safe_exceptions
748 660 def rev_range(self, wire, commit_filter):
749 661 cache_on, context_uid, repo_id = self._cache_on(wire)
750 662 @self.region.conditional_cache_on_arguments(condition=cache_on)
751 663 def _rev_range(_context_uid, _repo_id, _filter):
752 664 repo = self._factory.repo(wire)
753 665 revisions = [rev for rev in revrange(repo, commit_filter)]
754 666 return revisions
755 667
756 668 return _rev_range(context_uid, repo_id, sorted(commit_filter))
757 669
758 670 @reraise_safe_exceptions
759 671 def rev_range_hash(self, wire, node):
760 672 repo = self._factory.repo(wire)
761 673
762 674 def get_revs(repo, rev_opt):
763 675 if rev_opt:
764 676 revs = revrange(repo, rev_opt)
765 677 if len(revs) == 0:
766 678 return (nullrev, nullrev)
767 679 return max(revs), min(revs)
768 680 else:
769 681 return len(repo) - 1, 0
770 682
771 683 stop, start = get_revs(repo, [node + ':'])
772 684 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
773 685 return revs
774 686
775 687 @reraise_safe_exceptions
776 688 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
777 689 other_path = kwargs.pop('other_path', None)
778 690
779 691 # case when we want to compare two independent repositories
780 692 if other_path and other_path != wire["path"]:
781 693 baseui = self._factory._create_config(wire["config"])
782 694 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
783 695 else:
784 696 repo = self._factory.repo(wire)
785 697 return list(repo.revs(rev_spec, *args))
786 698
787 699 @reraise_safe_exceptions
788 def strip(self, wire, revision, update, backup):
789 repo = self._factory.repo(wire)
790 ctx = self._get_ctx(repo, revision)
791 hgext_strip(
792 repo.baseui, repo, ctx.node(), update=update, backup=backup)
793
794 @reraise_safe_exceptions
795 700 def verify(self, wire,):
796 701 repo = self._factory.repo(wire)
797 702 baseui = self._factory._create_config(wire['config'])
798 703 baseui.setconfig('ui', 'quiet', 'false')
799 704 output = io.BytesIO()
800 705
801 706 def write(data, **unused_kwargs):
802 707 output.write(data)
803 708 baseui.write = write
804 709
805 710 repo.ui = baseui
806 711 verify.verify(repo)
807 712 return output.getvalue()
808 713
809 714 @reraise_safe_exceptions
810 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
811 repo = self._factory.repo(wire)
812 ctx = self._get_ctx(repo, revision)
813 node = ctx.node()
814
815 date = (tag_time, tag_timezone)
816 try:
817 hg_tag.tag(repo, name, node, message, local, user, date)
818 except Abort as e:
819 log.exception("Tag operation aborted")
820 # Exception can contain unicode which we convert
821 raise exceptions.AbortException(e)(repr(e))
822
823 @reraise_safe_exceptions
824 715 def tags(self, wire):
825 716 cache_on, context_uid, repo_id = self._cache_on(wire)
826 717 @self.region.conditional_cache_on_arguments(condition=cache_on)
827 718 def _tags(_context_uid, _repo_id):
828 719 repo = self._factory.repo(wire)
829 720 return repo.tags()
830 721
831 722 return _tags(context_uid, repo_id)
832 723
833 724 @reraise_safe_exceptions
834 725 def update(self, wire, node=None, clean=False):
835 726 repo = self._factory.repo(wire)
836 727 baseui = self._factory._create_config(wire['config'])
837 728 commands.update(baseui, repo, node=node, clean=clean)
838 729
839 730 @reraise_safe_exceptions
840 731 def identify(self, wire):
841 732 repo = self._factory.repo(wire)
842 733 baseui = self._factory._create_config(wire['config'])
843 734 output = io.BytesIO()
844 735 baseui.write = output.write
845 736 # This is required to get a full node id
846 737 baseui.debugflag = True
847 738 commands.identify(baseui, repo, id=True)
848 739
849 740 return output.getvalue()
850 741
851 742 @reraise_safe_exceptions
852 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
853 repo = self._factory.repo(wire)
854 baseui = self._factory._create_config(wire['config'], hooks=hooks)
855
856 # Mercurial internally has a lot of logic that checks ONLY if
857 # option is defined, we just pass those if they are defined then
858 opts = {}
859 if bookmark:
860 opts['bookmark'] = bookmark
861 if branch:
862 opts['branch'] = branch
863 if revision:
864 opts['rev'] = revision
865
866 commands.pull(baseui, repo, source, **opts)
867
868 @reraise_safe_exceptions
869 743 def heads(self, wire, branch=None):
870 744 repo = self._factory.repo(wire)
871 745 baseui = self._factory._create_config(wire['config'])
872 746 output = io.BytesIO()
873 747
874 748 def write(data, **unused_kwargs):
875 749 output.write(data)
876 750
877 751 baseui.write = write
878 752 if branch:
879 753 args = [branch]
880 754 else:
881 755 args = []
882 756 commands.heads(baseui, repo, template='{node} ', *args)
883 757
884 758 return output.getvalue()
885 759
886 760 @reraise_safe_exceptions
887 761 def ancestor(self, wire, revision1, revision2):
888 762 repo = self._factory.repo(wire)
889 763 changelog = repo.changelog
890 764 lookup = repo.lookup
891 765 a = changelog.ancestor(lookup(revision1), lookup(revision2))
892 766 return hex(a)
893 767
894 768 @reraise_safe_exceptions
769 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
770 baseui = self._factory._create_config(wire["config"], hooks=hooks)
771 clone(baseui, source, dest, noupdate=not update_after_clone)
772
773 @reraise_safe_exceptions
774 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
775
776 repo = self._factory.repo(wire)
777 baseui = self._factory._create_config(wire['config'])
778 publishing = baseui.configbool('phases', 'publish')
779 if publishing:
780 new_commit = 'public'
781 else:
782 new_commit = 'draft'
783
784 def _filectxfn(_repo, ctx, path):
785 """
786 Marks given path as added/changed/removed in a given _repo. This is
787 for internal mercurial commit function.
788 """
789
790 # check if this path is removed
791 if path in removed:
792 # returning None is a way to mark node for removal
793 return None
794
795 # check if this path is added
796 for node in updated:
797 if node['path'] == path:
798 return memfilectx(
799 _repo,
800 changectx=ctx,
801 path=node['path'],
802 data=node['content'],
803 islink=False,
804 isexec=bool(node['mode'] & stat.S_IXUSR),
805 copysource=False)
806
807 raise exceptions.AbortException()(
808 "Given path haven't been marked as added, "
809 "changed or removed (%s)" % path)
810
811 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
812
813 commit_ctx = memctx(
814 repo=repo,
815 parents=parents,
816 text=message,
817 files=files,
818 filectxfn=_filectxfn,
819 user=user,
820 date=(commit_time, commit_timezone),
821 extra=extra)
822
823 n = repo.commitctx(commit_ctx)
824 new_id = hex(n)
825
826 return new_id
827
828 @reraise_safe_exceptions
829 def pull(self, wire, url, commit_ids=None):
830 repo = self._factory.repo(wire)
831 # Disable any prompts for this repo
832 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
833
834 remote = peer(repo, {}, url)
835 # Disable any prompts for this remote
836 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
837
838 if commit_ids:
839 commit_ids = [bin(commit_id) for commit_id in commit_ids]
840
841 return exchange.pull(
842 repo, remote, heads=commit_ids, force=None).cgresult
843
844 @reraise_safe_exceptions
845 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
846 repo = self._factory.repo(wire)
847 baseui = self._factory._create_config(wire['config'], hooks=hooks)
848
849 # Mercurial internally has a lot of logic that checks ONLY if
850 # option is defined, we just pass those if they are defined then
851 opts = {}
852 if bookmark:
853 opts['bookmark'] = bookmark
854 if branch:
855 opts['branch'] = branch
856 if revision:
857 opts['rev'] = revision
858
859 commands.pull(baseui, repo, source, **opts)
860
861 @reraise_safe_exceptions
895 862 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
896 863 repo = self._factory.repo(wire)
897 864 baseui = self._factory._create_config(wire['config'], hooks=hooks)
898 865 commands.push(baseui, repo, dest=dest_path, rev=revisions,
899 866 new_branch=push_branches)
900 867
901 868 @reraise_safe_exceptions
869 def strip(self, wire, revision, update, backup):
870 repo = self._factory.repo(wire)
871 ctx = self._get_ctx(repo, revision)
872 hgext_strip(
873 repo.baseui, repo, ctx.node(), update=update, backup=backup)
874
875 @reraise_safe_exceptions
902 876 def merge(self, wire, revision):
903 877 repo = self._factory.repo(wire)
904 878 baseui = self._factory._create_config(wire['config'])
905 879 repo.ui.setconfig('ui', 'merge', 'internal:dump')
906 880
907 881 # In case of sub repositories are used mercurial prompts the user in
908 882 # case of merge conflicts or different sub repository sources. By
909 883 # setting the interactive flag to `False` mercurial doesn't prompt the
910 884 # used but instead uses a default value.
911 885 repo.ui.setconfig('ui', 'interactive', False)
912 886 commands.merge(baseui, repo, rev=revision)
913 887
914 888 @reraise_safe_exceptions
915 889 def merge_state(self, wire):
916 890 repo = self._factory.repo(wire)
917 891 repo.ui.setconfig('ui', 'merge', 'internal:dump')
918 892
919 893 # In case of sub repositories are used mercurial prompts the user in
920 894 # case of merge conflicts or different sub repository sources. By
921 895 # setting the interactive flag to `False` mercurial doesn't prompt the
922 896 # used but instead uses a default value.
923 897 repo.ui.setconfig('ui', 'interactive', False)
924 898 ms = hg_merge.mergestate(repo)
925 899 return [x for x in ms.unresolved()]
926 900
927 901 @reraise_safe_exceptions
928 902 def commit(self, wire, message, username, close_branch=False):
929 903 repo = self._factory.repo(wire)
930 904 baseui = self._factory._create_config(wire['config'])
931 905 repo.ui.setconfig('ui', 'username', username)
932 906 commands.commit(baseui, repo, message=message, close_branch=close_branch)
933 907
934 908 @reraise_safe_exceptions
935 909 def rebase(self, wire, source=None, dest=None, abort=False):
936 910 repo = self._factory.repo(wire)
937 911 baseui = self._factory._create_config(wire['config'])
938 912 repo.ui.setconfig('ui', 'merge', 'internal:dump')
939 913 rebase.rebase(
940 914 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
941 915
942 916 @reraise_safe_exceptions
917 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
918 repo = self._factory.repo(wire)
919 ctx = self._get_ctx(repo, revision)
920 node = ctx.node()
921
922 date = (tag_time, tag_timezone)
923 try:
924 hg_tag.tag(repo, name, node, message, local, user, date)
925 except Abort as e:
926 log.exception("Tag operation aborted")
927 # Exception can contain unicode which we convert
928 raise exceptions.AbortException(e)(repr(e))
929
930 @reraise_safe_exceptions
943 931 def bookmark(self, wire, bookmark, revision=None):
944 932 repo = self._factory.repo(wire)
945 933 baseui = self._factory._create_config(wire['config'])
946 934 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
947 935
948 936 @reraise_safe_exceptions
949 937 def install_hooks(self, wire, force=False):
950 938 # we don't need any special hooks for Mercurial
951 939 pass
952 940
953 941 @reraise_safe_exceptions
954 942 def get_hooks_info(self, wire):
955 943 return {
956 944 'pre_version': vcsserver.__version__,
957 945 'post_version': vcsserver.__version__,
958 946 }
@@ -1,793 +1,796 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 from __future__ import absolute_import
19 19
20 20 import os
21 21 import subprocess
22 22 from urllib2 import URLError
23 23 import urlparse
24 24 import logging
25 25 import posixpath as vcspath
26 26 import StringIO
27 27 import urllib
28 28 import traceback
29 29
30 30 import svn.client
31 31 import svn.core
32 32 import svn.delta
33 33 import svn.diff
34 34 import svn.fs
35 35 import svn.repos
36 36
37 37 from vcsserver import svn_diff, exceptions, subprocessio, settings
38 38 from vcsserver.base import RepoFactory, raise_from_original
39 39
40 40 log = logging.getLogger(__name__)
41 41
42 42
43 43 # Set of svn compatible version flags.
44 44 # Compare with subversion/svnadmin/svnadmin.c
45 45 svn_compatible_versions = {
46 46 'pre-1.4-compatible',
47 47 'pre-1.5-compatible',
48 48 'pre-1.6-compatible',
49 49 'pre-1.8-compatible',
50 50 'pre-1.9-compatible'
51 51 }
52 52
53 53 svn_compatible_versions_map = {
54 54 'pre-1.4-compatible': '1.3',
55 55 'pre-1.5-compatible': '1.4',
56 56 'pre-1.6-compatible': '1.5',
57 57 'pre-1.8-compatible': '1.7',
58 58 'pre-1.9-compatible': '1.8',
59 59 }
60 60
61 61
62 62 def reraise_safe_exceptions(func):
63 63 """Decorator for converting svn exceptions to something neutral."""
64 64 def wrapper(*args, **kwargs):
65 65 try:
66 66 return func(*args, **kwargs)
67 67 except Exception as e:
68 68 if not hasattr(e, '_vcs_kind'):
69 69 log.exception("Unhandled exception in svn remote call")
70 70 raise_from_original(exceptions.UnhandledException(e))
71 71 raise
72 72 return wrapper
73 73
74 74
75 75 class SubversionFactory(RepoFactory):
76 76 repo_type = 'svn'
77 77
78 78 def _create_repo(self, wire, create, compatible_version):
79 79 path = svn.core.svn_path_canonicalize(wire['path'])
80 80 if create:
81 81 fs_config = {'compatible-version': '1.9'}
82 82 if compatible_version:
83 83 if compatible_version not in svn_compatible_versions:
84 84 raise Exception('Unknown SVN compatible version "{}"'
85 85 .format(compatible_version))
86 86 fs_config['compatible-version'] = \
87 87 svn_compatible_versions_map[compatible_version]
88 88
89 89 log.debug('Create SVN repo with config "%s"', fs_config)
90 90 repo = svn.repos.create(path, "", "", None, fs_config)
91 91 else:
92 92 repo = svn.repos.open(path)
93 93
94 94 log.debug('Got SVN object: %s', repo)
95 95 return repo
96 96
97 97 def repo(self, wire, create=False, compatible_version=None):
98 98 """
99 99 Get a repository instance for the given path.
100 100 """
101 101 return self._create_repo(wire, create, compatible_version)
102 102
103 103
104 104 NODE_TYPE_MAPPING = {
105 105 svn.core.svn_node_file: 'file',
106 106 svn.core.svn_node_dir: 'dir',
107 107 }
108 108
109 109
110 110 class SvnRemote(object):
111 111
112 112 def __init__(self, factory, hg_factory=None):
113 113 self._factory = factory
114 114 # TODO: Remove once we do not use internal Mercurial objects anymore
115 115 # for subversion
116 116 self._hg_factory = hg_factory
117 117 self.region = self._factory._cache_region
118 118
119 119 def _cache_on(self, wire):
120 120 context = wire.get('context', '')
121 121 context_uid = '{}'.format(context)
122 122 repo_id = wire.get('repo_id', '')
123 123 cache = wire.get('cache', True)
124 124 cache_on = context and cache
125 125 return cache_on, context_uid, repo_id
126 126
127 127 @reraise_safe_exceptions
128 128 def discover_svn_version(self):
129 129 try:
130 130 import svn.core
131 131 svn_ver = svn.core.SVN_VERSION
132 132 except ImportError:
133 133 svn_ver = None
134 134 return svn_ver
135 135
136 136 @reraise_safe_exceptions
137 137 def is_empty(self, wire):
138 138
139 139 try:
140 140 return self.lookup(wire, -1) == 0
141 141 except Exception:
142 142 log.exception("failed to read object_store")
143 143 return False
144 144
145 145 def check_url(self, url, config_items):
146 146 # this can throw exception if not installed, but we detect this
147 147 from hgsubversion import svnrepo
148 148
149 149 baseui = self._hg_factory._create_config(config_items)
150 150 # uuid function get's only valid UUID from proper repo, else
151 151 # throws exception
152 152 try:
153 153 svnrepo.svnremoterepo(baseui, url).svn.uuid
154 154 except Exception:
155 155 tb = traceback.format_exc()
156 156 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
157 157 raise URLError(
158 158 '"%s" is not a valid Subversion source url.' % (url, ))
159 159 return True
160 160
161 161 def is_path_valid_repository(self, wire, path):
162 162
163 163 # NOTE(marcink): short circuit the check for SVN repo
164 164 # the repos.open might be expensive to check, but we have one cheap
165 165 # pre condition that we can use, to check for 'format' file
166 166
167 167 if not os.path.isfile(os.path.join(path, 'format')):
168 168 return False
169 169
170 170 try:
171 171 svn.repos.open(path)
172 172 except svn.core.SubversionException:
173 173 tb = traceback.format_exc()
174 174 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
175 175 return False
176 176 return True
177 177
178 178 @reraise_safe_exceptions
179 179 def verify(self, wire,):
180 180 repo_path = wire['path']
181 181 if not self.is_path_valid_repository(wire, repo_path):
182 182 raise Exception(
183 183 "Path %s is not a valid Subversion repository." % repo_path)
184 184
185 185 cmd = ['svnadmin', 'info', repo_path]
186 186 stdout, stderr = subprocessio.run_command(cmd)
187 187 return stdout
188 188
189 189 def lookup(self, wire, revision):
190 190 if revision not in [-1, None, 'HEAD']:
191 191 raise NotImplementedError
192 192 repo = self._factory.repo(wire)
193 193 fs_ptr = svn.repos.fs(repo)
194 194 head = svn.fs.youngest_rev(fs_ptr)
195 195 return head
196 196
197 197 def lookup_interval(self, wire, start_ts, end_ts):
198 198 repo = self._factory.repo(wire)
199 199 fsobj = svn.repos.fs(repo)
200 200 start_rev = None
201 201 end_rev = None
202 202 if start_ts:
203 203 start_ts_svn = apr_time_t(start_ts)
204 204 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
205 205 else:
206 206 start_rev = 1
207 207 if end_ts:
208 208 end_ts_svn = apr_time_t(end_ts)
209 209 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
210 210 else:
211 211 end_rev = svn.fs.youngest_rev(fsobj)
212 212 return start_rev, end_rev
213 213
214 214 def revision_properties(self, wire, revision):
215 215
216 216 cache_on, context_uid, repo_id = self._cache_on(wire)
217 217 @self.region.conditional_cache_on_arguments(condition=cache_on)
218 def _revision_properties(_context_uid, _repo_id, _revision):
218 def _revision_properties(_repo_id, _revision):
219 219 repo = self._factory.repo(wire)
220 220 fs_ptr = svn.repos.fs(repo)
221 221 return svn.fs.revision_proplist(fs_ptr, revision)
222 return _revision_properties(context_uid, repo_id, revision)
222 return _revision_properties(repo_id, revision)
223 223
224 224 def revision_changes(self, wire, revision):
225 225
226 226 repo = self._factory.repo(wire)
227 227 fsobj = svn.repos.fs(repo)
228 228 rev_root = svn.fs.revision_root(fsobj, revision)
229 229
230 230 editor = svn.repos.ChangeCollector(fsobj, rev_root)
231 231 editor_ptr, editor_baton = svn.delta.make_editor(editor)
232 232 base_dir = ""
233 233 send_deltas = False
234 234 svn.repos.replay2(
235 235 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
236 236 editor_ptr, editor_baton, None)
237 237
238 238 added = []
239 239 changed = []
240 240 removed = []
241 241
242 242 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
243 243 for path, change in editor.changes.iteritems():
244 244 # TODO: Decide what to do with directory nodes. Subversion can add
245 245 # empty directories.
246 246
247 247 if change.item_kind == svn.core.svn_node_dir:
248 248 continue
249 249 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
250 250 added.append(path)
251 251 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
252 252 svn.repos.CHANGE_ACTION_REPLACE]:
253 253 changed.append(path)
254 254 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
255 255 removed.append(path)
256 256 else:
257 257 raise NotImplementedError(
258 258 "Action %s not supported on path %s" % (
259 259 change.action, path))
260 260
261 261 changes = {
262 262 'added': added,
263 263 'changed': changed,
264 264 'removed': removed,
265 265 }
266 266 return changes
267 267
268 268 @reraise_safe_exceptions
269 269 def node_history(self, wire, path, revision, limit):
270 270 cache_on, context_uid, repo_id = self._cache_on(wire)
271 271 @self.region.conditional_cache_on_arguments(condition=cache_on)
272 272 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
273 273 cross_copies = False
274 274 repo = self._factory.repo(wire)
275 275 fsobj = svn.repos.fs(repo)
276 276 rev_root = svn.fs.revision_root(fsobj, revision)
277 277
278 278 history_revisions = []
279 279 history = svn.fs.node_history(rev_root, path)
280 280 history = svn.fs.history_prev(history, cross_copies)
281 281 while history:
282 282 __, node_revision = svn.fs.history_location(history)
283 283 history_revisions.append(node_revision)
284 284 if limit and len(history_revisions) >= limit:
285 285 break
286 286 history = svn.fs.history_prev(history, cross_copies)
287 287 return history_revisions
288 288 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
289 289
290 290 def node_properties(self, wire, path, revision):
291 cache_on, context_uid, repo_id = self._cache_on(wire)
292 @self.region.conditional_cache_on_arguments(condition=cache_on)
293 def _node_properties(_repo_id, _path, _revision):
291 294 repo = self._factory.repo(wire)
292 295 fsobj = svn.repos.fs(repo)
293 296 rev_root = svn.fs.revision_root(fsobj, revision)
294 297 return svn.fs.node_proplist(rev_root, path)
298 return _node_properties(repo_id, path, revision)
295 299
296 300 def file_annotate(self, wire, path, revision):
297 301 abs_path = 'file://' + urllib.pathname2url(
298 302 vcspath.join(wire['path'], path))
299 303 file_uri = svn.core.svn_path_canonicalize(abs_path)
300 304
301 305 start_rev = svn_opt_revision_value_t(0)
302 306 peg_rev = svn_opt_revision_value_t(revision)
303 307 end_rev = peg_rev
304 308
305 309 annotations = []
306 310
307 311 def receiver(line_no, revision, author, date, line, pool):
308 312 annotations.append((line_no, revision, line))
309 313
310 314 # TODO: Cannot use blame5, missing typemap function in the swig code
311 315 try:
312 316 svn.client.blame2(
313 317 file_uri, peg_rev, start_rev, end_rev,
314 318 receiver, svn.client.create_context())
315 319 except svn.core.SubversionException as exc:
316 320 log.exception("Error during blame operation.")
317 321 raise Exception(
318 322 "Blame not supported or file does not exist at path %s. "
319 323 "Error %s." % (path, exc))
320 324
321 325 return annotations
322 326
323 327 def get_node_type(self, wire, path, revision=None):
324 328
325 329 cache_on, context_uid, repo_id = self._cache_on(wire)
326 330 @self.region.conditional_cache_on_arguments(condition=cache_on)
327 def _get_node_type(_context_uid, _repo_id, _path, _revision):
331 def _get_node_type(_repo_id, _path, _revision):
328 332 repo = self._factory.repo(wire)
329 333 fs_ptr = svn.repos.fs(repo)
330 334 if _revision is None:
331 335 _revision = svn.fs.youngest_rev(fs_ptr)
332 336 root = svn.fs.revision_root(fs_ptr, _revision)
333 337 node = svn.fs.check_path(root, path)
334 338 return NODE_TYPE_MAPPING.get(node, None)
335 return _get_node_type(context_uid, repo_id, path, revision)
339 return _get_node_type(repo_id, path, revision)
336 340
337 341 def get_nodes(self, wire, path, revision=None):
338 342
339 343 cache_on, context_uid, repo_id = self._cache_on(wire)
340 344 @self.region.conditional_cache_on_arguments(condition=cache_on)
341 def _get_nodes(_context_uid, _repo_id, _path, _revision):
345 def _get_nodes(_repo_id, _path, _revision):
342 346 repo = self._factory.repo(wire)
343 347 fsobj = svn.repos.fs(repo)
344 348 if _revision is None:
345 349 _revision = svn.fs.youngest_rev(fsobj)
346 350 root = svn.fs.revision_root(fsobj, _revision)
347 351 entries = svn.fs.dir_entries(root, path)
348 352 result = []
349 353 for entry_path, entry_info in entries.iteritems():
350 354 result.append(
351 355 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
352 356 return result
353 return _get_nodes(context_uid, repo_id, path, revision)
357 return _get_nodes(repo_id, path, revision)
354 358
355 359 def get_file_content(self, wire, path, rev=None):
356 360 repo = self._factory.repo(wire)
357 361 fsobj = svn.repos.fs(repo)
358 362 if rev is None:
359 363 rev = svn.fs.youngest_revision(fsobj)
360 364 root = svn.fs.revision_root(fsobj, rev)
361 365 content = svn.core.Stream(svn.fs.file_contents(root, path))
362 366 return content.read()
363 367
364 368 def get_file_size(self, wire, path, revision=None):
365 369
366 370 cache_on, context_uid, repo_id = self._cache_on(wire)
367 371 @self.region.conditional_cache_on_arguments(condition=cache_on)
368 def _get_file_size(_context_uid, _repo_id, _path, _revision):
372 def _get_file_size(_repo_id, _path, _revision):
369 373 repo = self._factory.repo(wire)
370 374 fsobj = svn.repos.fs(repo)
371 375 if _revision is None:
372 376 _revision = svn.fs.youngest_revision(fsobj)
373 377 root = svn.fs.revision_root(fsobj, _revision)
374 378 size = svn.fs.file_length(root, path)
375 379 return size
376 return _get_file_size(context_uid, repo_id, path, revision)
380 return _get_file_size(repo_id, path, revision)
377 381
378 382 def create_repository(self, wire, compatible_version=None):
379 383 log.info('Creating Subversion repository in path "%s"', wire['path'])
380 384 self._factory.repo(wire, create=True,
381 385 compatible_version=compatible_version)
382 386
383 387 def get_url_and_credentials(self, src_url):
384 388 obj = urlparse.urlparse(src_url)
385 389 username = obj.username or None
386 390 password = obj.password or None
387 391 return username, password, src_url
388 392
389 393 def import_remote_repository(self, wire, src_url):
390 394 repo_path = wire['path']
391 395 if not self.is_path_valid_repository(wire, repo_path):
392 396 raise Exception(
393 397 "Path %s is not a valid Subversion repository." % repo_path)
394 398
395 399 username, password, src_url = self.get_url_and_credentials(src_url)
396 400 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
397 401 '--trust-server-cert-failures=unknown-ca']
398 402 if username and password:
399 403 rdump_cmd += ['--username', username, '--password', password]
400 404 rdump_cmd += [src_url]
401 405
402 406 rdump = subprocess.Popen(
403 407 rdump_cmd,
404 408 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
405 409 load = subprocess.Popen(
406 410 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
407 411
408 412 # TODO: johbo: This can be a very long operation, might be better
409 413 # to track some kind of status and provide an api to check if the
410 414 # import is done.
411 415 rdump.wait()
412 416 load.wait()
413 417
414 418 log.debug('Return process ended with code: %s', rdump.returncode)
415 419 if rdump.returncode != 0:
416 420 errors = rdump.stderr.read()
417 421 log.error('svnrdump dump failed: statuscode %s: message: %s',
418 422 rdump.returncode, errors)
419 423 reason = 'UNKNOWN'
420 424 if 'svnrdump: E230001:' in errors:
421 425 reason = 'INVALID_CERTIFICATE'
422 426
423 427 if reason == 'UNKNOWN':
424 428 reason = 'UNKNOWN:{}'.format(errors)
425 429 raise Exception(
426 430 'Failed to dump the remote repository from %s. Reason:%s' % (
427 431 src_url, reason))
428 432 if load.returncode != 0:
429 433 raise Exception(
430 434 'Failed to load the dump of remote repository from %s.' %
431 435 (src_url, ))
432 436
433 437 def commit(self, wire, message, author, timestamp, updated, removed):
434 438 assert isinstance(message, str)
435 439 assert isinstance(author, str)
436 440
437 441 repo = self._factory.repo(wire)
438 442 fsobj = svn.repos.fs(repo)
439 443
440 444 rev = svn.fs.youngest_rev(fsobj)
441 445 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
442 446 txn_root = svn.fs.txn_root(txn)
443 447
444 448 for node in updated:
445 449 TxnNodeProcessor(node, txn_root).update()
446 450 for node in removed:
447 451 TxnNodeProcessor(node, txn_root).remove()
448 452
449 453 commit_id = svn.repos.fs_commit_txn(repo, txn)
450 454
451 455 if timestamp:
452 456 apr_time = apr_time_t(timestamp)
453 457 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
454 458 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
455 459
456 460 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
457 461 return commit_id
458 462
459 463 def diff(self, wire, rev1, rev2, path1=None, path2=None,
460 464 ignore_whitespace=False, context=3):
461 465
462 466 wire.update(cache=False)
463 467 repo = self._factory.repo(wire)
464 468 diff_creator = SvnDiffer(
465 469 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
466 470 try:
467 471 return diff_creator.generate_diff()
468 472 except svn.core.SubversionException as e:
469 473 log.exception(
470 474 "Error during diff operation operation. "
471 475 "Path might not exist %s, %s" % (path1, path2))
472 476 return ""
473 477
474 478 @reraise_safe_exceptions
475 479 def is_large_file(self, wire, path):
476 480 return False
477 481
478 482 @reraise_safe_exceptions
479 483 def run_svn_command(self, wire, cmd, **opts):
480 484 path = wire.get('path', None)
481 485
482 486 if path and os.path.isdir(path):
483 487 opts['cwd'] = path
484 488
485 489 safe_call = False
486 490 if '_safe' in opts:
487 491 safe_call = True
488 492
489 493 svnenv = os.environ.copy()
490 494 svnenv.update(opts.pop('extra_env', {}))
491 495
492 496 _opts = {'env': svnenv, 'shell': False}
493 497
494 498 try:
495 499 _opts.update(opts)
496 500 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
497 501
498 502 return ''.join(p), ''.join(p.error)
499 503 except (EnvironmentError, OSError) as err:
500 504 cmd = ' '.join(cmd) # human friendly CMD
501 505 tb_err = ("Couldn't run svn command (%s).\n"
502 506 "Original error was:%s\n"
503 507 "Call options:%s\n"
504 508 % (cmd, err, _opts))
505 509 log.exception(tb_err)
506 510 if safe_call:
507 511 return '', err
508 512 else:
509 513 raise exceptions.VcsException()(tb_err)
510 514
511 515 @reraise_safe_exceptions
512 516 def install_hooks(self, wire, force=False):
513 517 from vcsserver.hook_utils import install_svn_hooks
514 518 repo_path = wire['path']
515 519 binary_dir = settings.BINARY_DIR
516 520 executable = None
517 521 if binary_dir:
518 522 executable = os.path.join(binary_dir, 'python')
519 523 return install_svn_hooks(
520 524 repo_path, executable=executable, force_create=force)
521 525
522 526 @reraise_safe_exceptions
523 527 def get_hooks_info(self, wire):
524 528 from vcsserver.hook_utils import (
525 529 get_svn_pre_hook_version, get_svn_post_hook_version)
526 530 repo_path = wire['path']
527 531 return {
528 532 'pre_version': get_svn_pre_hook_version(repo_path),
529 533 'post_version': get_svn_post_hook_version(repo_path),
530 534 }
531 535
532 536
533 537 class SvnDiffer(object):
534 538 """
535 539 Utility to create diffs based on difflib and the Subversion api
536 540 """
537 541
538 542 binary_content = False
539 543
540 544 def __init__(
541 545 self, repo, src_rev, src_path, tgt_rev, tgt_path,
542 546 ignore_whitespace, context):
543 547 self.repo = repo
544 548 self.ignore_whitespace = ignore_whitespace
545 549 self.context = context
546 550
547 551 fsobj = svn.repos.fs(repo)
548 552
549 553 self.tgt_rev = tgt_rev
550 554 self.tgt_path = tgt_path or ''
551 555 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
552 556 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
553 557
554 558 self.src_rev = src_rev
555 559 self.src_path = src_path or self.tgt_path
556 560 self.src_root = svn.fs.revision_root(fsobj, src_rev)
557 561 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
558 562
559 563 self._validate()
560 564
561 565 def _validate(self):
562 566 if (self.tgt_kind != svn.core.svn_node_none and
563 567 self.src_kind != svn.core.svn_node_none and
564 568 self.src_kind != self.tgt_kind):
565 569 # TODO: johbo: proper error handling
566 570 raise Exception(
567 571 "Source and target are not compatible for diff generation. "
568 572 "Source type: %s, target type: %s" %
569 573 (self.src_kind, self.tgt_kind))
570 574
571 575 def generate_diff(self):
572 576 buf = StringIO.StringIO()
573 577 if self.tgt_kind == svn.core.svn_node_dir:
574 578 self._generate_dir_diff(buf)
575 579 else:
576 580 self._generate_file_diff(buf)
577 581 return buf.getvalue()
578 582
579 583 def _generate_dir_diff(self, buf):
580 584 editor = DiffChangeEditor()
581 585 editor_ptr, editor_baton = svn.delta.make_editor(editor)
582 586 svn.repos.dir_delta2(
583 587 self.src_root,
584 588 self.src_path,
585 589 '', # src_entry
586 590 self.tgt_root,
587 591 self.tgt_path,
588 592 editor_ptr, editor_baton,
589 593 authorization_callback_allow_all,
590 594 False, # text_deltas
591 595 svn.core.svn_depth_infinity, # depth
592 596 False, # entry_props
593 597 False, # ignore_ancestry
594 598 )
595 599
596 600 for path, __, change in sorted(editor.changes):
597 601 self._generate_node_diff(
598 602 buf, change, path, self.tgt_path, path, self.src_path)
599 603
600 604 def _generate_file_diff(self, buf):
601 605 change = None
602 606 if self.src_kind == svn.core.svn_node_none:
603 607 change = "add"
604 608 elif self.tgt_kind == svn.core.svn_node_none:
605 609 change = "delete"
606 610 tgt_base, tgt_path = vcspath.split(self.tgt_path)
607 611 src_base, src_path = vcspath.split(self.src_path)
608 612 self._generate_node_diff(
609 613 buf, change, tgt_path, tgt_base, src_path, src_base)
610 614
611 615 def _generate_node_diff(
612 616 self, buf, change, tgt_path, tgt_base, src_path, src_base):
613 617
614 618 if self.src_rev == self.tgt_rev and tgt_base == src_base:
615 619 # makes consistent behaviour with git/hg to return empty diff if
616 620 # we compare same revisions
617 621 return
618 622
619 623 tgt_full_path = vcspath.join(tgt_base, tgt_path)
620 624 src_full_path = vcspath.join(src_base, src_path)
621 625
622 626 self.binary_content = False
623 627 mime_type = self._get_mime_type(tgt_full_path)
624 628
625 629 if mime_type and not mime_type.startswith('text'):
626 630 self.binary_content = True
627 631 buf.write("=" * 67 + '\n')
628 632 buf.write("Cannot display: file marked as a binary type.\n")
629 633 buf.write("svn:mime-type = %s\n" % mime_type)
630 634 buf.write("Index: %s\n" % (tgt_path, ))
631 635 buf.write("=" * 67 + '\n')
632 636 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
633 637 'tgt_path': tgt_path})
634 638
635 639 if change == 'add':
636 640 # TODO: johbo: SVN is missing a zero here compared to git
637 641 buf.write("new file mode 10644\n")
638 642
639 643 #TODO(marcink): intro to binary detection of svn patches
640 644 # if self.binary_content:
641 645 # buf.write('GIT binary patch\n')
642 646
643 647 buf.write("--- /dev/null\t(revision 0)\n")
644 648 src_lines = []
645 649 else:
646 650 if change == 'delete':
647 651 buf.write("deleted file mode 10644\n")
648 652
649 653 #TODO(marcink): intro to binary detection of svn patches
650 654 # if self.binary_content:
651 655 # buf.write('GIT binary patch\n')
652 656
653 657 buf.write("--- a/%s\t(revision %s)\n" % (
654 658 src_path, self.src_rev))
655 659 src_lines = self._svn_readlines(self.src_root, src_full_path)
656 660
657 661 if change == 'delete':
658 662 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
659 663 tgt_lines = []
660 664 else:
661 665 buf.write("+++ b/%s\t(revision %s)\n" % (
662 666 tgt_path, self.tgt_rev))
663 667 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
664 668
665 669 if not self.binary_content:
666 670 udiff = svn_diff.unified_diff(
667 671 src_lines, tgt_lines, context=self.context,
668 672 ignore_blank_lines=self.ignore_whitespace,
669 673 ignore_case=False,
670 674 ignore_space_changes=self.ignore_whitespace)
671 675 buf.writelines(udiff)
672 676
673 677 def _get_mime_type(self, path):
674 678 try:
675 679 mime_type = svn.fs.node_prop(
676 680 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
677 681 except svn.core.SubversionException:
678 682 mime_type = svn.fs.node_prop(
679 683 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
680 684 return mime_type
681 685
682 686 def _svn_readlines(self, fs_root, node_path):
683 687 if self.binary_content:
684 688 return []
685 689 node_kind = svn.fs.check_path(fs_root, node_path)
686 690 if node_kind not in (
687 691 svn.core.svn_node_file, svn.core.svn_node_symlink):
688 692 return []
689 693 content = svn.core.Stream(
690 694 svn.fs.file_contents(fs_root, node_path)).read()
691 695 return content.splitlines(True)
692 696
693 697
694
695 698 class DiffChangeEditor(svn.delta.Editor):
696 699 """
697 700 Records changes between two given revisions
698 701 """
699 702
700 703 def __init__(self):
701 704 self.changes = []
702 705
703 706 def delete_entry(self, path, revision, parent_baton, pool=None):
704 707 self.changes.append((path, None, 'delete'))
705 708
706 709 def add_file(
707 710 self, path, parent_baton, copyfrom_path, copyfrom_revision,
708 711 file_pool=None):
709 712 self.changes.append((path, 'file', 'add'))
710 713
711 714 def open_file(self, path, parent_baton, base_revision, file_pool=None):
712 715 self.changes.append((path, 'file', 'change'))
713 716
714 717
715 718 def authorization_callback_allow_all(root, path, pool):
716 719 return True
717 720
718 721
719 722 class TxnNodeProcessor(object):
720 723 """
721 724 Utility to process the change of one node within a transaction root.
722 725
723 726 It encapsulates the knowledge of how to add, update or remove
724 727 a node for a given transaction root. The purpose is to support the method
725 728 `SvnRemote.commit`.
726 729 """
727 730
728 731 def __init__(self, node, txn_root):
729 732 assert isinstance(node['path'], str)
730 733
731 734 self.node = node
732 735 self.txn_root = txn_root
733 736
734 737 def update(self):
735 738 self._ensure_parent_dirs()
736 739 self._add_file_if_node_does_not_exist()
737 740 self._update_file_content()
738 741 self._update_file_properties()
739 742
740 743 def remove(self):
741 744 svn.fs.delete(self.txn_root, self.node['path'])
742 745 # TODO: Clean up directory if empty
743 746
744 747 def _ensure_parent_dirs(self):
745 748 curdir = vcspath.dirname(self.node['path'])
746 749 dirs_to_create = []
747 750 while not self._svn_path_exists(curdir):
748 751 dirs_to_create.append(curdir)
749 752 curdir = vcspath.dirname(curdir)
750 753
751 754 for curdir in reversed(dirs_to_create):
752 755 log.debug('Creating missing directory "%s"', curdir)
753 756 svn.fs.make_dir(self.txn_root, curdir)
754 757
755 758 def _svn_path_exists(self, path):
756 759 path_status = svn.fs.check_path(self.txn_root, path)
757 760 return path_status != svn.core.svn_node_none
758 761
759 762 def _add_file_if_node_does_not_exist(self):
760 763 kind = svn.fs.check_path(self.txn_root, self.node['path'])
761 764 if kind == svn.core.svn_node_none:
762 765 svn.fs.make_file(self.txn_root, self.node['path'])
763 766
764 767 def _update_file_content(self):
765 768 assert isinstance(self.node['content'], str)
766 769 handler, baton = svn.fs.apply_textdelta(
767 770 self.txn_root, self.node['path'], None, None)
768 771 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
769 772
770 773 def _update_file_properties(self):
771 774 properties = self.node.get('properties', {})
772 775 for key, value in properties.iteritems():
773 776 svn.fs.change_node_prop(
774 777 self.txn_root, self.node['path'], key, value)
775 778
776 779
777 780 def apr_time_t(timestamp):
778 781 """
779 782 Convert a Python timestamp into APR timestamp type apr_time_t
780 783 """
781 784 return timestamp * 1E6
782 785
783 786
784 787 def svn_opt_revision_value_t(num):
785 788 """
786 789 Put `num` into a `svn_opt_revision_value_t` structure.
787 790 """
788 791 value = svn.core.svn_opt_revision_value_t()
789 792 value.number = num
790 793 revision = svn.core.svn_opt_revision_t()
791 794 revision.kind = svn.core.svn_opt_revision_number
792 795 revision.value = value
793 796 return revision
@@ -1,108 +1,108 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import inspect
19 19 import sys
20 20 import traceback
21 21
22 22 import pytest
23 23 from mercurial.error import LookupError
24 24 from mock import Mock, MagicMock, patch
25 25
26 26 from vcsserver import exceptions, hg, hgcompat
27 27
28 28
29 29 class TestDiff(object):
30 30 def test_raising_safe_exception_when_lookup_failed(self):
31 31
32 32 factory = Mock()
33 33 hg_remote = hg.HgRemote(factory)
34 34 with patch('mercurial.patch.diff') as diff_mock:
35 35 diff_mock.side_effect = LookupError(
36 36 'deadbeef', 'index', 'message')
37 37 with pytest.raises(Exception) as exc_info:
38 38 hg_remote.diff(
39 wire={}, rev1='deadbeef', rev2='deadbee1',
39 wire={}, commit_id_1='deadbeef', commit_id_2='deadbee1',
40 40 file_filter=None, opt_git=True, opt_ignorews=True,
41 41 context=3)
42 42 assert type(exc_info.value) == Exception
43 43 assert exc_info.value._vcs_kind == 'lookup'
44 44
45 45
46 46 class TestReraiseSafeExceptions(object):
47 47 def test_method_decorated_with_reraise_safe_exceptions(self):
48 48 factory = Mock()
49 49 hg_remote = hg.HgRemote(factory)
50 50 methods = inspect.getmembers(hg_remote, predicate=inspect.ismethod)
51 51 decorator = hg.reraise_safe_exceptions(None)
52 52 for method_name, method in methods:
53 53 if not method_name.startswith('_'):
54 54 assert method.im_func.__code__ == decorator.__code__
55 55
56 56 @pytest.mark.parametrize('side_effect, expected_type', [
57 57 (hgcompat.Abort(), 'abort'),
58 58 (hgcompat.InterventionRequired(), 'abort'),
59 59 (hgcompat.RepoLookupError(), 'lookup'),
60 60 (hgcompat.LookupError('deadbeef', 'index', 'message'), 'lookup'),
61 61 (hgcompat.RepoError(), 'error'),
62 62 (hgcompat.RequirementError(), 'requirement'),
63 63 ])
64 64 def test_safe_exceptions_reraised(self, side_effect, expected_type):
65 65 @hg.reraise_safe_exceptions
66 66 def fake_method():
67 67 raise side_effect
68 68
69 69 with pytest.raises(Exception) as exc_info:
70 70 fake_method()
71 71 assert type(exc_info.value) == Exception
72 72 assert exc_info.value._vcs_kind == expected_type
73 73
74 74 def test_keeps_original_traceback(self):
75 75 @hg.reraise_safe_exceptions
76 76 def fake_method():
77 77 try:
78 78 raise hgcompat.Abort()
79 79 except:
80 80 self.original_traceback = traceback.format_tb(
81 81 sys.exc_info()[2])
82 82 raise
83 83
84 84 try:
85 85 fake_method()
86 86 except Exception:
87 87 new_traceback = traceback.format_tb(sys.exc_info()[2])
88 88
89 89 new_traceback_tail = new_traceback[-len(self.original_traceback):]
90 90 assert new_traceback_tail == self.original_traceback
91 91
92 92 def test_maps_unknow_exceptions_to_unhandled(self):
93 93 @hg.reraise_safe_exceptions
94 94 def stub_method():
95 95 raise ValueError('stub')
96 96
97 97 with pytest.raises(Exception) as exc_info:
98 98 stub_method()
99 99 assert exc_info.value._vcs_kind == 'unhandled'
100 100
101 101 def test_does_not_map_known_exceptions(self):
102 102 @hg.reraise_safe_exceptions
103 103 def stub_method():
104 104 raise exceptions.LookupException()('stub')
105 105
106 106 with pytest.raises(Exception) as exc_info:
107 107 stub_method()
108 108 assert exc_info.value._vcs_kind == 'lookup'
General Comments 0
You need to be logged in to leave comments. Login now