##// END OF EJS Templates
git/hg: skip double caching with bulk requests calls....
super-admin -
r1075:8fc1778b python3
parent child Browse files
Show More
@@ -1,1343 +1,1344 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import collections
19 19 import logging
20 20 import os
21 21 import posixpath as vcspath
22 22 import re
23 23 import stat
24 24 import traceback
25 25 import urllib.request, urllib.parse, urllib.error
26 26 import urllib.request, urllib.error, urllib.parse
27 27 from functools import wraps
28 28
29 29 import more_itertools
30 30 import pygit2
31 31 from pygit2 import Repository as LibGit2Repo
32 32 from pygit2 import index as LibGit2Index
33 33 from dulwich import index, objects
34 34 from dulwich.client import HttpGitClient, LocalGitClient
35 35 from dulwich.errors import (
36 36 NotGitRepository, ChecksumMismatch, WrongObjectException,
37 37 MissingCommitError, ObjectMissing, HangupException,
38 38 UnexpectedCommandError)
39 39 from dulwich.repo import Repo as DulwichRepo
40 40 from dulwich.server import update_server_info
41 41
42 42 from vcsserver import exceptions, settings, subprocessio
43 43 from vcsserver.str_utils import safe_str, safe_int, safe_bytes
44 44 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, archive_repo
45 45 from vcsserver.hgcompat import (
46 46 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
47 47 from vcsserver.git_lfs.lib import LFSOidStore
48 48 from vcsserver.vcs_base import RemoteBase
49 49
50 50 DIR_STAT = stat.S_IFDIR
51 51 FILE_MODE = stat.S_IFMT
52 52 GIT_LINK = objects.S_IFGITLINK
53 53 PEELED_REF_MARKER = b'^{}'
54 54
55 55
56 56 log = logging.getLogger(__name__)
57 57
58 58
59 59 def reraise_safe_exceptions(func):
60 60 """Converts Dulwich exceptions to something neutral."""
61 61
62 62 @wraps(func)
63 63 def wrapper(*args, **kwargs):
64 64 try:
65 65 return func(*args, **kwargs)
66 66 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
67 67 exc = exceptions.LookupException(org_exc=e)
68 68 raise exc(safe_str(e))
69 69 except (HangupException, UnexpectedCommandError) as e:
70 70 exc = exceptions.VcsException(org_exc=e)
71 71 raise exc(safe_str(e))
72 72 except Exception as e:
73 73 # NOTE(marcink): becuase of how dulwich handles some exceptions
74 74 # (KeyError on empty repos), we cannot track this and catch all
75 75 # exceptions, it's an exceptions from other handlers
76 76 #if not hasattr(e, '_vcs_kind'):
77 77 #log.exception("Unhandled exception in git remote call")
78 78 #raise_from_original(exceptions.UnhandledException)
79 79 raise
80 80 return wrapper
81 81
82 82
83 83 class Repo(DulwichRepo):
84 84 """
85 85 A wrapper for dulwich Repo class.
86 86
87 87 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
88 88 "Too many open files" error. We need to close all opened file descriptors
89 89 once the repo object is destroyed.
90 90 """
91 91 def __del__(self):
92 92 if hasattr(self, 'object_store'):
93 93 self.close()
94 94
95 95
96 96 class Repository(LibGit2Repo):
97 97
98 98 def __enter__(self):
99 99 return self
100 100
101 101 def __exit__(self, exc_type, exc_val, exc_tb):
102 102 self.free()
103 103
104 104
105 105 class GitFactory(RepoFactory):
106 106 repo_type = 'git'
107 107
108 108 def _create_repo(self, wire, create, use_libgit2=False):
109 109 if use_libgit2:
110 110 return Repository(wire['path'])
111 111 else:
112 112 repo_path = safe_str(wire['path'], to_encoding=settings.WIRE_ENCODING)
113 113 return Repo(repo_path)
114 114
115 115 def repo(self, wire, create=False, use_libgit2=False):
116 116 """
117 117 Get a repository instance for the given path.
118 118 """
119 119 return self._create_repo(wire, create, use_libgit2)
120 120
121 121 def repo_libgit2(self, wire):
122 122 return self.repo(wire, use_libgit2=True)
123 123
124 124
125 125 class GitRemote(RemoteBase):
126 126
127 127 def __init__(self, factory):
128 128 self._factory = factory
129 129 self._bulk_methods = {
130 130 "date": self.date,
131 131 "author": self.author,
132 132 "branch": self.branch,
133 133 "message": self.message,
134 134 "parents": self.parents,
135 135 "_commit": self.revision,
136 136 }
137 137
138 138 def _wire_to_config(self, wire):
139 139 if 'config' in wire:
140 140 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
141 141 return {}
142 142
143 143 def _remote_conf(self, config):
144 144 params = [
145 145 '-c', 'core.askpass=""',
146 146 ]
147 147 ssl_cert_dir = config.get('vcs_ssl_dir')
148 148 if ssl_cert_dir:
149 149 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
150 150 return params
151 151
152 152 @reraise_safe_exceptions
153 153 def discover_git_version(self):
154 154 stdout, _ = self.run_git_command(
155 155 {}, ['--version'], _bare=True, _safe=True)
156 156 prefix = b'git version'
157 157 if stdout.startswith(prefix):
158 158 stdout = stdout[len(prefix):]
159 159 return safe_str(stdout.strip())
160 160
161 161 @reraise_safe_exceptions
162 162 def is_empty(self, wire):
163 163 repo_init = self._factory.repo_libgit2(wire)
164 164 with repo_init as repo:
165 165
166 166 try:
167 167 has_head = repo.head.name
168 168 if has_head:
169 169 return False
170 170
171 171 # NOTE(marcink): check again using more expensive method
172 172 return repo.is_empty
173 173 except Exception:
174 174 pass
175 175
176 176 return True
177 177
178 178 @reraise_safe_exceptions
179 179 def assert_correct_path(self, wire):
180 180 cache_on, context_uid, repo_id = self._cache_on(wire)
181 181 region = self._region(wire)
182 182
183 183 @region.conditional_cache_on_arguments(condition=cache_on)
184 184 def _assert_correct_path(_context_uid, _repo_id):
185 185 try:
186 186 repo_init = self._factory.repo_libgit2(wire)
187 187 with repo_init as repo:
188 188 pass
189 189 except pygit2.GitError:
190 190 path = wire.get('path')
191 191 tb = traceback.format_exc()
192 192 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
193 193 return False
194 194
195 195 return True
196 196 return _assert_correct_path(context_uid, repo_id)
197 197
198 198 @reraise_safe_exceptions
199 199 def bare(self, wire):
200 200 repo_init = self._factory.repo_libgit2(wire)
201 201 with repo_init as repo:
202 202 return repo.is_bare
203 203
204 204 @reraise_safe_exceptions
205 205 def blob_as_pretty_string(self, wire, sha):
206 206 repo_init = self._factory.repo_libgit2(wire)
207 207 with repo_init as repo:
208 208 blob_obj = repo[sha]
209 209 blob = blob_obj.data
210 210 return blob
211 211
212 212 @reraise_safe_exceptions
213 213 def blob_raw_length(self, wire, sha):
214 214 cache_on, context_uid, repo_id = self._cache_on(wire)
215 215 region = self._region(wire)
216 216
217 217 @region.conditional_cache_on_arguments(condition=cache_on)
218 218 def _blob_raw_length(_repo_id, _sha):
219 219
220 220 repo_init = self._factory.repo_libgit2(wire)
221 221 with repo_init as repo:
222 222 blob = repo[sha]
223 223 return blob.size
224 224
225 225 return _blob_raw_length(repo_id, sha)
226 226
227 227 def _parse_lfs_pointer(self, raw_content):
228 228 spec_string = b'version https://git-lfs.github.com/spec'
229 229 if raw_content and raw_content.startswith(spec_string):
230 230
231 231 pattern = re.compile(rb"""
232 232 (?:\n)?
233 233 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
234 234 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
235 235 ^size[ ](?P<oid_size>[0-9]+)\n
236 236 (?:\n)?
237 237 """, re.VERBOSE | re.MULTILINE)
238 238 match = pattern.match(raw_content)
239 239 if match:
240 240 return match.groupdict()
241 241
242 242 return {}
243 243
244 244 @reraise_safe_exceptions
245 245 def is_large_file(self, wire, commit_id):
246 246 cache_on, context_uid, repo_id = self._cache_on(wire)
247 247 region = self._region(wire)
248 248
249 249 @region.conditional_cache_on_arguments(condition=cache_on)
250 250 def _is_large_file(_repo_id, _sha):
251 251 repo_init = self._factory.repo_libgit2(wire)
252 252 with repo_init as repo:
253 253 blob = repo[commit_id]
254 254 if blob.is_binary:
255 255 return {}
256 256
257 257 return self._parse_lfs_pointer(blob.data)
258 258
259 259 return _is_large_file(repo_id, commit_id)
260 260
261 261 @reraise_safe_exceptions
262 262 def is_binary(self, wire, tree_id):
263 263 cache_on, context_uid, repo_id = self._cache_on(wire)
264 264 region = self._region(wire)
265 265
266 266 @region.conditional_cache_on_arguments(condition=cache_on)
267 267 def _is_binary(_repo_id, _tree_id):
268 268 repo_init = self._factory.repo_libgit2(wire)
269 269 with repo_init as repo:
270 270 blob_obj = repo[tree_id]
271 271 return blob_obj.is_binary
272 272
273 273 return _is_binary(repo_id, tree_id)
274 274
275 275 @reraise_safe_exceptions
276 276 def md5_hash(self, wire, tree_id):
277 277 cache_on, context_uid, repo_id = self._cache_on(wire)
278 278 region = self._region(wire)
279 279
280 280 @region.conditional_cache_on_arguments(condition=cache_on)
281 281 def _md5_hash(_repo_id, _tree_id):
282 282 return ''
283 283
284 284 return _md5_hash(repo_id, tree_id)
285 285
286 286 @reraise_safe_exceptions
287 287 def in_largefiles_store(self, wire, oid):
288 288 conf = self._wire_to_config(wire)
289 289 repo_init = self._factory.repo_libgit2(wire)
290 290 with repo_init as repo:
291 291 repo_name = repo.path
292 292
293 293 store_location = conf.get('vcs_git_lfs_store_location')
294 294 if store_location:
295 295
296 296 store = LFSOidStore(
297 297 oid=oid, repo=repo_name, store_location=store_location)
298 298 return store.has_oid()
299 299
300 300 return False
301 301
302 302 @reraise_safe_exceptions
303 303 def store_path(self, wire, oid):
304 304 conf = self._wire_to_config(wire)
305 305 repo_init = self._factory.repo_libgit2(wire)
306 306 with repo_init as repo:
307 307 repo_name = repo.path
308 308
309 309 store_location = conf.get('vcs_git_lfs_store_location')
310 310 if store_location:
311 311 store = LFSOidStore(
312 312 oid=oid, repo=repo_name, store_location=store_location)
313 313 return store.oid_path
314 314 raise ValueError('Unable to fetch oid with path {}'.format(oid))
315 315
316 316 @reraise_safe_exceptions
317 317 def bulk_request(self, wire, rev, pre_load):
318 318 cache_on, context_uid, repo_id = self._cache_on(wire)
319 319 region = self._region(wire)
320 320
321 321 @region.conditional_cache_on_arguments(condition=cache_on)
322 322 def _bulk_request(_repo_id, _rev, _pre_load):
323 323 result = {}
324 324 for attr in pre_load:
325 325 try:
326 326 method = self._bulk_methods[attr]
327 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
327 328 args = [wire, rev]
328 329 result[attr] = method(*args)
329 330 except KeyError as e:
330 331 raise exceptions.VcsException(e)(
331 332 "Unknown bulk attribute: %s" % attr)
332 333 return result
333 334
334 335 return _bulk_request(repo_id, rev, sorted(pre_load))
335 336
336 337 def _build_opener(self, url):
337 338 handlers = []
338 339 url_obj = url_parser(url)
339 340 _, authinfo = url_obj.authinfo()
340 341
341 342 if authinfo:
342 343 # create a password manager
343 344 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
344 345 passmgr.add_password(*authinfo)
345 346
346 347 handlers.extend((httpbasicauthhandler(passmgr),
347 348 httpdigestauthhandler(passmgr)))
348 349
349 350 return urllib.request.build_opener(*handlers)
350 351
351 352 def _type_id_to_name(self, type_id: int):
352 353 return {
353 354 1: 'commit',
354 355 2: 'tree',
355 356 3: 'blob',
356 357 4: 'tag'
357 358 }[type_id]
358 359
359 360 @reraise_safe_exceptions
360 361 def check_url(self, url, config):
361 362 url_obj = url_parser(url)
362 363 test_uri, _ = url_obj.authinfo()
363 364 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
364 365 url_obj.query = obfuscate_qs(url_obj.query)
365 366 cleaned_uri = str(url_obj)
366 367 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
367 368
368 369 if not test_uri.endswith('info/refs'):
369 370 test_uri = test_uri.rstrip('/') + '/info/refs'
370 371
371 372 o = self._build_opener(url)
372 373 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
373 374
374 375 q = {"service": 'git-upload-pack'}
375 376 qs = '?%s' % urllib.parse.urlencode(q)
376 377 cu = "%s%s" % (test_uri, qs)
377 378 req = urllib.request.Request(cu, None, {})
378 379
379 380 try:
380 381 log.debug("Trying to open URL %s", cleaned_uri)
381 382 resp = o.open(req)
382 383 if resp.code != 200:
383 384 raise exceptions.URLError()('Return Code is not 200')
384 385 except Exception as e:
385 386 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
386 387 # means it cannot be cloned
387 388 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
388 389
389 390 # now detect if it's proper git repo
390 391 gitdata = resp.read()
391 392 if 'service=git-upload-pack' in gitdata:
392 393 pass
393 394 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
394 395 # old style git can return some other format !
395 396 pass
396 397 else:
397 398 raise exceptions.URLError()(
398 399 "url [%s] does not look like an git" % (cleaned_uri,))
399 400
400 401 return True
401 402
402 403 @reraise_safe_exceptions
403 404 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
404 405 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
405 406 remote_refs = self.pull(wire, url, apply_refs=False)
406 407 repo = self._factory.repo(wire)
407 408 if isinstance(valid_refs, list):
408 409 valid_refs = tuple(valid_refs)
409 410
410 411 for k in remote_refs:
411 412 # only parse heads/tags and skip so called deferred tags
412 413 if k.startswith(valid_refs) and not k.endswith(deferred):
413 414 repo[k] = remote_refs[k]
414 415
415 416 if update_after_clone:
416 417 # we want to checkout HEAD
417 418 repo["HEAD"] = remote_refs["HEAD"]
418 419 index.build_index_from_tree(repo.path, repo.index_path(),
419 420 repo.object_store, repo["HEAD"].tree)
420 421
421 422 @reraise_safe_exceptions
422 423 def branch(self, wire, commit_id):
423 424 cache_on, context_uid, repo_id = self._cache_on(wire)
424 425 region = self._region(wire)
425 426 @region.conditional_cache_on_arguments(condition=cache_on)
426 427 def _branch(_context_uid, _repo_id, _commit_id):
427 428 regex = re.compile('^refs/heads')
428 429
429 430 def filter_with(ref):
430 431 return regex.match(ref[0]) and ref[1] == _commit_id
431 432
432 433 branches = list(filter(filter_with, list(self.get_refs(wire).items())))
433 434 return [x[0].split('refs/heads/')[-1] for x in branches]
434 435
435 436 return _branch(context_uid, repo_id, commit_id)
436 437
437 438 @reraise_safe_exceptions
438 439 def commit_branches(self, wire, commit_id):
439 440 cache_on, context_uid, repo_id = self._cache_on(wire)
440 441 region = self._region(wire)
441 442 @region.conditional_cache_on_arguments(condition=cache_on)
442 443 def _commit_branches(_context_uid, _repo_id, _commit_id):
443 444 repo_init = self._factory.repo_libgit2(wire)
444 445 with repo_init as repo:
445 446 branches = [x for x in repo.branches.with_commit(_commit_id)]
446 447 return branches
447 448
448 449 return _commit_branches(context_uid, repo_id, commit_id)
449 450
450 451 @reraise_safe_exceptions
451 452 def add_object(self, wire, content):
452 453 repo_init = self._factory.repo_libgit2(wire)
453 454 with repo_init as repo:
454 455 blob = objects.Blob()
455 456 blob.set_raw_string(content)
456 457 repo.object_store.add_object(blob)
457 458 return blob.id
458 459
459 460 # TODO: this is quite complex, check if that can be simplified
460 461 @reraise_safe_exceptions
461 462 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
462 463 # Defines the root tree
463 464 class _Root(object):
464 465 def __repr__(self):
465 466 return 'ROOT TREE'
466 467 ROOT = _Root()
467 468
468 469 repo = self._factory.repo(wire)
469 470 object_store = repo.object_store
470 471
471 472 # Create tree and populates it with blobs
472 473
473 474 if commit_tree and repo[commit_tree]:
474 475 git_commit = repo[commit_data['parents'][0]]
475 476 commit_tree = repo[git_commit.tree] # root tree
476 477 else:
477 478 commit_tree = objects.Tree()
478 479
479 480 for node in updated:
480 481 # Compute subdirs if needed
481 482 dirpath, nodename = vcspath.split(node['path'])
482 483 dirnames = list(map(safe_str, dirpath and dirpath.split('/') or []))
483 484 parent = commit_tree
484 485 ancestors = [('', parent)]
485 486
486 487 # Tries to dig for the deepest existing tree
487 488 while dirnames:
488 489 curdir = dirnames.pop(0)
489 490 try:
490 491 dir_id = parent[curdir][1]
491 492 except KeyError:
492 493 # put curdir back into dirnames and stops
493 494 dirnames.insert(0, curdir)
494 495 break
495 496 else:
496 497 # If found, updates parent
497 498 parent = repo[dir_id]
498 499 ancestors.append((curdir, parent))
499 500 # Now parent is deepest existing tree and we need to create
500 501 # subtrees for dirnames (in reverse order)
501 502 # [this only applies for nodes from added]
502 503 new_trees = []
503 504
504 505 blob = objects.Blob.from_string(node['content'])
505 506
506 507 if dirnames:
507 508 # If there are trees which should be created we need to build
508 509 # them now (in reverse order)
509 510 reversed_dirnames = list(reversed(dirnames))
510 511 curtree = objects.Tree()
511 512 curtree[node['node_path']] = node['mode'], blob.id
512 513 new_trees.append(curtree)
513 514 for dirname in reversed_dirnames[:-1]:
514 515 newtree = objects.Tree()
515 516 newtree[dirname] = (DIR_STAT, curtree.id)
516 517 new_trees.append(newtree)
517 518 curtree = newtree
518 519 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
519 520 else:
520 521 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
521 522
522 523 new_trees.append(parent)
523 524 # Update ancestors
524 525 reversed_ancestors = reversed(
525 526 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
526 527 for parent, tree, path in reversed_ancestors:
527 528 parent[path] = (DIR_STAT, tree.id)
528 529 object_store.add_object(tree)
529 530
530 531 object_store.add_object(blob)
531 532 for tree in new_trees:
532 533 object_store.add_object(tree)
533 534
534 535 for node_path in removed:
535 536 paths = node_path.split('/')
536 537 tree = commit_tree # start with top-level
537 538 trees = [{'tree': tree, 'path': ROOT}]
538 539 # Traverse deep into the forest...
539 540 # resolve final tree by iterating the path.
540 541 # e.g a/b/c.txt will get
541 542 # - root as tree then
542 543 # - 'a' as tree,
543 544 # - 'b' as tree,
544 545 # - stop at c as blob.
545 546 for path in paths:
546 547 try:
547 548 obj = repo[tree[path][1]]
548 549 if isinstance(obj, objects.Tree):
549 550 trees.append({'tree': obj, 'path': path})
550 551 tree = obj
551 552 except KeyError:
552 553 break
553 554 #PROBLEM:
554 555 """
555 556 We're not editing same reference tree object
556 557 """
557 558 # Cut down the blob and all rotten trees on the way back...
558 559 for path, tree_data in reversed(list(zip(paths, trees))):
559 560 tree = tree_data['tree']
560 561 tree.__delitem__(path)
561 562 # This operation edits the tree, we need to mark new commit back
562 563
563 564 if len(tree) > 0:
564 565 # This tree still has elements - don't remove it or any
565 566 # of it's parents
566 567 break
567 568
568 569 object_store.add_object(commit_tree)
569 570
570 571 # Create commit
571 572 commit = objects.Commit()
572 573 commit.tree = commit_tree.id
573 574 bytes_keys = [
574 575 'author',
575 576 'committer',
576 577 'message',
577 578 'encoding'
578 579 ]
579 580
580 581 for k, v in commit_data.items():
581 582 if k in bytes_keys:
582 583 v = safe_bytes(v)
583 584 setattr(commit, k, v)
584 585
585 586 object_store.add_object(commit)
586 587
587 588 self.create_branch(wire, branch, safe_str(commit.id))
588 589
589 590 # dulwich set-ref
590 591 repo.refs[safe_bytes(f'refs/heads/{branch}')] = commit.id
591 592
592 593 return commit.id
593 594
594 595 @reraise_safe_exceptions
595 596 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
596 597 if url != 'default' and '://' not in url:
597 598 client = LocalGitClient(url)
598 599 else:
599 600 url_obj = url_parser(url)
600 601 o = self._build_opener(url)
601 602 url, _ = url_obj.authinfo()
602 603 client = HttpGitClient(base_url=url, opener=o)
603 604 repo = self._factory.repo(wire)
604 605
605 606 determine_wants = repo.object_store.determine_wants_all
606 607 if refs:
607 608 def determine_wants_requested(references):
608 609 return [references[r] for r in references if r in refs]
609 610 determine_wants = determine_wants_requested
610 611
611 612 try:
612 613 remote_refs = client.fetch(
613 614 path=url, target=repo, determine_wants=determine_wants)
614 615 except NotGitRepository as e:
615 616 log.warning(
616 617 'Trying to fetch from "%s" failed, not a Git repository.', url)
617 618 # Exception can contain unicode which we convert
618 619 raise exceptions.AbortException(e)(repr(e))
619 620
620 621 # mikhail: client.fetch() returns all the remote refs, but fetches only
621 622 # refs filtered by `determine_wants` function. We need to filter result
622 623 # as well
623 624 if refs:
624 625 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
625 626
626 627 if apply_refs:
627 628 # TODO: johbo: Needs proper test coverage with a git repository
628 629 # that contains a tag object, so that we would end up with
629 630 # a peeled ref at this point.
630 631 for k in remote_refs:
631 632 if k.endswith(PEELED_REF_MARKER):
632 633 log.debug("Skipping peeled reference %s", k)
633 634 continue
634 635 repo[k] = remote_refs[k]
635 636
636 637 if refs and not update_after:
637 638 # mikhail: explicitly set the head to the last ref.
638 639 repo["HEAD"] = remote_refs[refs[-1]]
639 640
640 641 if update_after:
641 642 # we want to checkout HEAD
642 643 repo["HEAD"] = remote_refs["HEAD"]
643 644 index.build_index_from_tree(repo.path, repo.index_path(),
644 645 repo.object_store, repo["HEAD"].tree)
645 646 return remote_refs
646 647
647 648 @reraise_safe_exceptions
648 649 def sync_fetch(self, wire, url, refs=None, all_refs=False):
649 650 repo = self._factory.repo(wire)
650 651 if refs and not isinstance(refs, (list, tuple)):
651 652 refs = [refs]
652 653
653 654 config = self._wire_to_config(wire)
654 655 # get all remote refs we'll use to fetch later
655 656 cmd = ['ls-remote']
656 657 if not all_refs:
657 658 cmd += ['--heads', '--tags']
658 659 cmd += [url]
659 660 output, __ = self.run_git_command(
660 661 wire, cmd, fail_on_stderr=False,
661 662 _copts=self._remote_conf(config),
662 663 extra_env={'GIT_TERMINAL_PROMPT': '0'})
663 664
664 665 remote_refs = collections.OrderedDict()
665 666 fetch_refs = []
666 667
667 668 for ref_line in output.splitlines():
668 669 sha, ref = ref_line.split(b'\t')
669 670 sha = sha.strip()
670 671 if ref in remote_refs:
671 672 # duplicate, skip
672 673 continue
673 674 if ref.endswith(PEELED_REF_MARKER):
674 675 log.debug("Skipping peeled reference %s", ref)
675 676 continue
676 677 # don't sync HEAD
677 678 if ref in [b'HEAD']:
678 679 continue
679 680
680 681 remote_refs[ref] = sha
681 682
682 683 if refs and sha in refs:
683 684 # we filter fetch using our specified refs
684 685 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
685 686 elif not refs:
686 687 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
687 688 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
688 689
689 690 if fetch_refs:
690 691 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
691 692 fetch_refs_chunks = list(chunk)
692 693 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
693 694 self.run_git_command(
694 695 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
695 696 fail_on_stderr=False,
696 697 _copts=self._remote_conf(config),
697 698 extra_env={'GIT_TERMINAL_PROMPT': '0'})
698 699
699 700 return remote_refs
700 701
701 702 @reraise_safe_exceptions
702 703 def sync_push(self, wire, url, refs=None):
703 704 if not self.check_url(url, wire):
704 705 return
705 706 config = self._wire_to_config(wire)
706 707 self._factory.repo(wire)
707 708 self.run_git_command(
708 709 wire, ['push', url, '--mirror'], fail_on_stderr=False,
709 710 _copts=self._remote_conf(config),
710 711 extra_env={'GIT_TERMINAL_PROMPT': '0'})
711 712
712 713 @reraise_safe_exceptions
713 714 def get_remote_refs(self, wire, url):
714 715 repo = Repo(url)
715 716 return repo.get_refs()
716 717
717 718 @reraise_safe_exceptions
718 719 def get_description(self, wire):
719 720 repo = self._factory.repo(wire)
720 721 return repo.get_description()
721 722
722 723 @reraise_safe_exceptions
723 724 def get_missing_revs(self, wire, rev1, rev2, path2):
724 725 repo = self._factory.repo(wire)
725 726 LocalGitClient(thin_packs=False).fetch(path2, repo)
726 727
727 728 wire_remote = wire.copy()
728 729 wire_remote['path'] = path2
729 730 repo_remote = self._factory.repo(wire_remote)
730 731 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
731 732
732 733 revs = [
733 734 x.commit.id
734 735 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
735 736 return revs
736 737
737 738 @reraise_safe_exceptions
738 739 def get_object(self, wire, sha, maybe_unreachable=False):
739 740 cache_on, context_uid, repo_id = self._cache_on(wire)
740 741 region = self._region(wire)
741 742
742 743 @region.conditional_cache_on_arguments(condition=cache_on)
743 744 def _get_object(_context_uid, _repo_id, _sha):
744 745 repo_init = self._factory.repo_libgit2(wire)
745 746 with repo_init as repo:
746 747
747 748 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
748 749 try:
749 750 commit = repo.revparse_single(sha)
750 751 except KeyError:
751 752 # NOTE(marcink): KeyError doesn't give us any meaningful information
752 753 # here, we instead give something more explicit
753 754 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
754 755 raise exceptions.LookupException(e)(missing_commit_err)
755 756 except ValueError as e:
756 757 raise exceptions.LookupException(e)(missing_commit_err)
757 758
758 759 is_tag = False
759 760 if isinstance(commit, pygit2.Tag):
760 761 commit = repo.get(commit.target)
761 762 is_tag = True
762 763
763 764 check_dangling = True
764 765 if is_tag:
765 766 check_dangling = False
766 767
767 768 if check_dangling and maybe_unreachable:
768 769 check_dangling = False
769 770
770 771 # we used a reference and it parsed means we're not having a dangling commit
771 772 if sha != commit.hex:
772 773 check_dangling = False
773 774
774 775 if check_dangling:
775 776 # check for dangling commit
776 777 for branch in repo.branches.with_commit(commit.hex):
777 778 if branch:
778 779 break
779 780 else:
780 781 # NOTE(marcink): Empty error doesn't give us any meaningful information
781 782 # here, we instead give something more explicit
782 783 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
783 784 raise exceptions.LookupException(e)(missing_commit_err)
784 785
785 786 commit_id = commit.hex
786 787 type_id = commit.type
787 788
788 789 return {
789 790 'id': commit_id,
790 791 'type': self._type_id_to_name(type_id),
791 792 'commit_id': commit_id,
792 793 'idx': 0
793 794 }
794 795
795 796 return _get_object(context_uid, repo_id, sha)
796 797
797 798 @reraise_safe_exceptions
798 799 def get_refs(self, wire):
799 800 cache_on, context_uid, repo_id = self._cache_on(wire)
800 801 region = self._region(wire)
801 802
802 803 @region.conditional_cache_on_arguments(condition=cache_on)
803 804 def _get_refs(_context_uid, _repo_id):
804 805
805 806 repo_init = self._factory.repo_libgit2(wire)
806 807 with repo_init as repo:
807 808 regex = re.compile('^refs/(heads|tags)/')
808 809 return {x.name: x.target.hex for x in
809 810 [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]}
810 811
811 812 return _get_refs(context_uid, repo_id)
812 813
813 814 @reraise_safe_exceptions
814 815 def get_branch_pointers(self, wire):
815 816 cache_on, context_uid, repo_id = self._cache_on(wire)
816 817 region = self._region(wire)
817 818
818 819 @region.conditional_cache_on_arguments(condition=cache_on)
819 820 def _get_branch_pointers(_context_uid, _repo_id):
820 821
821 822 repo_init = self._factory.repo_libgit2(wire)
822 823 regex = re.compile('^refs/heads')
823 824 with repo_init as repo:
824 825 branches = [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]
825 826 return {x.target.hex: x.shorthand for x in branches}
826 827
827 828 return _get_branch_pointers(context_uid, repo_id)
828 829
829 830 @reraise_safe_exceptions
830 831 def head(self, wire, show_exc=True):
831 832 cache_on, context_uid, repo_id = self._cache_on(wire)
832 833 region = self._region(wire)
833 834
834 835 @region.conditional_cache_on_arguments(condition=cache_on)
835 836 def _head(_context_uid, _repo_id, _show_exc):
836 837 repo_init = self._factory.repo_libgit2(wire)
837 838 with repo_init as repo:
838 839 try:
839 840 return repo.head.peel().hex
840 841 except Exception:
841 842 if show_exc:
842 843 raise
843 844 return _head(context_uid, repo_id, show_exc)
844 845
845 846 @reraise_safe_exceptions
846 847 def init(self, wire):
847 848 repo_path = safe_str(wire['path'])
848 849 self.repo = Repo.init(repo_path)
849 850
850 851 @reraise_safe_exceptions
851 852 def init_bare(self, wire):
852 853 repo_path = safe_str(wire['path'])
853 854 self.repo = Repo.init_bare(repo_path)
854 855
855 856 @reraise_safe_exceptions
856 857 def revision(self, wire, rev):
857 858
858 859 cache_on, context_uid, repo_id = self._cache_on(wire)
859 860 region = self._region(wire)
860 861
861 862 @region.conditional_cache_on_arguments(condition=cache_on)
862 863 def _revision(_context_uid, _repo_id, _rev):
863 864 repo_init = self._factory.repo_libgit2(wire)
864 865 with repo_init as repo:
865 866 commit = repo[rev]
866 867 obj_data = {
867 868 'id': commit.id.hex,
868 869 }
869 870 # tree objects itself don't have tree_id attribute
870 871 if hasattr(commit, 'tree_id'):
871 872 obj_data['tree'] = commit.tree_id.hex
872 873
873 874 return obj_data
874 875 return _revision(context_uid, repo_id, rev)
875 876
876 877 @reraise_safe_exceptions
877 878 def date(self, wire, commit_id):
878 879 cache_on, context_uid, repo_id = self._cache_on(wire)
879 880 region = self._region(wire)
880 881
881 882 @region.conditional_cache_on_arguments(condition=cache_on)
882 883 def _date(_repo_id, _commit_id):
883 884 repo_init = self._factory.repo_libgit2(wire)
884 885 with repo_init as repo:
885 886 commit = repo[commit_id]
886 887
887 888 if hasattr(commit, 'commit_time'):
888 889 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
889 890 else:
890 891 commit = commit.get_object()
891 892 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
892 893
893 894 # TODO(marcink): check dulwich difference of offset vs timezone
894 895 return [commit_time, commit_time_offset]
895 896 return _date(repo_id, commit_id)
896 897
897 898 @reraise_safe_exceptions
898 899 def author(self, wire, commit_id):
899 900 cache_on, context_uid, repo_id = self._cache_on(wire)
900 901 region = self._region(wire)
901 902
902 903 @region.conditional_cache_on_arguments(condition=cache_on)
903 904 def _author(_repo_id, _commit_id):
904 905 repo_init = self._factory.repo_libgit2(wire)
905 906 with repo_init as repo:
906 907 commit = repo[commit_id]
907 908
908 909 if hasattr(commit, 'author'):
909 910 author = commit.author
910 911 else:
911 912 author = commit.get_object().author
912 913
913 914 if author.email:
914 915 return "{} <{}>".format(author.name, author.email)
915 916
916 917 try:
917 918 return "{}".format(author.name)
918 919 except Exception:
919 920 return "{}".format(safe_str(author.raw_name))
920 921
921 922 return _author(repo_id, commit_id)
922 923
923 924 @reraise_safe_exceptions
924 925 def message(self, wire, commit_id):
925 926 cache_on, context_uid, repo_id = self._cache_on(wire)
926 927 region = self._region(wire)
927 928 @region.conditional_cache_on_arguments(condition=cache_on)
928 929 def _message(_repo_id, _commit_id):
929 930 repo_init = self._factory.repo_libgit2(wire)
930 931 with repo_init as repo:
931 932 commit = repo[commit_id]
932 933 return commit.message
933 934 return _message(repo_id, commit_id)
934 935
935 936 @reraise_safe_exceptions
936 937 def parents(self, wire, commit_id):
937 938 cache_on, context_uid, repo_id = self._cache_on(wire)
938 939 region = self._region(wire)
939 940
940 941 @region.conditional_cache_on_arguments(condition=cache_on)
941 942 def _parents(_repo_id, _commit_id):
942 943 repo_init = self._factory.repo_libgit2(wire)
943 944 with repo_init as repo:
944 945 commit = repo[commit_id]
945 946 if hasattr(commit, 'parent_ids'):
946 947 parent_ids = commit.parent_ids
947 948 else:
948 949 parent_ids = commit.get_object().parent_ids
949 950
950 951 return [x.hex for x in parent_ids]
951 952 return _parents(repo_id, commit_id)
952 953
953 954 @reraise_safe_exceptions
954 955 def children(self, wire, commit_id):
955 956 cache_on, context_uid, repo_id = self._cache_on(wire)
956 957 region = self._region(wire)
957 958
958 959 head = self.head(wire)
959 960
960 961 @region.conditional_cache_on_arguments(condition=cache_on)
961 962 def _children(_repo_id, _commit_id):
962 963
963 964 output, __ = self.run_git_command(
964 965 wire, ['rev-list', '--all', '--children', f'{commit_id}^..{head}'])
965 966
966 967 child_ids = []
967 968 pat = re.compile(r'^{}'.format(commit_id))
968 969 for line in output.splitlines():
969 970 line = safe_str(line)
970 971 if pat.match(line):
971 972 found_ids = line.split(' ')[1:]
972 973 child_ids.extend(found_ids)
973 974 break
974 975
975 976 return child_ids
976 977 return _children(repo_id, commit_id)
977 978
978 979 @reraise_safe_exceptions
979 980 def set_refs(self, wire, key, value):
980 981 repo_init = self._factory.repo_libgit2(wire)
981 982 with repo_init as repo:
982 983 repo.references.create(key, value, force=True)
983 984
984 985 @reraise_safe_exceptions
985 986 def create_branch(self, wire, branch_name, commit_id, force=False):
986 987 repo_init = self._factory.repo_libgit2(wire)
987 988 with repo_init as repo:
988 989 commit = repo[commit_id]
989 990
990 991 if force:
991 992 repo.branches.local.create(branch_name, commit, force=force)
992 993 elif not repo.branches.get(branch_name):
993 994 # create only if that branch isn't existing
994 995 repo.branches.local.create(branch_name, commit, force=force)
995 996
996 997 @reraise_safe_exceptions
997 998 def remove_ref(self, wire, key):
998 999 repo_init = self._factory.repo_libgit2(wire)
999 1000 with repo_init as repo:
1000 1001 repo.references.delete(key)
1001 1002
1002 1003 @reraise_safe_exceptions
1003 1004 def tag_remove(self, wire, tag_name):
1004 1005 repo_init = self._factory.repo_libgit2(wire)
1005 1006 with repo_init as repo:
1006 1007 key = 'refs/tags/{}'.format(tag_name)
1007 1008 repo.references.delete(key)
1008 1009
1009 1010 @reraise_safe_exceptions
1010 1011 def tree_changes(self, wire, source_id, target_id):
1011 1012 # TODO(marcink): remove this seems it's only used by tests
1012 1013 repo = self._factory.repo(wire)
1013 1014 source = repo[source_id].tree if source_id else None
1014 1015 target = repo[target_id].tree
1015 1016 result = repo.object_store.tree_changes(source, target)
1016 1017 return list(result)
1017 1018
1018 1019 @reraise_safe_exceptions
1019 1020 def tree_and_type_for_path(self, wire, commit_id, path):
1020 1021
1021 1022 cache_on, context_uid, repo_id = self._cache_on(wire)
1022 1023 region = self._region(wire)
1023 1024
1024 1025 @region.conditional_cache_on_arguments(condition=cache_on)
1025 1026 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
1026 1027 repo_init = self._factory.repo_libgit2(wire)
1027 1028
1028 1029 with repo_init as repo:
1029 1030 commit = repo[commit_id]
1030 1031 try:
1031 1032 tree = commit.tree[path]
1032 1033 except KeyError:
1033 1034 return None, None, None
1034 1035
1035 1036 return tree.id.hex, tree.type_str, tree.filemode
1036 1037 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
1037 1038
1038 1039 @reraise_safe_exceptions
1039 1040 def tree_items(self, wire, tree_id):
1040 1041 cache_on, context_uid, repo_id = self._cache_on(wire)
1041 1042 region = self._region(wire)
1042 1043
1043 1044 @region.conditional_cache_on_arguments(condition=cache_on)
1044 1045 def _tree_items(_repo_id, _tree_id):
1045 1046
1046 1047 repo_init = self._factory.repo_libgit2(wire)
1047 1048 with repo_init as repo:
1048 1049 try:
1049 1050 tree = repo[tree_id]
1050 1051 except KeyError:
1051 1052 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1052 1053
1053 1054 result = []
1054 1055 for item in tree:
1055 1056 item_sha = item.hex
1056 1057 item_mode = item.filemode
1057 1058 item_type = item.type_str
1058 1059
1059 1060 if item_type == 'commit':
1060 1061 # NOTE(marcink): submodules we translate to 'link' for backward compat
1061 1062 item_type = 'link'
1062 1063
1063 1064 result.append((item.name, item_mode, item_sha, item_type))
1064 1065 return result
1065 1066 return _tree_items(repo_id, tree_id)
1066 1067
1067 1068 @reraise_safe_exceptions
1068 1069 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1069 1070 """
1070 1071 Old version that uses subprocess to call diff
1071 1072 """
1072 1073
1073 1074 flags = [
1074 1075 '-U%s' % context, '--patch',
1075 1076 '--binary',
1076 1077 '--find-renames',
1077 1078 '--no-indent-heuristic',
1078 1079 # '--indent-heuristic',
1079 1080 #'--full-index',
1080 1081 #'--abbrev=40'
1081 1082 ]
1082 1083
1083 1084 if opt_ignorews:
1084 1085 flags.append('--ignore-all-space')
1085 1086
1086 1087 if commit_id_1 == self.EMPTY_COMMIT:
1087 1088 cmd = ['show'] + flags + [commit_id_2]
1088 1089 else:
1089 1090 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1090 1091
1091 1092 if file_filter:
1092 1093 cmd.extend(['--', file_filter])
1093 1094
1094 1095 diff, __ = self.run_git_command(wire, cmd)
1095 1096 # If we used 'show' command, strip first few lines (until actual diff
1096 1097 # starts)
1097 1098 if commit_id_1 == self.EMPTY_COMMIT:
1098 1099 lines = diff.splitlines()
1099 1100 x = 0
1100 1101 for line in lines:
1101 1102 if line.startswith(b'diff'):
1102 1103 break
1103 1104 x += 1
1104 1105 # Append new line just like 'diff' command do
1105 1106 diff = '\n'.join(lines[x:]) + '\n'
1106 1107 return diff
1107 1108
1108 1109 @reraise_safe_exceptions
1109 1110 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1110 1111 repo_init = self._factory.repo_libgit2(wire)
1111 1112 with repo_init as repo:
1112 1113 swap = True
1113 1114 flags = 0
1114 1115 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1115 1116
1116 1117 if opt_ignorews:
1117 1118 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1118 1119
1119 1120 if commit_id_1 == self.EMPTY_COMMIT:
1120 1121 comm1 = repo[commit_id_2]
1121 1122 diff_obj = comm1.tree.diff_to_tree(
1122 1123 flags=flags, context_lines=context, swap=swap)
1123 1124
1124 1125 else:
1125 1126 comm1 = repo[commit_id_2]
1126 1127 comm2 = repo[commit_id_1]
1127 1128 diff_obj = comm1.tree.diff_to_tree(
1128 1129 comm2.tree, flags=flags, context_lines=context, swap=swap)
1129 1130 similar_flags = 0
1130 1131 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1131 1132 diff_obj.find_similar(flags=similar_flags)
1132 1133
1133 1134 if file_filter:
1134 1135 for p in diff_obj:
1135 1136 if p.delta.old_file.path == file_filter:
1136 1137 return p.patch or ''
1137 1138 # fo matching path == no diff
1138 1139 return ''
1139 1140 return diff_obj.patch or ''
1140 1141
1141 1142 @reraise_safe_exceptions
1142 1143 def node_history(self, wire, commit_id, path, limit):
1143 1144 cache_on, context_uid, repo_id = self._cache_on(wire)
1144 1145 region = self._region(wire)
1145 1146
1146 1147 @region.conditional_cache_on_arguments(condition=cache_on)
1147 1148 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1148 1149 # optimize for n==1, rev-list is much faster for that use-case
1149 1150 if limit == 1:
1150 1151 cmd = ['rev-list', '-1', commit_id, '--', path]
1151 1152 else:
1152 1153 cmd = ['log']
1153 1154 if limit:
1154 1155 cmd.extend(['-n', str(safe_int(limit, 0))])
1155 1156 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1156 1157
1157 1158 output, __ = self.run_git_command(wire, cmd)
1158 1159 commit_ids = re.findall(rb'[0-9a-fA-F]{40}', output)
1159 1160
1160 1161 return [x for x in commit_ids]
1161 1162 return _node_history(context_uid, repo_id, commit_id, path, limit)
1162 1163
1163 1164 @reraise_safe_exceptions
1164 1165 def node_annotate_legacy(self, wire, commit_id, path):
1165 1166 #note: replaced by pygit2 impelementation
1166 1167 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1167 1168 # -l ==> outputs long shas (and we need all 40 characters)
1168 1169 # --root ==> doesn't put '^' character for boundaries
1169 1170 # -r commit_id ==> blames for the given commit
1170 1171 output, __ = self.run_git_command(wire, cmd)
1171 1172
1172 1173 result = []
1173 1174 for i, blame_line in enumerate(output.splitlines()[:-1]):
1174 1175 line_no = i + 1
1175 1176 blame_commit_id, line = re.split(rb' ', blame_line, 1)
1176 1177 result.append((line_no, blame_commit_id, line))
1177 1178
1178 1179 return result
1179 1180
1180 1181 @reraise_safe_exceptions
1181 1182 def node_annotate(self, wire, commit_id, path):
1182 1183
1183 1184 result_libgit = []
1184 1185 repo_init = self._factory.repo_libgit2(wire)
1185 1186 with repo_init as repo:
1186 1187 commit = repo[commit_id]
1187 1188 blame_obj = repo.blame(path, newest_commit=commit_id)
1188 1189 for i, line in enumerate(commit.tree[path].data.splitlines()):
1189 1190 line_no = i + 1
1190 1191 hunk = blame_obj.for_line(line_no)
1191 1192 blame_commit_id = hunk.final_commit_id.hex
1192 1193
1193 1194 result_libgit.append((line_no, blame_commit_id, line))
1194 1195
1195 1196 return result_libgit
1196 1197
1197 1198 @reraise_safe_exceptions
1198 1199 def update_server_info(self, wire):
1199 1200 repo = self._factory.repo(wire)
1200 1201 update_server_info(repo)
1201 1202
1202 1203 @reraise_safe_exceptions
1203 1204 def get_all_commit_ids(self, wire):
1204 1205
1205 1206 cache_on, context_uid, repo_id = self._cache_on(wire)
1206 1207 region = self._region(wire)
1207 1208
1208 1209 @region.conditional_cache_on_arguments(condition=cache_on)
1209 1210 def _get_all_commit_ids(_context_uid, _repo_id):
1210 1211
1211 1212 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1212 1213 try:
1213 1214 output, __ = self.run_git_command(wire, cmd)
1214 1215 return output.splitlines()
1215 1216 except Exception:
1216 1217 # Can be raised for empty repositories
1217 1218 return []
1218 1219
1219 1220 @region.conditional_cache_on_arguments(condition=cache_on)
1220 1221 def _get_all_commit_ids_pygit2(_context_uid, _repo_id):
1221 1222 repo_init = self._factory.repo_libgit2(wire)
1222 1223 from pygit2 import GIT_SORT_REVERSE, GIT_SORT_TIME, GIT_BRANCH_ALL
1223 1224 results = []
1224 1225 with repo_init as repo:
1225 1226 for commit in repo.walk(repo.head.target, GIT_SORT_TIME | GIT_BRANCH_ALL | GIT_SORT_REVERSE):
1226 1227 results.append(commit.id.hex)
1227 1228
1228 1229 return _get_all_commit_ids(context_uid, repo_id)
1229 1230
1230 1231 @reraise_safe_exceptions
1231 1232 def run_git_command(self, wire, cmd, **opts):
1232 1233 path = wire.get('path', None)
1233 1234
1234 1235 if path and os.path.isdir(path):
1235 1236 opts['cwd'] = path
1236 1237
1237 1238 if '_bare' in opts:
1238 1239 _copts = []
1239 1240 del opts['_bare']
1240 1241 else:
1241 1242 _copts = ['-c', 'core.quotepath=false', ]
1242 1243 safe_call = False
1243 1244 if '_safe' in opts:
1244 1245 # no exc on failure
1245 1246 del opts['_safe']
1246 1247 safe_call = True
1247 1248
1248 1249 if '_copts' in opts:
1249 1250 _copts.extend(opts['_copts'] or [])
1250 1251 del opts['_copts']
1251 1252
1252 1253 gitenv = os.environ.copy()
1253 1254 gitenv.update(opts.pop('extra_env', {}))
1254 1255 # need to clean fix GIT_DIR !
1255 1256 if 'GIT_DIR' in gitenv:
1256 1257 del gitenv['GIT_DIR']
1257 1258 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1258 1259 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1259 1260
1260 1261 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1261 1262 _opts = {'env': gitenv, 'shell': False}
1262 1263
1263 1264 proc = None
1264 1265 try:
1265 1266 _opts.update(opts)
1266 1267 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1267 1268
1268 1269 return b''.join(proc), b''.join(proc.stderr)
1269 1270 except OSError as err:
1270 1271 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
1271 1272 tb_err = ("Couldn't run git command (%s).\n"
1272 1273 "Original error was:%s\n"
1273 1274 "Call options:%s\n"
1274 1275 % (cmd, err, _opts))
1275 1276 log.exception(tb_err)
1276 1277 if safe_call:
1277 1278 return '', err
1278 1279 else:
1279 1280 raise exceptions.VcsException()(tb_err)
1280 1281 finally:
1281 1282 if proc:
1282 1283 proc.close()
1283 1284
1284 1285 @reraise_safe_exceptions
1285 1286 def install_hooks(self, wire, force=False):
1286 1287 from vcsserver.hook_utils import install_git_hooks
1287 1288 bare = self.bare(wire)
1288 1289 path = wire['path']
1289 1290 return install_git_hooks(path, bare, force_create=force)
1290 1291
1291 1292 @reraise_safe_exceptions
1292 1293 def get_hooks_info(self, wire):
1293 1294 from vcsserver.hook_utils import (
1294 1295 get_git_pre_hook_version, get_git_post_hook_version)
1295 1296 bare = self.bare(wire)
1296 1297 path = wire['path']
1297 1298 return {
1298 1299 'pre_version': get_git_pre_hook_version(path, bare),
1299 1300 'post_version': get_git_post_hook_version(path, bare),
1300 1301 }
1301 1302
1302 1303 @reraise_safe_exceptions
1303 1304 def set_head_ref(self, wire, head_name):
1304 1305 log.debug('Setting refs/head to `%s`', head_name)
1305 1306 cmd = ['symbolic-ref', '"HEAD"', '"refs/heads/%s"' % head_name]
1306 1307 output, __ = self.run_git_command(wire, cmd)
1307 1308 return [head_name] + output.splitlines()
1308 1309
1309 1310 @reraise_safe_exceptions
1310 1311 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1311 1312 archive_dir_name, commit_id):
1312 1313
1313 1314 def file_walker(_commit_id, path):
1314 1315 repo_init = self._factory.repo_libgit2(wire)
1315 1316
1316 1317 with repo_init as repo:
1317 1318 commit = repo[commit_id]
1318 1319
1319 1320 if path in ['', '/']:
1320 1321 tree = commit.tree
1321 1322 else:
1322 1323 tree = commit.tree[path.rstrip('/')]
1323 1324 tree_id = tree.id.hex
1324 1325 try:
1325 1326 tree = repo[tree_id]
1326 1327 except KeyError:
1327 1328 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1328 1329
1329 1330 index = LibGit2Index.Index()
1330 1331 index.read_tree(tree)
1331 1332 file_iter = index
1332 1333
1333 1334 for fn in file_iter:
1334 1335 file_path = fn.path
1335 1336 mode = fn.mode
1336 1337 is_link = stat.S_ISLNK(mode)
1337 1338 if mode == pygit2.GIT_FILEMODE_COMMIT:
1338 1339 log.debug('Skipping path %s as a commit node', file_path)
1339 1340 continue
1340 1341 yield ArchiveNode(file_path, mode, is_link, repo[fn.hex].read_raw)
1341 1342
1342 1343 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1343 1344 archive_dir_name, commit_id)
@@ -1,1086 +1,1087 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import logging
20 20 import stat
21 21 import urllib.request, urllib.parse, urllib.error
22 22 import urllib.request, urllib.error, urllib.parse
23 23 import traceback
24 24
25 25 from hgext import largefiles, rebase, purge
26 26
27 27 from mercurial import commands
28 28 from mercurial import unionrepo
29 29 from mercurial import verify
30 30 from mercurial import repair
31 31
32 32 import vcsserver
33 33 from vcsserver import exceptions
34 34 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, archive_repo, ArchiveNode
35 35 from vcsserver.hgcompat import (
36 36 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
37 37 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
38 38 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
39 39 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
40 40 RepoLookupError, InterventionRequired, RequirementError,
41 41 alwaysmatcher, patternmatcher, hgutil, hgext_strip)
42 42 from vcsserver.str_utils import ascii_bytes, ascii_str, safe_str, safe_bytes
43 43 from vcsserver.vcs_base import RemoteBase
44 44
45 45 log = logging.getLogger(__name__)
46 46
47 47
48 48 def make_ui_from_config(repo_config):
49 49
50 50 class LoggingUI(ui.ui):
51 51
52 52 def status(self, *msg, **opts):
53 53 str_msg = map(safe_str, msg)
54 54 log.info(' '.join(str_msg).rstrip('\n'))
55 55 #super(LoggingUI, self).status(*msg, **opts)
56 56
57 57 def warn(self, *msg, **opts):
58 58 str_msg = map(safe_str, msg)
59 59 log.warning('ui_logger:'+' '.join(str_msg).rstrip('\n'))
60 60 #super(LoggingUI, self).warn(*msg, **opts)
61 61
62 62 def error(self, *msg, **opts):
63 63 str_msg = map(safe_str, msg)
64 64 log.error('ui_logger:'+' '.join(str_msg).rstrip('\n'))
65 65 #super(LoggingUI, self).error(*msg, **opts)
66 66
67 67 def note(self, *msg, **opts):
68 68 str_msg = map(safe_str, msg)
69 69 log.info('ui_logger:'+' '.join(str_msg).rstrip('\n'))
70 70 #super(LoggingUI, self).note(*msg, **opts)
71 71
72 72 def debug(self, *msg, **opts):
73 73 str_msg = map(safe_str, msg)
74 74 log.debug('ui_logger:'+' '.join(str_msg).rstrip('\n'))
75 75 #super(LoggingUI, self).debug(*msg, **opts)
76 76
77 77 baseui = LoggingUI()
78 78
79 79 # clean the baseui object
80 80 baseui._ocfg = hgconfig.config()
81 81 baseui._ucfg = hgconfig.config()
82 82 baseui._tcfg = hgconfig.config()
83 83
84 84 for section, option, value in repo_config:
85 85 baseui.setconfig(ascii_bytes(section), ascii_bytes(option), ascii_bytes(value))
86 86
87 87 # make our hgweb quiet so it doesn't print output
88 88 baseui.setconfig(b'ui', b'quiet', b'true')
89 89
90 90 baseui.setconfig(b'ui', b'paginate', b'never')
91 91 # for better Error reporting of Mercurial
92 92 baseui.setconfig(b'ui', b'message-output', b'stderr')
93 93
94 94 # force mercurial to only use 1 thread, otherwise it may try to set a
95 95 # signal in a non-main thread, thus generating a ValueError.
96 96 baseui.setconfig(b'worker', b'numcpus', 1)
97 97
98 98 # If there is no config for the largefiles extension, we explicitly disable
99 99 # it here. This overrides settings from repositories hgrc file. Recent
100 100 # mercurial versions enable largefiles in hgrc on clone from largefile
101 101 # repo.
102 102 if not baseui.hasconfig(b'extensions', b'largefiles'):
103 103 log.debug('Explicitly disable largefiles extension for repo.')
104 104 baseui.setconfig(b'extensions', b'largefiles', b'!')
105 105
106 106 return baseui
107 107
108 108
109 109 def reraise_safe_exceptions(func):
110 110 """Decorator for converting mercurial exceptions to something neutral."""
111 111
112 112 def wrapper(*args, **kwargs):
113 113 try:
114 114 return func(*args, **kwargs)
115 115 except (Abort, InterventionRequired) as e:
116 116 raise_from_original(exceptions.AbortException(e), e)
117 117 except RepoLookupError as e:
118 118 raise_from_original(exceptions.LookupException(e), e)
119 119 except RequirementError as e:
120 120 raise_from_original(exceptions.RequirementException(e), e)
121 121 except RepoError as e:
122 122 raise_from_original(exceptions.VcsException(e), e)
123 123 except LookupError as e:
124 124 raise_from_original(exceptions.LookupException(e), e)
125 125 except Exception as e:
126 126 if not hasattr(e, '_vcs_kind'):
127 127 log.exception("Unhandled exception in hg remote call")
128 128 raise_from_original(exceptions.UnhandledException(e), e)
129 129
130 130 raise
131 131 return wrapper
132 132
133 133
134 134 class MercurialFactory(RepoFactory):
135 135 repo_type = 'hg'
136 136
137 137 def _create_config(self, config, hooks=True):
138 138 if not hooks:
139 139 hooks_to_clean = frozenset((
140 140 'changegroup.repo_size', 'preoutgoing.pre_pull',
141 141 'outgoing.pull_logger', 'prechangegroup.pre_push'))
142 142 new_config = []
143 143 for section, option, value in config:
144 144 if section == 'hooks' and option in hooks_to_clean:
145 145 continue
146 146 new_config.append((section, option, value))
147 147 config = new_config
148 148
149 149 baseui = make_ui_from_config(config)
150 150 return baseui
151 151
152 152 def _create_repo(self, wire, create):
153 153 baseui = self._create_config(wire["config"])
154 154 return instance(baseui, ascii_bytes(wire["path"]), create)
155 155
156 156 def repo(self, wire, create=False):
157 157 """
158 158 Get a repository instance for the given path.
159 159 """
160 160 return self._create_repo(wire, create)
161 161
162 162
163 163 def patch_ui_message_output(baseui):
164 164 baseui.setconfig(b'ui', b'quiet', b'false')
165 165 output = io.BytesIO()
166 166
167 167 def write(data, **unused_kwargs):
168 168 output.write(data)
169 169
170 170 baseui.status = write
171 171 baseui.write = write
172 172 baseui.warn = write
173 173 baseui.debug = write
174 174
175 175 return baseui, output
176 176
177 177
178 178 class HgRemote(RemoteBase):
179 179
180 180 def __init__(self, factory):
181 181 self._factory = factory
182 182 self._bulk_methods = {
183 183 "affected_files": self.ctx_files,
184 184 "author": self.ctx_user,
185 185 "branch": self.ctx_branch,
186 186 "children": self.ctx_children,
187 187 "date": self.ctx_date,
188 188 "message": self.ctx_description,
189 189 "parents": self.ctx_parents,
190 190 "status": self.ctx_status,
191 191 "obsolete": self.ctx_obsolete,
192 192 "phase": self.ctx_phase,
193 193 "hidden": self.ctx_hidden,
194 194 "_file_paths": self.ctx_list,
195 195 }
196 196
197 197 def _get_ctx(self, repo, ref):
198 198 return get_ctx(repo, ref)
199 199
200 200 @reraise_safe_exceptions
201 201 def discover_hg_version(self):
202 202 from mercurial import util
203 203 return safe_str(util.version())
204 204
205 205 @reraise_safe_exceptions
206 206 def is_empty(self, wire):
207 207 repo = self._factory.repo(wire)
208 208
209 209 try:
210 210 return len(repo) == 0
211 211 except Exception:
212 212 log.exception("failed to read object_store")
213 213 return False
214 214
215 215 @reraise_safe_exceptions
216 216 def bookmarks(self, wire):
217 217 cache_on, context_uid, repo_id = self._cache_on(wire)
218 218 region = self._region(wire)
219 219
220 220 @region.conditional_cache_on_arguments(condition=cache_on)
221 221 def _bookmarks(_context_uid, _repo_id):
222 222 repo = self._factory.repo(wire)
223 223 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo._bookmarks.items()}
224 224
225 225 return _bookmarks(context_uid, repo_id)
226 226
227 227 @reraise_safe_exceptions
228 228 def branches(self, wire, normal, closed):
229 229 cache_on, context_uid, repo_id = self._cache_on(wire)
230 230 region = self._region(wire)
231 231
232 232 @region.conditional_cache_on_arguments(condition=cache_on)
233 233 def _branches(_context_uid, _repo_id, _normal, _closed):
234 234 repo = self._factory.repo(wire)
235 235 iter_branches = repo.branchmap().iterbranches()
236 236 bt = {}
237 237 for branch_name, _heads, tip_node, is_closed in iter_branches:
238 238 if normal and not is_closed:
239 239 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
240 240 if closed and is_closed:
241 241 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
242 242
243 243 return bt
244 244
245 245 return _branches(context_uid, repo_id, normal, closed)
246 246
247 247 @reraise_safe_exceptions
248 248 def bulk_request(self, wire, commit_id, pre_load):
249 249 cache_on, context_uid, repo_id = self._cache_on(wire)
250 250 region = self._region(wire)
251 251
252 252 @region.conditional_cache_on_arguments(condition=cache_on)
253 253 def _bulk_request(_repo_id, _commit_id, _pre_load):
254 254 result = {}
255 255 for attr in pre_load:
256 256 try:
257 257 method = self._bulk_methods[attr]
258 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
258 259 result[attr] = method(wire, commit_id)
259 260 except KeyError as e:
260 261 raise exceptions.VcsException(e)(
261 262 'Unknown bulk attribute: "%s"' % attr)
262 263 return result
263 264
264 265 return _bulk_request(repo_id, commit_id, sorted(pre_load))
265 266
266 267 @reraise_safe_exceptions
267 268 def ctx_branch(self, wire, commit_id):
268 269 cache_on, context_uid, repo_id = self._cache_on(wire)
269 270 region = self._region(wire)
270 271
271 272 @region.conditional_cache_on_arguments(condition=cache_on)
272 273 def _ctx_branch(_repo_id, _commit_id):
273 274 repo = self._factory.repo(wire)
274 275 ctx = self._get_ctx(repo, commit_id)
275 276 return ctx.branch()
276 277 return _ctx_branch(repo_id, commit_id)
277 278
278 279 @reraise_safe_exceptions
279 280 def ctx_date(self, wire, commit_id):
280 281 cache_on, context_uid, repo_id = self._cache_on(wire)
281 282 region = self._region(wire)
282 283
283 284 @region.conditional_cache_on_arguments(condition=cache_on)
284 285 def _ctx_date(_repo_id, _commit_id):
285 286 repo = self._factory.repo(wire)
286 287 ctx = self._get_ctx(repo, commit_id)
287 288 return ctx.date()
288 289 return _ctx_date(repo_id, commit_id)
289 290
290 291 @reraise_safe_exceptions
291 292 def ctx_description(self, wire, revision):
292 293 repo = self._factory.repo(wire)
293 294 ctx = self._get_ctx(repo, revision)
294 295 return ctx.description()
295 296
296 297 @reraise_safe_exceptions
297 298 def ctx_files(self, wire, commit_id):
298 299 cache_on, context_uid, repo_id = self._cache_on(wire)
299 300 region = self._region(wire)
300 301
301 302 @region.conditional_cache_on_arguments(condition=cache_on)
302 303 def _ctx_files(_repo_id, _commit_id):
303 304 repo = self._factory.repo(wire)
304 305 ctx = self._get_ctx(repo, commit_id)
305 306 return ctx.files()
306 307
307 308 return _ctx_files(repo_id, commit_id)
308 309
309 310 @reraise_safe_exceptions
310 311 def ctx_list(self, path, revision):
311 312 repo = self._factory.repo(path)
312 313 ctx = self._get_ctx(repo, revision)
313 314 return list(ctx)
314 315
315 316 @reraise_safe_exceptions
316 317 def ctx_parents(self, wire, commit_id):
317 318 cache_on, context_uid, repo_id = self._cache_on(wire)
318 319 region = self._region(wire)
319 320
320 321 @region.conditional_cache_on_arguments(condition=cache_on)
321 322 def _ctx_parents(_repo_id, _commit_id):
322 323 repo = self._factory.repo(wire)
323 324 ctx = self._get_ctx(repo, commit_id)
324 325 return [parent.hex() for parent in ctx.parents()
325 326 if not (parent.hidden() or parent.obsolete())]
326 327
327 328 return _ctx_parents(repo_id, commit_id)
328 329
329 330 @reraise_safe_exceptions
330 331 def ctx_children(self, wire, commit_id):
331 332 cache_on, context_uid, repo_id = self._cache_on(wire)
332 333 region = self._region(wire)
333 334
334 335 @region.conditional_cache_on_arguments(condition=cache_on)
335 336 def _ctx_children(_repo_id, _commit_id):
336 337 repo = self._factory.repo(wire)
337 338 ctx = self._get_ctx(repo, commit_id)
338 339 return [child.hex() for child in ctx.children()
339 340 if not (child.hidden() or child.obsolete())]
340 341
341 342 return _ctx_children(repo_id, commit_id)
342 343
343 344 @reraise_safe_exceptions
344 345 def ctx_phase(self, wire, commit_id):
345 346 cache_on, context_uid, repo_id = self._cache_on(wire)
346 347 region = self._region(wire)
347 348
348 349 @region.conditional_cache_on_arguments(condition=cache_on)
349 350 def _ctx_phase(_context_uid, _repo_id, _commit_id):
350 351 repo = self._factory.repo(wire)
351 352 ctx = self._get_ctx(repo, commit_id)
352 353 # public=0, draft=1, secret=3
353 354 return ctx.phase()
354 355 return _ctx_phase(context_uid, repo_id, commit_id)
355 356
356 357 @reraise_safe_exceptions
357 358 def ctx_obsolete(self, wire, commit_id):
358 359 cache_on, context_uid, repo_id = self._cache_on(wire)
359 360 region = self._region(wire)
360 361
361 362 @region.conditional_cache_on_arguments(condition=cache_on)
362 363 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
363 364 repo = self._factory.repo(wire)
364 365 ctx = self._get_ctx(repo, commit_id)
365 366 return ctx.obsolete()
366 367 return _ctx_obsolete(context_uid, repo_id, commit_id)
367 368
368 369 @reraise_safe_exceptions
369 370 def ctx_hidden(self, wire, commit_id):
370 371 cache_on, context_uid, repo_id = self._cache_on(wire)
371 372 region = self._region(wire)
372 373
373 374 @region.conditional_cache_on_arguments(condition=cache_on)
374 375 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
375 376 repo = self._factory.repo(wire)
376 377 ctx = self._get_ctx(repo, commit_id)
377 378 return ctx.hidden()
378 379 return _ctx_hidden(context_uid, repo_id, commit_id)
379 380
380 381 @reraise_safe_exceptions
381 382 def ctx_substate(self, wire, revision):
382 383 repo = self._factory.repo(wire)
383 384 ctx = self._get_ctx(repo, revision)
384 385 return ctx.substate
385 386
386 387 @reraise_safe_exceptions
387 388 def ctx_status(self, wire, revision):
388 389 repo = self._factory.repo(wire)
389 390 ctx = self._get_ctx(repo, revision)
390 391 status = repo[ctx.p1().node()].status(other=ctx.node())
391 392 # object of status (odd, custom named tuple in mercurial) is not
392 393 # correctly serializable, we make it a list, as the underling
393 394 # API expects this to be a list
394 395 return list(status)
395 396
396 397 @reraise_safe_exceptions
397 398 def ctx_user(self, wire, revision):
398 399 repo = self._factory.repo(wire)
399 400 ctx = self._get_ctx(repo, revision)
400 401 return ctx.user()
401 402
402 403 @reraise_safe_exceptions
403 404 def check_url(self, url, config):
404 405 _proto = None
405 406 if '+' in url[:url.find('://')]:
406 407 _proto = url[0:url.find('+')]
407 408 url = url[url.find('+') + 1:]
408 409 handlers = []
409 410 url_obj = url_parser(url)
410 411 test_uri, authinfo = url_obj.authinfo()
411 412 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
412 413 url_obj.query = obfuscate_qs(url_obj.query)
413 414
414 415 cleaned_uri = str(url_obj)
415 416 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
416 417
417 418 if authinfo:
418 419 # create a password manager
419 420 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
420 421 passmgr.add_password(*authinfo)
421 422
422 423 handlers.extend((httpbasicauthhandler(passmgr),
423 424 httpdigestauthhandler(passmgr)))
424 425
425 426 o = urllib.request.build_opener(*handlers)
426 427 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
427 428 ('Accept', 'application/mercurial-0.1')]
428 429
429 430 q = {"cmd": 'between'}
430 431 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
431 432 qs = '?%s' % urllib.parse.urlencode(q)
432 433 cu = "%s%s" % (test_uri, qs)
433 434 req = urllib.request.Request(cu, None, {})
434 435
435 436 try:
436 437 log.debug("Trying to open URL %s", cleaned_uri)
437 438 resp = o.open(req)
438 439 if resp.code != 200:
439 440 raise exceptions.URLError()('Return Code is not 200')
440 441 except Exception as e:
441 442 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
442 443 # means it cannot be cloned
443 444 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
444 445
445 446 # now check if it's a proper hg repo, but don't do it for svn
446 447 try:
447 448 if _proto == 'svn':
448 449 pass
449 450 else:
450 451 # check for pure hg repos
451 452 log.debug(
452 453 "Verifying if URL is a Mercurial repository: %s",
453 454 cleaned_uri)
454 455 ui = make_ui_from_config(config)
455 456 peer_checker = makepeer(ui, url)
456 457 peer_checker.lookup('tip')
457 458 except Exception as e:
458 459 log.warning("URL is not a valid Mercurial repository: %s",
459 460 cleaned_uri)
460 461 raise exceptions.URLError(e)(
461 462 "url [%s] does not look like an hg repo org_exc: %s"
462 463 % (cleaned_uri, e))
463 464
464 465 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
465 466 return True
466 467
467 468 @reraise_safe_exceptions
468 469 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
469 470 repo = self._factory.repo(wire)
470 471
471 472 if file_filter:
472 473 match_filter = match(file_filter[0], '', [file_filter[1]])
473 474 else:
474 475 match_filter = file_filter
475 476 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
476 477
477 478 try:
478 479 diff_iter = patch.diff(
479 480 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts)
480 481 return b"".join(diff_iter)
481 482 except RepoLookupError as e:
482 483 raise exceptions.LookupException(e)()
483 484
484 485 @reraise_safe_exceptions
485 486 def node_history(self, wire, revision, path, limit):
486 487 cache_on, context_uid, repo_id = self._cache_on(wire)
487 488 region = self._region(wire)
488 489
489 490 @region.conditional_cache_on_arguments(condition=cache_on)
490 491 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
491 492 repo = self._factory.repo(wire)
492 493
493 494 ctx = self._get_ctx(repo, revision)
494 495 fctx = ctx.filectx(safe_bytes(path))
495 496
496 497 def history_iter():
497 498 limit_rev = fctx.rev()
498 499 for obj in reversed(list(fctx.filelog())):
499 500 obj = fctx.filectx(obj)
500 501 ctx = obj.changectx()
501 502 if ctx.hidden() or ctx.obsolete():
502 503 continue
503 504
504 505 if limit_rev >= obj.rev():
505 506 yield obj
506 507
507 508 history = []
508 509 for cnt, obj in enumerate(history_iter()):
509 510 if limit and cnt >= limit:
510 511 break
511 512 history.append(hex(obj.node()))
512 513
513 514 return [x for x in history]
514 515 return _node_history(context_uid, repo_id, revision, path, limit)
515 516
516 517 @reraise_safe_exceptions
517 518 def node_history_untill(self, wire, revision, path, limit):
518 519 cache_on, context_uid, repo_id = self._cache_on(wire)
519 520 region = self._region(wire)
520 521
521 522 @region.conditional_cache_on_arguments(condition=cache_on)
522 523 def _node_history_until(_context_uid, _repo_id):
523 524 repo = self._factory.repo(wire)
524 525 ctx = self._get_ctx(repo, revision)
525 526 fctx = ctx.filectx(safe_bytes(path))
526 527
527 528 file_log = list(fctx.filelog())
528 529 if limit:
529 530 # Limit to the last n items
530 531 file_log = file_log[-limit:]
531 532
532 533 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
533 534 return _node_history_until(context_uid, repo_id, revision, path, limit)
534 535
535 536 @reraise_safe_exceptions
536 537 def fctx_annotate(self, wire, revision, path):
537 538 repo = self._factory.repo(wire)
538 539 ctx = self._get_ctx(repo, revision)
539 540 fctx = ctx.filectx(safe_bytes(path))
540 541
541 542 result = []
542 543 for i, annotate_obj in enumerate(fctx.annotate(), 1):
543 544 ln_no = i
544 545 sha = hex(annotate_obj.fctx.node())
545 546 content = annotate_obj.text
546 547 result.append((ln_no, sha, content))
547 548 return result
548 549
549 550 @reraise_safe_exceptions
550 551 def fctx_node_data(self, wire, revision, path):
551 552 repo = self._factory.repo(wire)
552 553 ctx = self._get_ctx(repo, revision)
553 554 fctx = ctx.filectx(safe_bytes(path))
554 555 return fctx.data()
555 556
556 557 @reraise_safe_exceptions
557 558 def fctx_flags(self, wire, commit_id, path):
558 559 cache_on, context_uid, repo_id = self._cache_on(wire)
559 560 region = self._region(wire)
560 561
561 562 @region.conditional_cache_on_arguments(condition=cache_on)
562 563 def _fctx_flags(_repo_id, _commit_id, _path):
563 564 repo = self._factory.repo(wire)
564 565 ctx = self._get_ctx(repo, commit_id)
565 566 fctx = ctx.filectx(safe_bytes(path))
566 567 return fctx.flags()
567 568
568 569 return _fctx_flags(repo_id, commit_id, path)
569 570
570 571 @reraise_safe_exceptions
571 572 def fctx_size(self, wire, commit_id, path):
572 573 cache_on, context_uid, repo_id = self._cache_on(wire)
573 574 region = self._region(wire)
574 575
575 576 @region.conditional_cache_on_arguments(condition=cache_on)
576 577 def _fctx_size(_repo_id, _revision, _path):
577 578 repo = self._factory.repo(wire)
578 579 ctx = self._get_ctx(repo, commit_id)
579 580 fctx = ctx.filectx(safe_bytes(path))
580 581 return fctx.size()
581 582 return _fctx_size(repo_id, commit_id, path)
582 583
583 584 @reraise_safe_exceptions
584 585 def get_all_commit_ids(self, wire, name):
585 586 cache_on, context_uid, repo_id = self._cache_on(wire)
586 587 region = self._region(wire)
587 588
588 589 @region.conditional_cache_on_arguments(condition=cache_on)
589 590 def _get_all_commit_ids(_context_uid, _repo_id, _name):
590 591 repo = self._factory.repo(wire)
591 592 revs = [ascii_str(repo[x].hex()) for x in repo.filtered(b'visible').changelog.revs()]
592 593 return revs
593 594 return _get_all_commit_ids(context_uid, repo_id, name)
594 595
595 596 @reraise_safe_exceptions
596 597 def get_config_value(self, wire, section, name, untrusted=False):
597 598 repo = self._factory.repo(wire)
598 599 return repo.ui.config(ascii_bytes(section), ascii_bytes(name), untrusted=untrusted)
599 600
600 601 @reraise_safe_exceptions
601 602 def is_large_file(self, wire, commit_id, path):
602 603 cache_on, context_uid, repo_id = self._cache_on(wire)
603 604 region = self._region(wire)
604 605
605 606 @region.conditional_cache_on_arguments(condition=cache_on)
606 607 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
607 608 return largefiles.lfutil.isstandin(safe_bytes(path))
608 609
609 610 return _is_large_file(context_uid, repo_id, commit_id, path)
610 611
611 612 @reraise_safe_exceptions
612 613 def is_binary(self, wire, revision, path):
613 614 cache_on, context_uid, repo_id = self._cache_on(wire)
614 615 region = self._region(wire)
615 616
616 617 @region.conditional_cache_on_arguments(condition=cache_on)
617 618 def _is_binary(_repo_id, _sha, _path):
618 619 repo = self._factory.repo(wire)
619 620 ctx = self._get_ctx(repo, revision)
620 621 fctx = ctx.filectx(safe_bytes(path))
621 622 return fctx.isbinary()
622 623
623 624 return _is_binary(repo_id, revision, path)
624 625
625 626 @reraise_safe_exceptions
626 627 def md5_hash(self, wire, revision, path):
627 628 cache_on, context_uid, repo_id = self._cache_on(wire)
628 629 region = self._region(wire)
629 630
630 631 @region.conditional_cache_on_arguments(condition=cache_on)
631 632 def _md5_hash(_repo_id, _sha, _path):
632 633 repo = self._factory.repo(wire)
633 634 ctx = self._get_ctx(repo, revision)
634 635 fctx = ctx.filectx(safe_bytes(path))
635 636 return hashlib.md5(fctx.data()).hexdigest()
636 637
637 638 return _md5_hash(repo_id, revision, path)
638 639
639 640 @reraise_safe_exceptions
640 641 def in_largefiles_store(self, wire, sha):
641 642 repo = self._factory.repo(wire)
642 643 return largefiles.lfutil.instore(repo, sha)
643 644
644 645 @reraise_safe_exceptions
645 646 def in_user_cache(self, wire, sha):
646 647 repo = self._factory.repo(wire)
647 648 return largefiles.lfutil.inusercache(repo.ui, sha)
648 649
649 650 @reraise_safe_exceptions
650 651 def store_path(self, wire, sha):
651 652 repo = self._factory.repo(wire)
652 653 return largefiles.lfutil.storepath(repo, sha)
653 654
654 655 @reraise_safe_exceptions
655 656 def link(self, wire, sha, path):
656 657 repo = self._factory.repo(wire)
657 658 largefiles.lfutil.link(
658 659 largefiles.lfutil.usercachepath(repo.ui, sha), path)
659 660
660 661 @reraise_safe_exceptions
661 662 def localrepository(self, wire, create=False):
662 663 self._factory.repo(wire, create=create)
663 664
664 665 @reraise_safe_exceptions
665 666 def lookup(self, wire, revision, both):
666 667 cache_on, context_uid, repo_id = self._cache_on(wire)
667 668 region = self._region(wire)
668 669
669 670 @region.conditional_cache_on_arguments(condition=cache_on)
670 671 def _lookup(_context_uid, _repo_id, _revision, _both):
671 672
672 673 repo = self._factory.repo(wire)
673 674 rev = _revision
674 675 if isinstance(rev, int):
675 676 # NOTE(marcink):
676 677 # since Mercurial doesn't support negative indexes properly
677 678 # we need to shift accordingly by one to get proper index, e.g
678 679 # repo[-1] => repo[-2]
679 680 # repo[0] => repo[-1]
680 681 if rev <= 0:
681 682 rev = rev + -1
682 683 try:
683 684 ctx = self._get_ctx(repo, rev)
684 685 except (TypeError, RepoLookupError) as e:
685 686 e._org_exc_tb = traceback.format_exc()
686 687 raise exceptions.LookupException(e)(rev)
687 688 except LookupError as e:
688 689 e._org_exc_tb = traceback.format_exc()
689 690 raise exceptions.LookupException(e)(e.name)
690 691
691 692 if not both:
692 693 return ctx.hex()
693 694
694 695 ctx = repo[ctx.hex()]
695 696 return ctx.hex(), ctx.rev()
696 697
697 698 return _lookup(context_uid, repo_id, revision, both)
698 699
699 700 @reraise_safe_exceptions
700 701 def sync_push(self, wire, url):
701 702 if not self.check_url(url, wire['config']):
702 703 return
703 704
704 705 repo = self._factory.repo(wire)
705 706
706 707 # Disable any prompts for this repo
707 708 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
708 709
709 710 bookmarks = list(dict(repo._bookmarks).keys())
710 711 remote = peer(repo, {}, safe_bytes(url))
711 712 # Disable any prompts for this remote
712 713 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
713 714
714 715 return exchange.push(
715 716 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
716 717
717 718 @reraise_safe_exceptions
718 719 def revision(self, wire, rev):
719 720 repo = self._factory.repo(wire)
720 721 ctx = self._get_ctx(repo, rev)
721 722 return ctx.rev()
722 723
723 724 @reraise_safe_exceptions
724 725 def rev_range(self, wire, commit_filter):
725 726 cache_on, context_uid, repo_id = self._cache_on(wire)
726 727 region = self._region(wire)
727 728
728 729 @region.conditional_cache_on_arguments(condition=cache_on)
729 730 def _rev_range(_context_uid, _repo_id, _filter):
730 731 repo = self._factory.repo(wire)
731 732 revisions = [
732 733 ascii_str(repo[rev].hex())
733 734 for rev in revrange(repo, list(map(ascii_bytes, commit_filter)))
734 735 ]
735 736 return revisions
736 737
737 738 return _rev_range(context_uid, repo_id, sorted(commit_filter))
738 739
739 740 @reraise_safe_exceptions
740 741 def rev_range_hash(self, wire, node):
741 742 repo = self._factory.repo(wire)
742 743
743 744 def get_revs(repo, rev_opt):
744 745 if rev_opt:
745 746 revs = revrange(repo, rev_opt)
746 747 if len(revs) == 0:
747 748 return (nullrev, nullrev)
748 749 return max(revs), min(revs)
749 750 else:
750 751 return len(repo) - 1, 0
751 752
752 753 stop, start = get_revs(repo, [node + ':'])
753 754 revs = [ascii_str(repo[r].hex()) for r in range(start, stop + 1)]
754 755 return revs
755 756
756 757 @reraise_safe_exceptions
757 758 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
758 759 other_path = kwargs.pop('other_path', None)
759 760
760 761 # case when we want to compare two independent repositories
761 762 if other_path and other_path != wire["path"]:
762 763 baseui = self._factory._create_config(wire["config"])
763 764 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
764 765 else:
765 766 repo = self._factory.repo(wire)
766 767 return list(repo.revs(rev_spec, *args))
767 768
768 769 @reraise_safe_exceptions
769 770 def verify(self, wire,):
770 771 repo = self._factory.repo(wire)
771 772 baseui = self._factory._create_config(wire['config'])
772 773
773 774 baseui, output = patch_ui_message_output(baseui)
774 775
775 776 repo.ui = baseui
776 777 verify.verify(repo)
777 778 return output.getvalue()
778 779
779 780 @reraise_safe_exceptions
780 781 def hg_update_cache(self, wire,):
781 782 repo = self._factory.repo(wire)
782 783 baseui = self._factory._create_config(wire['config'])
783 784 baseui, output = patch_ui_message_output(baseui)
784 785
785 786 repo.ui = baseui
786 787 with repo.wlock(), repo.lock():
787 788 repo.updatecaches(full=True)
788 789
789 790 return output.getvalue()
790 791
791 792 @reraise_safe_exceptions
792 793 def hg_rebuild_fn_cache(self, wire,):
793 794 repo = self._factory.repo(wire)
794 795 baseui = self._factory._create_config(wire['config'])
795 796 baseui, output = patch_ui_message_output(baseui)
796 797
797 798 repo.ui = baseui
798 799
799 800 repair.rebuildfncache(baseui, repo)
800 801
801 802 return output.getvalue()
802 803
803 804 @reraise_safe_exceptions
804 805 def tags(self, wire):
805 806 cache_on, context_uid, repo_id = self._cache_on(wire)
806 807 region = self._region(wire)
807 808
808 809 @region.conditional_cache_on_arguments(condition=cache_on)
809 810 def _tags(_context_uid, _repo_id):
810 811 repo = self._factory.repo(wire)
811 812 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo.tags().items()}
812 813
813 814 return _tags(context_uid, repo_id)
814 815
815 816 @reraise_safe_exceptions
816 817 def update(self, wire, node=None, clean=False):
817 818 repo = self._factory.repo(wire)
818 819 baseui = self._factory._create_config(wire['config'])
819 820 commands.update(baseui, repo, node=node, clean=clean)
820 821
821 822 @reraise_safe_exceptions
822 823 def identify(self, wire):
823 824 repo = self._factory.repo(wire)
824 825 baseui = self._factory._create_config(wire['config'])
825 826 output = io.BytesIO()
826 827 baseui.write = output.write
827 828 # This is required to get a full node id
828 829 baseui.debugflag = True
829 830 commands.identify(baseui, repo, id=True)
830 831
831 832 return output.getvalue()
832 833
833 834 @reraise_safe_exceptions
834 835 def heads(self, wire, branch=None):
835 836 repo = self._factory.repo(wire)
836 837 baseui = self._factory._create_config(wire['config'])
837 838 output = io.BytesIO()
838 839
839 840 def write(data, **unused_kwargs):
840 841 output.write(data)
841 842
842 843 baseui.write = write
843 844 if branch:
844 845 args = [safe_bytes(branch)]
845 846 else:
846 847 args = []
847 848 commands.heads(baseui, repo, template=b'{node} ', *args)
848 849
849 850 return output.getvalue()
850 851
851 852 @reraise_safe_exceptions
852 853 def ancestor(self, wire, revision1, revision2):
853 854 repo = self._factory.repo(wire)
854 855 changelog = repo.changelog
855 856 lookup = repo.lookup
856 857 a = changelog.ancestor(lookup(revision1), lookup(revision2))
857 858 return hex(a)
858 859
859 860 @reraise_safe_exceptions
860 861 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
861 862 baseui = self._factory._create_config(wire["config"], hooks=hooks)
862 863 clone(baseui, safe_bytes(source), safe_bytes(dest), noupdate=not update_after_clone)
863 864
864 865 @reraise_safe_exceptions
865 866 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
866 867
867 868 repo = self._factory.repo(wire)
868 869 baseui = self._factory._create_config(wire['config'])
869 870 publishing = baseui.configbool(b'phases', b'publish')
870 871
871 872 def _filectxfn(_repo, ctx, path: bytes):
872 873 """
873 874 Marks given path as added/changed/removed in a given _repo. This is
874 875 for internal mercurial commit function.
875 876 """
876 877
877 878 # check if this path is removed
878 879 if safe_str(path) in removed:
879 880 # returning None is a way to mark node for removal
880 881 return None
881 882
882 883 # check if this path is added
883 884 for node in updated:
884 885 if safe_bytes(node['path']) == path:
885 886 return memfilectx(
886 887 _repo,
887 888 changectx=ctx,
888 889 path=safe_bytes(node['path']),
889 890 data=safe_bytes(node['content']),
890 891 islink=False,
891 892 isexec=bool(node['mode'] & stat.S_IXUSR),
892 893 copysource=False)
893 894 abort_exc = exceptions.AbortException()
894 895 raise abort_exc(f"Given path haven't been marked as added, changed or removed ({path})")
895 896
896 897 if publishing:
897 898 new_commit_phase = b'public'
898 899 else:
899 900 new_commit_phase = b'draft'
900 901 with repo.ui.configoverride({(b'phases', b'new-commit'): new_commit_phase}):
901 902 kwargs = {safe_bytes(k): safe_bytes(v) for k, v in extra.items()}
902 903 commit_ctx = memctx(
903 904 repo=repo,
904 905 parents=parents,
905 906 text=safe_bytes(message),
906 907 files=[safe_bytes(x) for x in files],
907 908 filectxfn=_filectxfn,
908 909 user=safe_bytes(user),
909 910 date=(commit_time, commit_timezone),
910 911 extra=kwargs)
911 912
912 913 n = repo.commitctx(commit_ctx)
913 914 new_id = hex(n)
914 915
915 916 return new_id
916 917
917 918 @reraise_safe_exceptions
918 919 def pull(self, wire, url, commit_ids=None):
919 920 repo = self._factory.repo(wire)
920 921 # Disable any prompts for this repo
921 922 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
922 923
923 924 remote = peer(repo, {}, safe_bytes(url))
924 925 # Disable any prompts for this remote
925 926 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
926 927
927 928 if commit_ids:
928 929 commit_ids = [bin(commit_id) for commit_id in commit_ids]
929 930
930 931 return exchange.pull(
931 932 repo, remote, heads=commit_ids, force=None).cgresult
932 933
933 934 @reraise_safe_exceptions
934 935 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
935 936 repo = self._factory.repo(wire)
936 937 baseui = self._factory._create_config(wire['config'], hooks=hooks)
937 938
938 939 # Mercurial internally has a lot of logic that checks ONLY if
939 940 # option is defined, we just pass those if they are defined then
940 941 opts = {}
941 942 if bookmark:
942 943 opts['bookmark'] = bookmark
943 944 if branch:
944 945 opts['branch'] = branch
945 946 if revision:
946 947 opts['rev'] = revision
947 948
948 949 commands.pull(baseui, repo, source, **opts)
949 950
950 951 @reraise_safe_exceptions
951 952 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
952 953 repo = self._factory.repo(wire)
953 954 baseui = self._factory._create_config(wire['config'], hooks=hooks)
954 955 commands.push(baseui, repo, dest=dest_path, rev=revisions,
955 956 new_branch=push_branches)
956 957
957 958 @reraise_safe_exceptions
958 959 def strip(self, wire, revision, update, backup):
959 960 repo = self._factory.repo(wire)
960 961 ctx = self._get_ctx(repo, revision)
961 962 hgext_strip(
962 963 repo.baseui, repo, ctx.node(), update=update, backup=backup)
963 964
964 965 @reraise_safe_exceptions
965 966 def get_unresolved_files(self, wire):
966 967 repo = self._factory.repo(wire)
967 968
968 969 log.debug('Calculating unresolved files for repo: %s', repo)
969 970 output = io.BytesIO()
970 971
971 972 def write(data, **unused_kwargs):
972 973 output.write(data)
973 974
974 975 baseui = self._factory._create_config(wire['config'])
975 976 baseui.write = write
976 977
977 978 commands.resolve(baseui, repo, list=True)
978 979 unresolved = output.getvalue().splitlines(0)
979 980 return unresolved
980 981
981 982 @reraise_safe_exceptions
982 983 def merge(self, wire, revision):
983 984 repo = self._factory.repo(wire)
984 985 baseui = self._factory._create_config(wire['config'])
985 986 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
986 987
987 988 # In case of sub repositories are used mercurial prompts the user in
988 989 # case of merge conflicts or different sub repository sources. By
989 990 # setting the interactive flag to `False` mercurial doesn't prompt the
990 991 # used but instead uses a default value.
991 992 repo.ui.setconfig(b'ui', b'interactive', False)
992 993 commands.merge(baseui, repo, rev=revision)
993 994
994 995 @reraise_safe_exceptions
995 996 def merge_state(self, wire):
996 997 repo = self._factory.repo(wire)
997 998 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
998 999
999 1000 # In case of sub repositories are used mercurial prompts the user in
1000 1001 # case of merge conflicts or different sub repository sources. By
1001 1002 # setting the interactive flag to `False` mercurial doesn't prompt the
1002 1003 # used but instead uses a default value.
1003 1004 repo.ui.setconfig(b'ui', b'interactive', False)
1004 1005 ms = hg_merge.mergestate(repo)
1005 1006 return [x for x in ms.unresolved()]
1006 1007
1007 1008 @reraise_safe_exceptions
1008 1009 def commit(self, wire, message, username, close_branch=False):
1009 1010 repo = self._factory.repo(wire)
1010 1011 baseui = self._factory._create_config(wire['config'])
1011 1012 repo.ui.setconfig(b'ui', b'username', username)
1012 1013 commands.commit(baseui, repo, message=message, close_branch=close_branch)
1013 1014
1014 1015 @reraise_safe_exceptions
1015 1016 def rebase(self, wire, source=None, dest=None, abort=False):
1016 1017 repo = self._factory.repo(wire)
1017 1018 baseui = self._factory._create_config(wire['config'])
1018 1019 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1019 1020 # In case of sub repositories are used mercurial prompts the user in
1020 1021 # case of merge conflicts or different sub repository sources. By
1021 1022 # setting the interactive flag to `False` mercurial doesn't prompt the
1022 1023 # used but instead uses a default value.
1023 1024 repo.ui.setconfig(b'ui', b'interactive', False)
1024 1025 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
1025 1026
1026 1027 @reraise_safe_exceptions
1027 1028 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
1028 1029 repo = self._factory.repo(wire)
1029 1030 ctx = self._get_ctx(repo, revision)
1030 1031 node = ctx.node()
1031 1032
1032 1033 date = (tag_time, tag_timezone)
1033 1034 try:
1034 1035 hg_tag.tag(repo, name, node, message, local, user, date)
1035 1036 except Abort as e:
1036 1037 log.exception("Tag operation aborted")
1037 1038 # Exception can contain unicode which we convert
1038 1039 raise exceptions.AbortException(e)(repr(e))
1039 1040
1040 1041 @reraise_safe_exceptions
1041 1042 def bookmark(self, wire, bookmark, revision=None):
1042 1043 repo = self._factory.repo(wire)
1043 1044 baseui = self._factory._create_config(wire['config'])
1044 1045 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
1045 1046
1046 1047 @reraise_safe_exceptions
1047 1048 def install_hooks(self, wire, force=False):
1048 1049 # we don't need any special hooks for Mercurial
1049 1050 pass
1050 1051
1051 1052 @reraise_safe_exceptions
1052 1053 def get_hooks_info(self, wire):
1053 1054 return {
1054 1055 'pre_version': vcsserver.__version__,
1055 1056 'post_version': vcsserver.__version__,
1056 1057 }
1057 1058
1058 1059 @reraise_safe_exceptions
1059 1060 def set_head_ref(self, wire, head_name):
1060 1061 pass
1061 1062
1062 1063 @reraise_safe_exceptions
1063 1064 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1064 1065 archive_dir_name, commit_id):
1065 1066
1066 1067 def file_walker(_commit_id, path):
1067 1068 repo = self._factory.repo(wire)
1068 1069 ctx = repo[_commit_id]
1069 1070 is_root = path in ['', '/']
1070 1071 if is_root:
1071 1072 matcher = alwaysmatcher(badfn=None)
1072 1073 else:
1073 1074 matcher = patternmatcher('', [(b'glob', path+'/**', b'')], badfn=None)
1074 1075 file_iter = ctx.manifest().walk(matcher)
1075 1076
1076 1077 for fn in file_iter:
1077 1078 file_path = fn
1078 1079 flags = ctx.flags(fn)
1079 1080 mode = b'x' in flags and 0o755 or 0o644
1080 1081 is_link = b'l' in flags
1081 1082
1082 1083 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1083 1084
1084 1085 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1085 1086 archive_dir_name, commit_id)
1086 1087
General Comments 0
You need to be logged in to leave comments. Login now