##// END OF EJS Templates
vcsserver: few fixes for python3 support
super-admin -
r1076:69a85da8 python3
parent child Browse files
Show More
@@ -1,1344 +1,1349 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import collections
19 19 import logging
20 20 import os
21 21 import posixpath as vcspath
22 22 import re
23 23 import stat
24 24 import traceback
25 25 import urllib.request, urllib.parse, urllib.error
26 26 import urllib.request, urllib.error, urllib.parse
27 27 from functools import wraps
28 28
29 29 import more_itertools
30 30 import pygit2
31 31 from pygit2 import Repository as LibGit2Repo
32 32 from pygit2 import index as LibGit2Index
33 33 from dulwich import index, objects
34 34 from dulwich.client import HttpGitClient, LocalGitClient
35 35 from dulwich.errors import (
36 36 NotGitRepository, ChecksumMismatch, WrongObjectException,
37 37 MissingCommitError, ObjectMissing, HangupException,
38 38 UnexpectedCommandError)
39 39 from dulwich.repo import Repo as DulwichRepo
40 40 from dulwich.server import update_server_info
41 41
42 42 from vcsserver import exceptions, settings, subprocessio
43 43 from vcsserver.str_utils import safe_str, safe_int, safe_bytes
44 44 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, archive_repo
45 45 from vcsserver.hgcompat import (
46 46 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
47 47 from vcsserver.git_lfs.lib import LFSOidStore
48 48 from vcsserver.vcs_base import RemoteBase
49 49
50 50 DIR_STAT = stat.S_IFDIR
51 51 FILE_MODE = stat.S_IFMT
52 52 GIT_LINK = objects.S_IFGITLINK
53 53 PEELED_REF_MARKER = b'^{}'
54 54
55 55
56 56 log = logging.getLogger(__name__)
57 57
58 58
59 59 def reraise_safe_exceptions(func):
60 60 """Converts Dulwich exceptions to something neutral."""
61 61
62 62 @wraps(func)
63 63 def wrapper(*args, **kwargs):
64 64 try:
65 65 return func(*args, **kwargs)
66 66 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
67 67 exc = exceptions.LookupException(org_exc=e)
68 68 raise exc(safe_str(e))
69 69 except (HangupException, UnexpectedCommandError) as e:
70 70 exc = exceptions.VcsException(org_exc=e)
71 71 raise exc(safe_str(e))
72 72 except Exception as e:
73 73 # NOTE(marcink): becuase of how dulwich handles some exceptions
74 74 # (KeyError on empty repos), we cannot track this and catch all
75 75 # exceptions, it's an exceptions from other handlers
76 76 #if not hasattr(e, '_vcs_kind'):
77 77 #log.exception("Unhandled exception in git remote call")
78 78 #raise_from_original(exceptions.UnhandledException)
79 79 raise
80 80 return wrapper
81 81
82 82
83 83 class Repo(DulwichRepo):
84 84 """
85 85 A wrapper for dulwich Repo class.
86 86
87 87 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
88 88 "Too many open files" error. We need to close all opened file descriptors
89 89 once the repo object is destroyed.
90 90 """
91 91 def __del__(self):
92 92 if hasattr(self, 'object_store'):
93 93 self.close()
94 94
95 95
96 96 class Repository(LibGit2Repo):
97 97
98 98 def __enter__(self):
99 99 return self
100 100
101 101 def __exit__(self, exc_type, exc_val, exc_tb):
102 102 self.free()
103 103
104 104
105 105 class GitFactory(RepoFactory):
106 106 repo_type = 'git'
107 107
108 108 def _create_repo(self, wire, create, use_libgit2=False):
109 109 if use_libgit2:
110 110 return Repository(wire['path'])
111 111 else:
112 112 repo_path = safe_str(wire['path'], to_encoding=settings.WIRE_ENCODING)
113 113 return Repo(repo_path)
114 114
115 115 def repo(self, wire, create=False, use_libgit2=False):
116 116 """
117 117 Get a repository instance for the given path.
118 118 """
119 119 return self._create_repo(wire, create, use_libgit2)
120 120
121 121 def repo_libgit2(self, wire):
122 122 return self.repo(wire, use_libgit2=True)
123 123
124 124
125 125 class GitRemote(RemoteBase):
126 126
127 127 def __init__(self, factory):
128 128 self._factory = factory
129 129 self._bulk_methods = {
130 130 "date": self.date,
131 131 "author": self.author,
132 132 "branch": self.branch,
133 133 "message": self.message,
134 134 "parents": self.parents,
135 135 "_commit": self.revision,
136 136 }
137 137
138 138 def _wire_to_config(self, wire):
139 139 if 'config' in wire:
140 140 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
141 141 return {}
142 142
143 143 def _remote_conf(self, config):
144 144 params = [
145 145 '-c', 'core.askpass=""',
146 146 ]
147 147 ssl_cert_dir = config.get('vcs_ssl_dir')
148 148 if ssl_cert_dir:
149 149 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
150 150 return params
151 151
152 152 @reraise_safe_exceptions
153 153 def discover_git_version(self):
154 154 stdout, _ = self.run_git_command(
155 155 {}, ['--version'], _bare=True, _safe=True)
156 156 prefix = b'git version'
157 157 if stdout.startswith(prefix):
158 158 stdout = stdout[len(prefix):]
159 159 return safe_str(stdout.strip())
160 160
161 161 @reraise_safe_exceptions
162 162 def is_empty(self, wire):
163 163 repo_init = self._factory.repo_libgit2(wire)
164 164 with repo_init as repo:
165 165
166 166 try:
167 167 has_head = repo.head.name
168 168 if has_head:
169 169 return False
170 170
171 171 # NOTE(marcink): check again using more expensive method
172 172 return repo.is_empty
173 173 except Exception:
174 174 pass
175 175
176 176 return True
177 177
178 178 @reraise_safe_exceptions
179 179 def assert_correct_path(self, wire):
180 180 cache_on, context_uid, repo_id = self._cache_on(wire)
181 181 region = self._region(wire)
182 182
183 183 @region.conditional_cache_on_arguments(condition=cache_on)
184 184 def _assert_correct_path(_context_uid, _repo_id):
185 185 try:
186 186 repo_init = self._factory.repo_libgit2(wire)
187 187 with repo_init as repo:
188 188 pass
189 189 except pygit2.GitError:
190 190 path = wire.get('path')
191 191 tb = traceback.format_exc()
192 192 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
193 193 return False
194 194
195 195 return True
196 196 return _assert_correct_path(context_uid, repo_id)
197 197
198 198 @reraise_safe_exceptions
199 199 def bare(self, wire):
200 200 repo_init = self._factory.repo_libgit2(wire)
201 201 with repo_init as repo:
202 202 return repo.is_bare
203 203
204 204 @reraise_safe_exceptions
205 205 def blob_as_pretty_string(self, wire, sha):
206 206 repo_init = self._factory.repo_libgit2(wire)
207 207 with repo_init as repo:
208 208 blob_obj = repo[sha]
209 209 blob = blob_obj.data
210 210 return blob
211 211
212 212 @reraise_safe_exceptions
213 213 def blob_raw_length(self, wire, sha):
214 214 cache_on, context_uid, repo_id = self._cache_on(wire)
215 215 region = self._region(wire)
216 216
217 217 @region.conditional_cache_on_arguments(condition=cache_on)
218 218 def _blob_raw_length(_repo_id, _sha):
219 219
220 220 repo_init = self._factory.repo_libgit2(wire)
221 221 with repo_init as repo:
222 222 blob = repo[sha]
223 223 return blob.size
224 224
225 225 return _blob_raw_length(repo_id, sha)
226 226
227 227 def _parse_lfs_pointer(self, raw_content):
228 228 spec_string = b'version https://git-lfs.github.com/spec'
229 229 if raw_content and raw_content.startswith(spec_string):
230 230
231 231 pattern = re.compile(rb"""
232 232 (?:\n)?
233 233 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
234 234 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
235 235 ^size[ ](?P<oid_size>[0-9]+)\n
236 236 (?:\n)?
237 237 """, re.VERBOSE | re.MULTILINE)
238 238 match = pattern.match(raw_content)
239 239 if match:
240 240 return match.groupdict()
241 241
242 242 return {}
243 243
244 244 @reraise_safe_exceptions
245 245 def is_large_file(self, wire, commit_id):
246 246 cache_on, context_uid, repo_id = self._cache_on(wire)
247 247 region = self._region(wire)
248 248
249 249 @region.conditional_cache_on_arguments(condition=cache_on)
250 250 def _is_large_file(_repo_id, _sha):
251 251 repo_init = self._factory.repo_libgit2(wire)
252 252 with repo_init as repo:
253 253 blob = repo[commit_id]
254 254 if blob.is_binary:
255 255 return {}
256 256
257 257 return self._parse_lfs_pointer(blob.data)
258 258
259 259 return _is_large_file(repo_id, commit_id)
260 260
261 261 @reraise_safe_exceptions
262 262 def is_binary(self, wire, tree_id):
263 263 cache_on, context_uid, repo_id = self._cache_on(wire)
264 264 region = self._region(wire)
265 265
266 266 @region.conditional_cache_on_arguments(condition=cache_on)
267 267 def _is_binary(_repo_id, _tree_id):
268 268 repo_init = self._factory.repo_libgit2(wire)
269 269 with repo_init as repo:
270 270 blob_obj = repo[tree_id]
271 271 return blob_obj.is_binary
272 272
273 273 return _is_binary(repo_id, tree_id)
274 274
275 275 @reraise_safe_exceptions
276 276 def md5_hash(self, wire, tree_id):
277 277 cache_on, context_uid, repo_id = self._cache_on(wire)
278 278 region = self._region(wire)
279 279
280 280 @region.conditional_cache_on_arguments(condition=cache_on)
281 281 def _md5_hash(_repo_id, _tree_id):
282 282 return ''
283 283
284 284 return _md5_hash(repo_id, tree_id)
285 285
286 286 @reraise_safe_exceptions
287 287 def in_largefiles_store(self, wire, oid):
288 288 conf = self._wire_to_config(wire)
289 289 repo_init = self._factory.repo_libgit2(wire)
290 290 with repo_init as repo:
291 291 repo_name = repo.path
292 292
293 293 store_location = conf.get('vcs_git_lfs_store_location')
294 294 if store_location:
295 295
296 296 store = LFSOidStore(
297 297 oid=oid, repo=repo_name, store_location=store_location)
298 298 return store.has_oid()
299 299
300 300 return False
301 301
302 302 @reraise_safe_exceptions
303 303 def store_path(self, wire, oid):
304 304 conf = self._wire_to_config(wire)
305 305 repo_init = self._factory.repo_libgit2(wire)
306 306 with repo_init as repo:
307 307 repo_name = repo.path
308 308
309 309 store_location = conf.get('vcs_git_lfs_store_location')
310 310 if store_location:
311 311 store = LFSOidStore(
312 312 oid=oid, repo=repo_name, store_location=store_location)
313 313 return store.oid_path
314 314 raise ValueError('Unable to fetch oid with path {}'.format(oid))
315 315
316 316 @reraise_safe_exceptions
317 317 def bulk_request(self, wire, rev, pre_load):
318 318 cache_on, context_uid, repo_id = self._cache_on(wire)
319 319 region = self._region(wire)
320 320
321 321 @region.conditional_cache_on_arguments(condition=cache_on)
322 322 def _bulk_request(_repo_id, _rev, _pre_load):
323 323 result = {}
324 324 for attr in pre_load:
325 325 try:
326 326 method = self._bulk_methods[attr]
327 327 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
328 328 args = [wire, rev]
329 329 result[attr] = method(*args)
330 330 except KeyError as e:
331 raise exceptions.VcsException(e)(
332 "Unknown bulk attribute: %s" % attr)
331 raise exceptions.VcsException(e)(f"Unknown bulk attribute: {attr}")
333 332 return result
334 333
335 334 return _bulk_request(repo_id, rev, sorted(pre_load))
336 335
337 336 def _build_opener(self, url):
338 337 handlers = []
339 338 url_obj = url_parser(url)
340 339 _, authinfo = url_obj.authinfo()
341 340
342 341 if authinfo:
343 342 # create a password manager
344 343 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
345 344 passmgr.add_password(*authinfo)
346 345
347 346 handlers.extend((httpbasicauthhandler(passmgr),
348 347 httpdigestauthhandler(passmgr)))
349 348
350 349 return urllib.request.build_opener(*handlers)
351 350
352 351 def _type_id_to_name(self, type_id: int):
353 352 return {
354 353 1: 'commit',
355 354 2: 'tree',
356 355 3: 'blob',
357 356 4: 'tag'
358 357 }[type_id]
359 358
360 359 @reraise_safe_exceptions
361 360 def check_url(self, url, config):
362 url_obj = url_parser(url)
361 url_obj = url_parser(safe_bytes(url))
363 362 test_uri, _ = url_obj.authinfo()
364 363 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
365 364 url_obj.query = obfuscate_qs(url_obj.query)
366 365 cleaned_uri = str(url_obj)
367 366 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
368 367
369 368 if not test_uri.endswith('info/refs'):
370 369 test_uri = test_uri.rstrip('/') + '/info/refs'
371 370
372 371 o = self._build_opener(url)
373 372 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
374 373
375 374 q = {"service": 'git-upload-pack'}
376 375 qs = '?%s' % urllib.parse.urlencode(q)
377 376 cu = "%s%s" % (test_uri, qs)
378 377 req = urllib.request.Request(cu, None, {})
379 378
380 379 try:
381 380 log.debug("Trying to open URL %s", cleaned_uri)
382 381 resp = o.open(req)
383 382 if resp.code != 200:
384 383 raise exceptions.URLError()('Return Code is not 200')
385 384 except Exception as e:
386 385 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
387 386 # means it cannot be cloned
388 387 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
389 388
390 389 # now detect if it's proper git repo
391 390 gitdata = resp.read()
392 391 if 'service=git-upload-pack' in gitdata:
393 392 pass
394 393 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
395 394 # old style git can return some other format !
396 395 pass
397 396 else:
398 397 raise exceptions.URLError()(
399 398 "url [%s] does not look like an git" % (cleaned_uri,))
400 399
401 400 return True
402 401
403 402 @reraise_safe_exceptions
404 403 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
405 404 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
406 405 remote_refs = self.pull(wire, url, apply_refs=False)
407 406 repo = self._factory.repo(wire)
408 407 if isinstance(valid_refs, list):
409 408 valid_refs = tuple(valid_refs)
410 409
411 410 for k in remote_refs:
412 411 # only parse heads/tags and skip so called deferred tags
413 412 if k.startswith(valid_refs) and not k.endswith(deferred):
414 413 repo[k] = remote_refs[k]
415 414
416 415 if update_after_clone:
417 416 # we want to checkout HEAD
418 417 repo["HEAD"] = remote_refs["HEAD"]
419 418 index.build_index_from_tree(repo.path, repo.index_path(),
420 419 repo.object_store, repo["HEAD"].tree)
421 420
422 421 @reraise_safe_exceptions
423 422 def branch(self, wire, commit_id):
424 423 cache_on, context_uid, repo_id = self._cache_on(wire)
425 424 region = self._region(wire)
426 425 @region.conditional_cache_on_arguments(condition=cache_on)
427 426 def _branch(_context_uid, _repo_id, _commit_id):
428 427 regex = re.compile('^refs/heads')
429 428
430 429 def filter_with(ref):
431 430 return regex.match(ref[0]) and ref[1] == _commit_id
432 431
433 432 branches = list(filter(filter_with, list(self.get_refs(wire).items())))
434 433 return [x[0].split('refs/heads/')[-1] for x in branches]
435 434
436 435 return _branch(context_uid, repo_id, commit_id)
437 436
438 437 @reraise_safe_exceptions
439 438 def commit_branches(self, wire, commit_id):
440 439 cache_on, context_uid, repo_id = self._cache_on(wire)
441 440 region = self._region(wire)
442 441 @region.conditional_cache_on_arguments(condition=cache_on)
443 442 def _commit_branches(_context_uid, _repo_id, _commit_id):
444 443 repo_init = self._factory.repo_libgit2(wire)
445 444 with repo_init as repo:
446 445 branches = [x for x in repo.branches.with_commit(_commit_id)]
447 446 return branches
448 447
449 448 return _commit_branches(context_uid, repo_id, commit_id)
450 449
451 450 @reraise_safe_exceptions
452 451 def add_object(self, wire, content):
453 452 repo_init = self._factory.repo_libgit2(wire)
454 453 with repo_init as repo:
455 454 blob = objects.Blob()
456 455 blob.set_raw_string(content)
457 456 repo.object_store.add_object(blob)
458 457 return blob.id
459 458
460 459 # TODO: this is quite complex, check if that can be simplified
461 460 @reraise_safe_exceptions
462 461 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
463 462 # Defines the root tree
464 463 class _Root(object):
465 464 def __repr__(self):
466 465 return 'ROOT TREE'
467 466 ROOT = _Root()
468 467
469 468 repo = self._factory.repo(wire)
470 469 object_store = repo.object_store
471 470
472 471 # Create tree and populates it with blobs
472 if commit_tree:
473 commit_tree = safe_bytes(commit_tree)
473 474
474 475 if commit_tree and repo[commit_tree]:
475 git_commit = repo[commit_data['parents'][0]]
476 git_commit = repo[safe_bytes(commit_data['parents'][0])]
476 477 commit_tree = repo[git_commit.tree] # root tree
477 478 else:
478 479 commit_tree = objects.Tree()
479 480
480 481 for node in updated:
481 482 # Compute subdirs if needed
482 483 dirpath, nodename = vcspath.split(node['path'])
483 484 dirnames = list(map(safe_str, dirpath and dirpath.split('/') or []))
484 485 parent = commit_tree
485 486 ancestors = [('', parent)]
486 487
487 488 # Tries to dig for the deepest existing tree
488 489 while dirnames:
489 490 curdir = dirnames.pop(0)
490 491 try:
491 492 dir_id = parent[curdir][1]
492 493 except KeyError:
493 494 # put curdir back into dirnames and stops
494 495 dirnames.insert(0, curdir)
495 496 break
496 497 else:
497 498 # If found, updates parent
498 499 parent = repo[dir_id]
499 500 ancestors.append((curdir, parent))
500 501 # Now parent is deepest existing tree and we need to create
501 502 # subtrees for dirnames (in reverse order)
502 503 # [this only applies for nodes from added]
503 504 new_trees = []
504 505
505 506 blob = objects.Blob.from_string(node['content'])
506 507
507 508 if dirnames:
508 509 # If there are trees which should be created we need to build
509 510 # them now (in reverse order)
510 511 reversed_dirnames = list(reversed(dirnames))
511 512 curtree = objects.Tree()
512 513 curtree[node['node_path']] = node['mode'], blob.id
513 514 new_trees.append(curtree)
514 515 for dirname in reversed_dirnames[:-1]:
515 516 newtree = objects.Tree()
516 517 newtree[dirname] = (DIR_STAT, curtree.id)
517 518 new_trees.append(newtree)
518 519 curtree = newtree
519 520 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
520 521 else:
521 522 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
522 523
523 524 new_trees.append(parent)
524 525 # Update ancestors
525 526 reversed_ancestors = reversed(
526 527 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
527 528 for parent, tree, path in reversed_ancestors:
528 529 parent[path] = (DIR_STAT, tree.id)
529 530 object_store.add_object(tree)
530 531
531 532 object_store.add_object(blob)
532 533 for tree in new_trees:
533 534 object_store.add_object(tree)
534 535
535 536 for node_path in removed:
536 537 paths = node_path.split('/')
537 538 tree = commit_tree # start with top-level
538 539 trees = [{'tree': tree, 'path': ROOT}]
539 540 # Traverse deep into the forest...
540 541 # resolve final tree by iterating the path.
541 542 # e.g a/b/c.txt will get
542 543 # - root as tree then
543 544 # - 'a' as tree,
544 545 # - 'b' as tree,
545 546 # - stop at c as blob.
546 547 for path in paths:
547 548 try:
548 549 obj = repo[tree[path][1]]
549 550 if isinstance(obj, objects.Tree):
550 551 trees.append({'tree': obj, 'path': path})
551 552 tree = obj
552 553 except KeyError:
553 554 break
554 555 #PROBLEM:
555 556 """
556 557 We're not editing same reference tree object
557 558 """
558 559 # Cut down the blob and all rotten trees on the way back...
559 560 for path, tree_data in reversed(list(zip(paths, trees))):
560 561 tree = tree_data['tree']
561 562 tree.__delitem__(path)
562 563 # This operation edits the tree, we need to mark new commit back
563 564
564 565 if len(tree) > 0:
565 566 # This tree still has elements - don't remove it or any
566 567 # of it's parents
567 568 break
568 569
569 570 object_store.add_object(commit_tree)
570 571
571 572 # Create commit
572 573 commit = objects.Commit()
573 574 commit.tree = commit_tree.id
574 575 bytes_keys = [
575 576 'author',
576 577 'committer',
577 578 'message',
578 'encoding'
579 'encoding',
580 'parents'
579 581 ]
580 582
581 583 for k, v in commit_data.items():
582 584 if k in bytes_keys:
583 v = safe_bytes(v)
585 if k == 'parents':
586 v = [safe_bytes(x) for x in v]
587 else:
588 v = safe_bytes(v)
584 589 setattr(commit, k, v)
585 590
586 591 object_store.add_object(commit)
587 592
588 593 self.create_branch(wire, branch, safe_str(commit.id))
589 594
590 595 # dulwich set-ref
591 596 repo.refs[safe_bytes(f'refs/heads/{branch}')] = commit.id
592 597
593 598 return commit.id
594 599
595 600 @reraise_safe_exceptions
596 601 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
597 602 if url != 'default' and '://' not in url:
598 603 client = LocalGitClient(url)
599 604 else:
600 605 url_obj = url_parser(url)
601 606 o = self._build_opener(url)
602 607 url, _ = url_obj.authinfo()
603 608 client = HttpGitClient(base_url=url, opener=o)
604 609 repo = self._factory.repo(wire)
605 610
606 611 determine_wants = repo.object_store.determine_wants_all
607 612 if refs:
608 613 def determine_wants_requested(references):
609 614 return [references[r] for r in references if r in refs]
610 615 determine_wants = determine_wants_requested
611 616
612 617 try:
613 618 remote_refs = client.fetch(
614 619 path=url, target=repo, determine_wants=determine_wants)
615 620 except NotGitRepository as e:
616 621 log.warning(
617 622 'Trying to fetch from "%s" failed, not a Git repository.', url)
618 623 # Exception can contain unicode which we convert
619 624 raise exceptions.AbortException(e)(repr(e))
620 625
621 626 # mikhail: client.fetch() returns all the remote refs, but fetches only
622 627 # refs filtered by `determine_wants` function. We need to filter result
623 628 # as well
624 629 if refs:
625 630 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
626 631
627 632 if apply_refs:
628 633 # TODO: johbo: Needs proper test coverage with a git repository
629 634 # that contains a tag object, so that we would end up with
630 635 # a peeled ref at this point.
631 636 for k in remote_refs:
632 637 if k.endswith(PEELED_REF_MARKER):
633 638 log.debug("Skipping peeled reference %s", k)
634 639 continue
635 640 repo[k] = remote_refs[k]
636 641
637 642 if refs and not update_after:
638 643 # mikhail: explicitly set the head to the last ref.
639 644 repo["HEAD"] = remote_refs[refs[-1]]
640 645
641 646 if update_after:
642 647 # we want to checkout HEAD
643 648 repo["HEAD"] = remote_refs["HEAD"]
644 649 index.build_index_from_tree(repo.path, repo.index_path(),
645 650 repo.object_store, repo["HEAD"].tree)
646 651 return remote_refs
647 652
648 653 @reraise_safe_exceptions
649 654 def sync_fetch(self, wire, url, refs=None, all_refs=False):
650 655 repo = self._factory.repo(wire)
651 656 if refs and not isinstance(refs, (list, tuple)):
652 657 refs = [refs]
653 658
654 659 config = self._wire_to_config(wire)
655 660 # get all remote refs we'll use to fetch later
656 661 cmd = ['ls-remote']
657 662 if not all_refs:
658 663 cmd += ['--heads', '--tags']
659 664 cmd += [url]
660 665 output, __ = self.run_git_command(
661 666 wire, cmd, fail_on_stderr=False,
662 667 _copts=self._remote_conf(config),
663 668 extra_env={'GIT_TERMINAL_PROMPT': '0'})
664 669
665 670 remote_refs = collections.OrderedDict()
666 671 fetch_refs = []
667 672
668 673 for ref_line in output.splitlines():
669 674 sha, ref = ref_line.split(b'\t')
670 675 sha = sha.strip()
671 676 if ref in remote_refs:
672 677 # duplicate, skip
673 678 continue
674 679 if ref.endswith(PEELED_REF_MARKER):
675 680 log.debug("Skipping peeled reference %s", ref)
676 681 continue
677 682 # don't sync HEAD
678 683 if ref in [b'HEAD']:
679 684 continue
680 685
681 686 remote_refs[ref] = sha
682 687
683 688 if refs and sha in refs:
684 689 # we filter fetch using our specified refs
685 690 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
686 691 elif not refs:
687 692 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
688 693 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
689 694
690 695 if fetch_refs:
691 696 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
692 697 fetch_refs_chunks = list(chunk)
693 698 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
694 699 self.run_git_command(
695 700 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
696 701 fail_on_stderr=False,
697 702 _copts=self._remote_conf(config),
698 703 extra_env={'GIT_TERMINAL_PROMPT': '0'})
699 704
700 705 return remote_refs
701 706
702 707 @reraise_safe_exceptions
703 708 def sync_push(self, wire, url, refs=None):
704 709 if not self.check_url(url, wire):
705 710 return
706 711 config = self._wire_to_config(wire)
707 712 self._factory.repo(wire)
708 713 self.run_git_command(
709 714 wire, ['push', url, '--mirror'], fail_on_stderr=False,
710 715 _copts=self._remote_conf(config),
711 716 extra_env={'GIT_TERMINAL_PROMPT': '0'})
712 717
713 718 @reraise_safe_exceptions
714 719 def get_remote_refs(self, wire, url):
715 720 repo = Repo(url)
716 721 return repo.get_refs()
717 722
718 723 @reraise_safe_exceptions
719 724 def get_description(self, wire):
720 725 repo = self._factory.repo(wire)
721 726 return repo.get_description()
722 727
723 728 @reraise_safe_exceptions
724 729 def get_missing_revs(self, wire, rev1, rev2, path2):
725 730 repo = self._factory.repo(wire)
726 731 LocalGitClient(thin_packs=False).fetch(path2, repo)
727 732
728 733 wire_remote = wire.copy()
729 734 wire_remote['path'] = path2
730 735 repo_remote = self._factory.repo(wire_remote)
731 736 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
732 737
733 738 revs = [
734 739 x.commit.id
735 740 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
736 741 return revs
737 742
738 743 @reraise_safe_exceptions
739 744 def get_object(self, wire, sha, maybe_unreachable=False):
740 745 cache_on, context_uid, repo_id = self._cache_on(wire)
741 746 region = self._region(wire)
742 747
743 748 @region.conditional_cache_on_arguments(condition=cache_on)
744 749 def _get_object(_context_uid, _repo_id, _sha):
745 750 repo_init = self._factory.repo_libgit2(wire)
746 751 with repo_init as repo:
747 752
748 753 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
749 754 try:
750 755 commit = repo.revparse_single(sha)
751 756 except KeyError:
752 757 # NOTE(marcink): KeyError doesn't give us any meaningful information
753 758 # here, we instead give something more explicit
754 759 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
755 760 raise exceptions.LookupException(e)(missing_commit_err)
756 761 except ValueError as e:
757 762 raise exceptions.LookupException(e)(missing_commit_err)
758 763
759 764 is_tag = False
760 765 if isinstance(commit, pygit2.Tag):
761 766 commit = repo.get(commit.target)
762 767 is_tag = True
763 768
764 769 check_dangling = True
765 770 if is_tag:
766 771 check_dangling = False
767 772
768 773 if check_dangling and maybe_unreachable:
769 774 check_dangling = False
770 775
771 776 # we used a reference and it parsed means we're not having a dangling commit
772 777 if sha != commit.hex:
773 778 check_dangling = False
774 779
775 780 if check_dangling:
776 781 # check for dangling commit
777 782 for branch in repo.branches.with_commit(commit.hex):
778 783 if branch:
779 784 break
780 785 else:
781 786 # NOTE(marcink): Empty error doesn't give us any meaningful information
782 787 # here, we instead give something more explicit
783 788 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
784 789 raise exceptions.LookupException(e)(missing_commit_err)
785 790
786 791 commit_id = commit.hex
787 792 type_id = commit.type
788 793
789 794 return {
790 795 'id': commit_id,
791 796 'type': self._type_id_to_name(type_id),
792 797 'commit_id': commit_id,
793 798 'idx': 0
794 799 }
795 800
796 801 return _get_object(context_uid, repo_id, sha)
797 802
798 803 @reraise_safe_exceptions
799 804 def get_refs(self, wire):
800 805 cache_on, context_uid, repo_id = self._cache_on(wire)
801 806 region = self._region(wire)
802 807
803 808 @region.conditional_cache_on_arguments(condition=cache_on)
804 809 def _get_refs(_context_uid, _repo_id):
805 810
806 811 repo_init = self._factory.repo_libgit2(wire)
807 812 with repo_init as repo:
808 813 regex = re.compile('^refs/(heads|tags)/')
809 814 return {x.name: x.target.hex for x in
810 815 [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]}
811 816
812 817 return _get_refs(context_uid, repo_id)
813 818
814 819 @reraise_safe_exceptions
815 820 def get_branch_pointers(self, wire):
816 821 cache_on, context_uid, repo_id = self._cache_on(wire)
817 822 region = self._region(wire)
818 823
819 824 @region.conditional_cache_on_arguments(condition=cache_on)
820 825 def _get_branch_pointers(_context_uid, _repo_id):
821 826
822 827 repo_init = self._factory.repo_libgit2(wire)
823 828 regex = re.compile('^refs/heads')
824 829 with repo_init as repo:
825 830 branches = [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]
826 831 return {x.target.hex: x.shorthand for x in branches}
827 832
828 833 return _get_branch_pointers(context_uid, repo_id)
829 834
830 835 @reraise_safe_exceptions
831 836 def head(self, wire, show_exc=True):
832 837 cache_on, context_uid, repo_id = self._cache_on(wire)
833 838 region = self._region(wire)
834 839
835 840 @region.conditional_cache_on_arguments(condition=cache_on)
836 841 def _head(_context_uid, _repo_id, _show_exc):
837 842 repo_init = self._factory.repo_libgit2(wire)
838 843 with repo_init as repo:
839 844 try:
840 845 return repo.head.peel().hex
841 846 except Exception:
842 847 if show_exc:
843 848 raise
844 849 return _head(context_uid, repo_id, show_exc)
845 850
846 851 @reraise_safe_exceptions
847 852 def init(self, wire):
848 853 repo_path = safe_str(wire['path'])
849 854 self.repo = Repo.init(repo_path)
850 855
851 856 @reraise_safe_exceptions
852 857 def init_bare(self, wire):
853 858 repo_path = safe_str(wire['path'])
854 859 self.repo = Repo.init_bare(repo_path)
855 860
856 861 @reraise_safe_exceptions
857 862 def revision(self, wire, rev):
858 863
859 864 cache_on, context_uid, repo_id = self._cache_on(wire)
860 865 region = self._region(wire)
861 866
862 867 @region.conditional_cache_on_arguments(condition=cache_on)
863 868 def _revision(_context_uid, _repo_id, _rev):
864 869 repo_init = self._factory.repo_libgit2(wire)
865 870 with repo_init as repo:
866 871 commit = repo[rev]
867 872 obj_data = {
868 873 'id': commit.id.hex,
869 874 }
870 875 # tree objects itself don't have tree_id attribute
871 876 if hasattr(commit, 'tree_id'):
872 877 obj_data['tree'] = commit.tree_id.hex
873 878
874 879 return obj_data
875 880 return _revision(context_uid, repo_id, rev)
876 881
877 882 @reraise_safe_exceptions
878 883 def date(self, wire, commit_id):
879 884 cache_on, context_uid, repo_id = self._cache_on(wire)
880 885 region = self._region(wire)
881 886
882 887 @region.conditional_cache_on_arguments(condition=cache_on)
883 888 def _date(_repo_id, _commit_id):
884 889 repo_init = self._factory.repo_libgit2(wire)
885 890 with repo_init as repo:
886 891 commit = repo[commit_id]
887 892
888 893 if hasattr(commit, 'commit_time'):
889 894 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
890 895 else:
891 896 commit = commit.get_object()
892 897 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
893 898
894 899 # TODO(marcink): check dulwich difference of offset vs timezone
895 900 return [commit_time, commit_time_offset]
896 901 return _date(repo_id, commit_id)
897 902
898 903 @reraise_safe_exceptions
899 904 def author(self, wire, commit_id):
900 905 cache_on, context_uid, repo_id = self._cache_on(wire)
901 906 region = self._region(wire)
902 907
903 908 @region.conditional_cache_on_arguments(condition=cache_on)
904 909 def _author(_repo_id, _commit_id):
905 910 repo_init = self._factory.repo_libgit2(wire)
906 911 with repo_init as repo:
907 912 commit = repo[commit_id]
908 913
909 914 if hasattr(commit, 'author'):
910 915 author = commit.author
911 916 else:
912 917 author = commit.get_object().author
913 918
914 919 if author.email:
915 920 return "{} <{}>".format(author.name, author.email)
916 921
917 922 try:
918 923 return "{}".format(author.name)
919 924 except Exception:
920 925 return "{}".format(safe_str(author.raw_name))
921 926
922 927 return _author(repo_id, commit_id)
923 928
924 929 @reraise_safe_exceptions
925 930 def message(self, wire, commit_id):
926 931 cache_on, context_uid, repo_id = self._cache_on(wire)
927 932 region = self._region(wire)
928 933 @region.conditional_cache_on_arguments(condition=cache_on)
929 934 def _message(_repo_id, _commit_id):
930 935 repo_init = self._factory.repo_libgit2(wire)
931 936 with repo_init as repo:
932 937 commit = repo[commit_id]
933 938 return commit.message
934 939 return _message(repo_id, commit_id)
935 940
936 941 @reraise_safe_exceptions
937 942 def parents(self, wire, commit_id):
938 943 cache_on, context_uid, repo_id = self._cache_on(wire)
939 944 region = self._region(wire)
940 945
941 946 @region.conditional_cache_on_arguments(condition=cache_on)
942 947 def _parents(_repo_id, _commit_id):
943 948 repo_init = self._factory.repo_libgit2(wire)
944 949 with repo_init as repo:
945 950 commit = repo[commit_id]
946 951 if hasattr(commit, 'parent_ids'):
947 952 parent_ids = commit.parent_ids
948 953 else:
949 954 parent_ids = commit.get_object().parent_ids
950 955
951 956 return [x.hex for x in parent_ids]
952 957 return _parents(repo_id, commit_id)
953 958
954 959 @reraise_safe_exceptions
955 960 def children(self, wire, commit_id):
956 961 cache_on, context_uid, repo_id = self._cache_on(wire)
957 962 region = self._region(wire)
958 963
959 964 head = self.head(wire)
960 965
961 966 @region.conditional_cache_on_arguments(condition=cache_on)
962 967 def _children(_repo_id, _commit_id):
963 968
964 969 output, __ = self.run_git_command(
965 970 wire, ['rev-list', '--all', '--children', f'{commit_id}^..{head}'])
966 971
967 972 child_ids = []
968 973 pat = re.compile(r'^{}'.format(commit_id))
969 974 for line in output.splitlines():
970 975 line = safe_str(line)
971 976 if pat.match(line):
972 977 found_ids = line.split(' ')[1:]
973 978 child_ids.extend(found_ids)
974 979 break
975 980
976 981 return child_ids
977 982 return _children(repo_id, commit_id)
978 983
979 984 @reraise_safe_exceptions
980 985 def set_refs(self, wire, key, value):
981 986 repo_init = self._factory.repo_libgit2(wire)
982 987 with repo_init as repo:
983 988 repo.references.create(key, value, force=True)
984 989
985 990 @reraise_safe_exceptions
986 991 def create_branch(self, wire, branch_name, commit_id, force=False):
987 992 repo_init = self._factory.repo_libgit2(wire)
988 993 with repo_init as repo:
989 994 commit = repo[commit_id]
990 995
991 996 if force:
992 997 repo.branches.local.create(branch_name, commit, force=force)
993 998 elif not repo.branches.get(branch_name):
994 999 # create only if that branch isn't existing
995 1000 repo.branches.local.create(branch_name, commit, force=force)
996 1001
997 1002 @reraise_safe_exceptions
998 1003 def remove_ref(self, wire, key):
999 1004 repo_init = self._factory.repo_libgit2(wire)
1000 1005 with repo_init as repo:
1001 1006 repo.references.delete(key)
1002 1007
1003 1008 @reraise_safe_exceptions
1004 1009 def tag_remove(self, wire, tag_name):
1005 1010 repo_init = self._factory.repo_libgit2(wire)
1006 1011 with repo_init as repo:
1007 1012 key = 'refs/tags/{}'.format(tag_name)
1008 1013 repo.references.delete(key)
1009 1014
1010 1015 @reraise_safe_exceptions
1011 1016 def tree_changes(self, wire, source_id, target_id):
1012 1017 # TODO(marcink): remove this seems it's only used by tests
1013 1018 repo = self._factory.repo(wire)
1014 1019 source = repo[source_id].tree if source_id else None
1015 1020 target = repo[target_id].tree
1016 1021 result = repo.object_store.tree_changes(source, target)
1017 1022 return list(result)
1018 1023
1019 1024 @reraise_safe_exceptions
1020 1025 def tree_and_type_for_path(self, wire, commit_id, path):
1021 1026
1022 1027 cache_on, context_uid, repo_id = self._cache_on(wire)
1023 1028 region = self._region(wire)
1024 1029
1025 1030 @region.conditional_cache_on_arguments(condition=cache_on)
1026 1031 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
1027 1032 repo_init = self._factory.repo_libgit2(wire)
1028 1033
1029 1034 with repo_init as repo:
1030 1035 commit = repo[commit_id]
1031 1036 try:
1032 1037 tree = commit.tree[path]
1033 1038 except KeyError:
1034 1039 return None, None, None
1035 1040
1036 1041 return tree.id.hex, tree.type_str, tree.filemode
1037 1042 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
1038 1043
1039 1044 @reraise_safe_exceptions
1040 1045 def tree_items(self, wire, tree_id):
1041 1046 cache_on, context_uid, repo_id = self._cache_on(wire)
1042 1047 region = self._region(wire)
1043 1048
1044 1049 @region.conditional_cache_on_arguments(condition=cache_on)
1045 1050 def _tree_items(_repo_id, _tree_id):
1046 1051
1047 1052 repo_init = self._factory.repo_libgit2(wire)
1048 1053 with repo_init as repo:
1049 1054 try:
1050 1055 tree = repo[tree_id]
1051 1056 except KeyError:
1052 1057 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1053 1058
1054 1059 result = []
1055 1060 for item in tree:
1056 1061 item_sha = item.hex
1057 1062 item_mode = item.filemode
1058 1063 item_type = item.type_str
1059 1064
1060 1065 if item_type == 'commit':
1061 1066 # NOTE(marcink): submodules we translate to 'link' for backward compat
1062 1067 item_type = 'link'
1063 1068
1064 1069 result.append((item.name, item_mode, item_sha, item_type))
1065 1070 return result
1066 1071 return _tree_items(repo_id, tree_id)
1067 1072
1068 1073 @reraise_safe_exceptions
1069 1074 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1070 1075 """
1071 1076 Old version that uses subprocess to call diff
1072 1077 """
1073 1078
1074 1079 flags = [
1075 1080 '-U%s' % context, '--patch',
1076 1081 '--binary',
1077 1082 '--find-renames',
1078 1083 '--no-indent-heuristic',
1079 1084 # '--indent-heuristic',
1080 1085 #'--full-index',
1081 1086 #'--abbrev=40'
1082 1087 ]
1083 1088
1084 1089 if opt_ignorews:
1085 1090 flags.append('--ignore-all-space')
1086 1091
1087 1092 if commit_id_1 == self.EMPTY_COMMIT:
1088 1093 cmd = ['show'] + flags + [commit_id_2]
1089 1094 else:
1090 1095 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1091 1096
1092 1097 if file_filter:
1093 1098 cmd.extend(['--', file_filter])
1094 1099
1095 1100 diff, __ = self.run_git_command(wire, cmd)
1096 1101 # If we used 'show' command, strip first few lines (until actual diff
1097 1102 # starts)
1098 1103 if commit_id_1 == self.EMPTY_COMMIT:
1099 1104 lines = diff.splitlines()
1100 1105 x = 0
1101 1106 for line in lines:
1102 1107 if line.startswith(b'diff'):
1103 1108 break
1104 1109 x += 1
1105 1110 # Append new line just like 'diff' command do
1106 1111 diff = '\n'.join(lines[x:]) + '\n'
1107 1112 return diff
1108 1113
1109 1114 @reraise_safe_exceptions
1110 1115 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1111 1116 repo_init = self._factory.repo_libgit2(wire)
1112 1117 with repo_init as repo:
1113 1118 swap = True
1114 1119 flags = 0
1115 1120 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1116 1121
1117 1122 if opt_ignorews:
1118 1123 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1119 1124
1120 1125 if commit_id_1 == self.EMPTY_COMMIT:
1121 1126 comm1 = repo[commit_id_2]
1122 1127 diff_obj = comm1.tree.diff_to_tree(
1123 1128 flags=flags, context_lines=context, swap=swap)
1124 1129
1125 1130 else:
1126 1131 comm1 = repo[commit_id_2]
1127 1132 comm2 = repo[commit_id_1]
1128 1133 diff_obj = comm1.tree.diff_to_tree(
1129 1134 comm2.tree, flags=flags, context_lines=context, swap=swap)
1130 1135 similar_flags = 0
1131 1136 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1132 1137 diff_obj.find_similar(flags=similar_flags)
1133 1138
1134 1139 if file_filter:
1135 1140 for p in diff_obj:
1136 1141 if p.delta.old_file.path == file_filter:
1137 1142 return p.patch or ''
1138 1143 # fo matching path == no diff
1139 1144 return ''
1140 1145 return diff_obj.patch or ''
1141 1146
1142 1147 @reraise_safe_exceptions
1143 1148 def node_history(self, wire, commit_id, path, limit):
1144 1149 cache_on, context_uid, repo_id = self._cache_on(wire)
1145 1150 region = self._region(wire)
1146 1151
1147 1152 @region.conditional_cache_on_arguments(condition=cache_on)
1148 1153 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1149 1154 # optimize for n==1, rev-list is much faster for that use-case
1150 1155 if limit == 1:
1151 1156 cmd = ['rev-list', '-1', commit_id, '--', path]
1152 1157 else:
1153 1158 cmd = ['log']
1154 1159 if limit:
1155 1160 cmd.extend(['-n', str(safe_int(limit, 0))])
1156 1161 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1157 1162
1158 1163 output, __ = self.run_git_command(wire, cmd)
1159 1164 commit_ids = re.findall(rb'[0-9a-fA-F]{40}', output)
1160 1165
1161 1166 return [x for x in commit_ids]
1162 1167 return _node_history(context_uid, repo_id, commit_id, path, limit)
1163 1168
1164 1169 @reraise_safe_exceptions
1165 1170 def node_annotate_legacy(self, wire, commit_id, path):
1166 1171 #note: replaced by pygit2 impelementation
1167 1172 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1168 1173 # -l ==> outputs long shas (and we need all 40 characters)
1169 1174 # --root ==> doesn't put '^' character for boundaries
1170 1175 # -r commit_id ==> blames for the given commit
1171 1176 output, __ = self.run_git_command(wire, cmd)
1172 1177
1173 1178 result = []
1174 1179 for i, blame_line in enumerate(output.splitlines()[:-1]):
1175 1180 line_no = i + 1
1176 1181 blame_commit_id, line = re.split(rb' ', blame_line, 1)
1177 1182 result.append((line_no, blame_commit_id, line))
1178 1183
1179 1184 return result
1180 1185
1181 1186 @reraise_safe_exceptions
1182 1187 def node_annotate(self, wire, commit_id, path):
1183 1188
1184 1189 result_libgit = []
1185 1190 repo_init = self._factory.repo_libgit2(wire)
1186 1191 with repo_init as repo:
1187 1192 commit = repo[commit_id]
1188 1193 blame_obj = repo.blame(path, newest_commit=commit_id)
1189 1194 for i, line in enumerate(commit.tree[path].data.splitlines()):
1190 1195 line_no = i + 1
1191 1196 hunk = blame_obj.for_line(line_no)
1192 1197 blame_commit_id = hunk.final_commit_id.hex
1193 1198
1194 1199 result_libgit.append((line_no, blame_commit_id, line))
1195 1200
1196 1201 return result_libgit
1197 1202
1198 1203 @reraise_safe_exceptions
1199 1204 def update_server_info(self, wire):
1200 1205 repo = self._factory.repo(wire)
1201 1206 update_server_info(repo)
1202 1207
1203 1208 @reraise_safe_exceptions
1204 1209 def get_all_commit_ids(self, wire):
1205 1210
1206 1211 cache_on, context_uid, repo_id = self._cache_on(wire)
1207 1212 region = self._region(wire)
1208 1213
1209 1214 @region.conditional_cache_on_arguments(condition=cache_on)
1210 1215 def _get_all_commit_ids(_context_uid, _repo_id):
1211 1216
1212 1217 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1213 1218 try:
1214 1219 output, __ = self.run_git_command(wire, cmd)
1215 1220 return output.splitlines()
1216 1221 except Exception:
1217 1222 # Can be raised for empty repositories
1218 1223 return []
1219 1224
1220 1225 @region.conditional_cache_on_arguments(condition=cache_on)
1221 1226 def _get_all_commit_ids_pygit2(_context_uid, _repo_id):
1222 1227 repo_init = self._factory.repo_libgit2(wire)
1223 1228 from pygit2 import GIT_SORT_REVERSE, GIT_SORT_TIME, GIT_BRANCH_ALL
1224 1229 results = []
1225 1230 with repo_init as repo:
1226 1231 for commit in repo.walk(repo.head.target, GIT_SORT_TIME | GIT_BRANCH_ALL | GIT_SORT_REVERSE):
1227 1232 results.append(commit.id.hex)
1228 1233
1229 1234 return _get_all_commit_ids(context_uid, repo_id)
1230 1235
1231 1236 @reraise_safe_exceptions
1232 1237 def run_git_command(self, wire, cmd, **opts):
1233 1238 path = wire.get('path', None)
1234 1239
1235 1240 if path and os.path.isdir(path):
1236 1241 opts['cwd'] = path
1237 1242
1238 1243 if '_bare' in opts:
1239 1244 _copts = []
1240 1245 del opts['_bare']
1241 1246 else:
1242 1247 _copts = ['-c', 'core.quotepath=false', ]
1243 1248 safe_call = False
1244 1249 if '_safe' in opts:
1245 1250 # no exc on failure
1246 1251 del opts['_safe']
1247 1252 safe_call = True
1248 1253
1249 1254 if '_copts' in opts:
1250 1255 _copts.extend(opts['_copts'] or [])
1251 1256 del opts['_copts']
1252 1257
1253 1258 gitenv = os.environ.copy()
1254 1259 gitenv.update(opts.pop('extra_env', {}))
1255 1260 # need to clean fix GIT_DIR !
1256 1261 if 'GIT_DIR' in gitenv:
1257 1262 del gitenv['GIT_DIR']
1258 1263 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1259 1264 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1260 1265
1261 1266 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1262 1267 _opts = {'env': gitenv, 'shell': False}
1263 1268
1264 1269 proc = None
1265 1270 try:
1266 1271 _opts.update(opts)
1267 1272 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1268 1273
1269 1274 return b''.join(proc), b''.join(proc.stderr)
1270 1275 except OSError as err:
1271 1276 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
1272 1277 tb_err = ("Couldn't run git command (%s).\n"
1273 1278 "Original error was:%s\n"
1274 1279 "Call options:%s\n"
1275 1280 % (cmd, err, _opts))
1276 1281 log.exception(tb_err)
1277 1282 if safe_call:
1278 1283 return '', err
1279 1284 else:
1280 1285 raise exceptions.VcsException()(tb_err)
1281 1286 finally:
1282 1287 if proc:
1283 1288 proc.close()
1284 1289
1285 1290 @reraise_safe_exceptions
1286 1291 def install_hooks(self, wire, force=False):
1287 1292 from vcsserver.hook_utils import install_git_hooks
1288 1293 bare = self.bare(wire)
1289 1294 path = wire['path']
1290 1295 return install_git_hooks(path, bare, force_create=force)
1291 1296
1292 1297 @reraise_safe_exceptions
1293 1298 def get_hooks_info(self, wire):
1294 1299 from vcsserver.hook_utils import (
1295 1300 get_git_pre_hook_version, get_git_post_hook_version)
1296 1301 bare = self.bare(wire)
1297 1302 path = wire['path']
1298 1303 return {
1299 1304 'pre_version': get_git_pre_hook_version(path, bare),
1300 1305 'post_version': get_git_post_hook_version(path, bare),
1301 1306 }
1302 1307
1303 1308 @reraise_safe_exceptions
1304 1309 def set_head_ref(self, wire, head_name):
1305 1310 log.debug('Setting refs/head to `%s`', head_name)
1306 1311 cmd = ['symbolic-ref', '"HEAD"', '"refs/heads/%s"' % head_name]
1307 1312 output, __ = self.run_git_command(wire, cmd)
1308 1313 return [head_name] + output.splitlines()
1309 1314
1310 1315 @reraise_safe_exceptions
1311 1316 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1312 1317 archive_dir_name, commit_id):
1313 1318
1314 1319 def file_walker(_commit_id, path):
1315 1320 repo_init = self._factory.repo_libgit2(wire)
1316 1321
1317 1322 with repo_init as repo:
1318 1323 commit = repo[commit_id]
1319 1324
1320 1325 if path in ['', '/']:
1321 1326 tree = commit.tree
1322 1327 else:
1323 1328 tree = commit.tree[path.rstrip('/')]
1324 1329 tree_id = tree.id.hex
1325 1330 try:
1326 1331 tree = repo[tree_id]
1327 1332 except KeyError:
1328 1333 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1329 1334
1330 1335 index = LibGit2Index.Index()
1331 1336 index.read_tree(tree)
1332 1337 file_iter = index
1333 1338
1334 1339 for fn in file_iter:
1335 1340 file_path = fn.path
1336 1341 mode = fn.mode
1337 1342 is_link = stat.S_ISLNK(mode)
1338 1343 if mode == pygit2.GIT_FILEMODE_COMMIT:
1339 1344 log.debug('Skipping path %s as a commit node', file_path)
1340 1345 continue
1341 1346 yield ArchiveNode(file_path, mode, is_link, repo[fn.hex].read_raw)
1342 1347
1343 1348 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1344 1349 archive_dir_name, commit_id)
@@ -1,1087 +1,1088 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import logging
20 20 import stat
21 import urllib.request, urllib.parse, urllib.error
22 import urllib.request, urllib.error, urllib.parse
21 import urllib.request
22 import urllib.parse
23 23 import traceback
24 import hashlib
24 25
25 26 from hgext import largefiles, rebase, purge
26 27
27 28 from mercurial import commands
28 29 from mercurial import unionrepo
29 30 from mercurial import verify
30 31 from mercurial import repair
31 32
32 33 import vcsserver
33 34 from vcsserver import exceptions
34 35 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, archive_repo, ArchiveNode
35 36 from vcsserver.hgcompat import (
36 37 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
37 38 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
38 39 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
39 40 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
40 41 RepoLookupError, InterventionRequired, RequirementError,
41 42 alwaysmatcher, patternmatcher, hgutil, hgext_strip)
42 43 from vcsserver.str_utils import ascii_bytes, ascii_str, safe_str, safe_bytes
43 44 from vcsserver.vcs_base import RemoteBase
44 45
45 46 log = logging.getLogger(__name__)
46 47
47 48
48 49 def make_ui_from_config(repo_config):
49 50
50 51 class LoggingUI(ui.ui):
51 52
52 53 def status(self, *msg, **opts):
53 54 str_msg = map(safe_str, msg)
54 55 log.info(' '.join(str_msg).rstrip('\n'))
55 56 #super(LoggingUI, self).status(*msg, **opts)
56 57
57 58 def warn(self, *msg, **opts):
58 59 str_msg = map(safe_str, msg)
59 60 log.warning('ui_logger:'+' '.join(str_msg).rstrip('\n'))
60 61 #super(LoggingUI, self).warn(*msg, **opts)
61 62
62 63 def error(self, *msg, **opts):
63 64 str_msg = map(safe_str, msg)
64 65 log.error('ui_logger:'+' '.join(str_msg).rstrip('\n'))
65 66 #super(LoggingUI, self).error(*msg, **opts)
66 67
67 68 def note(self, *msg, **opts):
68 69 str_msg = map(safe_str, msg)
69 70 log.info('ui_logger:'+' '.join(str_msg).rstrip('\n'))
70 71 #super(LoggingUI, self).note(*msg, **opts)
71 72
72 73 def debug(self, *msg, **opts):
73 74 str_msg = map(safe_str, msg)
74 75 log.debug('ui_logger:'+' '.join(str_msg).rstrip('\n'))
75 76 #super(LoggingUI, self).debug(*msg, **opts)
76 77
77 78 baseui = LoggingUI()
78 79
79 80 # clean the baseui object
80 81 baseui._ocfg = hgconfig.config()
81 82 baseui._ucfg = hgconfig.config()
82 83 baseui._tcfg = hgconfig.config()
83 84
84 85 for section, option, value in repo_config:
85 86 baseui.setconfig(ascii_bytes(section), ascii_bytes(option), ascii_bytes(value))
86 87
87 88 # make our hgweb quiet so it doesn't print output
88 89 baseui.setconfig(b'ui', b'quiet', b'true')
89 90
90 91 baseui.setconfig(b'ui', b'paginate', b'never')
91 92 # for better Error reporting of Mercurial
92 93 baseui.setconfig(b'ui', b'message-output', b'stderr')
93 94
94 95 # force mercurial to only use 1 thread, otherwise it may try to set a
95 96 # signal in a non-main thread, thus generating a ValueError.
96 97 baseui.setconfig(b'worker', b'numcpus', 1)
97 98
98 99 # If there is no config for the largefiles extension, we explicitly disable
99 100 # it here. This overrides settings from repositories hgrc file. Recent
100 101 # mercurial versions enable largefiles in hgrc on clone from largefile
101 102 # repo.
102 103 if not baseui.hasconfig(b'extensions', b'largefiles'):
103 104 log.debug('Explicitly disable largefiles extension for repo.')
104 105 baseui.setconfig(b'extensions', b'largefiles', b'!')
105 106
106 107 return baseui
107 108
108 109
109 110 def reraise_safe_exceptions(func):
110 111 """Decorator for converting mercurial exceptions to something neutral."""
111 112
112 113 def wrapper(*args, **kwargs):
113 114 try:
114 115 return func(*args, **kwargs)
115 116 except (Abort, InterventionRequired) as e:
116 117 raise_from_original(exceptions.AbortException(e), e)
117 118 except RepoLookupError as e:
118 119 raise_from_original(exceptions.LookupException(e), e)
119 120 except RequirementError as e:
120 121 raise_from_original(exceptions.RequirementException(e), e)
121 122 except RepoError as e:
122 123 raise_from_original(exceptions.VcsException(e), e)
123 124 except LookupError as e:
124 125 raise_from_original(exceptions.LookupException(e), e)
125 126 except Exception as e:
126 127 if not hasattr(e, '_vcs_kind'):
127 128 log.exception("Unhandled exception in hg remote call")
128 129 raise_from_original(exceptions.UnhandledException(e), e)
129 130
130 131 raise
131 132 return wrapper
132 133
133 134
134 135 class MercurialFactory(RepoFactory):
135 136 repo_type = 'hg'
136 137
137 138 def _create_config(self, config, hooks=True):
138 139 if not hooks:
139 140 hooks_to_clean = frozenset((
140 141 'changegroup.repo_size', 'preoutgoing.pre_pull',
141 142 'outgoing.pull_logger', 'prechangegroup.pre_push'))
142 143 new_config = []
143 144 for section, option, value in config:
144 145 if section == 'hooks' and option in hooks_to_clean:
145 146 continue
146 147 new_config.append((section, option, value))
147 148 config = new_config
148 149
149 150 baseui = make_ui_from_config(config)
150 151 return baseui
151 152
152 153 def _create_repo(self, wire, create):
153 154 baseui = self._create_config(wire["config"])
154 155 return instance(baseui, ascii_bytes(wire["path"]), create)
155 156
156 157 def repo(self, wire, create=False):
157 158 """
158 159 Get a repository instance for the given path.
159 160 """
160 161 return self._create_repo(wire, create)
161 162
162 163
163 164 def patch_ui_message_output(baseui):
164 165 baseui.setconfig(b'ui', b'quiet', b'false')
165 166 output = io.BytesIO()
166 167
167 168 def write(data, **unused_kwargs):
168 169 output.write(data)
169 170
170 171 baseui.status = write
171 172 baseui.write = write
172 173 baseui.warn = write
173 174 baseui.debug = write
174 175
175 176 return baseui, output
176 177
177 178
178 179 class HgRemote(RemoteBase):
179 180
180 181 def __init__(self, factory):
181 182 self._factory = factory
182 183 self._bulk_methods = {
183 184 "affected_files": self.ctx_files,
184 185 "author": self.ctx_user,
185 186 "branch": self.ctx_branch,
186 187 "children": self.ctx_children,
187 188 "date": self.ctx_date,
188 189 "message": self.ctx_description,
189 190 "parents": self.ctx_parents,
190 191 "status": self.ctx_status,
191 192 "obsolete": self.ctx_obsolete,
192 193 "phase": self.ctx_phase,
193 194 "hidden": self.ctx_hidden,
194 195 "_file_paths": self.ctx_list,
195 196 }
196 197
197 198 def _get_ctx(self, repo, ref):
198 199 return get_ctx(repo, ref)
199 200
200 201 @reraise_safe_exceptions
201 202 def discover_hg_version(self):
202 203 from mercurial import util
203 204 return safe_str(util.version())
204 205
205 206 @reraise_safe_exceptions
206 207 def is_empty(self, wire):
207 208 repo = self._factory.repo(wire)
208 209
209 210 try:
210 211 return len(repo) == 0
211 212 except Exception:
212 213 log.exception("failed to read object_store")
213 214 return False
214 215
215 216 @reraise_safe_exceptions
216 217 def bookmarks(self, wire):
217 218 cache_on, context_uid, repo_id = self._cache_on(wire)
218 219 region = self._region(wire)
219 220
220 221 @region.conditional_cache_on_arguments(condition=cache_on)
221 222 def _bookmarks(_context_uid, _repo_id):
222 223 repo = self._factory.repo(wire)
223 224 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo._bookmarks.items()}
224 225
225 226 return _bookmarks(context_uid, repo_id)
226 227
227 228 @reraise_safe_exceptions
228 229 def branches(self, wire, normal, closed):
229 230 cache_on, context_uid, repo_id = self._cache_on(wire)
230 231 region = self._region(wire)
231 232
232 233 @region.conditional_cache_on_arguments(condition=cache_on)
233 234 def _branches(_context_uid, _repo_id, _normal, _closed):
234 235 repo = self._factory.repo(wire)
235 236 iter_branches = repo.branchmap().iterbranches()
236 237 bt = {}
237 238 for branch_name, _heads, tip_node, is_closed in iter_branches:
238 239 if normal and not is_closed:
239 240 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
240 241 if closed and is_closed:
241 242 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
242 243
243 244 return bt
244 245
245 246 return _branches(context_uid, repo_id, normal, closed)
246 247
247 248 @reraise_safe_exceptions
248 249 def bulk_request(self, wire, commit_id, pre_load):
249 250 cache_on, context_uid, repo_id = self._cache_on(wire)
250 251 region = self._region(wire)
251 252
252 253 @region.conditional_cache_on_arguments(condition=cache_on)
253 254 def _bulk_request(_repo_id, _commit_id, _pre_load):
254 255 result = {}
255 256 for attr in pre_load:
256 257 try:
257 258 method = self._bulk_methods[attr]
258 259 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
259 260 result[attr] = method(wire, commit_id)
260 261 except KeyError as e:
261 262 raise exceptions.VcsException(e)(
262 263 'Unknown bulk attribute: "%s"' % attr)
263 264 return result
264 265
265 266 return _bulk_request(repo_id, commit_id, sorted(pre_load))
266 267
267 268 @reraise_safe_exceptions
268 269 def ctx_branch(self, wire, commit_id):
269 270 cache_on, context_uid, repo_id = self._cache_on(wire)
270 271 region = self._region(wire)
271 272
272 273 @region.conditional_cache_on_arguments(condition=cache_on)
273 274 def _ctx_branch(_repo_id, _commit_id):
274 275 repo = self._factory.repo(wire)
275 276 ctx = self._get_ctx(repo, commit_id)
276 277 return ctx.branch()
277 278 return _ctx_branch(repo_id, commit_id)
278 279
279 280 @reraise_safe_exceptions
280 281 def ctx_date(self, wire, commit_id):
281 282 cache_on, context_uid, repo_id = self._cache_on(wire)
282 283 region = self._region(wire)
283 284
284 285 @region.conditional_cache_on_arguments(condition=cache_on)
285 286 def _ctx_date(_repo_id, _commit_id):
286 287 repo = self._factory.repo(wire)
287 288 ctx = self._get_ctx(repo, commit_id)
288 289 return ctx.date()
289 290 return _ctx_date(repo_id, commit_id)
290 291
291 292 @reraise_safe_exceptions
292 293 def ctx_description(self, wire, revision):
293 294 repo = self._factory.repo(wire)
294 295 ctx = self._get_ctx(repo, revision)
295 296 return ctx.description()
296 297
297 298 @reraise_safe_exceptions
298 299 def ctx_files(self, wire, commit_id):
299 300 cache_on, context_uid, repo_id = self._cache_on(wire)
300 301 region = self._region(wire)
301 302
302 303 @region.conditional_cache_on_arguments(condition=cache_on)
303 304 def _ctx_files(_repo_id, _commit_id):
304 305 repo = self._factory.repo(wire)
305 306 ctx = self._get_ctx(repo, commit_id)
306 307 return ctx.files()
307 308
308 309 return _ctx_files(repo_id, commit_id)
309 310
310 311 @reraise_safe_exceptions
311 312 def ctx_list(self, path, revision):
312 313 repo = self._factory.repo(path)
313 314 ctx = self._get_ctx(repo, revision)
314 315 return list(ctx)
315 316
316 317 @reraise_safe_exceptions
317 318 def ctx_parents(self, wire, commit_id):
318 319 cache_on, context_uid, repo_id = self._cache_on(wire)
319 320 region = self._region(wire)
320 321
321 322 @region.conditional_cache_on_arguments(condition=cache_on)
322 323 def _ctx_parents(_repo_id, _commit_id):
323 324 repo = self._factory.repo(wire)
324 325 ctx = self._get_ctx(repo, commit_id)
325 326 return [parent.hex() for parent in ctx.parents()
326 327 if not (parent.hidden() or parent.obsolete())]
327 328
328 329 return _ctx_parents(repo_id, commit_id)
329 330
330 331 @reraise_safe_exceptions
331 332 def ctx_children(self, wire, commit_id):
332 333 cache_on, context_uid, repo_id = self._cache_on(wire)
333 334 region = self._region(wire)
334 335
335 336 @region.conditional_cache_on_arguments(condition=cache_on)
336 337 def _ctx_children(_repo_id, _commit_id):
337 338 repo = self._factory.repo(wire)
338 339 ctx = self._get_ctx(repo, commit_id)
339 340 return [child.hex() for child in ctx.children()
340 341 if not (child.hidden() or child.obsolete())]
341 342
342 343 return _ctx_children(repo_id, commit_id)
343 344
344 345 @reraise_safe_exceptions
345 346 def ctx_phase(self, wire, commit_id):
346 347 cache_on, context_uid, repo_id = self._cache_on(wire)
347 348 region = self._region(wire)
348 349
349 350 @region.conditional_cache_on_arguments(condition=cache_on)
350 351 def _ctx_phase(_context_uid, _repo_id, _commit_id):
351 352 repo = self._factory.repo(wire)
352 353 ctx = self._get_ctx(repo, commit_id)
353 354 # public=0, draft=1, secret=3
354 355 return ctx.phase()
355 356 return _ctx_phase(context_uid, repo_id, commit_id)
356 357
357 358 @reraise_safe_exceptions
358 359 def ctx_obsolete(self, wire, commit_id):
359 360 cache_on, context_uid, repo_id = self._cache_on(wire)
360 361 region = self._region(wire)
361 362
362 363 @region.conditional_cache_on_arguments(condition=cache_on)
363 364 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
364 365 repo = self._factory.repo(wire)
365 366 ctx = self._get_ctx(repo, commit_id)
366 367 return ctx.obsolete()
367 368 return _ctx_obsolete(context_uid, repo_id, commit_id)
368 369
369 370 @reraise_safe_exceptions
370 371 def ctx_hidden(self, wire, commit_id):
371 372 cache_on, context_uid, repo_id = self._cache_on(wire)
372 373 region = self._region(wire)
373 374
374 375 @region.conditional_cache_on_arguments(condition=cache_on)
375 376 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
376 377 repo = self._factory.repo(wire)
377 378 ctx = self._get_ctx(repo, commit_id)
378 379 return ctx.hidden()
379 380 return _ctx_hidden(context_uid, repo_id, commit_id)
380 381
381 382 @reraise_safe_exceptions
382 383 def ctx_substate(self, wire, revision):
383 384 repo = self._factory.repo(wire)
384 385 ctx = self._get_ctx(repo, revision)
385 386 return ctx.substate
386 387
387 388 @reraise_safe_exceptions
388 389 def ctx_status(self, wire, revision):
389 390 repo = self._factory.repo(wire)
390 391 ctx = self._get_ctx(repo, revision)
391 392 status = repo[ctx.p1().node()].status(other=ctx.node())
392 393 # object of status (odd, custom named tuple in mercurial) is not
393 394 # correctly serializable, we make it a list, as the underling
394 395 # API expects this to be a list
395 396 return list(status)
396 397
397 398 @reraise_safe_exceptions
398 399 def ctx_user(self, wire, revision):
399 400 repo = self._factory.repo(wire)
400 401 ctx = self._get_ctx(repo, revision)
401 402 return ctx.user()
402 403
403 404 @reraise_safe_exceptions
404 405 def check_url(self, url, config):
405 406 _proto = None
406 407 if '+' in url[:url.find('://')]:
407 408 _proto = url[0:url.find('+')]
408 409 url = url[url.find('+') + 1:]
409 410 handlers = []
410 411 url_obj = url_parser(url)
411 412 test_uri, authinfo = url_obj.authinfo()
412 413 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
413 414 url_obj.query = obfuscate_qs(url_obj.query)
414 415
415 416 cleaned_uri = str(url_obj)
416 417 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
417 418
418 419 if authinfo:
419 420 # create a password manager
420 421 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
421 422 passmgr.add_password(*authinfo)
422 423
423 424 handlers.extend((httpbasicauthhandler(passmgr),
424 425 httpdigestauthhandler(passmgr)))
425 426
426 427 o = urllib.request.build_opener(*handlers)
427 428 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
428 429 ('Accept', 'application/mercurial-0.1')]
429 430
430 431 q = {"cmd": 'between'}
431 432 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
432 433 qs = '?%s' % urllib.parse.urlencode(q)
433 434 cu = "%s%s" % (test_uri, qs)
434 435 req = urllib.request.Request(cu, None, {})
435 436
436 437 try:
437 438 log.debug("Trying to open URL %s", cleaned_uri)
438 439 resp = o.open(req)
439 440 if resp.code != 200:
440 441 raise exceptions.URLError()('Return Code is not 200')
441 442 except Exception as e:
442 443 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
443 444 # means it cannot be cloned
444 445 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
445 446
446 447 # now check if it's a proper hg repo, but don't do it for svn
447 448 try:
448 449 if _proto == 'svn':
449 450 pass
450 451 else:
451 452 # check for pure hg repos
452 453 log.debug(
453 454 "Verifying if URL is a Mercurial repository: %s",
454 455 cleaned_uri)
455 456 ui = make_ui_from_config(config)
456 457 peer_checker = makepeer(ui, url)
457 458 peer_checker.lookup('tip')
458 459 except Exception as e:
459 460 log.warning("URL is not a valid Mercurial repository: %s",
460 461 cleaned_uri)
461 462 raise exceptions.URLError(e)(
462 463 "url [%s] does not look like an hg repo org_exc: %s"
463 464 % (cleaned_uri, e))
464 465
465 466 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
466 467 return True
467 468
468 469 @reraise_safe_exceptions
469 470 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
470 471 repo = self._factory.repo(wire)
471 472
472 473 if file_filter:
473 474 match_filter = match(file_filter[0], '', [file_filter[1]])
474 475 else:
475 476 match_filter = file_filter
476 477 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
477 478
478 479 try:
479 480 diff_iter = patch.diff(
480 481 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts)
481 482 return b"".join(diff_iter)
482 483 except RepoLookupError as e:
483 484 raise exceptions.LookupException(e)()
484 485
485 486 @reraise_safe_exceptions
486 487 def node_history(self, wire, revision, path, limit):
487 488 cache_on, context_uid, repo_id = self._cache_on(wire)
488 489 region = self._region(wire)
489 490
490 491 @region.conditional_cache_on_arguments(condition=cache_on)
491 492 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
492 493 repo = self._factory.repo(wire)
493 494
494 495 ctx = self._get_ctx(repo, revision)
495 496 fctx = ctx.filectx(safe_bytes(path))
496 497
497 498 def history_iter():
498 499 limit_rev = fctx.rev()
499 500 for obj in reversed(list(fctx.filelog())):
500 501 obj = fctx.filectx(obj)
501 502 ctx = obj.changectx()
502 503 if ctx.hidden() or ctx.obsolete():
503 504 continue
504 505
505 506 if limit_rev >= obj.rev():
506 507 yield obj
507 508
508 509 history = []
509 510 for cnt, obj in enumerate(history_iter()):
510 511 if limit and cnt >= limit:
511 512 break
512 513 history.append(hex(obj.node()))
513 514
514 515 return [x for x in history]
515 516 return _node_history(context_uid, repo_id, revision, path, limit)
516 517
517 518 @reraise_safe_exceptions
518 519 def node_history_untill(self, wire, revision, path, limit):
519 520 cache_on, context_uid, repo_id = self._cache_on(wire)
520 521 region = self._region(wire)
521 522
522 523 @region.conditional_cache_on_arguments(condition=cache_on)
523 524 def _node_history_until(_context_uid, _repo_id):
524 525 repo = self._factory.repo(wire)
525 526 ctx = self._get_ctx(repo, revision)
526 527 fctx = ctx.filectx(safe_bytes(path))
527 528
528 529 file_log = list(fctx.filelog())
529 530 if limit:
530 531 # Limit to the last n items
531 532 file_log = file_log[-limit:]
532 533
533 534 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
534 535 return _node_history_until(context_uid, repo_id, revision, path, limit)
535 536
536 537 @reraise_safe_exceptions
537 538 def fctx_annotate(self, wire, revision, path):
538 539 repo = self._factory.repo(wire)
539 540 ctx = self._get_ctx(repo, revision)
540 541 fctx = ctx.filectx(safe_bytes(path))
541 542
542 543 result = []
543 544 for i, annotate_obj in enumerate(fctx.annotate(), 1):
544 545 ln_no = i
545 546 sha = hex(annotate_obj.fctx.node())
546 547 content = annotate_obj.text
547 548 result.append((ln_no, sha, content))
548 549 return result
549 550
550 551 @reraise_safe_exceptions
551 552 def fctx_node_data(self, wire, revision, path):
552 553 repo = self._factory.repo(wire)
553 554 ctx = self._get_ctx(repo, revision)
554 555 fctx = ctx.filectx(safe_bytes(path))
555 556 return fctx.data()
556 557
557 558 @reraise_safe_exceptions
558 559 def fctx_flags(self, wire, commit_id, path):
559 560 cache_on, context_uid, repo_id = self._cache_on(wire)
560 561 region = self._region(wire)
561 562
562 563 @region.conditional_cache_on_arguments(condition=cache_on)
563 564 def _fctx_flags(_repo_id, _commit_id, _path):
564 565 repo = self._factory.repo(wire)
565 566 ctx = self._get_ctx(repo, commit_id)
566 567 fctx = ctx.filectx(safe_bytes(path))
567 568 return fctx.flags()
568 569
569 570 return _fctx_flags(repo_id, commit_id, path)
570 571
571 572 @reraise_safe_exceptions
572 573 def fctx_size(self, wire, commit_id, path):
573 574 cache_on, context_uid, repo_id = self._cache_on(wire)
574 575 region = self._region(wire)
575 576
576 577 @region.conditional_cache_on_arguments(condition=cache_on)
577 578 def _fctx_size(_repo_id, _revision, _path):
578 579 repo = self._factory.repo(wire)
579 580 ctx = self._get_ctx(repo, commit_id)
580 581 fctx = ctx.filectx(safe_bytes(path))
581 582 return fctx.size()
582 583 return _fctx_size(repo_id, commit_id, path)
583 584
584 585 @reraise_safe_exceptions
585 586 def get_all_commit_ids(self, wire, name):
586 587 cache_on, context_uid, repo_id = self._cache_on(wire)
587 588 region = self._region(wire)
588 589
589 590 @region.conditional_cache_on_arguments(condition=cache_on)
590 591 def _get_all_commit_ids(_context_uid, _repo_id, _name):
591 592 repo = self._factory.repo(wire)
592 593 revs = [ascii_str(repo[x].hex()) for x in repo.filtered(b'visible').changelog.revs()]
593 594 return revs
594 595 return _get_all_commit_ids(context_uid, repo_id, name)
595 596
596 597 @reraise_safe_exceptions
597 598 def get_config_value(self, wire, section, name, untrusted=False):
598 599 repo = self._factory.repo(wire)
599 600 return repo.ui.config(ascii_bytes(section), ascii_bytes(name), untrusted=untrusted)
600 601
601 602 @reraise_safe_exceptions
602 603 def is_large_file(self, wire, commit_id, path):
603 604 cache_on, context_uid, repo_id = self._cache_on(wire)
604 605 region = self._region(wire)
605 606
606 607 @region.conditional_cache_on_arguments(condition=cache_on)
607 608 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
608 609 return largefiles.lfutil.isstandin(safe_bytes(path))
609 610
610 611 return _is_large_file(context_uid, repo_id, commit_id, path)
611 612
612 613 @reraise_safe_exceptions
613 614 def is_binary(self, wire, revision, path):
614 615 cache_on, context_uid, repo_id = self._cache_on(wire)
615 616 region = self._region(wire)
616 617
617 618 @region.conditional_cache_on_arguments(condition=cache_on)
618 619 def _is_binary(_repo_id, _sha, _path):
619 620 repo = self._factory.repo(wire)
620 621 ctx = self._get_ctx(repo, revision)
621 622 fctx = ctx.filectx(safe_bytes(path))
622 623 return fctx.isbinary()
623 624
624 625 return _is_binary(repo_id, revision, path)
625 626
626 627 @reraise_safe_exceptions
627 628 def md5_hash(self, wire, revision, path):
628 629 cache_on, context_uid, repo_id = self._cache_on(wire)
629 630 region = self._region(wire)
630 631
631 632 @region.conditional_cache_on_arguments(condition=cache_on)
632 633 def _md5_hash(_repo_id, _sha, _path):
633 634 repo = self._factory.repo(wire)
634 635 ctx = self._get_ctx(repo, revision)
635 636 fctx = ctx.filectx(safe_bytes(path))
636 637 return hashlib.md5(fctx.data()).hexdigest()
637 638
638 639 return _md5_hash(repo_id, revision, path)
639 640
640 641 @reraise_safe_exceptions
641 642 def in_largefiles_store(self, wire, sha):
642 643 repo = self._factory.repo(wire)
643 644 return largefiles.lfutil.instore(repo, sha)
644 645
645 646 @reraise_safe_exceptions
646 647 def in_user_cache(self, wire, sha):
647 648 repo = self._factory.repo(wire)
648 649 return largefiles.lfutil.inusercache(repo.ui, sha)
649 650
650 651 @reraise_safe_exceptions
651 652 def store_path(self, wire, sha):
652 653 repo = self._factory.repo(wire)
653 654 return largefiles.lfutil.storepath(repo, sha)
654 655
655 656 @reraise_safe_exceptions
656 657 def link(self, wire, sha, path):
657 658 repo = self._factory.repo(wire)
658 659 largefiles.lfutil.link(
659 660 largefiles.lfutil.usercachepath(repo.ui, sha), path)
660 661
661 662 @reraise_safe_exceptions
662 663 def localrepository(self, wire, create=False):
663 664 self._factory.repo(wire, create=create)
664 665
665 666 @reraise_safe_exceptions
666 667 def lookup(self, wire, revision, both):
667 668 cache_on, context_uid, repo_id = self._cache_on(wire)
668 669 region = self._region(wire)
669 670
670 671 @region.conditional_cache_on_arguments(condition=cache_on)
671 672 def _lookup(_context_uid, _repo_id, _revision, _both):
672 673
673 674 repo = self._factory.repo(wire)
674 675 rev = _revision
675 676 if isinstance(rev, int):
676 677 # NOTE(marcink):
677 678 # since Mercurial doesn't support negative indexes properly
678 679 # we need to shift accordingly by one to get proper index, e.g
679 680 # repo[-1] => repo[-2]
680 681 # repo[0] => repo[-1]
681 682 if rev <= 0:
682 683 rev = rev + -1
683 684 try:
684 685 ctx = self._get_ctx(repo, rev)
685 686 except (TypeError, RepoLookupError) as e:
686 687 e._org_exc_tb = traceback.format_exc()
687 688 raise exceptions.LookupException(e)(rev)
688 689 except LookupError as e:
689 690 e._org_exc_tb = traceback.format_exc()
690 691 raise exceptions.LookupException(e)(e.name)
691 692
692 693 if not both:
693 694 return ctx.hex()
694 695
695 696 ctx = repo[ctx.hex()]
696 697 return ctx.hex(), ctx.rev()
697 698
698 699 return _lookup(context_uid, repo_id, revision, both)
699 700
700 701 @reraise_safe_exceptions
701 702 def sync_push(self, wire, url):
702 703 if not self.check_url(url, wire['config']):
703 704 return
704 705
705 706 repo = self._factory.repo(wire)
706 707
707 708 # Disable any prompts for this repo
708 709 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
709 710
710 711 bookmarks = list(dict(repo._bookmarks).keys())
711 712 remote = peer(repo, {}, safe_bytes(url))
712 713 # Disable any prompts for this remote
713 714 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
714 715
715 716 return exchange.push(
716 717 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
717 718
718 719 @reraise_safe_exceptions
719 720 def revision(self, wire, rev):
720 721 repo = self._factory.repo(wire)
721 722 ctx = self._get_ctx(repo, rev)
722 723 return ctx.rev()
723 724
724 725 @reraise_safe_exceptions
725 726 def rev_range(self, wire, commit_filter):
726 727 cache_on, context_uid, repo_id = self._cache_on(wire)
727 728 region = self._region(wire)
728 729
729 730 @region.conditional_cache_on_arguments(condition=cache_on)
730 731 def _rev_range(_context_uid, _repo_id, _filter):
731 732 repo = self._factory.repo(wire)
732 733 revisions = [
733 734 ascii_str(repo[rev].hex())
734 735 for rev in revrange(repo, list(map(ascii_bytes, commit_filter)))
735 736 ]
736 737 return revisions
737 738
738 739 return _rev_range(context_uid, repo_id, sorted(commit_filter))
739 740
740 741 @reraise_safe_exceptions
741 742 def rev_range_hash(self, wire, node):
742 743 repo = self._factory.repo(wire)
743 744
744 745 def get_revs(repo, rev_opt):
745 746 if rev_opt:
746 747 revs = revrange(repo, rev_opt)
747 748 if len(revs) == 0:
748 749 return (nullrev, nullrev)
749 750 return max(revs), min(revs)
750 751 else:
751 752 return len(repo) - 1, 0
752 753
753 754 stop, start = get_revs(repo, [node + ':'])
754 755 revs = [ascii_str(repo[r].hex()) for r in range(start, stop + 1)]
755 756 return revs
756 757
757 758 @reraise_safe_exceptions
758 759 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
759 760 other_path = kwargs.pop('other_path', None)
760 761
761 762 # case when we want to compare two independent repositories
762 763 if other_path and other_path != wire["path"]:
763 764 baseui = self._factory._create_config(wire["config"])
764 765 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
765 766 else:
766 767 repo = self._factory.repo(wire)
767 768 return list(repo.revs(rev_spec, *args))
768 769
769 770 @reraise_safe_exceptions
770 771 def verify(self, wire,):
771 772 repo = self._factory.repo(wire)
772 773 baseui = self._factory._create_config(wire['config'])
773 774
774 775 baseui, output = patch_ui_message_output(baseui)
775 776
776 777 repo.ui = baseui
777 778 verify.verify(repo)
778 779 return output.getvalue()
779 780
780 781 @reraise_safe_exceptions
781 782 def hg_update_cache(self, wire,):
782 783 repo = self._factory.repo(wire)
783 784 baseui = self._factory._create_config(wire['config'])
784 785 baseui, output = patch_ui_message_output(baseui)
785 786
786 787 repo.ui = baseui
787 788 with repo.wlock(), repo.lock():
788 789 repo.updatecaches(full=True)
789 790
790 791 return output.getvalue()
791 792
792 793 @reraise_safe_exceptions
793 794 def hg_rebuild_fn_cache(self, wire,):
794 795 repo = self._factory.repo(wire)
795 796 baseui = self._factory._create_config(wire['config'])
796 797 baseui, output = patch_ui_message_output(baseui)
797 798
798 799 repo.ui = baseui
799 800
800 801 repair.rebuildfncache(baseui, repo)
801 802
802 803 return output.getvalue()
803 804
804 805 @reraise_safe_exceptions
805 806 def tags(self, wire):
806 807 cache_on, context_uid, repo_id = self._cache_on(wire)
807 808 region = self._region(wire)
808 809
809 810 @region.conditional_cache_on_arguments(condition=cache_on)
810 811 def _tags(_context_uid, _repo_id):
811 812 repo = self._factory.repo(wire)
812 813 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo.tags().items()}
813 814
814 815 return _tags(context_uid, repo_id)
815 816
816 817 @reraise_safe_exceptions
817 818 def update(self, wire, node=None, clean=False):
818 819 repo = self._factory.repo(wire)
819 820 baseui = self._factory._create_config(wire['config'])
820 821 commands.update(baseui, repo, node=node, clean=clean)
821 822
822 823 @reraise_safe_exceptions
823 824 def identify(self, wire):
824 825 repo = self._factory.repo(wire)
825 826 baseui = self._factory._create_config(wire['config'])
826 827 output = io.BytesIO()
827 828 baseui.write = output.write
828 829 # This is required to get a full node id
829 830 baseui.debugflag = True
830 831 commands.identify(baseui, repo, id=True)
831 832
832 833 return output.getvalue()
833 834
834 835 @reraise_safe_exceptions
835 836 def heads(self, wire, branch=None):
836 837 repo = self._factory.repo(wire)
837 838 baseui = self._factory._create_config(wire['config'])
838 839 output = io.BytesIO()
839 840
840 841 def write(data, **unused_kwargs):
841 842 output.write(data)
842 843
843 844 baseui.write = write
844 845 if branch:
845 846 args = [safe_bytes(branch)]
846 847 else:
847 848 args = []
848 849 commands.heads(baseui, repo, template=b'{node} ', *args)
849 850
850 851 return output.getvalue()
851 852
852 853 @reraise_safe_exceptions
853 854 def ancestor(self, wire, revision1, revision2):
854 855 repo = self._factory.repo(wire)
855 856 changelog = repo.changelog
856 857 lookup = repo.lookup
857 858 a = changelog.ancestor(lookup(revision1), lookup(revision2))
858 859 return hex(a)
859 860
860 861 @reraise_safe_exceptions
861 862 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
862 863 baseui = self._factory._create_config(wire["config"], hooks=hooks)
863 864 clone(baseui, safe_bytes(source), safe_bytes(dest), noupdate=not update_after_clone)
864 865
865 866 @reraise_safe_exceptions
866 867 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
867 868
868 869 repo = self._factory.repo(wire)
869 870 baseui = self._factory._create_config(wire['config'])
870 871 publishing = baseui.configbool(b'phases', b'publish')
871 872
872 873 def _filectxfn(_repo, ctx, path: bytes):
873 874 """
874 875 Marks given path as added/changed/removed in a given _repo. This is
875 876 for internal mercurial commit function.
876 877 """
877 878
878 879 # check if this path is removed
879 880 if safe_str(path) in removed:
880 881 # returning None is a way to mark node for removal
881 882 return None
882 883
883 884 # check if this path is added
884 885 for node in updated:
885 886 if safe_bytes(node['path']) == path:
886 887 return memfilectx(
887 888 _repo,
888 889 changectx=ctx,
889 890 path=safe_bytes(node['path']),
890 891 data=safe_bytes(node['content']),
891 892 islink=False,
892 893 isexec=bool(node['mode'] & stat.S_IXUSR),
893 894 copysource=False)
894 895 abort_exc = exceptions.AbortException()
895 896 raise abort_exc(f"Given path haven't been marked as added, changed or removed ({path})")
896 897
897 898 if publishing:
898 899 new_commit_phase = b'public'
899 900 else:
900 901 new_commit_phase = b'draft'
901 902 with repo.ui.configoverride({(b'phases', b'new-commit'): new_commit_phase}):
902 903 kwargs = {safe_bytes(k): safe_bytes(v) for k, v in extra.items()}
903 904 commit_ctx = memctx(
904 905 repo=repo,
905 906 parents=parents,
906 907 text=safe_bytes(message),
907 908 files=[safe_bytes(x) for x in files],
908 909 filectxfn=_filectxfn,
909 910 user=safe_bytes(user),
910 911 date=(commit_time, commit_timezone),
911 912 extra=kwargs)
912 913
913 914 n = repo.commitctx(commit_ctx)
914 915 new_id = hex(n)
915 916
916 917 return new_id
917 918
918 919 @reraise_safe_exceptions
919 920 def pull(self, wire, url, commit_ids=None):
920 921 repo = self._factory.repo(wire)
921 922 # Disable any prompts for this repo
922 923 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
923 924
924 925 remote = peer(repo, {}, safe_bytes(url))
925 926 # Disable any prompts for this remote
926 927 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
927 928
928 929 if commit_ids:
929 930 commit_ids = [bin(commit_id) for commit_id in commit_ids]
930 931
931 932 return exchange.pull(
932 933 repo, remote, heads=commit_ids, force=None).cgresult
933 934
934 935 @reraise_safe_exceptions
935 936 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
936 937 repo = self._factory.repo(wire)
937 938 baseui = self._factory._create_config(wire['config'], hooks=hooks)
938 939
939 940 # Mercurial internally has a lot of logic that checks ONLY if
940 941 # option is defined, we just pass those if they are defined then
941 942 opts = {}
942 943 if bookmark:
943 944 opts['bookmark'] = bookmark
944 945 if branch:
945 946 opts['branch'] = branch
946 947 if revision:
947 948 opts['rev'] = revision
948 949
949 950 commands.pull(baseui, repo, source, **opts)
950 951
951 952 @reraise_safe_exceptions
952 953 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
953 954 repo = self._factory.repo(wire)
954 955 baseui = self._factory._create_config(wire['config'], hooks=hooks)
955 956 commands.push(baseui, repo, dest=dest_path, rev=revisions,
956 957 new_branch=push_branches)
957 958
958 959 @reraise_safe_exceptions
959 960 def strip(self, wire, revision, update, backup):
960 961 repo = self._factory.repo(wire)
961 962 ctx = self._get_ctx(repo, revision)
962 963 hgext_strip(
963 964 repo.baseui, repo, ctx.node(), update=update, backup=backup)
964 965
965 966 @reraise_safe_exceptions
966 967 def get_unresolved_files(self, wire):
967 968 repo = self._factory.repo(wire)
968 969
969 970 log.debug('Calculating unresolved files for repo: %s', repo)
970 971 output = io.BytesIO()
971 972
972 973 def write(data, **unused_kwargs):
973 974 output.write(data)
974 975
975 976 baseui = self._factory._create_config(wire['config'])
976 977 baseui.write = write
977 978
978 979 commands.resolve(baseui, repo, list=True)
979 980 unresolved = output.getvalue().splitlines(0)
980 981 return unresolved
981 982
982 983 @reraise_safe_exceptions
983 984 def merge(self, wire, revision):
984 985 repo = self._factory.repo(wire)
985 986 baseui = self._factory._create_config(wire['config'])
986 987 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
987 988
988 989 # In case of sub repositories are used mercurial prompts the user in
989 990 # case of merge conflicts or different sub repository sources. By
990 991 # setting the interactive flag to `False` mercurial doesn't prompt the
991 992 # used but instead uses a default value.
992 993 repo.ui.setconfig(b'ui', b'interactive', False)
993 994 commands.merge(baseui, repo, rev=revision)
994 995
995 996 @reraise_safe_exceptions
996 997 def merge_state(self, wire):
997 998 repo = self._factory.repo(wire)
998 999 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
999 1000
1000 1001 # In case of sub repositories are used mercurial prompts the user in
1001 1002 # case of merge conflicts or different sub repository sources. By
1002 1003 # setting the interactive flag to `False` mercurial doesn't prompt the
1003 1004 # used but instead uses a default value.
1004 1005 repo.ui.setconfig(b'ui', b'interactive', False)
1005 1006 ms = hg_merge.mergestate(repo)
1006 1007 return [x for x in ms.unresolved()]
1007 1008
1008 1009 @reraise_safe_exceptions
1009 1010 def commit(self, wire, message, username, close_branch=False):
1010 1011 repo = self._factory.repo(wire)
1011 1012 baseui = self._factory._create_config(wire['config'])
1012 1013 repo.ui.setconfig(b'ui', b'username', username)
1013 1014 commands.commit(baseui, repo, message=message, close_branch=close_branch)
1014 1015
1015 1016 @reraise_safe_exceptions
1016 1017 def rebase(self, wire, source=None, dest=None, abort=False):
1017 1018 repo = self._factory.repo(wire)
1018 1019 baseui = self._factory._create_config(wire['config'])
1019 1020 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1020 1021 # In case of sub repositories are used mercurial prompts the user in
1021 1022 # case of merge conflicts or different sub repository sources. By
1022 1023 # setting the interactive flag to `False` mercurial doesn't prompt the
1023 1024 # used but instead uses a default value.
1024 1025 repo.ui.setconfig(b'ui', b'interactive', False)
1025 1026 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
1026 1027
1027 1028 @reraise_safe_exceptions
1028 1029 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
1029 1030 repo = self._factory.repo(wire)
1030 1031 ctx = self._get_ctx(repo, revision)
1031 1032 node = ctx.node()
1032 1033
1033 1034 date = (tag_time, tag_timezone)
1034 1035 try:
1035 1036 hg_tag.tag(repo, name, node, message, local, user, date)
1036 1037 except Abort as e:
1037 1038 log.exception("Tag operation aborted")
1038 1039 # Exception can contain unicode which we convert
1039 1040 raise exceptions.AbortException(e)(repr(e))
1040 1041
1041 1042 @reraise_safe_exceptions
1042 1043 def bookmark(self, wire, bookmark, revision=None):
1043 1044 repo = self._factory.repo(wire)
1044 1045 baseui = self._factory._create_config(wire['config'])
1045 1046 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
1046 1047
1047 1048 @reraise_safe_exceptions
1048 1049 def install_hooks(self, wire, force=False):
1049 1050 # we don't need any special hooks for Mercurial
1050 1051 pass
1051 1052
1052 1053 @reraise_safe_exceptions
1053 1054 def get_hooks_info(self, wire):
1054 1055 return {
1055 1056 'pre_version': vcsserver.__version__,
1056 1057 'post_version': vcsserver.__version__,
1057 1058 }
1058 1059
1059 1060 @reraise_safe_exceptions
1060 1061 def set_head_ref(self, wire, head_name):
1061 1062 pass
1062 1063
1063 1064 @reraise_safe_exceptions
1064 1065 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1065 1066 archive_dir_name, commit_id):
1066 1067
1067 1068 def file_walker(_commit_id, path):
1068 1069 repo = self._factory.repo(wire)
1069 1070 ctx = repo[_commit_id]
1070 1071 is_root = path in ['', '/']
1071 1072 if is_root:
1072 1073 matcher = alwaysmatcher(badfn=None)
1073 1074 else:
1074 1075 matcher = patternmatcher('', [(b'glob', path+'/**', b'')], badfn=None)
1075 1076 file_iter = ctx.manifest().walk(matcher)
1076 1077
1077 1078 for fn in file_iter:
1078 1079 file_path = fn
1079 1080 flags = ctx.flags(fn)
1080 1081 mode = b'x' in flags and 0o755 or 0o644
1081 1082 is_link = b'l' in flags
1082 1083
1083 1084 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1084 1085
1085 1086 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1086 1087 archive_dir_name, commit_id)
1087 1088
@@ -1,47 +1,47 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 from vcsserver.lib import rc_cache
19 19
20 20
21 21 class RemoteBase(object):
22 22 EMPTY_COMMIT = '0' * 40
23 23
24 24 def _region(self, wire):
25 25 cache_repo_id = wire.get('cache_repo_id', '')
26 cache_namespace_uid = 'cache_repo.{}'.format(cache_repo_id)
26 cache_namespace_uid = f'cache_repo.{cache_repo_id}'
27 27 return rc_cache.get_or_create_region('repo_object', cache_namespace_uid)
28 28
29 29 def _cache_on(self, wire):
30 30 context = wire.get('context', '')
31 31 context_uid = '{}'.format(context)
32 32 repo_id = wire.get('repo_id', '')
33 33 cache = wire.get('cache', True)
34 34 cache_on = context and cache
35 35 return cache_on, context_uid, repo_id
36 36
37 37 def vcsserver_invalidate_cache(self, wire, delete):
38 38 from vcsserver.lib import rc_cache
39 39 repo_id = wire.get('repo_id', '')
40 40 cache_repo_id = wire.get('cache_repo_id', '')
41 cache_namespace_uid = 'cache_repo.{}'.format(cache_repo_id)
41 cache_namespace_uid = f'cache_repo.{cache_repo_id}'
42 42
43 43 if delete:
44 44 rc_cache.clear_cache_namespace(
45 45 'repo_object', cache_namespace_uid, invalidate=True)
46 46
47 47 return {'invalidated': {'repo_id': repo_id, 'delete': delete}}
General Comments 0
You need to be logged in to leave comments. Login now