##// END OF EJS Templates
sync: disable prompts to not allow in any case to block processes on input.
marcink -
r381:d27ef2a8 default
parent child Browse files
Show More
@@ -1,658 +1,663 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import logging
19 19 import os
20 20 import posixpath as vcspath
21 21 import re
22 22 import stat
23 23 import traceback
24 24 import urllib
25 25 import urllib2
26 26 from functools import wraps
27 27
28 28 from dulwich import index, objects
29 29 from dulwich.client import HttpGitClient, LocalGitClient
30 30 from dulwich.errors import (
31 31 NotGitRepository, ChecksumMismatch, WrongObjectException,
32 32 MissingCommitError, ObjectMissing, HangupException,
33 33 UnexpectedCommandError)
34 34 from dulwich.repo import Repo as DulwichRepo, Tag
35 35 from dulwich.server import update_server_info
36 36
37 37 from vcsserver import exceptions, settings, subprocessio
38 38 from vcsserver.utils import safe_str
39 39 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
40 40 from vcsserver.hgcompat import (
41 41 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
42 42 from vcsserver.git_lfs.lib import LFSOidStore
43 43
44 44 DIR_STAT = stat.S_IFDIR
45 45 FILE_MODE = stat.S_IFMT
46 46 GIT_LINK = objects.S_IFGITLINK
47 47
48 48 log = logging.getLogger(__name__)
49 49
50 50
51 51 def reraise_safe_exceptions(func):
52 52 """Converts Dulwich exceptions to something neutral."""
53 53 @wraps(func)
54 54 def wrapper(*args, **kwargs):
55 55 try:
56 56 return func(*args, **kwargs)
57 57 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
58 58 ObjectMissing) as e:
59 59 raise exceptions.LookupException(e.message)
60 60 except (HangupException, UnexpectedCommandError) as e:
61 61 raise exceptions.VcsException(e.message)
62 62 except Exception as e:
63 63 # NOTE(marcink): becuase of how dulwich handles some exceptions
64 64 # (KeyError on empty repos), we cannot track this and catch all
65 65 # exceptions, it's an exceptions from other handlers
66 66 #if not hasattr(e, '_vcs_kind'):
67 67 #log.exception("Unhandled exception in git remote call")
68 68 #raise_from_original(exceptions.UnhandledException)
69 69 raise
70 70 return wrapper
71 71
72 72
73 73 class Repo(DulwichRepo):
74 74 """
75 75 A wrapper for dulwich Repo class.
76 76
77 77 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
78 78 "Too many open files" error. We need to close all opened file descriptors
79 79 once the repo object is destroyed.
80 80
81 81 TODO: mikhail: please check if we need this wrapper after updating dulwich
82 82 to 0.12.0 +
83 83 """
84 84 def __del__(self):
85 85 if hasattr(self, 'object_store'):
86 86 self.close()
87 87
88 88
89 89 class GitFactory(RepoFactory):
90 90
91 91 def _create_repo(self, wire, create):
92 92 repo_path = str_to_dulwich(wire['path'])
93 93 return Repo(repo_path)
94 94
95 95
96 96 class GitRemote(object):
97 97
98 98 def __init__(self, factory):
99 99 self._factory = factory
100 100
101 101 self._bulk_methods = {
102 102 "author": self.commit_attribute,
103 103 "date": self.get_object_attrs,
104 104 "message": self.commit_attribute,
105 105 "parents": self.commit_attribute,
106 106 "_commit": self.revision,
107 107 }
108 108
109 109 def _wire_to_config(self, wire):
110 110 if 'config' in wire:
111 111 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
112 112 return {}
113 113
114 114 def _assign_ref(self, wire, ref, commit_id):
115 115 repo = self._factory.repo(wire)
116 116 repo[ref] = commit_id
117 117
118 118 @reraise_safe_exceptions
119 119 def add_object(self, wire, content):
120 120 repo = self._factory.repo(wire)
121 121 blob = objects.Blob()
122 122 blob.set_raw_string(content)
123 123 repo.object_store.add_object(blob)
124 124 return blob.id
125 125
126 126 @reraise_safe_exceptions
127 127 def assert_correct_path(self, wire):
128 128 path = wire.get('path')
129 129 try:
130 130 self._factory.repo(wire)
131 131 except NotGitRepository as e:
132 132 tb = traceback.format_exc()
133 133 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
134 134 return False
135 135
136 136 return True
137 137
138 138 @reraise_safe_exceptions
139 139 def bare(self, wire):
140 140 repo = self._factory.repo(wire)
141 141 return repo.bare
142 142
143 143 @reraise_safe_exceptions
144 144 def blob_as_pretty_string(self, wire, sha):
145 145 repo = self._factory.repo(wire)
146 146 return repo[sha].as_pretty_string()
147 147
148 148 @reraise_safe_exceptions
149 149 def blob_raw_length(self, wire, sha):
150 150 repo = self._factory.repo(wire)
151 151 blob = repo[sha]
152 152 return blob.raw_length()
153 153
154 154 def _parse_lfs_pointer(self, raw_content):
155 155
156 156 spec_string = 'version https://git-lfs.github.com/spec'
157 157 if raw_content and raw_content.startswith(spec_string):
158 158 pattern = re.compile(r"""
159 159 (?:\n)?
160 160 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
161 161 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
162 162 ^size[ ](?P<oid_size>[0-9]+)\n
163 163 (?:\n)?
164 164 """, re.VERBOSE | re.MULTILINE)
165 165 match = pattern.match(raw_content)
166 166 if match:
167 167 return match.groupdict()
168 168
169 169 return {}
170 170
171 171 @reraise_safe_exceptions
172 172 def is_large_file(self, wire, sha):
173 173 repo = self._factory.repo(wire)
174 174 blob = repo[sha]
175 175 return self._parse_lfs_pointer(blob.as_raw_string())
176 176
177 177 @reraise_safe_exceptions
178 178 def in_largefiles_store(self, wire, oid):
179 179 repo = self._factory.repo(wire)
180 180 conf = self._wire_to_config(wire)
181 181
182 182 store_location = conf.get('vcs_git_lfs_store_location')
183 183 if store_location:
184 184 repo_name = repo.path
185 185 store = LFSOidStore(
186 186 oid=oid, repo=repo_name, store_location=store_location)
187 187 return store.has_oid()
188 188
189 189 return False
190 190
191 191 @reraise_safe_exceptions
192 192 def store_path(self, wire, oid):
193 193 repo = self._factory.repo(wire)
194 194 conf = self._wire_to_config(wire)
195 195
196 196 store_location = conf.get('vcs_git_lfs_store_location')
197 197 if store_location:
198 198 repo_name = repo.path
199 199 store = LFSOidStore(
200 200 oid=oid, repo=repo_name, store_location=store_location)
201 201 return store.oid_path
202 202 raise ValueError('Unable to fetch oid with path {}'.format(oid))
203 203
204 204 @reraise_safe_exceptions
205 205 def bulk_request(self, wire, rev, pre_load):
206 206 result = {}
207 207 for attr in pre_load:
208 208 try:
209 209 method = self._bulk_methods[attr]
210 210 args = [wire, rev]
211 211 if attr == "date":
212 212 args.extend(["commit_time", "commit_timezone"])
213 213 elif attr in ["author", "message", "parents"]:
214 214 args.append(attr)
215 215 result[attr] = method(*args)
216 216 except KeyError:
217 217 raise exceptions.VcsException(
218 218 "Unknown bulk attribute: %s" % attr)
219 219 return result
220 220
221 221 def _build_opener(self, url):
222 222 handlers = []
223 223 url_obj = url_parser(url)
224 224 _, authinfo = url_obj.authinfo()
225 225
226 226 if authinfo:
227 227 # create a password manager
228 228 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
229 229 passmgr.add_password(*authinfo)
230 230
231 231 handlers.extend((httpbasicauthhandler(passmgr),
232 232 httpdigestauthhandler(passmgr)))
233 233
234 234 return urllib2.build_opener(*handlers)
235 235
236 236 @reraise_safe_exceptions
237 237 def check_url(self, url, config):
238 238 url_obj = url_parser(url)
239 239 test_uri, _ = url_obj.authinfo()
240 240 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
241 241 url_obj.query = obfuscate_qs(url_obj.query)
242 242 cleaned_uri = str(url_obj)
243 243 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
244 244
245 245 if not test_uri.endswith('info/refs'):
246 246 test_uri = test_uri.rstrip('/') + '/info/refs'
247 247
248 248 o = self._build_opener(url)
249 249 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
250 250
251 251 q = {"service": 'git-upload-pack'}
252 252 qs = '?%s' % urllib.urlencode(q)
253 253 cu = "%s%s" % (test_uri, qs)
254 254 req = urllib2.Request(cu, None, {})
255 255
256 256 try:
257 257 log.debug("Trying to open URL %s", cleaned_uri)
258 258 resp = o.open(req)
259 259 if resp.code != 200:
260 260 raise exceptions.URLError('Return Code is not 200')
261 261 except Exception as e:
262 262 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
263 263 # means it cannot be cloned
264 264 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
265 265
266 266 # now detect if it's proper git repo
267 267 gitdata = resp.read()
268 268 if 'service=git-upload-pack' in gitdata:
269 269 pass
270 270 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
271 271 # old style git can return some other format !
272 272 pass
273 273 else:
274 274 raise exceptions.URLError(
275 275 "url [%s] does not look like an git" % (cleaned_uri,))
276 276
277 277 return True
278 278
279 279 @reraise_safe_exceptions
280 280 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
281 281 remote_refs = self.fetch(wire, url, apply_refs=False)
282 282 repo = self._factory.repo(wire)
283 283 if isinstance(valid_refs, list):
284 284 valid_refs = tuple(valid_refs)
285 285
286 286 for k in remote_refs:
287 287 # only parse heads/tags and skip so called deferred tags
288 288 if k.startswith(valid_refs) and not k.endswith(deferred):
289 289 repo[k] = remote_refs[k]
290 290
291 291 if update_after_clone:
292 292 # we want to checkout HEAD
293 293 repo["HEAD"] = remote_refs["HEAD"]
294 294 index.build_index_from_tree(repo.path, repo.index_path(),
295 295 repo.object_store, repo["HEAD"].tree)
296 296
297 297 # TODO: this is quite complex, check if that can be simplified
298 298 @reraise_safe_exceptions
299 299 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
300 300 repo = self._factory.repo(wire)
301 301 object_store = repo.object_store
302 302
303 303 # Create tree and populates it with blobs
304 304 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
305 305
306 306 for node in updated:
307 307 # Compute subdirs if needed
308 308 dirpath, nodename = vcspath.split(node['path'])
309 309 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
310 310 parent = commit_tree
311 311 ancestors = [('', parent)]
312 312
313 313 # Tries to dig for the deepest existing tree
314 314 while dirnames:
315 315 curdir = dirnames.pop(0)
316 316 try:
317 317 dir_id = parent[curdir][1]
318 318 except KeyError:
319 319 # put curdir back into dirnames and stops
320 320 dirnames.insert(0, curdir)
321 321 break
322 322 else:
323 323 # If found, updates parent
324 324 parent = repo[dir_id]
325 325 ancestors.append((curdir, parent))
326 326 # Now parent is deepest existing tree and we need to create
327 327 # subtrees for dirnames (in reverse order)
328 328 # [this only applies for nodes from added]
329 329 new_trees = []
330 330
331 331 blob = objects.Blob.from_string(node['content'])
332 332
333 333 if dirnames:
334 334 # If there are trees which should be created we need to build
335 335 # them now (in reverse order)
336 336 reversed_dirnames = list(reversed(dirnames))
337 337 curtree = objects.Tree()
338 338 curtree[node['node_path']] = node['mode'], blob.id
339 339 new_trees.append(curtree)
340 340 for dirname in reversed_dirnames[:-1]:
341 341 newtree = objects.Tree()
342 342 newtree[dirname] = (DIR_STAT, curtree.id)
343 343 new_trees.append(newtree)
344 344 curtree = newtree
345 345 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
346 346 else:
347 347 parent.add(
348 348 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
349 349
350 350 new_trees.append(parent)
351 351 # Update ancestors
352 352 reversed_ancestors = reversed(
353 353 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
354 354 for parent, tree, path in reversed_ancestors:
355 355 parent[path] = (DIR_STAT, tree.id)
356 356 object_store.add_object(tree)
357 357
358 358 object_store.add_object(blob)
359 359 for tree in new_trees:
360 360 object_store.add_object(tree)
361 361
362 362 for node_path in removed:
363 363 paths = node_path.split('/')
364 364 tree = commit_tree
365 365 trees = [tree]
366 366 # Traverse deep into the forest...
367 367 for path in paths:
368 368 try:
369 369 obj = repo[tree[path][1]]
370 370 if isinstance(obj, objects.Tree):
371 371 trees.append(obj)
372 372 tree = obj
373 373 except KeyError:
374 374 break
375 375 # Cut down the blob and all rotten trees on the way back...
376 376 for path, tree in reversed(zip(paths, trees)):
377 377 del tree[path]
378 378 if tree:
379 379 # This tree still has elements - don't remove it or any
380 380 # of it's parents
381 381 break
382 382
383 383 object_store.add_object(commit_tree)
384 384
385 385 # Create commit
386 386 commit = objects.Commit()
387 387 commit.tree = commit_tree.id
388 388 for k, v in commit_data.iteritems():
389 389 setattr(commit, k, v)
390 390 object_store.add_object(commit)
391 391
392 392 ref = 'refs/heads/%s' % branch
393 393 repo.refs[ref] = commit.id
394 394
395 395 return commit.id
396 396
397 397 @reraise_safe_exceptions
398 398 def fetch(self, wire, url, apply_refs=True, refs=None):
399 399 if url != 'default' and '://' not in url:
400 400 client = LocalGitClient(url)
401 401 else:
402 402 url_obj = url_parser(url)
403 403 o = self._build_opener(url)
404 404 url, _ = url_obj.authinfo()
405 405 client = HttpGitClient(base_url=url, opener=o)
406 406 repo = self._factory.repo(wire)
407 407
408 408 determine_wants = repo.object_store.determine_wants_all
409 409 if refs:
410 410 def determine_wants_requested(references):
411 411 return [references[r] for r in references if r in refs]
412 412 determine_wants = determine_wants_requested
413 413
414 414 try:
415 415 remote_refs = client.fetch(
416 416 path=url, target=repo, determine_wants=determine_wants)
417 417 except NotGitRepository as e:
418 418 log.warning(
419 419 'Trying to fetch from "%s" failed, not a Git repository.', url)
420 420 # Exception can contain unicode which we convert
421 421 raise exceptions.AbortException(repr(e))
422 422
423 423 # mikhail: client.fetch() returns all the remote refs, but fetches only
424 424 # refs filtered by `determine_wants` function. We need to filter result
425 425 # as well
426 426 if refs:
427 427 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
428 428
429 429 if apply_refs:
430 430 # TODO: johbo: Needs proper test coverage with a git repository
431 431 # that contains a tag object, so that we would end up with
432 432 # a peeled ref at this point.
433 433 PEELED_REF_MARKER = '^{}'
434 434 for k in remote_refs:
435 435 if k.endswith(PEELED_REF_MARKER):
436 436 log.info("Skipping peeled reference %s", k)
437 437 continue
438 438 repo[k] = remote_refs[k]
439 439
440 440 if refs:
441 441 # mikhail: explicitly set the head to the last ref.
442 442 repo['HEAD'] = remote_refs[refs[-1]]
443 443
444 444 # TODO: mikhail: should we return remote_refs here to be
445 445 # consistent?
446 446 else:
447 447 return remote_refs
448 448
449 449 @reraise_safe_exceptions
450 450 def sync_push(self, wire, url, refs=None):
451 451 if self.check_url(url, wire):
452 452 repo = self._factory.repo(wire)
453 453 self.run_git_command(
454 wire, ['push', url, '--mirror'], fail_on_stderr=False)
455
454 wire, ['push', url, '--mirror'], fail_on_stderr=False,
455 _copts=['-c', 'core.askpass=""'],
456 extra_env={'GIT_TERMINAL_PROMPT': '0'})
456 457
457 458 @reraise_safe_exceptions
458 459 def get_remote_refs(self, wire, url):
459 460 repo = Repo(url)
460 461 return repo.get_refs()
461 462
462 463 @reraise_safe_exceptions
463 464 def get_description(self, wire):
464 465 repo = self._factory.repo(wire)
465 466 return repo.get_description()
466 467
467 468 @reraise_safe_exceptions
468 469 def get_file_history(self, wire, file_path, commit_id, limit):
469 470 repo = self._factory.repo(wire)
470 471 include = [commit_id]
471 472 paths = [file_path]
472 473
473 474 walker = repo.get_walker(include, paths=paths, max_entries=limit)
474 475 return [x.commit.id for x in walker]
475 476
476 477 @reraise_safe_exceptions
477 478 def get_missing_revs(self, wire, rev1, rev2, path2):
478 479 repo = self._factory.repo(wire)
479 480 LocalGitClient(thin_packs=False).fetch(path2, repo)
480 481
481 482 wire_remote = wire.copy()
482 483 wire_remote['path'] = path2
483 484 repo_remote = self._factory.repo(wire_remote)
484 485 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
485 486
486 487 revs = [
487 488 x.commit.id
488 489 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
489 490 return revs
490 491
491 492 @reraise_safe_exceptions
492 493 def get_object(self, wire, sha):
493 494 repo = self._factory.repo(wire)
494 495 obj = repo.get_object(sha)
495 496 commit_id = obj.id
496 497
497 498 if isinstance(obj, Tag):
498 499 commit_id = obj.object[1]
499 500
500 501 return {
501 502 'id': obj.id,
502 503 'type': obj.type_name,
503 504 'commit_id': commit_id
504 505 }
505 506
506 507 @reraise_safe_exceptions
507 508 def get_object_attrs(self, wire, sha, *attrs):
508 509 repo = self._factory.repo(wire)
509 510 obj = repo.get_object(sha)
510 511 return list(getattr(obj, a) for a in attrs)
511 512
512 513 @reraise_safe_exceptions
513 514 def get_refs(self, wire):
514 515 repo = self._factory.repo(wire)
515 516 result = {}
516 517 for ref, sha in repo.refs.as_dict().items():
517 518 peeled_sha = repo.get_peeled(ref)
518 519 result[ref] = peeled_sha
519 520 return result
520 521
521 522 @reraise_safe_exceptions
522 523 def get_refs_path(self, wire):
523 524 repo = self._factory.repo(wire)
524 525 return repo.refs.path
525 526
526 527 @reraise_safe_exceptions
527 528 def head(self, wire):
528 529 repo = self._factory.repo(wire)
529 530 return repo.head()
530 531
531 532 @reraise_safe_exceptions
532 533 def init(self, wire):
533 534 repo_path = str_to_dulwich(wire['path'])
534 535 self.repo = Repo.init(repo_path)
535 536
536 537 @reraise_safe_exceptions
537 538 def init_bare(self, wire):
538 539 repo_path = str_to_dulwich(wire['path'])
539 540 self.repo = Repo.init_bare(repo_path)
540 541
541 542 @reraise_safe_exceptions
542 543 def revision(self, wire, rev):
543 544 repo = self._factory.repo(wire)
544 545 obj = repo[rev]
545 546 obj_data = {
546 547 'id': obj.id,
547 548 }
548 549 try:
549 550 obj_data['tree'] = obj.tree
550 551 except AttributeError:
551 552 pass
552 553 return obj_data
553 554
554 555 @reraise_safe_exceptions
555 556 def commit_attribute(self, wire, rev, attr):
556 557 repo = self._factory.repo(wire)
557 558 obj = repo[rev]
558 559 return getattr(obj, attr)
559 560
560 561 @reraise_safe_exceptions
561 562 def set_refs(self, wire, key, value):
562 563 repo = self._factory.repo(wire)
563 564 repo.refs[key] = value
564 565
565 566 @reraise_safe_exceptions
566 567 def remove_ref(self, wire, key):
567 568 repo = self._factory.repo(wire)
568 569 del repo.refs[key]
569 570
570 571 @reraise_safe_exceptions
571 572 def tree_changes(self, wire, source_id, target_id):
572 573 repo = self._factory.repo(wire)
573 574 source = repo[source_id].tree if source_id else None
574 575 target = repo[target_id].tree
575 576 result = repo.object_store.tree_changes(source, target)
576 577 return list(result)
577 578
578 579 @reraise_safe_exceptions
579 580 def tree_items(self, wire, tree_id):
580 581 repo = self._factory.repo(wire)
581 582 tree = repo[tree_id]
582 583
583 584 result = []
584 585 for item in tree.iteritems():
585 586 item_sha = item.sha
586 587 item_mode = item.mode
587 588
588 589 if FILE_MODE(item_mode) == GIT_LINK:
589 590 item_type = "link"
590 591 else:
591 592 item_type = repo[item_sha].type_name
592 593
593 594 result.append((item.path, item_mode, item_sha, item_type))
594 595 return result
595 596
596 597 @reraise_safe_exceptions
597 598 def update_server_info(self, wire):
598 599 repo = self._factory.repo(wire)
599 600 update_server_info(repo)
600 601
601 602 @reraise_safe_exceptions
602 603 def discover_git_version(self):
603 604 stdout, _ = self.run_git_command(
604 605 {}, ['--version'], _bare=True, _safe=True)
605 606 prefix = 'git version'
606 607 if stdout.startswith(prefix):
607 608 stdout = stdout[len(prefix):]
608 609 return stdout.strip()
609 610
610 611 @reraise_safe_exceptions
611 612 def run_git_command(self, wire, cmd, **opts):
612 613 path = wire.get('path', None)
613 614
614 615 if path and os.path.isdir(path):
615 616 opts['cwd'] = path
616 617
617 618 if '_bare' in opts:
618 619 _copts = []
619 620 del opts['_bare']
620 621 else:
621 622 _copts = ['-c', 'core.quotepath=false', ]
622 623 safe_call = False
623 624 if '_safe' in opts:
624 625 # no exc on failure
625 626 del opts['_safe']
626 627 safe_call = True
627 628
629 if '_copts' in opts:
630 _copts.extend(opts['_copts'] or [])
631 del opts['_copts']
632
628 633 gitenv = os.environ.copy()
629 634 gitenv.update(opts.pop('extra_env', {}))
630 635 # need to clean fix GIT_DIR !
631 636 if 'GIT_DIR' in gitenv:
632 637 del gitenv['GIT_DIR']
633 638 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
634 639
635 640 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
636 641
637 642 try:
638 643 _opts = {'env': gitenv, 'shell': False}
639 644 _opts.update(opts)
640 645 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
641 646
642 647 return ''.join(p), ''.join(p.error)
643 648 except (EnvironmentError, OSError) as err:
644 649 cmd = ' '.join(cmd) # human friendly CMD
645 650 tb_err = ("Couldn't run git command (%s).\n"
646 651 "Original error was:%s\n" % (cmd, err))
647 652 log.exception(tb_err)
648 653 if safe_call:
649 654 return '', err
650 655 else:
651 656 raise exceptions.VcsException(tb_err)
652 657
653 658
654 659 def str_to_dulwich(value):
655 660 """
656 661 Dulwich 0.10.1a requires `unicode` objects to be passed in.
657 662 """
658 663 return value.decode(settings.WIRE_ENCODING)
@@ -1,758 +1,771 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import logging
20 20 import stat
21 21 import urllib
22 22 import urllib2
23 23
24 24 from hgext import largefiles, rebase
25 25 from hgext.strip import strip as hgext_strip
26 26 from mercurial import commands
27 27 from mercurial import unionrepo
28 28 from mercurial import verify
29 29
30 30 from vcsserver import exceptions
31 31 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
32 32 from vcsserver.hgcompat import (
33 33 archival, bin, clone, config as hgconfig, diffopts, hex,
34 34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
35 35 httppeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
36 36 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
37 37 RepoLookupError, InterventionRequired, RequirementError)
38 38
39 39 log = logging.getLogger(__name__)
40 40
41 41
42 42 def make_ui_from_config(repo_config):
43 43 baseui = ui.ui()
44 44
45 45 # clean the baseui object
46 46 baseui._ocfg = hgconfig.config()
47 47 baseui._ucfg = hgconfig.config()
48 48 baseui._tcfg = hgconfig.config()
49 49
50 50 for section, option, value in repo_config:
51 51 baseui.setconfig(section, option, value)
52 52
53 53 # make our hgweb quiet so it doesn't print output
54 54 baseui.setconfig('ui', 'quiet', 'true')
55 55
56 56 baseui.setconfig('ui', 'paginate', 'never')
57 57 # force mercurial to only use 1 thread, otherwise it may try to set a
58 58 # signal in a non-main thread, thus generating a ValueError.
59 59 baseui.setconfig('worker', 'numcpus', 1)
60 60
61 61 # If there is no config for the largefiles extension, we explicitly disable
62 62 # it here. This overrides settings from repositories hgrc file. Recent
63 63 # mercurial versions enable largefiles in hgrc on clone from largefile
64 64 # repo.
65 65 if not baseui.hasconfig('extensions', 'largefiles'):
66 66 log.debug('Explicitly disable largefiles extension for repo.')
67 67 baseui.setconfig('extensions', 'largefiles', '!')
68 68
69 69 return baseui
70 70
71 71
72 72 def reraise_safe_exceptions(func):
73 73 """Decorator for converting mercurial exceptions to something neutral."""
74 74 def wrapper(*args, **kwargs):
75 75 try:
76 76 return func(*args, **kwargs)
77 77 except (Abort, InterventionRequired):
78 78 raise_from_original(exceptions.AbortException)
79 79 except RepoLookupError:
80 80 raise_from_original(exceptions.LookupException)
81 81 except RequirementError:
82 82 raise_from_original(exceptions.RequirementException)
83 83 except RepoError:
84 84 raise_from_original(exceptions.VcsException)
85 85 except LookupError:
86 86 raise_from_original(exceptions.LookupException)
87 87 except Exception as e:
88 88 if not hasattr(e, '_vcs_kind'):
89 89 log.exception("Unhandled exception in hg remote call")
90 90 raise_from_original(exceptions.UnhandledException)
91 91 raise
92 92 return wrapper
93 93
94 94
95 95 class MercurialFactory(RepoFactory):
96 96
97 97 def _create_config(self, config, hooks=True):
98 98 if not hooks:
99 99 hooks_to_clean = frozenset((
100 100 'changegroup.repo_size', 'preoutgoing.pre_pull',
101 101 'outgoing.pull_logger', 'prechangegroup.pre_push'))
102 102 new_config = []
103 103 for section, option, value in config:
104 104 if section == 'hooks' and option in hooks_to_clean:
105 105 continue
106 106 new_config.append((section, option, value))
107 107 config = new_config
108 108
109 109 baseui = make_ui_from_config(config)
110 110 return baseui
111 111
112 112 def _create_repo(self, wire, create):
113 113 baseui = self._create_config(wire["config"])
114 114 return localrepository(baseui, wire["path"], create)
115 115
116 116
117 117 class HgRemote(object):
118 118
119 119 def __init__(self, factory):
120 120 self._factory = factory
121 121
122 122 self._bulk_methods = {
123 123 "affected_files": self.ctx_files,
124 124 "author": self.ctx_user,
125 125 "branch": self.ctx_branch,
126 126 "children": self.ctx_children,
127 127 "date": self.ctx_date,
128 128 "message": self.ctx_description,
129 129 "parents": self.ctx_parents,
130 130 "status": self.ctx_status,
131 131 "obsolete": self.ctx_obsolete,
132 132 "phase": self.ctx_phase,
133 133 "hidden": self.ctx_hidden,
134 134 "_file_paths": self.ctx_list,
135 135 }
136 136
137 137 @reraise_safe_exceptions
138 138 def discover_hg_version(self):
139 139 from mercurial import util
140 140 return util.version()
141 141
142 142 @reraise_safe_exceptions
143 143 def archive_repo(self, archive_path, mtime, file_info, kind):
144 144 if kind == "tgz":
145 145 archiver = archival.tarit(archive_path, mtime, "gz")
146 146 elif kind == "tbz2":
147 147 archiver = archival.tarit(archive_path, mtime, "bz2")
148 148 elif kind == 'zip':
149 149 archiver = archival.zipit(archive_path, mtime)
150 150 else:
151 151 raise exceptions.ArchiveException(
152 152 'Remote does not support: "%s".' % kind)
153 153
154 154 for f_path, f_mode, f_is_link, f_content in file_info:
155 155 archiver.addfile(f_path, f_mode, f_is_link, f_content)
156 156 archiver.done()
157 157
158 158 @reraise_safe_exceptions
159 159 def bookmarks(self, wire):
160 160 repo = self._factory.repo(wire)
161 161 return dict(repo._bookmarks)
162 162
163 163 @reraise_safe_exceptions
164 164 def branches(self, wire, normal, closed):
165 165 repo = self._factory.repo(wire)
166 166 iter_branches = repo.branchmap().iterbranches()
167 167 bt = {}
168 168 for branch_name, _heads, tip, is_closed in iter_branches:
169 169 if normal and not is_closed:
170 170 bt[branch_name] = tip
171 171 if closed and is_closed:
172 172 bt[branch_name] = tip
173 173
174 174 return bt
175 175
176 176 @reraise_safe_exceptions
177 177 def bulk_request(self, wire, rev, pre_load):
178 178 result = {}
179 179 for attr in pre_load:
180 180 try:
181 181 method = self._bulk_methods[attr]
182 182 result[attr] = method(wire, rev)
183 183 except KeyError:
184 184 raise exceptions.VcsException(
185 185 'Unknown bulk attribute: "%s"' % attr)
186 186 return result
187 187
188 188 @reraise_safe_exceptions
189 189 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
190 190 baseui = self._factory._create_config(wire["config"], hooks=hooks)
191 191 clone(baseui, source, dest, noupdate=not update_after_clone)
192 192
193 193 @reraise_safe_exceptions
194 194 def commitctx(
195 195 self, wire, message, parents, commit_time, commit_timezone,
196 196 user, files, extra, removed, updated):
197 197
198 198 def _filectxfn(_repo, memctx, path):
199 199 """
200 200 Marks given path as added/changed/removed in a given _repo. This is
201 201 for internal mercurial commit function.
202 202 """
203 203
204 204 # check if this path is removed
205 205 if path in removed:
206 206 # returning None is a way to mark node for removal
207 207 return None
208 208
209 209 # check if this path is added
210 210 for node in updated:
211 211 if node['path'] == path:
212 212 return memfilectx(
213 213 _repo,
214 214 path=node['path'],
215 215 data=node['content'],
216 216 islink=False,
217 217 isexec=bool(node['mode'] & stat.S_IXUSR),
218 218 copied=False,
219 219 memctx=memctx)
220 220
221 221 raise exceptions.AbortException(
222 222 "Given path haven't been marked as added, "
223 223 "changed or removed (%s)" % path)
224 224
225 225 repo = self._factory.repo(wire)
226 226
227 227 commit_ctx = memctx(
228 228 repo=repo,
229 229 parents=parents,
230 230 text=message,
231 231 files=files,
232 232 filectxfn=_filectxfn,
233 233 user=user,
234 234 date=(commit_time, commit_timezone),
235 235 extra=extra)
236 236
237 237 n = repo.commitctx(commit_ctx)
238 238 new_id = hex(n)
239 239
240 240 return new_id
241 241
242 242 @reraise_safe_exceptions
243 243 def ctx_branch(self, wire, revision):
244 244 repo = self._factory.repo(wire)
245 245 ctx = repo[revision]
246 246 return ctx.branch()
247 247
248 248 @reraise_safe_exceptions
249 249 def ctx_children(self, wire, revision):
250 250 repo = self._factory.repo(wire)
251 251 ctx = repo[revision]
252 252 return [child.rev() for child in ctx.children()]
253 253
254 254 @reraise_safe_exceptions
255 255 def ctx_date(self, wire, revision):
256 256 repo = self._factory.repo(wire)
257 257 ctx = repo[revision]
258 258 return ctx.date()
259 259
260 260 @reraise_safe_exceptions
261 261 def ctx_description(self, wire, revision):
262 262 repo = self._factory.repo(wire)
263 263 ctx = repo[revision]
264 264 return ctx.description()
265 265
266 266 @reraise_safe_exceptions
267 267 def ctx_diff(
268 268 self, wire, revision, git=True, ignore_whitespace=True, context=3):
269 269 repo = self._factory.repo(wire)
270 270 ctx = repo[revision]
271 271 result = ctx.diff(
272 272 git=git, ignore_whitespace=ignore_whitespace, context=context)
273 273 return list(result)
274 274
275 275 @reraise_safe_exceptions
276 276 def ctx_files(self, wire, revision):
277 277 repo = self._factory.repo(wire)
278 278 ctx = repo[revision]
279 279 return ctx.files()
280 280
281 281 @reraise_safe_exceptions
282 282 def ctx_list(self, path, revision):
283 283 repo = self._factory.repo(path)
284 284 ctx = repo[revision]
285 285 return list(ctx)
286 286
287 287 @reraise_safe_exceptions
288 288 def ctx_parents(self, wire, revision):
289 289 repo = self._factory.repo(wire)
290 290 ctx = repo[revision]
291 291 return [parent.rev() for parent in ctx.parents()]
292 292
293 293 @reraise_safe_exceptions
294 294 def ctx_phase(self, wire, revision):
295 295 repo = self._factory.repo(wire)
296 296 ctx = repo[revision]
297 297 # public=0, draft=1, secret=3
298 298 return ctx.phase()
299 299
300 300 @reraise_safe_exceptions
301 301 def ctx_obsolete(self, wire, revision):
302 302 repo = self._factory.repo(wire)
303 303 ctx = repo[revision]
304 304 return ctx.obsolete()
305 305
306 306 @reraise_safe_exceptions
307 307 def ctx_hidden(self, wire, revision):
308 308 repo = self._factory.repo(wire)
309 309 ctx = repo[revision]
310 310 return ctx.hidden()
311 311
312 312 @reraise_safe_exceptions
313 313 def ctx_substate(self, wire, revision):
314 314 repo = self._factory.repo(wire)
315 315 ctx = repo[revision]
316 316 return ctx.substate
317 317
318 318 @reraise_safe_exceptions
319 319 def ctx_status(self, wire, revision):
320 320 repo = self._factory.repo(wire)
321 321 ctx = repo[revision]
322 322 status = repo[ctx.p1().node()].status(other=ctx.node())
323 323 # object of status (odd, custom named tuple in mercurial) is not
324 324 # correctly serializable, we make it a list, as the underling
325 325 # API expects this to be a list
326 326 return list(status)
327 327
328 328 @reraise_safe_exceptions
329 329 def ctx_user(self, wire, revision):
330 330 repo = self._factory.repo(wire)
331 331 ctx = repo[revision]
332 332 return ctx.user()
333 333
334 334 @reraise_safe_exceptions
335 335 def check_url(self, url, config):
336 336 _proto = None
337 337 if '+' in url[:url.find('://')]:
338 338 _proto = url[0:url.find('+')]
339 339 url = url[url.find('+') + 1:]
340 340 handlers = []
341 341 url_obj = url_parser(url)
342 342 test_uri, authinfo = url_obj.authinfo()
343 343 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
344 344 url_obj.query = obfuscate_qs(url_obj.query)
345 345
346 346 cleaned_uri = str(url_obj)
347 347 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
348 348
349 349 if authinfo:
350 350 # create a password manager
351 351 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
352 352 passmgr.add_password(*authinfo)
353 353
354 354 handlers.extend((httpbasicauthhandler(passmgr),
355 355 httpdigestauthhandler(passmgr)))
356 356
357 357 o = urllib2.build_opener(*handlers)
358 358 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
359 359 ('Accept', 'application/mercurial-0.1')]
360 360
361 361 q = {"cmd": 'between'}
362 362 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
363 363 qs = '?%s' % urllib.urlencode(q)
364 364 cu = "%s%s" % (test_uri, qs)
365 365 req = urllib2.Request(cu, None, {})
366 366
367 367 try:
368 368 log.debug("Trying to open URL %s", cleaned_uri)
369 369 resp = o.open(req)
370 370 if resp.code != 200:
371 371 raise exceptions.URLError('Return Code is not 200')
372 372 except Exception as e:
373 373 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
374 374 # means it cannot be cloned
375 375 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
376 376
377 377 # now check if it's a proper hg repo, but don't do it for svn
378 378 try:
379 379 if _proto == 'svn':
380 380 pass
381 381 else:
382 382 # check for pure hg repos
383 383 log.debug(
384 384 "Verifying if URL is a Mercurial repository: %s",
385 385 cleaned_uri)
386 386 httppeer(make_ui_from_config(config), url).lookup('tip')
387 387 except Exception as e:
388 388 log.warning("URL is not a valid Mercurial repository: %s",
389 389 cleaned_uri)
390 390 raise exceptions.URLError(
391 391 "url [%s] does not look like an hg repo org_exc: %s"
392 392 % (cleaned_uri, e))
393 393
394 394 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
395 395 return True
396 396
397 397 @reraise_safe_exceptions
398 398 def diff(
399 399 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
400 400 context):
401 401 repo = self._factory.repo(wire)
402 402
403 403 if file_filter:
404 404 match_filter = match(file_filter[0], '', [file_filter[1]])
405 405 else:
406 406 match_filter = file_filter
407 407 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
408 408
409 409 try:
410 410 return "".join(patch.diff(
411 411 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
412 412 except RepoLookupError:
413 413 raise exceptions.LookupException()
414 414
415 415 @reraise_safe_exceptions
416 416 def file_history(self, wire, revision, path, limit):
417 417 repo = self._factory.repo(wire)
418 418
419 419 ctx = repo[revision]
420 420 fctx = ctx.filectx(path)
421 421
422 422 def history_iter():
423 423 limit_rev = fctx.rev()
424 424 for obj in reversed(list(fctx.filelog())):
425 425 obj = fctx.filectx(obj)
426 426 if limit_rev >= obj.rev():
427 427 yield obj
428 428
429 429 history = []
430 430 for cnt, obj in enumerate(history_iter()):
431 431 if limit and cnt >= limit:
432 432 break
433 433 history.append(hex(obj.node()))
434 434
435 435 return [x for x in history]
436 436
437 437 @reraise_safe_exceptions
438 438 def file_history_untill(self, wire, revision, path, limit):
439 439 repo = self._factory.repo(wire)
440 440 ctx = repo[revision]
441 441 fctx = ctx.filectx(path)
442 442
443 443 file_log = list(fctx.filelog())
444 444 if limit:
445 445 # Limit to the last n items
446 446 file_log = file_log[-limit:]
447 447
448 448 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
449 449
450 450 @reraise_safe_exceptions
451 451 def fctx_annotate(self, wire, revision, path):
452 452 repo = self._factory.repo(wire)
453 453 ctx = repo[revision]
454 454 fctx = ctx.filectx(path)
455 455
456 456 result = []
457 457 for i, (a_line, content) in enumerate(fctx.annotate()):
458 458 ln_no = i + 1
459 459 sha = hex(a_line.fctx.node())
460 460 result.append((ln_no, sha, content))
461 461 return result
462 462
463 463 @reraise_safe_exceptions
464 464 def fctx_data(self, wire, revision, path):
465 465 repo = self._factory.repo(wire)
466 466 ctx = repo[revision]
467 467 fctx = ctx.filectx(path)
468 468 return fctx.data()
469 469
470 470 @reraise_safe_exceptions
471 471 def fctx_flags(self, wire, revision, path):
472 472 repo = self._factory.repo(wire)
473 473 ctx = repo[revision]
474 474 fctx = ctx.filectx(path)
475 475 return fctx.flags()
476 476
477 477 @reraise_safe_exceptions
478 478 def fctx_size(self, wire, revision, path):
479 479 repo = self._factory.repo(wire)
480 480 ctx = repo[revision]
481 481 fctx = ctx.filectx(path)
482 482 return fctx.size()
483 483
484 484 @reraise_safe_exceptions
485 485 def get_all_commit_ids(self, wire, name):
486 486 repo = self._factory.repo(wire)
487 487 revs = repo.filtered(name).changelog.index
488 488 return map(lambda x: hex(x[7]), revs)[:-1]
489 489
490 490 @reraise_safe_exceptions
491 491 def get_config_value(self, wire, section, name, untrusted=False):
492 492 repo = self._factory.repo(wire)
493 493 return repo.ui.config(section, name, untrusted=untrusted)
494 494
495 495 @reraise_safe_exceptions
496 496 def get_config_bool(self, wire, section, name, untrusted=False):
497 497 repo = self._factory.repo(wire)
498 498 return repo.ui.configbool(section, name, untrusted=untrusted)
499 499
500 500 @reraise_safe_exceptions
501 501 def get_config_list(self, wire, section, name, untrusted=False):
502 502 repo = self._factory.repo(wire)
503 503 return repo.ui.configlist(section, name, untrusted=untrusted)
504 504
505 505 @reraise_safe_exceptions
506 506 def is_large_file(self, wire, path):
507 507 return largefiles.lfutil.isstandin(path)
508 508
509 509 @reraise_safe_exceptions
510 510 def in_largefiles_store(self, wire, sha):
511 511 repo = self._factory.repo(wire)
512 512 return largefiles.lfutil.instore(repo, sha)
513 513
514 514 @reraise_safe_exceptions
515 515 def in_user_cache(self, wire, sha):
516 516 repo = self._factory.repo(wire)
517 517 return largefiles.lfutil.inusercache(repo.ui, sha)
518 518
519 519 @reraise_safe_exceptions
520 520 def store_path(self, wire, sha):
521 521 repo = self._factory.repo(wire)
522 522 return largefiles.lfutil.storepath(repo, sha)
523 523
524 524 @reraise_safe_exceptions
525 525 def link(self, wire, sha, path):
526 526 repo = self._factory.repo(wire)
527 527 largefiles.lfutil.link(
528 528 largefiles.lfutil.usercachepath(repo.ui, sha), path)
529 529
530 530 @reraise_safe_exceptions
531 531 def localrepository(self, wire, create=False):
532 532 self._factory.repo(wire, create=create)
533 533
534 534 @reraise_safe_exceptions
535 535 def lookup(self, wire, revision, both):
536 536 # TODO Paris: Ugly hack to "deserialize" long for msgpack
537 537 if isinstance(revision, float):
538 538 revision = long(revision)
539 539 repo = self._factory.repo(wire)
540 540 try:
541 541 ctx = repo[revision]
542 542 except RepoLookupError:
543 543 raise exceptions.LookupException(revision)
544 544 except LookupError as e:
545 545 raise exceptions.LookupException(e.name)
546 546
547 547 if not both:
548 548 return ctx.hex()
549 549
550 550 ctx = repo[ctx.hex()]
551 551 return ctx.hex(), ctx.rev()
552 552
553 553 @reraise_safe_exceptions
554 554 def pull(self, wire, url, commit_ids=None):
555 555 repo = self._factory.repo(wire)
556 # Disable any prompts for this repo
557 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
558
556 559 remote = peer(repo, {}, url)
560 # Disable any prompts for this remote
561 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
562
557 563 if commit_ids:
558 564 commit_ids = [bin(commit_id) for commit_id in commit_ids]
559 565
560 566 return exchange.pull(
561 567 repo, remote, heads=commit_ids, force=None).cgresult
562 568
563 569 @reraise_safe_exceptions
564 570 def sync_push(self, wire, url):
565 571 if self.check_url(url, wire['config']):
566 572 repo = self._factory.repo(wire)
573
574 # Disable any prompts for this repo
575 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
576
567 577 bookmarks = dict(repo._bookmarks).keys()
568 578 remote = peer(repo, {}, url)
579 # Disable any prompts for this remote
580 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
581
569 582 return exchange.push(
570 583 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
571 584
572 585 @reraise_safe_exceptions
573 586 def revision(self, wire, rev):
574 587 repo = self._factory.repo(wire)
575 588 ctx = repo[rev]
576 589 return ctx.rev()
577 590
578 591 @reraise_safe_exceptions
579 592 def rev_range(self, wire, filter):
580 593 repo = self._factory.repo(wire)
581 594 revisions = [rev for rev in revrange(repo, filter)]
582 595 return revisions
583 596
584 597 @reraise_safe_exceptions
585 598 def rev_range_hash(self, wire, node):
586 599 repo = self._factory.repo(wire)
587 600
588 601 def get_revs(repo, rev_opt):
589 602 if rev_opt:
590 603 revs = revrange(repo, rev_opt)
591 604 if len(revs) == 0:
592 605 return (nullrev, nullrev)
593 606 return max(revs), min(revs)
594 607 else:
595 608 return len(repo) - 1, 0
596 609
597 610 stop, start = get_revs(repo, [node + ':'])
598 611 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
599 612 return revs
600 613
601 614 @reraise_safe_exceptions
602 615 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
603 616 other_path = kwargs.pop('other_path', None)
604 617
605 618 # case when we want to compare two independent repositories
606 619 if other_path and other_path != wire["path"]:
607 620 baseui = self._factory._create_config(wire["config"])
608 621 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
609 622 else:
610 623 repo = self._factory.repo(wire)
611 624 return list(repo.revs(rev_spec, *args))
612 625
613 626 @reraise_safe_exceptions
614 627 def strip(self, wire, revision, update, backup):
615 628 repo = self._factory.repo(wire)
616 629 ctx = repo[revision]
617 630 hgext_strip(
618 631 repo.baseui, repo, ctx.node(), update=update, backup=backup)
619 632
620 633 @reraise_safe_exceptions
621 634 def verify(self, wire,):
622 635 repo = self._factory.repo(wire)
623 636 baseui = self._factory._create_config(wire['config'])
624 637 baseui.setconfig('ui', 'quiet', 'false')
625 638 output = io.BytesIO()
626 639
627 640 def write(data, **unused_kwargs):
628 641 output.write(data)
629 642 baseui.write = write
630 643
631 644 repo.ui = baseui
632 645 verify.verify(repo)
633 646 return output.getvalue()
634 647
635 648 @reraise_safe_exceptions
636 649 def tag(self, wire, name, revision, message, local, user,
637 650 tag_time, tag_timezone):
638 651 repo = self._factory.repo(wire)
639 652 ctx = repo[revision]
640 653 node = ctx.node()
641 654
642 655 date = (tag_time, tag_timezone)
643 656 try:
644 657 hg_tag.tag(repo, name, node, message, local, user, date)
645 658 except Abort as e:
646 659 log.exception("Tag operation aborted")
647 660 # Exception can contain unicode which we convert
648 661 raise exceptions.AbortException(repr(e))
649 662
650 663 @reraise_safe_exceptions
651 664 def tags(self, wire):
652 665 repo = self._factory.repo(wire)
653 666 return repo.tags()
654 667
655 668 @reraise_safe_exceptions
656 669 def update(self, wire, node=None, clean=False):
657 670 repo = self._factory.repo(wire)
658 671 baseui = self._factory._create_config(wire['config'])
659 672 commands.update(baseui, repo, node=node, clean=clean)
660 673
661 674 @reraise_safe_exceptions
662 675 def identify(self, wire):
663 676 repo = self._factory.repo(wire)
664 677 baseui = self._factory._create_config(wire['config'])
665 678 output = io.BytesIO()
666 679 baseui.write = output.write
667 680 # This is required to get a full node id
668 681 baseui.debugflag = True
669 682 commands.identify(baseui, repo, id=True)
670 683
671 684 return output.getvalue()
672 685
673 686 @reraise_safe_exceptions
674 687 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
675 688 hooks=True):
676 689 repo = self._factory.repo(wire)
677 690 baseui = self._factory._create_config(wire['config'], hooks=hooks)
678 691
679 692 # Mercurial internally has a lot of logic that checks ONLY if
680 693 # option is defined, we just pass those if they are defined then
681 694 opts = {}
682 695 if bookmark:
683 696 opts['bookmark'] = bookmark
684 697 if branch:
685 698 opts['branch'] = branch
686 699 if revision:
687 700 opts['rev'] = revision
688 701
689 702 commands.pull(baseui, repo, source, **opts)
690 703
691 704 @reraise_safe_exceptions
692 705 def heads(self, wire, branch=None):
693 706 repo = self._factory.repo(wire)
694 707 baseui = self._factory._create_config(wire['config'])
695 708 output = io.BytesIO()
696 709
697 710 def write(data, **unused_kwargs):
698 711 output.write(data)
699 712
700 713 baseui.write = write
701 714 if branch:
702 715 args = [branch]
703 716 else:
704 717 args = []
705 718 commands.heads(baseui, repo, template='{node} ', *args)
706 719
707 720 return output.getvalue()
708 721
709 722 @reraise_safe_exceptions
710 723 def ancestor(self, wire, revision1, revision2):
711 724 repo = self._factory.repo(wire)
712 725 changelog = repo.changelog
713 726 lookup = repo.lookup
714 727 a = changelog.ancestor(lookup(revision1), lookup(revision2))
715 728 return hex(a)
716 729
717 730 @reraise_safe_exceptions
718 731 def push(self, wire, revisions, dest_path, hooks=True,
719 732 push_branches=False):
720 733 repo = self._factory.repo(wire)
721 734 baseui = self._factory._create_config(wire['config'], hooks=hooks)
722 735 commands.push(baseui, repo, dest=dest_path, rev=revisions,
723 736 new_branch=push_branches)
724 737
725 738 @reraise_safe_exceptions
726 739 def merge(self, wire, revision):
727 740 repo = self._factory.repo(wire)
728 741 baseui = self._factory._create_config(wire['config'])
729 742 repo.ui.setconfig('ui', 'merge', 'internal:dump')
730 743
731 744 # In case of sub repositories are used mercurial prompts the user in
732 745 # case of merge conflicts or different sub repository sources. By
733 746 # setting the interactive flag to `False` mercurial doesn't prompt the
734 747 # used but instead uses a default value.
735 748 repo.ui.setconfig('ui', 'interactive', False)
736 749
737 750 commands.merge(baseui, repo, rev=revision)
738 751
739 752 @reraise_safe_exceptions
740 753 def commit(self, wire, message, username, close_branch=False):
741 754 repo = self._factory.repo(wire)
742 755 baseui = self._factory._create_config(wire['config'])
743 756 repo.ui.setconfig('ui', 'username', username)
744 757 commands.commit(baseui, repo, message=message, close_branch=close_branch)
745 758
746 759 @reraise_safe_exceptions
747 760 def rebase(self, wire, source=None, dest=None, abort=False):
748 761 repo = self._factory.repo(wire)
749 762 baseui = self._factory._create_config(wire['config'])
750 763 repo.ui.setconfig('ui', 'merge', 'internal:dump')
751 764 rebase.rebase(
752 765 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
753 766
754 767 @reraise_safe_exceptions
755 768 def bookmark(self, wire, bookmark, revision=None):
756 769 repo = self._factory.repo(wire)
757 770 baseui = self._factory._create_config(wire['config'])
758 771 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
@@ -1,72 +1,75 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import logging
18
19 log = logging.getLogger(__name__)
17 20
18 21
19 22 def safe_int(val, default=None):
20 23 """
21 24 Returns int() of val if val is not convertable to int use default
22 25 instead
23 26
24 27 :param val:
25 28 :param default:
26 29 """
27 30
28 31 try:
29 32 val = int(val)
30 33 except (ValueError, TypeError):
31 34 val = default
32 35
33 36 return val
34 37
35 38
36 39 def safe_str(unicode_, to_encoding=['utf8']):
37 40 """
38 41 safe str function. Does few trick to turn unicode_ into string
39 42
40 43 In case of UnicodeEncodeError, we try to return it with encoding detected
41 44 by chardet library if it fails fallback to string with errors replaced
42 45
43 46 :param unicode_: unicode to encode
44 47 :rtype: str
45 48 :returns: str object
46 49 """
47 50
48 51 # if it's not basestr cast to str
49 52 if not isinstance(unicode_, basestring):
50 53 return str(unicode_)
51 54
52 55 if isinstance(unicode_, str):
53 56 return unicode_
54 57
55 58 if not isinstance(to_encoding, (list, tuple)):
56 59 to_encoding = [to_encoding]
57 60
58 61 for enc in to_encoding:
59 62 try:
60 63 return unicode_.encode(enc)
61 64 except UnicodeEncodeError:
62 65 pass
63 66
64 67 try:
65 68 import chardet
66 69 encoding = chardet.detect(unicode_)['encoding']
67 70 if encoding is None:
68 71 raise UnicodeEncodeError()
69 72
70 73 return unicode_.encode(encoding)
71 74 except (ImportError, UnicodeEncodeError):
72 75 return unicode_.encode(to_encoding[0], 'replace')
General Comments 0
You need to be logged in to leave comments. Login now