##// END OF EJS Templates
git: use True/False on check git repo path call. This prevents spamming...
marcink -
r346:b1d2c5d6 default
parent child Browse files
Show More
@@ -1,645 +1,650 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2017 RodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import logging
19 19 import os
20 20 import posixpath as vcspath
21 21 import re
22 22 import stat
23 import traceback
23 24 import urllib
24 25 import urllib2
25 26 from functools import wraps
26 27
27 28 from dulwich import index, objects
28 29 from dulwich.client import HttpGitClient, LocalGitClient
29 30 from dulwich.errors import (
30 31 NotGitRepository, ChecksumMismatch, WrongObjectException,
31 32 MissingCommitError, ObjectMissing, HangupException,
32 33 UnexpectedCommandError)
33 34 from dulwich.repo import Repo as DulwichRepo, Tag
34 35 from dulwich.server import update_server_info
35 36
36 37 from vcsserver import exceptions, settings, subprocessio
37 38 from vcsserver.utils import safe_str
38 39 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
39 40 from vcsserver.hgcompat import (
40 41 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
41 42 from vcsserver.git_lfs.lib import LFSOidStore
42 43
43 44 DIR_STAT = stat.S_IFDIR
44 45 FILE_MODE = stat.S_IFMT
45 46 GIT_LINK = objects.S_IFGITLINK
46 47
47 48 log = logging.getLogger(__name__)
48 49
49 50
50 51 def reraise_safe_exceptions(func):
51 52 """Converts Dulwich exceptions to something neutral."""
52 53 @wraps(func)
53 54 def wrapper(*args, **kwargs):
54 55 try:
55 56 return func(*args, **kwargs)
56 57 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
57 58 ObjectMissing) as e:
58 59 raise exceptions.LookupException(e.message)
59 60 except (HangupException, UnexpectedCommandError) as e:
60 61 raise exceptions.VcsException(e.message)
61 62 except Exception as e:
62 63 # NOTE(marcink): becuase of how dulwich handles some exceptions
63 64 # (KeyError on empty repos), we cannot track this and catch all
64 65 # exceptions, it's an exceptions from other handlers
65 66 #if not hasattr(e, '_vcs_kind'):
66 67 #log.exception("Unhandled exception in git remote call")
67 68 #raise_from_original(exceptions.UnhandledException)
68 69 raise
69 70 return wrapper
70 71
71 72
72 73 class Repo(DulwichRepo):
73 74 """
74 75 A wrapper for dulwich Repo class.
75 76
76 77 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
77 78 "Too many open files" error. We need to close all opened file descriptors
78 79 once the repo object is destroyed.
79 80
80 81 TODO: mikhail: please check if we need this wrapper after updating dulwich
81 82 to 0.12.0 +
82 83 """
83 84 def __del__(self):
84 85 if hasattr(self, 'object_store'):
85 86 self.close()
86 87
87 88
88 89 class GitFactory(RepoFactory):
89 90
90 91 def _create_repo(self, wire, create):
91 92 repo_path = str_to_dulwich(wire['path'])
92 93 return Repo(repo_path)
93 94
94 95
95 96 class GitRemote(object):
96 97
97 98 def __init__(self, factory):
98 99 self._factory = factory
99 100
100 101 self._bulk_methods = {
101 102 "author": self.commit_attribute,
102 103 "date": self.get_object_attrs,
103 104 "message": self.commit_attribute,
104 105 "parents": self.commit_attribute,
105 106 "_commit": self.revision,
106 107 }
107 108
108 109 def _wire_to_config(self, wire):
109 110 if 'config' in wire:
110 111 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
111 112 return {}
112 113
113 114 def _assign_ref(self, wire, ref, commit_id):
114 115 repo = self._factory.repo(wire)
115 116 repo[ref] = commit_id
116 117
117 118 @reraise_safe_exceptions
118 119 def add_object(self, wire, content):
119 120 repo = self._factory.repo(wire)
120 121 blob = objects.Blob()
121 122 blob.set_raw_string(content)
122 123 repo.object_store.add_object(blob)
123 124 return blob.id
124 125
125 126 @reraise_safe_exceptions
126 127 def assert_correct_path(self, wire):
128 path = wire.get('path')
127 129 try:
128 130 self._factory.repo(wire)
129 131 except NotGitRepository as e:
130 # Exception can contain unicode which we convert
131 raise exceptions.AbortException(repr(e))
132 tb = traceback.format_exc()
133 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
134 return False
135
136 return True
132 137
133 138 @reraise_safe_exceptions
134 139 def bare(self, wire):
135 140 repo = self._factory.repo(wire)
136 141 return repo.bare
137 142
138 143 @reraise_safe_exceptions
139 144 def blob_as_pretty_string(self, wire, sha):
140 145 repo = self._factory.repo(wire)
141 146 return repo[sha].as_pretty_string()
142 147
143 148 @reraise_safe_exceptions
144 149 def blob_raw_length(self, wire, sha):
145 150 repo = self._factory.repo(wire)
146 151 blob = repo[sha]
147 152 return blob.raw_length()
148 153
149 154 def _parse_lfs_pointer(self, raw_content):
150 155
151 156 spec_string = 'version https://git-lfs.github.com/spec'
152 157 if raw_content and raw_content.startswith(spec_string):
153 158 pattern = re.compile(r"""
154 159 (?:\n)?
155 160 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
156 161 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
157 162 ^size[ ](?P<oid_size>[0-9]+)\n
158 163 (?:\n)?
159 164 """, re.VERBOSE | re.MULTILINE)
160 165 match = pattern.match(raw_content)
161 166 if match:
162 167 return match.groupdict()
163 168
164 169 return {}
165 170
166 171 @reraise_safe_exceptions
167 172 def is_large_file(self, wire, sha):
168 173 repo = self._factory.repo(wire)
169 174 blob = repo[sha]
170 175 return self._parse_lfs_pointer(blob.as_raw_string())
171 176
172 177 @reraise_safe_exceptions
173 178 def in_largefiles_store(self, wire, oid):
174 179 repo = self._factory.repo(wire)
175 180 conf = self._wire_to_config(wire)
176 181
177 182 store_location = conf.get('vcs_git_lfs_store_location')
178 183 if store_location:
179 184 repo_name = repo.path
180 185 store = LFSOidStore(
181 186 oid=oid, repo=repo_name, store_location=store_location)
182 187 return store.has_oid()
183 188
184 189 return False
185 190
186 191 @reraise_safe_exceptions
187 192 def store_path(self, wire, oid):
188 193 repo = self._factory.repo(wire)
189 194 conf = self._wire_to_config(wire)
190 195
191 196 store_location = conf.get('vcs_git_lfs_store_location')
192 197 if store_location:
193 198 repo_name = repo.path
194 199 store = LFSOidStore(
195 200 oid=oid, repo=repo_name, store_location=store_location)
196 201 return store.oid_path
197 202 raise ValueError('Unable to fetch oid with path {}'.format(oid))
198 203
199 204 @reraise_safe_exceptions
200 205 def bulk_request(self, wire, rev, pre_load):
201 206 result = {}
202 207 for attr in pre_load:
203 208 try:
204 209 method = self._bulk_methods[attr]
205 210 args = [wire, rev]
206 211 if attr == "date":
207 212 args.extend(["commit_time", "commit_timezone"])
208 213 elif attr in ["author", "message", "parents"]:
209 214 args.append(attr)
210 215 result[attr] = method(*args)
211 216 except KeyError:
212 217 raise exceptions.VcsException(
213 218 "Unknown bulk attribute: %s" % attr)
214 219 return result
215 220
216 221 def _build_opener(self, url):
217 222 handlers = []
218 223 url_obj = url_parser(url)
219 224 _, authinfo = url_obj.authinfo()
220 225
221 226 if authinfo:
222 227 # create a password manager
223 228 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
224 229 passmgr.add_password(*authinfo)
225 230
226 231 handlers.extend((httpbasicauthhandler(passmgr),
227 232 httpdigestauthhandler(passmgr)))
228 233
229 234 return urllib2.build_opener(*handlers)
230 235
231 236 @reraise_safe_exceptions
232 237 def check_url(self, url, config):
233 238 url_obj = url_parser(url)
234 239 test_uri, _ = url_obj.authinfo()
235 240 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
236 241 url_obj.query = obfuscate_qs(url_obj.query)
237 242 cleaned_uri = str(url_obj)
238 243 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
239 244
240 245 if not test_uri.endswith('info/refs'):
241 246 test_uri = test_uri.rstrip('/') + '/info/refs'
242 247
243 248 o = self._build_opener(url)
244 249 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
245 250
246 251 q = {"service": 'git-upload-pack'}
247 252 qs = '?%s' % urllib.urlencode(q)
248 253 cu = "%s%s" % (test_uri, qs)
249 254 req = urllib2.Request(cu, None, {})
250 255
251 256 try:
252 257 log.debug("Trying to open URL %s", cleaned_uri)
253 258 resp = o.open(req)
254 259 if resp.code != 200:
255 260 raise exceptions.URLError('Return Code is not 200')
256 261 except Exception as e:
257 262 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
258 263 # means it cannot be cloned
259 264 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
260 265
261 266 # now detect if it's proper git repo
262 267 gitdata = resp.read()
263 268 if 'service=git-upload-pack' in gitdata:
264 269 pass
265 270 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
266 271 # old style git can return some other format !
267 272 pass
268 273 else:
269 274 raise exceptions.URLError(
270 275 "url [%s] does not look like an git" % (cleaned_uri,))
271 276
272 277 return True
273 278
274 279 @reraise_safe_exceptions
275 280 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
276 281 remote_refs = self.fetch(wire, url, apply_refs=False)
277 282 repo = self._factory.repo(wire)
278 283 if isinstance(valid_refs, list):
279 284 valid_refs = tuple(valid_refs)
280 285
281 286 for k in remote_refs:
282 287 # only parse heads/tags and skip so called deferred tags
283 288 if k.startswith(valid_refs) and not k.endswith(deferred):
284 289 repo[k] = remote_refs[k]
285 290
286 291 if update_after_clone:
287 292 # we want to checkout HEAD
288 293 repo["HEAD"] = remote_refs["HEAD"]
289 294 index.build_index_from_tree(repo.path, repo.index_path(),
290 295 repo.object_store, repo["HEAD"].tree)
291 296
292 297 # TODO: this is quite complex, check if that can be simplified
293 298 @reraise_safe_exceptions
294 299 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
295 300 repo = self._factory.repo(wire)
296 301 object_store = repo.object_store
297 302
298 303 # Create tree and populates it with blobs
299 304 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
300 305
301 306 for node in updated:
302 307 # Compute subdirs if needed
303 308 dirpath, nodename = vcspath.split(node['path'])
304 309 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
305 310 parent = commit_tree
306 311 ancestors = [('', parent)]
307 312
308 313 # Tries to dig for the deepest existing tree
309 314 while dirnames:
310 315 curdir = dirnames.pop(0)
311 316 try:
312 317 dir_id = parent[curdir][1]
313 318 except KeyError:
314 319 # put curdir back into dirnames and stops
315 320 dirnames.insert(0, curdir)
316 321 break
317 322 else:
318 323 # If found, updates parent
319 324 parent = repo[dir_id]
320 325 ancestors.append((curdir, parent))
321 326 # Now parent is deepest existing tree and we need to create
322 327 # subtrees for dirnames (in reverse order)
323 328 # [this only applies for nodes from added]
324 329 new_trees = []
325 330
326 331 blob = objects.Blob.from_string(node['content'])
327 332
328 333 if dirnames:
329 334 # If there are trees which should be created we need to build
330 335 # them now (in reverse order)
331 336 reversed_dirnames = list(reversed(dirnames))
332 337 curtree = objects.Tree()
333 338 curtree[node['node_path']] = node['mode'], blob.id
334 339 new_trees.append(curtree)
335 340 for dirname in reversed_dirnames[:-1]:
336 341 newtree = objects.Tree()
337 342 newtree[dirname] = (DIR_STAT, curtree.id)
338 343 new_trees.append(newtree)
339 344 curtree = newtree
340 345 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
341 346 else:
342 347 parent.add(
343 348 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
344 349
345 350 new_trees.append(parent)
346 351 # Update ancestors
347 352 reversed_ancestors = reversed(
348 353 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
349 354 for parent, tree, path in reversed_ancestors:
350 355 parent[path] = (DIR_STAT, tree.id)
351 356 object_store.add_object(tree)
352 357
353 358 object_store.add_object(blob)
354 359 for tree in new_trees:
355 360 object_store.add_object(tree)
356 361
357 362 for node_path in removed:
358 363 paths = node_path.split('/')
359 364 tree = commit_tree
360 365 trees = [tree]
361 366 # Traverse deep into the forest...
362 367 for path in paths:
363 368 try:
364 369 obj = repo[tree[path][1]]
365 370 if isinstance(obj, objects.Tree):
366 371 trees.append(obj)
367 372 tree = obj
368 373 except KeyError:
369 374 break
370 375 # Cut down the blob and all rotten trees on the way back...
371 376 for path, tree in reversed(zip(paths, trees)):
372 377 del tree[path]
373 378 if tree:
374 379 # This tree still has elements - don't remove it or any
375 380 # of it's parents
376 381 break
377 382
378 383 object_store.add_object(commit_tree)
379 384
380 385 # Create commit
381 386 commit = objects.Commit()
382 387 commit.tree = commit_tree.id
383 388 for k, v in commit_data.iteritems():
384 389 setattr(commit, k, v)
385 390 object_store.add_object(commit)
386 391
387 392 ref = 'refs/heads/%s' % branch
388 393 repo.refs[ref] = commit.id
389 394
390 395 return commit.id
391 396
392 397 @reraise_safe_exceptions
393 398 def fetch(self, wire, url, apply_refs=True, refs=None):
394 399 if url != 'default' and '://' not in url:
395 400 client = LocalGitClient(url)
396 401 else:
397 402 url_obj = url_parser(url)
398 403 o = self._build_opener(url)
399 404 url, _ = url_obj.authinfo()
400 405 client = HttpGitClient(base_url=url, opener=o)
401 406 repo = self._factory.repo(wire)
402 407
403 408 determine_wants = repo.object_store.determine_wants_all
404 409 if refs:
405 410 def determine_wants_requested(references):
406 411 return [references[r] for r in references if r in refs]
407 412 determine_wants = determine_wants_requested
408 413
409 414 try:
410 415 remote_refs = client.fetch(
411 416 path=url, target=repo, determine_wants=determine_wants)
412 417 except NotGitRepository as e:
413 418 log.warning(
414 419 'Trying to fetch from "%s" failed, not a Git repository.', url)
415 420 # Exception can contain unicode which we convert
416 421 raise exceptions.AbortException(repr(e))
417 422
418 423 # mikhail: client.fetch() returns all the remote refs, but fetches only
419 424 # refs filtered by `determine_wants` function. We need to filter result
420 425 # as well
421 426 if refs:
422 427 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
423 428
424 429 if apply_refs:
425 430 # TODO: johbo: Needs proper test coverage with a git repository
426 431 # that contains a tag object, so that we would end up with
427 432 # a peeled ref at this point.
428 433 PEELED_REF_MARKER = '^{}'
429 434 for k in remote_refs:
430 435 if k.endswith(PEELED_REF_MARKER):
431 436 log.info("Skipping peeled reference %s", k)
432 437 continue
433 438 repo[k] = remote_refs[k]
434 439
435 440 if refs:
436 441 # mikhail: explicitly set the head to the last ref.
437 442 repo['HEAD'] = remote_refs[refs[-1]]
438 443
439 444 # TODO: mikhail: should we return remote_refs here to be
440 445 # consistent?
441 446 else:
442 447 return remote_refs
443 448
444 449 @reraise_safe_exceptions
445 450 def get_remote_refs(self, wire, url):
446 451 repo = Repo(url)
447 452 return repo.get_refs()
448 453
449 454 @reraise_safe_exceptions
450 455 def get_description(self, wire):
451 456 repo = self._factory.repo(wire)
452 457 return repo.get_description()
453 458
454 459 @reraise_safe_exceptions
455 460 def get_file_history(self, wire, file_path, commit_id, limit):
456 461 repo = self._factory.repo(wire)
457 462 include = [commit_id]
458 463 paths = [file_path]
459 464
460 465 walker = repo.get_walker(include, paths=paths, max_entries=limit)
461 466 return [x.commit.id for x in walker]
462 467
463 468 @reraise_safe_exceptions
464 469 def get_missing_revs(self, wire, rev1, rev2, path2):
465 470 repo = self._factory.repo(wire)
466 471 LocalGitClient(thin_packs=False).fetch(path2, repo)
467 472
468 473 wire_remote = wire.copy()
469 474 wire_remote['path'] = path2
470 475 repo_remote = self._factory.repo(wire_remote)
471 476 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
472 477
473 478 revs = [
474 479 x.commit.id
475 480 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
476 481 return revs
477 482
478 483 @reraise_safe_exceptions
479 484 def get_object(self, wire, sha):
480 485 repo = self._factory.repo(wire)
481 486 obj = repo.get_object(sha)
482 487 commit_id = obj.id
483 488
484 489 if isinstance(obj, Tag):
485 490 commit_id = obj.object[1]
486 491
487 492 return {
488 493 'id': obj.id,
489 494 'type': obj.type_name,
490 495 'commit_id': commit_id
491 496 }
492 497
493 498 @reraise_safe_exceptions
494 499 def get_object_attrs(self, wire, sha, *attrs):
495 500 repo = self._factory.repo(wire)
496 501 obj = repo.get_object(sha)
497 502 return list(getattr(obj, a) for a in attrs)
498 503
499 504 @reraise_safe_exceptions
500 505 def get_refs(self, wire):
501 506 repo = self._factory.repo(wire)
502 507 result = {}
503 508 for ref, sha in repo.refs.as_dict().items():
504 509 peeled_sha = repo.get_peeled(ref)
505 510 result[ref] = peeled_sha
506 511 return result
507 512
508 513 @reraise_safe_exceptions
509 514 def get_refs_path(self, wire):
510 515 repo = self._factory.repo(wire)
511 516 return repo.refs.path
512 517
513 518 @reraise_safe_exceptions
514 519 def head(self, wire):
515 520 repo = self._factory.repo(wire)
516 521 return repo.head()
517 522
518 523 @reraise_safe_exceptions
519 524 def init(self, wire):
520 525 repo_path = str_to_dulwich(wire['path'])
521 526 self.repo = Repo.init(repo_path)
522 527
523 528 @reraise_safe_exceptions
524 529 def init_bare(self, wire):
525 530 repo_path = str_to_dulwich(wire['path'])
526 531 self.repo = Repo.init_bare(repo_path)
527 532
528 533 @reraise_safe_exceptions
529 534 def revision(self, wire, rev):
530 535 repo = self._factory.repo(wire)
531 536 obj = repo[rev]
532 537 obj_data = {
533 538 'id': obj.id,
534 539 }
535 540 try:
536 541 obj_data['tree'] = obj.tree
537 542 except AttributeError:
538 543 pass
539 544 return obj_data
540 545
541 546 @reraise_safe_exceptions
542 547 def commit_attribute(self, wire, rev, attr):
543 548 repo = self._factory.repo(wire)
544 549 obj = repo[rev]
545 550 return getattr(obj, attr)
546 551
547 552 @reraise_safe_exceptions
548 553 def set_refs(self, wire, key, value):
549 554 repo = self._factory.repo(wire)
550 555 repo.refs[key] = value
551 556
552 557 @reraise_safe_exceptions
553 558 def remove_ref(self, wire, key):
554 559 repo = self._factory.repo(wire)
555 560 del repo.refs[key]
556 561
557 562 @reraise_safe_exceptions
558 563 def tree_changes(self, wire, source_id, target_id):
559 564 repo = self._factory.repo(wire)
560 565 source = repo[source_id].tree if source_id else None
561 566 target = repo[target_id].tree
562 567 result = repo.object_store.tree_changes(source, target)
563 568 return list(result)
564 569
565 570 @reraise_safe_exceptions
566 571 def tree_items(self, wire, tree_id):
567 572 repo = self._factory.repo(wire)
568 573 tree = repo[tree_id]
569 574
570 575 result = []
571 576 for item in tree.iteritems():
572 577 item_sha = item.sha
573 578 item_mode = item.mode
574 579
575 580 if FILE_MODE(item_mode) == GIT_LINK:
576 581 item_type = "link"
577 582 else:
578 583 item_type = repo[item_sha].type_name
579 584
580 585 result.append((item.path, item_mode, item_sha, item_type))
581 586 return result
582 587
583 588 @reraise_safe_exceptions
584 589 def update_server_info(self, wire):
585 590 repo = self._factory.repo(wire)
586 591 update_server_info(repo)
587 592
588 593 @reraise_safe_exceptions
589 594 def discover_git_version(self):
590 595 stdout, _ = self.run_git_command(
591 596 {}, ['--version'], _bare=True, _safe=True)
592 597 prefix = 'git version'
593 598 if stdout.startswith(prefix):
594 599 stdout = stdout[len(prefix):]
595 600 return stdout.strip()
596 601
597 602 @reraise_safe_exceptions
598 603 def run_git_command(self, wire, cmd, **opts):
599 604 path = wire.get('path', None)
600 605
601 606 if path and os.path.isdir(path):
602 607 opts['cwd'] = path
603 608
604 609 if '_bare' in opts:
605 610 _copts = []
606 611 del opts['_bare']
607 612 else:
608 613 _copts = ['-c', 'core.quotepath=false', ]
609 614 safe_call = False
610 615 if '_safe' in opts:
611 616 # no exc on failure
612 617 del opts['_safe']
613 618 safe_call = True
614 619
615 620 gitenv = os.environ.copy()
616 621 gitenv.update(opts.pop('extra_env', {}))
617 622 # need to clean fix GIT_DIR !
618 623 if 'GIT_DIR' in gitenv:
619 624 del gitenv['GIT_DIR']
620 625 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
621 626
622 627 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
623 628
624 629 try:
625 630 _opts = {'env': gitenv, 'shell': False}
626 631 _opts.update(opts)
627 632 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
628 633
629 634 return ''.join(p), ''.join(p.error)
630 635 except (EnvironmentError, OSError) as err:
631 636 cmd = ' '.join(cmd) # human friendly CMD
632 637 tb_err = ("Couldn't run git command (%s).\n"
633 638 "Original error was:%s\n" % (cmd, err))
634 639 log.exception(tb_err)
635 640 if safe_call:
636 641 return '', err
637 642 else:
638 643 raise exceptions.VcsException(tb_err)
639 644
640 645
641 646 def str_to_dulwich(value):
642 647 """
643 648 Dulwich 0.10.1a requires `unicode` objects to be passed in.
644 649 """
645 650 return value.decode(settings.WIRE_ENCODING)
General Comments 0
You need to be logged in to leave comments. Login now