##// END OF EJS Templates
git: use iterative fetch to prevent errors about too many arguments on very large repositories.
marcink -
r622:df15fe03 default
parent child Browse files
Show More
@@ -1,728 +1,732 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17 import collections
18 18 import logging
19 19 import os
20 20 import posixpath as vcspath
21 21 import re
22 22 import stat
23 23 import traceback
24 24 import urllib
25 25 import urllib2
26 26 from functools import wraps
27 27
28 import more_itertools
28 29 from dulwich import index, objects
29 30 from dulwich.client import HttpGitClient, LocalGitClient
30 31 from dulwich.errors import (
31 32 NotGitRepository, ChecksumMismatch, WrongObjectException,
32 33 MissingCommitError, ObjectMissing, HangupException,
33 34 UnexpectedCommandError)
34 35 from dulwich.repo import Repo as DulwichRepo, Tag
35 36 from dulwich.server import update_server_info
36 37
37 38 from vcsserver import exceptions, settings, subprocessio
38 39 from vcsserver.utils import safe_str
39 40 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
40 41 from vcsserver.hgcompat import (
41 42 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
42 43 from vcsserver.git_lfs.lib import LFSOidStore
43 44
44 45 DIR_STAT = stat.S_IFDIR
45 46 FILE_MODE = stat.S_IFMT
46 47 GIT_LINK = objects.S_IFGITLINK
47 48
48 49 log = logging.getLogger(__name__)
49 50
50 51
51 52 def reraise_safe_exceptions(func):
52 53 """Converts Dulwich exceptions to something neutral."""
53 54 @wraps(func)
54 55 def wrapper(*args, **kwargs):
55 56 try:
56 57 return func(*args, **kwargs)
57 58 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
58 59 ObjectMissing) as e:
59 60 exc = exceptions.LookupException(e)
60 61 raise exc(e)
61 62 except (HangupException, UnexpectedCommandError) as e:
62 63 exc = exceptions.VcsException(e)
63 64 raise exc(e)
64 65 except Exception as e:
65 66 # NOTE(marcink): becuase of how dulwich handles some exceptions
66 67 # (KeyError on empty repos), we cannot track this and catch all
67 68 # exceptions, it's an exceptions from other handlers
68 69 #if not hasattr(e, '_vcs_kind'):
69 70 #log.exception("Unhandled exception in git remote call")
70 71 #raise_from_original(exceptions.UnhandledException)
71 72 raise
72 73 return wrapper
73 74
74 75
75 76 class Repo(DulwichRepo):
76 77 """
77 78 A wrapper for dulwich Repo class.
78 79
79 80 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
80 81 "Too many open files" error. We need to close all opened file descriptors
81 82 once the repo object is destroyed.
82 83
83 84 TODO: mikhail: please check if we need this wrapper after updating dulwich
84 85 to 0.12.0 +
85 86 """
86 87 def __del__(self):
87 88 if hasattr(self, 'object_store'):
88 89 self.close()
89 90
90 91
91 92 class GitFactory(RepoFactory):
92 93 repo_type = 'git'
93 94
94 95 def _create_repo(self, wire, create):
95 96 repo_path = str_to_dulwich(wire['path'])
96 97 return Repo(repo_path)
97 98
98 99
99 100 class GitRemote(object):
100 101
101 102 def __init__(self, factory):
102 103 self._factory = factory
103 104 self.peeled_ref_marker = '^{}'
104 105 self._bulk_methods = {
105 106 "author": self.commit_attribute,
106 107 "date": self.get_object_attrs,
107 108 "message": self.commit_attribute,
108 109 "parents": self.commit_attribute,
109 110 "_commit": self.revision,
110 111 }
111 112
112 113 def _wire_to_config(self, wire):
113 114 if 'config' in wire:
114 115 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
115 116 return {}
116 117
117 118 def _assign_ref(self, wire, ref, commit_id):
118 119 repo = self._factory.repo(wire)
119 120 repo[ref] = commit_id
120 121
121 122 def _remote_conf(self, config):
122 123 params = [
123 124 '-c', 'core.askpass=""',
124 125 ]
125 126 ssl_cert_dir = config.get('vcs_ssl_dir')
126 127 if ssl_cert_dir:
127 128 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
128 129 return params
129 130
130 131 @reraise_safe_exceptions
131 132 def add_object(self, wire, content):
132 133 repo = self._factory.repo(wire)
133 134 blob = objects.Blob()
134 135 blob.set_raw_string(content)
135 136 repo.object_store.add_object(blob)
136 137 return blob.id
137 138
138 139 @reraise_safe_exceptions
139 140 def assert_correct_path(self, wire):
140 141 path = wire.get('path')
141 142 try:
142 143 self._factory.repo(wire)
143 144 except NotGitRepository as e:
144 145 tb = traceback.format_exc()
145 146 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
146 147 return False
147 148
148 149 return True
149 150
150 151 @reraise_safe_exceptions
151 152 def bare(self, wire):
152 153 repo = self._factory.repo(wire)
153 154 return repo.bare
154 155
155 156 @reraise_safe_exceptions
156 157 def blob_as_pretty_string(self, wire, sha):
157 158 repo = self._factory.repo(wire)
158 159 return repo[sha].as_pretty_string()
159 160
160 161 @reraise_safe_exceptions
161 162 def blob_raw_length(self, wire, sha):
162 163 repo = self._factory.repo(wire)
163 164 blob = repo[sha]
164 165 return blob.raw_length()
165 166
166 167 def _parse_lfs_pointer(self, raw_content):
167 168
168 169 spec_string = 'version https://git-lfs.github.com/spec'
169 170 if raw_content and raw_content.startswith(spec_string):
170 171 pattern = re.compile(r"""
171 172 (?:\n)?
172 173 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
173 174 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
174 175 ^size[ ](?P<oid_size>[0-9]+)\n
175 176 (?:\n)?
176 177 """, re.VERBOSE | re.MULTILINE)
177 178 match = pattern.match(raw_content)
178 179 if match:
179 180 return match.groupdict()
180 181
181 182 return {}
182 183
183 184 @reraise_safe_exceptions
184 185 def is_large_file(self, wire, sha):
185 186 repo = self._factory.repo(wire)
186 187 blob = repo[sha]
187 188 return self._parse_lfs_pointer(blob.as_raw_string())
188 189
189 190 @reraise_safe_exceptions
190 191 def in_largefiles_store(self, wire, oid):
191 192 repo = self._factory.repo(wire)
192 193 conf = self._wire_to_config(wire)
193 194
194 195 store_location = conf.get('vcs_git_lfs_store_location')
195 196 if store_location:
196 197 repo_name = repo.path
197 198 store = LFSOidStore(
198 199 oid=oid, repo=repo_name, store_location=store_location)
199 200 return store.has_oid()
200 201
201 202 return False
202 203
203 204 @reraise_safe_exceptions
204 205 def store_path(self, wire, oid):
205 206 repo = self._factory.repo(wire)
206 207 conf = self._wire_to_config(wire)
207 208
208 209 store_location = conf.get('vcs_git_lfs_store_location')
209 210 if store_location:
210 211 repo_name = repo.path
211 212 store = LFSOidStore(
212 213 oid=oid, repo=repo_name, store_location=store_location)
213 214 return store.oid_path
214 215 raise ValueError('Unable to fetch oid with path {}'.format(oid))
215 216
216 217 @reraise_safe_exceptions
217 218 def bulk_request(self, wire, rev, pre_load):
218 219 result = {}
219 220 for attr in pre_load:
220 221 try:
221 222 method = self._bulk_methods[attr]
222 223 args = [wire, rev]
223 224 if attr == "date":
224 225 args.extend(["commit_time", "commit_timezone"])
225 226 elif attr in ["author", "message", "parents"]:
226 227 args.append(attr)
227 228 result[attr] = method(*args)
228 229 except KeyError as e:
229 230 raise exceptions.VcsException(e)(
230 231 "Unknown bulk attribute: %s" % attr)
231 232 return result
232 233
233 234 def _build_opener(self, url):
234 235 handlers = []
235 236 url_obj = url_parser(url)
236 237 _, authinfo = url_obj.authinfo()
237 238
238 239 if authinfo:
239 240 # create a password manager
240 241 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
241 242 passmgr.add_password(*authinfo)
242 243
243 244 handlers.extend((httpbasicauthhandler(passmgr),
244 245 httpdigestauthhandler(passmgr)))
245 246
246 247 return urllib2.build_opener(*handlers)
247 248
248 249 @reraise_safe_exceptions
249 250 def check_url(self, url, config):
250 251 url_obj = url_parser(url)
251 252 test_uri, _ = url_obj.authinfo()
252 253 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
253 254 url_obj.query = obfuscate_qs(url_obj.query)
254 255 cleaned_uri = str(url_obj)
255 256 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
256 257
257 258 if not test_uri.endswith('info/refs'):
258 259 test_uri = test_uri.rstrip('/') + '/info/refs'
259 260
260 261 o = self._build_opener(url)
261 262 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
262 263
263 264 q = {"service": 'git-upload-pack'}
264 265 qs = '?%s' % urllib.urlencode(q)
265 266 cu = "%s%s" % (test_uri, qs)
266 267 req = urllib2.Request(cu, None, {})
267 268
268 269 try:
269 270 log.debug("Trying to open URL %s", cleaned_uri)
270 271 resp = o.open(req)
271 272 if resp.code != 200:
272 273 raise exceptions.URLError()('Return Code is not 200')
273 274 except Exception as e:
274 275 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
275 276 # means it cannot be cloned
276 277 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
277 278
278 279 # now detect if it's proper git repo
279 280 gitdata = resp.read()
280 281 if 'service=git-upload-pack' in gitdata:
281 282 pass
282 283 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
283 284 # old style git can return some other format !
284 285 pass
285 286 else:
286 287 raise exceptions.URLError()(
287 288 "url [%s] does not look like an git" % (cleaned_uri,))
288 289
289 290 return True
290 291
291 292 @reraise_safe_exceptions
292 293 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
293 294 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
294 295 remote_refs = self.pull(wire, url, apply_refs=False)
295 296 repo = self._factory.repo(wire)
296 297 if isinstance(valid_refs, list):
297 298 valid_refs = tuple(valid_refs)
298 299
299 300 for k in remote_refs:
300 301 # only parse heads/tags and skip so called deferred tags
301 302 if k.startswith(valid_refs) and not k.endswith(deferred):
302 303 repo[k] = remote_refs[k]
303 304
304 305 if update_after_clone:
305 306 # we want to checkout HEAD
306 307 repo["HEAD"] = remote_refs["HEAD"]
307 308 index.build_index_from_tree(repo.path, repo.index_path(),
308 309 repo.object_store, repo["HEAD"].tree)
309 310
310 311 # TODO: this is quite complex, check if that can be simplified
311 312 @reraise_safe_exceptions
312 313 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
313 314 repo = self._factory.repo(wire)
314 315 object_store = repo.object_store
315 316
316 317 # Create tree and populates it with blobs
317 318 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
318 319
319 320 for node in updated:
320 321 # Compute subdirs if needed
321 322 dirpath, nodename = vcspath.split(node['path'])
322 323 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
323 324 parent = commit_tree
324 325 ancestors = [('', parent)]
325 326
326 327 # Tries to dig for the deepest existing tree
327 328 while dirnames:
328 329 curdir = dirnames.pop(0)
329 330 try:
330 331 dir_id = parent[curdir][1]
331 332 except KeyError:
332 333 # put curdir back into dirnames and stops
333 334 dirnames.insert(0, curdir)
334 335 break
335 336 else:
336 337 # If found, updates parent
337 338 parent = repo[dir_id]
338 339 ancestors.append((curdir, parent))
339 340 # Now parent is deepest existing tree and we need to create
340 341 # subtrees for dirnames (in reverse order)
341 342 # [this only applies for nodes from added]
342 343 new_trees = []
343 344
344 345 blob = objects.Blob.from_string(node['content'])
345 346
346 347 if dirnames:
347 348 # If there are trees which should be created we need to build
348 349 # them now (in reverse order)
349 350 reversed_dirnames = list(reversed(dirnames))
350 351 curtree = objects.Tree()
351 352 curtree[node['node_path']] = node['mode'], blob.id
352 353 new_trees.append(curtree)
353 354 for dirname in reversed_dirnames[:-1]:
354 355 newtree = objects.Tree()
355 356 newtree[dirname] = (DIR_STAT, curtree.id)
356 357 new_trees.append(newtree)
357 358 curtree = newtree
358 359 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
359 360 else:
360 361 parent.add(
361 362 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
362 363
363 364 new_trees.append(parent)
364 365 # Update ancestors
365 366 reversed_ancestors = reversed(
366 367 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
367 368 for parent, tree, path in reversed_ancestors:
368 369 parent[path] = (DIR_STAT, tree.id)
369 370 object_store.add_object(tree)
370 371
371 372 object_store.add_object(blob)
372 373 for tree in new_trees:
373 374 object_store.add_object(tree)
374 375
375 376 for node_path in removed:
376 377 paths = node_path.split('/')
377 378 tree = commit_tree
378 379 trees = [tree]
379 380 # Traverse deep into the forest...
380 381 for path in paths:
381 382 try:
382 383 obj = repo[tree[path][1]]
383 384 if isinstance(obj, objects.Tree):
384 385 trees.append(obj)
385 386 tree = obj
386 387 except KeyError:
387 388 break
388 389 # Cut down the blob and all rotten trees on the way back...
389 390 for path, tree in reversed(zip(paths, trees)):
390 391 del tree[path]
391 392 if tree:
392 393 # This tree still has elements - don't remove it or any
393 394 # of it's parents
394 395 break
395 396
396 397 object_store.add_object(commit_tree)
397 398
398 399 # Create commit
399 400 commit = objects.Commit()
400 401 commit.tree = commit_tree.id
401 402 for k, v in commit_data.iteritems():
402 403 setattr(commit, k, v)
403 404 object_store.add_object(commit)
404 405
405 406 ref = 'refs/heads/%s' % branch
406 407 repo.refs[ref] = commit.id
407 408
408 409 return commit.id
409 410
410 411 @reraise_safe_exceptions
411 412 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
412 413 if url != 'default' and '://' not in url:
413 414 client = LocalGitClient(url)
414 415 else:
415 416 url_obj = url_parser(url)
416 417 o = self._build_opener(url)
417 418 url, _ = url_obj.authinfo()
418 419 client = HttpGitClient(base_url=url, opener=o)
419 420 repo = self._factory.repo(wire)
420 421
421 422 determine_wants = repo.object_store.determine_wants_all
422 423 if refs:
423 424 def determine_wants_requested(references):
424 425 return [references[r] for r in references if r in refs]
425 426 determine_wants = determine_wants_requested
426 427
427 428 try:
428 429 remote_refs = client.fetch(
429 430 path=url, target=repo, determine_wants=determine_wants)
430 431 except NotGitRepository as e:
431 432 log.warning(
432 433 'Trying to fetch from "%s" failed, not a Git repository.', url)
433 434 # Exception can contain unicode which we convert
434 435 raise exceptions.AbortException(e)(repr(e))
435 436
436 437 # mikhail: client.fetch() returns all the remote refs, but fetches only
437 438 # refs filtered by `determine_wants` function. We need to filter result
438 439 # as well
439 440 if refs:
440 441 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
441 442
442 443 if apply_refs:
443 444 # TODO: johbo: Needs proper test coverage with a git repository
444 445 # that contains a tag object, so that we would end up with
445 446 # a peeled ref at this point.
446 447 for k in remote_refs:
447 448 if k.endswith(self.peeled_ref_marker):
448 449 log.debug("Skipping peeled reference %s", k)
449 450 continue
450 451 repo[k] = remote_refs[k]
451 452
452 453 if refs and not update_after:
453 454 # mikhail: explicitly set the head to the last ref.
454 455 repo['HEAD'] = remote_refs[refs[-1]]
455 456
456 457 if update_after:
457 458 # we want to checkout HEAD
458 459 repo["HEAD"] = remote_refs["HEAD"]
459 460 index.build_index_from_tree(repo.path, repo.index_path(),
460 461 repo.object_store, repo["HEAD"].tree)
461 462 return remote_refs
462 463
463 464 @reraise_safe_exceptions
464 465 def sync_fetch(self, wire, url, refs=None):
465 466 repo = self._factory.repo(wire)
466 467 if refs and not isinstance(refs, (list, tuple)):
467 468 refs = [refs]
468 469 config = self._wire_to_config(wire)
469 470 # get all remote refs we'll use to fetch later
470 471 output, __ = self.run_git_command(
471 472 wire, ['ls-remote', url], fail_on_stderr=False,
472 473 _copts=self._remote_conf(config),
473 474 extra_env={'GIT_TERMINAL_PROMPT': '0'})
474 475
475 476 remote_refs = collections.OrderedDict()
476 477 fetch_refs = []
477 478
478 479 for ref_line in output.splitlines():
479 480 sha, ref = ref_line.split('\t')
480 481 sha = sha.strip()
481 482 if ref in remote_refs:
482 483 # duplicate, skip
483 484 continue
484 485 if ref.endswith(self.peeled_ref_marker):
485 486 log.debug("Skipping peeled reference %s", ref)
486 487 continue
487 488 # don't sync HEAD
488 489 if ref in ['HEAD']:
489 490 continue
490 491
491 492 remote_refs[ref] = sha
492 493
493 494 if refs and sha in refs:
494 495 # we filter fetch using our specified refs
495 496 fetch_refs.append('{}:{}'.format(ref, ref))
496 497 elif not refs:
497 498 fetch_refs.append('{}:{}'.format(ref, ref))
498
499 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
499 500 if fetch_refs:
500 _out, _err = self.run_git_command(
501 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs,
502 fail_on_stderr=False,
503 _copts=self._remote_conf(config),
504 extra_env={'GIT_TERMINAL_PROMPT': '0'})
501 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
502 fetch_refs_chunks = list(chunk)
503 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
504 _out, _err = self.run_git_command(
505 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
506 fail_on_stderr=False,
507 _copts=self._remote_conf(config),
508 extra_env={'GIT_TERMINAL_PROMPT': '0'})
505 509
506 510 return remote_refs
507 511
508 512 @reraise_safe_exceptions
509 513 def sync_push(self, wire, url, refs=None):
510 514 if not self.check_url(url, wire):
511 515 return
512 516 config = self._wire_to_config(wire)
513 517 repo = self._factory.repo(wire)
514 518 self.run_git_command(
515 519 wire, ['push', url, '--mirror'], fail_on_stderr=False,
516 520 _copts=self._remote_conf(config),
517 521 extra_env={'GIT_TERMINAL_PROMPT': '0'})
518 522
519 523 @reraise_safe_exceptions
520 524 def get_remote_refs(self, wire, url):
521 525 repo = Repo(url)
522 526 return repo.get_refs()
523 527
524 528 @reraise_safe_exceptions
525 529 def get_description(self, wire):
526 530 repo = self._factory.repo(wire)
527 531 return repo.get_description()
528 532
529 533 @reraise_safe_exceptions
530 534 def get_missing_revs(self, wire, rev1, rev2, path2):
531 535 repo = self._factory.repo(wire)
532 536 LocalGitClient(thin_packs=False).fetch(path2, repo)
533 537
534 538 wire_remote = wire.copy()
535 539 wire_remote['path'] = path2
536 540 repo_remote = self._factory.repo(wire_remote)
537 541 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
538 542
539 543 revs = [
540 544 x.commit.id
541 545 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
542 546 return revs
543 547
544 548 @reraise_safe_exceptions
545 549 def get_object(self, wire, sha):
546 550 repo = self._factory.repo(wire)
547 551 obj = repo.get_object(sha)
548 552 commit_id = obj.id
549 553
550 554 if isinstance(obj, Tag):
551 555 commit_id = obj.object[1]
552 556
553 557 return {
554 558 'id': obj.id,
555 559 'type': obj.type_name,
556 560 'commit_id': commit_id
557 561 }
558 562
559 563 @reraise_safe_exceptions
560 564 def get_object_attrs(self, wire, sha, *attrs):
561 565 repo = self._factory.repo(wire)
562 566 obj = repo.get_object(sha)
563 567 return list(getattr(obj, a) for a in attrs)
564 568
565 569 @reraise_safe_exceptions
566 570 def get_refs(self, wire):
567 571 repo = self._factory.repo(wire)
568 572 result = {}
569 573 for ref, sha in repo.refs.as_dict().items():
570 574 peeled_sha = repo.get_peeled(ref)
571 575 result[ref] = peeled_sha
572 576 return result
573 577
574 578 @reraise_safe_exceptions
575 579 def get_refs_path(self, wire):
576 580 repo = self._factory.repo(wire)
577 581 return repo.refs.path
578 582
579 583 @reraise_safe_exceptions
580 584 def head(self, wire, show_exc=True):
581 585 repo = self._factory.repo(wire)
582 586 try:
583 587 return repo.head()
584 588 except Exception:
585 589 if show_exc:
586 590 raise
587 591
588 592 @reraise_safe_exceptions
589 593 def init(self, wire):
590 594 repo_path = str_to_dulwich(wire['path'])
591 595 self.repo = Repo.init(repo_path)
592 596
593 597 @reraise_safe_exceptions
594 598 def init_bare(self, wire):
595 599 repo_path = str_to_dulwich(wire['path'])
596 600 self.repo = Repo.init_bare(repo_path)
597 601
598 602 @reraise_safe_exceptions
599 603 def revision(self, wire, rev):
600 604 repo = self._factory.repo(wire)
601 605 obj = repo[rev]
602 606 obj_data = {
603 607 'id': obj.id,
604 608 }
605 609 try:
606 610 obj_data['tree'] = obj.tree
607 611 except AttributeError:
608 612 pass
609 613 return obj_data
610 614
611 615 @reraise_safe_exceptions
612 616 def commit_attribute(self, wire, rev, attr):
613 617 repo = self._factory.repo(wire)
614 618 obj = repo[rev]
615 619 return getattr(obj, attr)
616 620
617 621 @reraise_safe_exceptions
618 622 def set_refs(self, wire, key, value):
619 623 repo = self._factory.repo(wire)
620 624 repo.refs[key] = value
621 625
622 626 @reraise_safe_exceptions
623 627 def remove_ref(self, wire, key):
624 628 repo = self._factory.repo(wire)
625 629 del repo.refs[key]
626 630
627 631 @reraise_safe_exceptions
628 632 def tree_changes(self, wire, source_id, target_id):
629 633 repo = self._factory.repo(wire)
630 634 source = repo[source_id].tree if source_id else None
631 635 target = repo[target_id].tree
632 636 result = repo.object_store.tree_changes(source, target)
633 637 return list(result)
634 638
635 639 @reraise_safe_exceptions
636 640 def tree_items(self, wire, tree_id):
637 641 repo = self._factory.repo(wire)
638 642 tree = repo[tree_id]
639 643
640 644 result = []
641 645 for item in tree.iteritems():
642 646 item_sha = item.sha
643 647 item_mode = item.mode
644 648
645 649 if FILE_MODE(item_mode) == GIT_LINK:
646 650 item_type = "link"
647 651 else:
648 652 item_type = repo[item_sha].type_name
649 653
650 654 result.append((item.path, item_mode, item_sha, item_type))
651 655 return result
652 656
653 657 @reraise_safe_exceptions
654 658 def update_server_info(self, wire):
655 659 repo = self._factory.repo(wire)
656 660 update_server_info(repo)
657 661
658 662 @reraise_safe_exceptions
659 663 def discover_git_version(self):
660 664 stdout, _ = self.run_git_command(
661 665 {}, ['--version'], _bare=True, _safe=True)
662 666 prefix = 'git version'
663 667 if stdout.startswith(prefix):
664 668 stdout = stdout[len(prefix):]
665 669 return stdout.strip()
666 670
667 671 @reraise_safe_exceptions
668 672 def run_git_command(self, wire, cmd, **opts):
669 673 path = wire.get('path', None)
670 674
671 675 if path and os.path.isdir(path):
672 676 opts['cwd'] = path
673 677
674 678 if '_bare' in opts:
675 679 _copts = []
676 680 del opts['_bare']
677 681 else:
678 682 _copts = ['-c', 'core.quotepath=false', ]
679 683 safe_call = False
680 684 if '_safe' in opts:
681 685 # no exc on failure
682 686 del opts['_safe']
683 687 safe_call = True
684 688
685 689 if '_copts' in opts:
686 690 _copts.extend(opts['_copts'] or [])
687 691 del opts['_copts']
688 692
689 693 gitenv = os.environ.copy()
690 694 gitenv.update(opts.pop('extra_env', {}))
691 695 # need to clean fix GIT_DIR !
692 696 if 'GIT_DIR' in gitenv:
693 697 del gitenv['GIT_DIR']
694 698 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
695 699 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
696 700
697 701 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
698 702 _opts = {'env': gitenv, 'shell': False}
699 703
700 704 try:
701 705 _opts.update(opts)
702 706 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
703 707
704 708 return ''.join(p), ''.join(p.error)
705 709 except (EnvironmentError, OSError) as err:
706 710 cmd = ' '.join(cmd) # human friendly CMD
707 711 tb_err = ("Couldn't run git command (%s).\n"
708 712 "Original error was:%s\n"
709 713 "Call options:%s\n"
710 714 % (cmd, err, _opts))
711 715 log.exception(tb_err)
712 716 if safe_call:
713 717 return '', err
714 718 else:
715 719 raise exceptions.VcsException()(tb_err)
716 720
717 721 @reraise_safe_exceptions
718 722 def install_hooks(self, wire, force=False):
719 723 from vcsserver.hook_utils import install_git_hooks
720 724 repo = self._factory.repo(wire)
721 725 return install_git_hooks(repo.path, repo.bare, force_create=force)
722 726
723 727
724 728 def str_to_dulwich(value):
725 729 """
726 730 Dulwich 0.10.1a requires `unicode` objects to be passed in.
727 731 """
728 732 return value.decode(settings.WIRE_ENCODING)
General Comments 0
You need to be logged in to leave comments. Login now