##// END OF EJS Templates
vcsserver: return idx in object lookup for new API.
marcink -
r699:5644458b default
parent child Browse files
Show More
@@ -1,751 +1,752 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17 import collections
18 18 import logging
19 19 import os
20 20 import posixpath as vcspath
21 21 import re
22 22 import stat
23 23 import traceback
24 24 import urllib
25 25 import urllib2
26 26 from functools import wraps
27 27
28 28 import more_itertools
29 29 from dulwich import index, objects
30 30 from dulwich.client import HttpGitClient, LocalGitClient
31 31 from dulwich.errors import (
32 32 NotGitRepository, ChecksumMismatch, WrongObjectException,
33 33 MissingCommitError, ObjectMissing, HangupException,
34 34 UnexpectedCommandError)
35 35 from dulwich.repo import Repo as DulwichRepo, Tag
36 36 from dulwich.server import update_server_info
37 37
38 38 from vcsserver import exceptions, settings, subprocessio
39 39 from vcsserver.utils import safe_str
40 40 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
41 41 from vcsserver.hgcompat import (
42 42 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
43 43 from vcsserver.git_lfs.lib import LFSOidStore
44 44
45 45 DIR_STAT = stat.S_IFDIR
46 46 FILE_MODE = stat.S_IFMT
47 47 GIT_LINK = objects.S_IFGITLINK
48 48
49 49 log = logging.getLogger(__name__)
50 50
51 51
52 52 def reraise_safe_exceptions(func):
53 53 """Converts Dulwich exceptions to something neutral."""
54 54 @wraps(func)
55 55 def wrapper(*args, **kwargs):
56 56 try:
57 57 return func(*args, **kwargs)
58 58 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
59 59 ObjectMissing) as e:
60 60 exc = exceptions.LookupException(e)
61 61 raise exc(e)
62 62 except (HangupException, UnexpectedCommandError) as e:
63 63 exc = exceptions.VcsException(e)
64 64 raise exc(e)
65 65 except Exception as e:
66 66 # NOTE(marcink): becuase of how dulwich handles some exceptions
67 67 # (KeyError on empty repos), we cannot track this and catch all
68 68 # exceptions, it's an exceptions from other handlers
69 69 #if not hasattr(e, '_vcs_kind'):
70 70 #log.exception("Unhandled exception in git remote call")
71 71 #raise_from_original(exceptions.UnhandledException)
72 72 raise
73 73 return wrapper
74 74
75 75
76 76 class Repo(DulwichRepo):
77 77 """
78 78 A wrapper for dulwich Repo class.
79 79
80 80 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
81 81 "Too many open files" error. We need to close all opened file descriptors
82 82 once the repo object is destroyed.
83 83
84 84 TODO: mikhail: please check if we need this wrapper after updating dulwich
85 85 to 0.12.0 +
86 86 """
87 87 def __del__(self):
88 88 if hasattr(self, 'object_store'):
89 89 self.close()
90 90
91 91
92 92 class GitFactory(RepoFactory):
93 93 repo_type = 'git'
94 94
95 95 def _create_repo(self, wire, create):
96 96 repo_path = str_to_dulwich(wire['path'])
97 97 return Repo(repo_path)
98 98
99 99
100 100 class GitRemote(object):
101 101
102 102 def __init__(self, factory):
103 103 self._factory = factory
104 104 self.peeled_ref_marker = '^{}'
105 105 self._bulk_methods = {
106 106 "author": self.commit_attribute,
107 107 "date": self.get_object_attrs,
108 108 "message": self.commit_attribute,
109 109 "parents": self.commit_attribute,
110 110 "_commit": self.revision,
111 111 }
112 112
113 113 def _wire_to_config(self, wire):
114 114 if 'config' in wire:
115 115 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
116 116 return {}
117 117
118 118 def _assign_ref(self, wire, ref, commit_id):
119 119 repo = self._factory.repo(wire)
120 120 repo[ref] = commit_id
121 121
122 122 def _remote_conf(self, config):
123 123 params = [
124 124 '-c', 'core.askpass=""',
125 125 ]
126 126 ssl_cert_dir = config.get('vcs_ssl_dir')
127 127 if ssl_cert_dir:
128 128 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
129 129 return params
130 130
131 131 @reraise_safe_exceptions
132 132 def is_empty(self, wire):
133 133 repo = self._factory.repo(wire)
134 134 try:
135 135 return not repo.head()
136 136 except Exception:
137 137 log.exception("failed to read object_store")
138 138 return True
139 139
140 140 @reraise_safe_exceptions
141 141 def add_object(self, wire, content):
142 142 repo = self._factory.repo(wire)
143 143 blob = objects.Blob()
144 144 blob.set_raw_string(content)
145 145 repo.object_store.add_object(blob)
146 146 return blob.id
147 147
148 148 @reraise_safe_exceptions
149 149 def assert_correct_path(self, wire):
150 150 path = wire.get('path')
151 151 try:
152 152 self._factory.repo(wire)
153 153 except NotGitRepository as e:
154 154 tb = traceback.format_exc()
155 155 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
156 156 return False
157 157
158 158 return True
159 159
160 160 @reraise_safe_exceptions
161 161 def bare(self, wire):
162 162 repo = self._factory.repo(wire)
163 163 return repo.bare
164 164
165 165 @reraise_safe_exceptions
166 166 def blob_as_pretty_string(self, wire, sha):
167 167 repo = self._factory.repo(wire)
168 168 return repo[sha].as_pretty_string()
169 169
170 170 @reraise_safe_exceptions
171 171 def blob_raw_length(self, wire, sha):
172 172 repo = self._factory.repo(wire)
173 173 blob = repo[sha]
174 174 return blob.raw_length()
175 175
176 176 def _parse_lfs_pointer(self, raw_content):
177 177
178 178 spec_string = 'version https://git-lfs.github.com/spec'
179 179 if raw_content and raw_content.startswith(spec_string):
180 180 pattern = re.compile(r"""
181 181 (?:\n)?
182 182 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
183 183 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
184 184 ^size[ ](?P<oid_size>[0-9]+)\n
185 185 (?:\n)?
186 186 """, re.VERBOSE | re.MULTILINE)
187 187 match = pattern.match(raw_content)
188 188 if match:
189 189 return match.groupdict()
190 190
191 191 return {}
192 192
193 193 @reraise_safe_exceptions
194 194 def is_large_file(self, wire, sha):
195 195 repo = self._factory.repo(wire)
196 196 blob = repo[sha]
197 197 return self._parse_lfs_pointer(blob.as_raw_string())
198 198
199 199 @reraise_safe_exceptions
200 200 def in_largefiles_store(self, wire, oid):
201 201 repo = self._factory.repo(wire)
202 202 conf = self._wire_to_config(wire)
203 203
204 204 store_location = conf.get('vcs_git_lfs_store_location')
205 205 if store_location:
206 206 repo_name = repo.path
207 207 store = LFSOidStore(
208 208 oid=oid, repo=repo_name, store_location=store_location)
209 209 return store.has_oid()
210 210
211 211 return False
212 212
213 213 @reraise_safe_exceptions
214 214 def store_path(self, wire, oid):
215 215 repo = self._factory.repo(wire)
216 216 conf = self._wire_to_config(wire)
217 217
218 218 store_location = conf.get('vcs_git_lfs_store_location')
219 219 if store_location:
220 220 repo_name = repo.path
221 221 store = LFSOidStore(
222 222 oid=oid, repo=repo_name, store_location=store_location)
223 223 return store.oid_path
224 224 raise ValueError('Unable to fetch oid with path {}'.format(oid))
225 225
226 226 @reraise_safe_exceptions
227 227 def bulk_request(self, wire, rev, pre_load):
228 228 result = {}
229 229 for attr in pre_load:
230 230 try:
231 231 method = self._bulk_methods[attr]
232 232 args = [wire, rev]
233 233 if attr == "date":
234 234 args.extend(["commit_time", "commit_timezone"])
235 235 elif attr in ["author", "message", "parents"]:
236 236 args.append(attr)
237 237 result[attr] = method(*args)
238 238 except KeyError as e:
239 239 raise exceptions.VcsException(e)(
240 240 "Unknown bulk attribute: %s" % attr)
241 241 return result
242 242
243 243 def _build_opener(self, url):
244 244 handlers = []
245 245 url_obj = url_parser(url)
246 246 _, authinfo = url_obj.authinfo()
247 247
248 248 if authinfo:
249 249 # create a password manager
250 250 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
251 251 passmgr.add_password(*authinfo)
252 252
253 253 handlers.extend((httpbasicauthhandler(passmgr),
254 254 httpdigestauthhandler(passmgr)))
255 255
256 256 return urllib2.build_opener(*handlers)
257 257
258 258 @reraise_safe_exceptions
259 259 def check_url(self, url, config):
260 260 url_obj = url_parser(url)
261 261 test_uri, _ = url_obj.authinfo()
262 262 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
263 263 url_obj.query = obfuscate_qs(url_obj.query)
264 264 cleaned_uri = str(url_obj)
265 265 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
266 266
267 267 if not test_uri.endswith('info/refs'):
268 268 test_uri = test_uri.rstrip('/') + '/info/refs'
269 269
270 270 o = self._build_opener(url)
271 271 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
272 272
273 273 q = {"service": 'git-upload-pack'}
274 274 qs = '?%s' % urllib.urlencode(q)
275 275 cu = "%s%s" % (test_uri, qs)
276 276 req = urllib2.Request(cu, None, {})
277 277
278 278 try:
279 279 log.debug("Trying to open URL %s", cleaned_uri)
280 280 resp = o.open(req)
281 281 if resp.code != 200:
282 282 raise exceptions.URLError()('Return Code is not 200')
283 283 except Exception as e:
284 284 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
285 285 # means it cannot be cloned
286 286 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
287 287
288 288 # now detect if it's proper git repo
289 289 gitdata = resp.read()
290 290 if 'service=git-upload-pack' in gitdata:
291 291 pass
292 292 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
293 293 # old style git can return some other format !
294 294 pass
295 295 else:
296 296 raise exceptions.URLError()(
297 297 "url [%s] does not look like an git" % (cleaned_uri,))
298 298
299 299 return True
300 300
301 301 @reraise_safe_exceptions
302 302 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
303 303 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
304 304 remote_refs = self.pull(wire, url, apply_refs=False)
305 305 repo = self._factory.repo(wire)
306 306 if isinstance(valid_refs, list):
307 307 valid_refs = tuple(valid_refs)
308 308
309 309 for k in remote_refs:
310 310 # only parse heads/tags and skip so called deferred tags
311 311 if k.startswith(valid_refs) and not k.endswith(deferred):
312 312 repo[k] = remote_refs[k]
313 313
314 314 if update_after_clone:
315 315 # we want to checkout HEAD
316 316 repo["HEAD"] = remote_refs["HEAD"]
317 317 index.build_index_from_tree(repo.path, repo.index_path(),
318 318 repo.object_store, repo["HEAD"].tree)
319 319
320 320 # TODO: this is quite complex, check if that can be simplified
321 321 @reraise_safe_exceptions
322 322 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
323 323 repo = self._factory.repo(wire)
324 324 object_store = repo.object_store
325 325
326 326 # Create tree and populates it with blobs
327 327 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
328 328
329 329 for node in updated:
330 330 # Compute subdirs if needed
331 331 dirpath, nodename = vcspath.split(node['path'])
332 332 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
333 333 parent = commit_tree
334 334 ancestors = [('', parent)]
335 335
336 336 # Tries to dig for the deepest existing tree
337 337 while dirnames:
338 338 curdir = dirnames.pop(0)
339 339 try:
340 340 dir_id = parent[curdir][1]
341 341 except KeyError:
342 342 # put curdir back into dirnames and stops
343 343 dirnames.insert(0, curdir)
344 344 break
345 345 else:
346 346 # If found, updates parent
347 347 parent = repo[dir_id]
348 348 ancestors.append((curdir, parent))
349 349 # Now parent is deepest existing tree and we need to create
350 350 # subtrees for dirnames (in reverse order)
351 351 # [this only applies for nodes from added]
352 352 new_trees = []
353 353
354 354 blob = objects.Blob.from_string(node['content'])
355 355
356 356 if dirnames:
357 357 # If there are trees which should be created we need to build
358 358 # them now (in reverse order)
359 359 reversed_dirnames = list(reversed(dirnames))
360 360 curtree = objects.Tree()
361 361 curtree[node['node_path']] = node['mode'], blob.id
362 362 new_trees.append(curtree)
363 363 for dirname in reversed_dirnames[:-1]:
364 364 newtree = objects.Tree()
365 365 newtree[dirname] = (DIR_STAT, curtree.id)
366 366 new_trees.append(newtree)
367 367 curtree = newtree
368 368 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
369 369 else:
370 370 parent.add(
371 371 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
372 372
373 373 new_trees.append(parent)
374 374 # Update ancestors
375 375 reversed_ancestors = reversed(
376 376 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
377 377 for parent, tree, path in reversed_ancestors:
378 378 parent[path] = (DIR_STAT, tree.id)
379 379 object_store.add_object(tree)
380 380
381 381 object_store.add_object(blob)
382 382 for tree in new_trees:
383 383 object_store.add_object(tree)
384 384
385 385 for node_path in removed:
386 386 paths = node_path.split('/')
387 387 tree = commit_tree
388 388 trees = [tree]
389 389 # Traverse deep into the forest...
390 390 for path in paths:
391 391 try:
392 392 obj = repo[tree[path][1]]
393 393 if isinstance(obj, objects.Tree):
394 394 trees.append(obj)
395 395 tree = obj
396 396 except KeyError:
397 397 break
398 398 # Cut down the blob and all rotten trees on the way back...
399 399 for path, tree in reversed(zip(paths, trees)):
400 400 del tree[path]
401 401 if tree:
402 402 # This tree still has elements - don't remove it or any
403 403 # of it's parents
404 404 break
405 405
406 406 object_store.add_object(commit_tree)
407 407
408 408 # Create commit
409 409 commit = objects.Commit()
410 410 commit.tree = commit_tree.id
411 411 for k, v in commit_data.iteritems():
412 412 setattr(commit, k, v)
413 413 object_store.add_object(commit)
414 414
415 415 ref = 'refs/heads/%s' % branch
416 416 repo.refs[ref] = commit.id
417 417
418 418 return commit.id
419 419
420 420 @reraise_safe_exceptions
421 421 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
422 422 if url != 'default' and '://' not in url:
423 423 client = LocalGitClient(url)
424 424 else:
425 425 url_obj = url_parser(url)
426 426 o = self._build_opener(url)
427 427 url, _ = url_obj.authinfo()
428 428 client = HttpGitClient(base_url=url, opener=o)
429 429 repo = self._factory.repo(wire)
430 430
431 431 determine_wants = repo.object_store.determine_wants_all
432 432 if refs:
433 433 def determine_wants_requested(references):
434 434 return [references[r] for r in references if r in refs]
435 435 determine_wants = determine_wants_requested
436 436
437 437 try:
438 438 remote_refs = client.fetch(
439 439 path=url, target=repo, determine_wants=determine_wants)
440 440 except NotGitRepository as e:
441 441 log.warning(
442 442 'Trying to fetch from "%s" failed, not a Git repository.', url)
443 443 # Exception can contain unicode which we convert
444 444 raise exceptions.AbortException(e)(repr(e))
445 445
446 446 # mikhail: client.fetch() returns all the remote refs, but fetches only
447 447 # refs filtered by `determine_wants` function. We need to filter result
448 448 # as well
449 449 if refs:
450 450 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
451 451
452 452 if apply_refs:
453 453 # TODO: johbo: Needs proper test coverage with a git repository
454 454 # that contains a tag object, so that we would end up with
455 455 # a peeled ref at this point.
456 456 for k in remote_refs:
457 457 if k.endswith(self.peeled_ref_marker):
458 458 log.debug("Skipping peeled reference %s", k)
459 459 continue
460 460 repo[k] = remote_refs[k]
461 461
462 462 if refs and not update_after:
463 463 # mikhail: explicitly set the head to the last ref.
464 464 repo['HEAD'] = remote_refs[refs[-1]]
465 465
466 466 if update_after:
467 467 # we want to checkout HEAD
468 468 repo["HEAD"] = remote_refs["HEAD"]
469 469 index.build_index_from_tree(repo.path, repo.index_path(),
470 470 repo.object_store, repo["HEAD"].tree)
471 471 return remote_refs
472 472
473 473 @reraise_safe_exceptions
474 474 def sync_fetch(self, wire, url, refs=None):
475 475 repo = self._factory.repo(wire)
476 476 if refs and not isinstance(refs, (list, tuple)):
477 477 refs = [refs]
478 478 config = self._wire_to_config(wire)
479 479 # get all remote refs we'll use to fetch later
480 480 output, __ = self.run_git_command(
481 481 wire, ['ls-remote', url], fail_on_stderr=False,
482 482 _copts=self._remote_conf(config),
483 483 extra_env={'GIT_TERMINAL_PROMPT': '0'})
484 484
485 485 remote_refs = collections.OrderedDict()
486 486 fetch_refs = []
487 487
488 488 for ref_line in output.splitlines():
489 489 sha, ref = ref_line.split('\t')
490 490 sha = sha.strip()
491 491 if ref in remote_refs:
492 492 # duplicate, skip
493 493 continue
494 494 if ref.endswith(self.peeled_ref_marker):
495 495 log.debug("Skipping peeled reference %s", ref)
496 496 continue
497 497 # don't sync HEAD
498 498 if ref in ['HEAD']:
499 499 continue
500 500
501 501 remote_refs[ref] = sha
502 502
503 503 if refs and sha in refs:
504 504 # we filter fetch using our specified refs
505 505 fetch_refs.append('{}:{}'.format(ref, ref))
506 506 elif not refs:
507 507 fetch_refs.append('{}:{}'.format(ref, ref))
508 508 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
509 509 if fetch_refs:
510 510 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
511 511 fetch_refs_chunks = list(chunk)
512 512 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
513 513 _out, _err = self.run_git_command(
514 514 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
515 515 fail_on_stderr=False,
516 516 _copts=self._remote_conf(config),
517 517 extra_env={'GIT_TERMINAL_PROMPT': '0'})
518 518
519 519 return remote_refs
520 520
521 521 @reraise_safe_exceptions
522 522 def sync_push(self, wire, url, refs=None):
523 523 if not self.check_url(url, wire):
524 524 return
525 525 config = self._wire_to_config(wire)
526 526 repo = self._factory.repo(wire)
527 527 self.run_git_command(
528 528 wire, ['push', url, '--mirror'], fail_on_stderr=False,
529 529 _copts=self._remote_conf(config),
530 530 extra_env={'GIT_TERMINAL_PROMPT': '0'})
531 531
532 532 @reraise_safe_exceptions
533 533 def get_remote_refs(self, wire, url):
534 534 repo = Repo(url)
535 535 return repo.get_refs()
536 536
537 537 @reraise_safe_exceptions
538 538 def get_description(self, wire):
539 539 repo = self._factory.repo(wire)
540 540 return repo.get_description()
541 541
542 542 @reraise_safe_exceptions
543 543 def get_missing_revs(self, wire, rev1, rev2, path2):
544 544 repo = self._factory.repo(wire)
545 545 LocalGitClient(thin_packs=False).fetch(path2, repo)
546 546
547 547 wire_remote = wire.copy()
548 548 wire_remote['path'] = path2
549 549 repo_remote = self._factory.repo(wire_remote)
550 550 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
551 551
552 552 revs = [
553 553 x.commit.id
554 554 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
555 555 return revs
556 556
557 557 @reraise_safe_exceptions
558 558 def get_object(self, wire, sha):
559 559 repo = self._factory.repo(wire)
560 560 obj = repo.get_object(sha)
561 561 commit_id = obj.id
562 562
563 563 if isinstance(obj, Tag):
564 564 commit_id = obj.object[1]
565 565
566 566 return {
567 567 'id': obj.id,
568 568 'type': obj.type_name,
569 'commit_id': commit_id
569 'commit_id': commit_id,
570 'idx': 0
570 571 }
571 572
572 573 @reraise_safe_exceptions
573 574 def get_object_attrs(self, wire, sha, *attrs):
574 575 repo = self._factory.repo(wire)
575 576 obj = repo.get_object(sha)
576 577 return list(getattr(obj, a) for a in attrs)
577 578
578 579 @reraise_safe_exceptions
579 580 def get_refs(self, wire):
580 581 repo = self._factory.repo(wire)
581 582 result = {}
582 583 for ref, sha in repo.refs.as_dict().items():
583 584 peeled_sha = repo.get_peeled(ref)
584 585 result[ref] = peeled_sha
585 586 return result
586 587
587 588 @reraise_safe_exceptions
588 589 def get_refs_path(self, wire):
589 590 repo = self._factory.repo(wire)
590 591 return repo.refs.path
591 592
592 593 @reraise_safe_exceptions
593 594 def head(self, wire, show_exc=True):
594 595 repo = self._factory.repo(wire)
595 596 try:
596 597 return repo.head()
597 598 except Exception:
598 599 if show_exc:
599 600 raise
600 601
601 602 @reraise_safe_exceptions
602 603 def init(self, wire):
603 604 repo_path = str_to_dulwich(wire['path'])
604 605 self.repo = Repo.init(repo_path)
605 606
606 607 @reraise_safe_exceptions
607 608 def init_bare(self, wire):
608 609 repo_path = str_to_dulwich(wire['path'])
609 610 self.repo = Repo.init_bare(repo_path)
610 611
611 612 @reraise_safe_exceptions
612 613 def revision(self, wire, rev):
613 614 repo = self._factory.repo(wire)
614 615 obj = repo[rev]
615 616 obj_data = {
616 617 'id': obj.id,
617 618 }
618 619 try:
619 620 obj_data['tree'] = obj.tree
620 621 except AttributeError:
621 622 pass
622 623 return obj_data
623 624
624 625 @reraise_safe_exceptions
625 626 def commit_attribute(self, wire, rev, attr):
626 627 repo = self._factory.repo(wire)
627 628 obj = repo[rev]
628 629 return getattr(obj, attr)
629 630
630 631 @reraise_safe_exceptions
631 632 def set_refs(self, wire, key, value):
632 633 repo = self._factory.repo(wire)
633 634 repo.refs[key] = value
634 635
635 636 @reraise_safe_exceptions
636 637 def remove_ref(self, wire, key):
637 638 repo = self._factory.repo(wire)
638 639 del repo.refs[key]
639 640
640 641 @reraise_safe_exceptions
641 642 def tree_changes(self, wire, source_id, target_id):
642 643 repo = self._factory.repo(wire)
643 644 source = repo[source_id].tree if source_id else None
644 645 target = repo[target_id].tree
645 646 result = repo.object_store.tree_changes(source, target)
646 647 return list(result)
647 648
648 649 @reraise_safe_exceptions
649 650 def tree_items(self, wire, tree_id):
650 651 repo = self._factory.repo(wire)
651 652 tree = repo[tree_id]
652 653
653 654 result = []
654 655 for item in tree.iteritems():
655 656 item_sha = item.sha
656 657 item_mode = item.mode
657 658
658 659 if FILE_MODE(item_mode) == GIT_LINK:
659 660 item_type = "link"
660 661 else:
661 662 item_type = repo[item_sha].type_name
662 663
663 664 result.append((item.path, item_mode, item_sha, item_type))
664 665 return result
665 666
666 667 @reraise_safe_exceptions
667 668 def update_server_info(self, wire):
668 669 repo = self._factory.repo(wire)
669 670 update_server_info(repo)
670 671
671 672 @reraise_safe_exceptions
672 673 def discover_git_version(self):
673 674 stdout, _ = self.run_git_command(
674 675 {}, ['--version'], _bare=True, _safe=True)
675 676 prefix = 'git version'
676 677 if stdout.startswith(prefix):
677 678 stdout = stdout[len(prefix):]
678 679 return stdout.strip()
679 680
680 681 @reraise_safe_exceptions
681 682 def run_git_command(self, wire, cmd, **opts):
682 683 path = wire.get('path', None)
683 684
684 685 if path and os.path.isdir(path):
685 686 opts['cwd'] = path
686 687
687 688 if '_bare' in opts:
688 689 _copts = []
689 690 del opts['_bare']
690 691 else:
691 692 _copts = ['-c', 'core.quotepath=false', ]
692 693 safe_call = False
693 694 if '_safe' in opts:
694 695 # no exc on failure
695 696 del opts['_safe']
696 697 safe_call = True
697 698
698 699 if '_copts' in opts:
699 700 _copts.extend(opts['_copts'] or [])
700 701 del opts['_copts']
701 702
702 703 gitenv = os.environ.copy()
703 704 gitenv.update(opts.pop('extra_env', {}))
704 705 # need to clean fix GIT_DIR !
705 706 if 'GIT_DIR' in gitenv:
706 707 del gitenv['GIT_DIR']
707 708 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
708 709 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
709 710
710 711 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
711 712 _opts = {'env': gitenv, 'shell': False}
712 713
713 714 try:
714 715 _opts.update(opts)
715 716 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
716 717
717 718 return ''.join(p), ''.join(p.error)
718 719 except (EnvironmentError, OSError) as err:
719 720 cmd = ' '.join(cmd) # human friendly CMD
720 721 tb_err = ("Couldn't run git command (%s).\n"
721 722 "Original error was:%s\n"
722 723 "Call options:%s\n"
723 724 % (cmd, err, _opts))
724 725 log.exception(tb_err)
725 726 if safe_call:
726 727 return '', err
727 728 else:
728 729 raise exceptions.VcsException()(tb_err)
729 730
730 731 @reraise_safe_exceptions
731 732 def install_hooks(self, wire, force=False):
732 733 from vcsserver.hook_utils import install_git_hooks
733 734 repo = self._factory.repo(wire)
734 735 return install_git_hooks(repo.path, repo.bare, force_create=force)
735 736
736 737 @reraise_safe_exceptions
737 738 def get_hooks_info(self, wire):
738 739 from vcsserver.hook_utils import (
739 740 get_git_pre_hook_version, get_git_post_hook_version)
740 741 repo = self._factory.repo(wire)
741 742 return {
742 743 'pre_version': get_git_pre_hook_version(repo.path, repo.bare),
743 744 'post_version': get_git_post_hook_version(repo.path, repo.bare),
744 745 }
745 746
746 747
747 748 def str_to_dulwich(value):
748 749 """
749 750 Dulwich 0.10.1a requires `unicode` objects to be passed in.
750 751 """
751 752 return value.decode(settings.WIRE_ENCODING)
General Comments 0
You need to be logged in to leave comments. Login now