##// END OF EJS Templates
vcsserver: return idx in object lookup for new API.
marcink -
r699:5644458b default
parent child Browse files
Show More
@@ -1,751 +1,752 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import collections
17 import collections
18 import logging
18 import logging
19 import os
19 import os
20 import posixpath as vcspath
20 import posixpath as vcspath
21 import re
21 import re
22 import stat
22 import stat
23 import traceback
23 import traceback
24 import urllib
24 import urllib
25 import urllib2
25 import urllib2
26 from functools import wraps
26 from functools import wraps
27
27
28 import more_itertools
28 import more_itertools
29 from dulwich import index, objects
29 from dulwich import index, objects
30 from dulwich.client import HttpGitClient, LocalGitClient
30 from dulwich.client import HttpGitClient, LocalGitClient
31 from dulwich.errors import (
31 from dulwich.errors import (
32 NotGitRepository, ChecksumMismatch, WrongObjectException,
32 NotGitRepository, ChecksumMismatch, WrongObjectException,
33 MissingCommitError, ObjectMissing, HangupException,
33 MissingCommitError, ObjectMissing, HangupException,
34 UnexpectedCommandError)
34 UnexpectedCommandError)
35 from dulwich.repo import Repo as DulwichRepo, Tag
35 from dulwich.repo import Repo as DulwichRepo, Tag
36 from dulwich.server import update_server_info
36 from dulwich.server import update_server_info
37
37
38 from vcsserver import exceptions, settings, subprocessio
38 from vcsserver import exceptions, settings, subprocessio
39 from vcsserver.utils import safe_str
39 from vcsserver.utils import safe_str
40 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
40 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
41 from vcsserver.hgcompat import (
41 from vcsserver.hgcompat import (
42 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
42 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
43 from vcsserver.git_lfs.lib import LFSOidStore
43 from vcsserver.git_lfs.lib import LFSOidStore
44
44
45 DIR_STAT = stat.S_IFDIR
45 DIR_STAT = stat.S_IFDIR
46 FILE_MODE = stat.S_IFMT
46 FILE_MODE = stat.S_IFMT
47 GIT_LINK = objects.S_IFGITLINK
47 GIT_LINK = objects.S_IFGITLINK
48
48
49 log = logging.getLogger(__name__)
49 log = logging.getLogger(__name__)
50
50
51
51
52 def reraise_safe_exceptions(func):
52 def reraise_safe_exceptions(func):
53 """Converts Dulwich exceptions to something neutral."""
53 """Converts Dulwich exceptions to something neutral."""
54 @wraps(func)
54 @wraps(func)
55 def wrapper(*args, **kwargs):
55 def wrapper(*args, **kwargs):
56 try:
56 try:
57 return func(*args, **kwargs)
57 return func(*args, **kwargs)
58 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
58 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
59 ObjectMissing) as e:
59 ObjectMissing) as e:
60 exc = exceptions.LookupException(e)
60 exc = exceptions.LookupException(e)
61 raise exc(e)
61 raise exc(e)
62 except (HangupException, UnexpectedCommandError) as e:
62 except (HangupException, UnexpectedCommandError) as e:
63 exc = exceptions.VcsException(e)
63 exc = exceptions.VcsException(e)
64 raise exc(e)
64 raise exc(e)
65 except Exception as e:
65 except Exception as e:
66 # NOTE(marcink): becuase of how dulwich handles some exceptions
66 # NOTE(marcink): becuase of how dulwich handles some exceptions
67 # (KeyError on empty repos), we cannot track this and catch all
67 # (KeyError on empty repos), we cannot track this and catch all
68 # exceptions, it's an exceptions from other handlers
68 # exceptions, it's an exceptions from other handlers
69 #if not hasattr(e, '_vcs_kind'):
69 #if not hasattr(e, '_vcs_kind'):
70 #log.exception("Unhandled exception in git remote call")
70 #log.exception("Unhandled exception in git remote call")
71 #raise_from_original(exceptions.UnhandledException)
71 #raise_from_original(exceptions.UnhandledException)
72 raise
72 raise
73 return wrapper
73 return wrapper
74
74
75
75
76 class Repo(DulwichRepo):
76 class Repo(DulwichRepo):
77 """
77 """
78 A wrapper for dulwich Repo class.
78 A wrapper for dulwich Repo class.
79
79
80 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
80 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
81 "Too many open files" error. We need to close all opened file descriptors
81 "Too many open files" error. We need to close all opened file descriptors
82 once the repo object is destroyed.
82 once the repo object is destroyed.
83
83
84 TODO: mikhail: please check if we need this wrapper after updating dulwich
84 TODO: mikhail: please check if we need this wrapper after updating dulwich
85 to 0.12.0 +
85 to 0.12.0 +
86 """
86 """
87 def __del__(self):
87 def __del__(self):
88 if hasattr(self, 'object_store'):
88 if hasattr(self, 'object_store'):
89 self.close()
89 self.close()
90
90
91
91
92 class GitFactory(RepoFactory):
92 class GitFactory(RepoFactory):
93 repo_type = 'git'
93 repo_type = 'git'
94
94
95 def _create_repo(self, wire, create):
95 def _create_repo(self, wire, create):
96 repo_path = str_to_dulwich(wire['path'])
96 repo_path = str_to_dulwich(wire['path'])
97 return Repo(repo_path)
97 return Repo(repo_path)
98
98
99
99
100 class GitRemote(object):
100 class GitRemote(object):
101
101
102 def __init__(self, factory):
102 def __init__(self, factory):
103 self._factory = factory
103 self._factory = factory
104 self.peeled_ref_marker = '^{}'
104 self.peeled_ref_marker = '^{}'
105 self._bulk_methods = {
105 self._bulk_methods = {
106 "author": self.commit_attribute,
106 "author": self.commit_attribute,
107 "date": self.get_object_attrs,
107 "date": self.get_object_attrs,
108 "message": self.commit_attribute,
108 "message": self.commit_attribute,
109 "parents": self.commit_attribute,
109 "parents": self.commit_attribute,
110 "_commit": self.revision,
110 "_commit": self.revision,
111 }
111 }
112
112
113 def _wire_to_config(self, wire):
113 def _wire_to_config(self, wire):
114 if 'config' in wire:
114 if 'config' in wire:
115 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
115 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
116 return {}
116 return {}
117
117
118 def _assign_ref(self, wire, ref, commit_id):
118 def _assign_ref(self, wire, ref, commit_id):
119 repo = self._factory.repo(wire)
119 repo = self._factory.repo(wire)
120 repo[ref] = commit_id
120 repo[ref] = commit_id
121
121
122 def _remote_conf(self, config):
122 def _remote_conf(self, config):
123 params = [
123 params = [
124 '-c', 'core.askpass=""',
124 '-c', 'core.askpass=""',
125 ]
125 ]
126 ssl_cert_dir = config.get('vcs_ssl_dir')
126 ssl_cert_dir = config.get('vcs_ssl_dir')
127 if ssl_cert_dir:
127 if ssl_cert_dir:
128 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
128 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
129 return params
129 return params
130
130
131 @reraise_safe_exceptions
131 @reraise_safe_exceptions
132 def is_empty(self, wire):
132 def is_empty(self, wire):
133 repo = self._factory.repo(wire)
133 repo = self._factory.repo(wire)
134 try:
134 try:
135 return not repo.head()
135 return not repo.head()
136 except Exception:
136 except Exception:
137 log.exception("failed to read object_store")
137 log.exception("failed to read object_store")
138 return True
138 return True
139
139
140 @reraise_safe_exceptions
140 @reraise_safe_exceptions
141 def add_object(self, wire, content):
141 def add_object(self, wire, content):
142 repo = self._factory.repo(wire)
142 repo = self._factory.repo(wire)
143 blob = objects.Blob()
143 blob = objects.Blob()
144 blob.set_raw_string(content)
144 blob.set_raw_string(content)
145 repo.object_store.add_object(blob)
145 repo.object_store.add_object(blob)
146 return blob.id
146 return blob.id
147
147
148 @reraise_safe_exceptions
148 @reraise_safe_exceptions
149 def assert_correct_path(self, wire):
149 def assert_correct_path(self, wire):
150 path = wire.get('path')
150 path = wire.get('path')
151 try:
151 try:
152 self._factory.repo(wire)
152 self._factory.repo(wire)
153 except NotGitRepository as e:
153 except NotGitRepository as e:
154 tb = traceback.format_exc()
154 tb = traceback.format_exc()
155 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
155 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
156 return False
156 return False
157
157
158 return True
158 return True
159
159
160 @reraise_safe_exceptions
160 @reraise_safe_exceptions
161 def bare(self, wire):
161 def bare(self, wire):
162 repo = self._factory.repo(wire)
162 repo = self._factory.repo(wire)
163 return repo.bare
163 return repo.bare
164
164
165 @reraise_safe_exceptions
165 @reraise_safe_exceptions
166 def blob_as_pretty_string(self, wire, sha):
166 def blob_as_pretty_string(self, wire, sha):
167 repo = self._factory.repo(wire)
167 repo = self._factory.repo(wire)
168 return repo[sha].as_pretty_string()
168 return repo[sha].as_pretty_string()
169
169
170 @reraise_safe_exceptions
170 @reraise_safe_exceptions
171 def blob_raw_length(self, wire, sha):
171 def blob_raw_length(self, wire, sha):
172 repo = self._factory.repo(wire)
172 repo = self._factory.repo(wire)
173 blob = repo[sha]
173 blob = repo[sha]
174 return blob.raw_length()
174 return blob.raw_length()
175
175
176 def _parse_lfs_pointer(self, raw_content):
176 def _parse_lfs_pointer(self, raw_content):
177
177
178 spec_string = 'version https://git-lfs.github.com/spec'
178 spec_string = 'version https://git-lfs.github.com/spec'
179 if raw_content and raw_content.startswith(spec_string):
179 if raw_content and raw_content.startswith(spec_string):
180 pattern = re.compile(r"""
180 pattern = re.compile(r"""
181 (?:\n)?
181 (?:\n)?
182 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
182 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
183 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
183 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
184 ^size[ ](?P<oid_size>[0-9]+)\n
184 ^size[ ](?P<oid_size>[0-9]+)\n
185 (?:\n)?
185 (?:\n)?
186 """, re.VERBOSE | re.MULTILINE)
186 """, re.VERBOSE | re.MULTILINE)
187 match = pattern.match(raw_content)
187 match = pattern.match(raw_content)
188 if match:
188 if match:
189 return match.groupdict()
189 return match.groupdict()
190
190
191 return {}
191 return {}
192
192
193 @reraise_safe_exceptions
193 @reraise_safe_exceptions
194 def is_large_file(self, wire, sha):
194 def is_large_file(self, wire, sha):
195 repo = self._factory.repo(wire)
195 repo = self._factory.repo(wire)
196 blob = repo[sha]
196 blob = repo[sha]
197 return self._parse_lfs_pointer(blob.as_raw_string())
197 return self._parse_lfs_pointer(blob.as_raw_string())
198
198
199 @reraise_safe_exceptions
199 @reraise_safe_exceptions
200 def in_largefiles_store(self, wire, oid):
200 def in_largefiles_store(self, wire, oid):
201 repo = self._factory.repo(wire)
201 repo = self._factory.repo(wire)
202 conf = self._wire_to_config(wire)
202 conf = self._wire_to_config(wire)
203
203
204 store_location = conf.get('vcs_git_lfs_store_location')
204 store_location = conf.get('vcs_git_lfs_store_location')
205 if store_location:
205 if store_location:
206 repo_name = repo.path
206 repo_name = repo.path
207 store = LFSOidStore(
207 store = LFSOidStore(
208 oid=oid, repo=repo_name, store_location=store_location)
208 oid=oid, repo=repo_name, store_location=store_location)
209 return store.has_oid()
209 return store.has_oid()
210
210
211 return False
211 return False
212
212
213 @reraise_safe_exceptions
213 @reraise_safe_exceptions
214 def store_path(self, wire, oid):
214 def store_path(self, wire, oid):
215 repo = self._factory.repo(wire)
215 repo = self._factory.repo(wire)
216 conf = self._wire_to_config(wire)
216 conf = self._wire_to_config(wire)
217
217
218 store_location = conf.get('vcs_git_lfs_store_location')
218 store_location = conf.get('vcs_git_lfs_store_location')
219 if store_location:
219 if store_location:
220 repo_name = repo.path
220 repo_name = repo.path
221 store = LFSOidStore(
221 store = LFSOidStore(
222 oid=oid, repo=repo_name, store_location=store_location)
222 oid=oid, repo=repo_name, store_location=store_location)
223 return store.oid_path
223 return store.oid_path
224 raise ValueError('Unable to fetch oid with path {}'.format(oid))
224 raise ValueError('Unable to fetch oid with path {}'.format(oid))
225
225
226 @reraise_safe_exceptions
226 @reraise_safe_exceptions
227 def bulk_request(self, wire, rev, pre_load):
227 def bulk_request(self, wire, rev, pre_load):
228 result = {}
228 result = {}
229 for attr in pre_load:
229 for attr in pre_load:
230 try:
230 try:
231 method = self._bulk_methods[attr]
231 method = self._bulk_methods[attr]
232 args = [wire, rev]
232 args = [wire, rev]
233 if attr == "date":
233 if attr == "date":
234 args.extend(["commit_time", "commit_timezone"])
234 args.extend(["commit_time", "commit_timezone"])
235 elif attr in ["author", "message", "parents"]:
235 elif attr in ["author", "message", "parents"]:
236 args.append(attr)
236 args.append(attr)
237 result[attr] = method(*args)
237 result[attr] = method(*args)
238 except KeyError as e:
238 except KeyError as e:
239 raise exceptions.VcsException(e)(
239 raise exceptions.VcsException(e)(
240 "Unknown bulk attribute: %s" % attr)
240 "Unknown bulk attribute: %s" % attr)
241 return result
241 return result
242
242
243 def _build_opener(self, url):
243 def _build_opener(self, url):
244 handlers = []
244 handlers = []
245 url_obj = url_parser(url)
245 url_obj = url_parser(url)
246 _, authinfo = url_obj.authinfo()
246 _, authinfo = url_obj.authinfo()
247
247
248 if authinfo:
248 if authinfo:
249 # create a password manager
249 # create a password manager
250 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
250 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
251 passmgr.add_password(*authinfo)
251 passmgr.add_password(*authinfo)
252
252
253 handlers.extend((httpbasicauthhandler(passmgr),
253 handlers.extend((httpbasicauthhandler(passmgr),
254 httpdigestauthhandler(passmgr)))
254 httpdigestauthhandler(passmgr)))
255
255
256 return urllib2.build_opener(*handlers)
256 return urllib2.build_opener(*handlers)
257
257
258 @reraise_safe_exceptions
258 @reraise_safe_exceptions
259 def check_url(self, url, config):
259 def check_url(self, url, config):
260 url_obj = url_parser(url)
260 url_obj = url_parser(url)
261 test_uri, _ = url_obj.authinfo()
261 test_uri, _ = url_obj.authinfo()
262 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
262 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
263 url_obj.query = obfuscate_qs(url_obj.query)
263 url_obj.query = obfuscate_qs(url_obj.query)
264 cleaned_uri = str(url_obj)
264 cleaned_uri = str(url_obj)
265 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
265 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
266
266
267 if not test_uri.endswith('info/refs'):
267 if not test_uri.endswith('info/refs'):
268 test_uri = test_uri.rstrip('/') + '/info/refs'
268 test_uri = test_uri.rstrip('/') + '/info/refs'
269
269
270 o = self._build_opener(url)
270 o = self._build_opener(url)
271 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
271 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
272
272
273 q = {"service": 'git-upload-pack'}
273 q = {"service": 'git-upload-pack'}
274 qs = '?%s' % urllib.urlencode(q)
274 qs = '?%s' % urllib.urlencode(q)
275 cu = "%s%s" % (test_uri, qs)
275 cu = "%s%s" % (test_uri, qs)
276 req = urllib2.Request(cu, None, {})
276 req = urllib2.Request(cu, None, {})
277
277
278 try:
278 try:
279 log.debug("Trying to open URL %s", cleaned_uri)
279 log.debug("Trying to open URL %s", cleaned_uri)
280 resp = o.open(req)
280 resp = o.open(req)
281 if resp.code != 200:
281 if resp.code != 200:
282 raise exceptions.URLError()('Return Code is not 200')
282 raise exceptions.URLError()('Return Code is not 200')
283 except Exception as e:
283 except Exception as e:
284 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
284 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
285 # means it cannot be cloned
285 # means it cannot be cloned
286 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
286 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
287
287
288 # now detect if it's proper git repo
288 # now detect if it's proper git repo
289 gitdata = resp.read()
289 gitdata = resp.read()
290 if 'service=git-upload-pack' in gitdata:
290 if 'service=git-upload-pack' in gitdata:
291 pass
291 pass
292 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
292 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
293 # old style git can return some other format !
293 # old style git can return some other format !
294 pass
294 pass
295 else:
295 else:
296 raise exceptions.URLError()(
296 raise exceptions.URLError()(
297 "url [%s] does not look like an git" % (cleaned_uri,))
297 "url [%s] does not look like an git" % (cleaned_uri,))
298
298
299 return True
299 return True
300
300
301 @reraise_safe_exceptions
301 @reraise_safe_exceptions
302 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
302 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
303 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
303 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
304 remote_refs = self.pull(wire, url, apply_refs=False)
304 remote_refs = self.pull(wire, url, apply_refs=False)
305 repo = self._factory.repo(wire)
305 repo = self._factory.repo(wire)
306 if isinstance(valid_refs, list):
306 if isinstance(valid_refs, list):
307 valid_refs = tuple(valid_refs)
307 valid_refs = tuple(valid_refs)
308
308
309 for k in remote_refs:
309 for k in remote_refs:
310 # only parse heads/tags and skip so called deferred tags
310 # only parse heads/tags and skip so called deferred tags
311 if k.startswith(valid_refs) and not k.endswith(deferred):
311 if k.startswith(valid_refs) and not k.endswith(deferred):
312 repo[k] = remote_refs[k]
312 repo[k] = remote_refs[k]
313
313
314 if update_after_clone:
314 if update_after_clone:
315 # we want to checkout HEAD
315 # we want to checkout HEAD
316 repo["HEAD"] = remote_refs["HEAD"]
316 repo["HEAD"] = remote_refs["HEAD"]
317 index.build_index_from_tree(repo.path, repo.index_path(),
317 index.build_index_from_tree(repo.path, repo.index_path(),
318 repo.object_store, repo["HEAD"].tree)
318 repo.object_store, repo["HEAD"].tree)
319
319
320 # TODO: this is quite complex, check if that can be simplified
320 # TODO: this is quite complex, check if that can be simplified
321 @reraise_safe_exceptions
321 @reraise_safe_exceptions
322 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
322 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
323 repo = self._factory.repo(wire)
323 repo = self._factory.repo(wire)
324 object_store = repo.object_store
324 object_store = repo.object_store
325
325
326 # Create tree and populates it with blobs
326 # Create tree and populates it with blobs
327 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
327 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
328
328
329 for node in updated:
329 for node in updated:
330 # Compute subdirs if needed
330 # Compute subdirs if needed
331 dirpath, nodename = vcspath.split(node['path'])
331 dirpath, nodename = vcspath.split(node['path'])
332 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
332 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
333 parent = commit_tree
333 parent = commit_tree
334 ancestors = [('', parent)]
334 ancestors = [('', parent)]
335
335
336 # Tries to dig for the deepest existing tree
336 # Tries to dig for the deepest existing tree
337 while dirnames:
337 while dirnames:
338 curdir = dirnames.pop(0)
338 curdir = dirnames.pop(0)
339 try:
339 try:
340 dir_id = parent[curdir][1]
340 dir_id = parent[curdir][1]
341 except KeyError:
341 except KeyError:
342 # put curdir back into dirnames and stops
342 # put curdir back into dirnames and stops
343 dirnames.insert(0, curdir)
343 dirnames.insert(0, curdir)
344 break
344 break
345 else:
345 else:
346 # If found, updates parent
346 # If found, updates parent
347 parent = repo[dir_id]
347 parent = repo[dir_id]
348 ancestors.append((curdir, parent))
348 ancestors.append((curdir, parent))
349 # Now parent is deepest existing tree and we need to create
349 # Now parent is deepest existing tree and we need to create
350 # subtrees for dirnames (in reverse order)
350 # subtrees for dirnames (in reverse order)
351 # [this only applies for nodes from added]
351 # [this only applies for nodes from added]
352 new_trees = []
352 new_trees = []
353
353
354 blob = objects.Blob.from_string(node['content'])
354 blob = objects.Blob.from_string(node['content'])
355
355
356 if dirnames:
356 if dirnames:
357 # If there are trees which should be created we need to build
357 # If there are trees which should be created we need to build
358 # them now (in reverse order)
358 # them now (in reverse order)
359 reversed_dirnames = list(reversed(dirnames))
359 reversed_dirnames = list(reversed(dirnames))
360 curtree = objects.Tree()
360 curtree = objects.Tree()
361 curtree[node['node_path']] = node['mode'], blob.id
361 curtree[node['node_path']] = node['mode'], blob.id
362 new_trees.append(curtree)
362 new_trees.append(curtree)
363 for dirname in reversed_dirnames[:-1]:
363 for dirname in reversed_dirnames[:-1]:
364 newtree = objects.Tree()
364 newtree = objects.Tree()
365 newtree[dirname] = (DIR_STAT, curtree.id)
365 newtree[dirname] = (DIR_STAT, curtree.id)
366 new_trees.append(newtree)
366 new_trees.append(newtree)
367 curtree = newtree
367 curtree = newtree
368 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
368 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
369 else:
369 else:
370 parent.add(
370 parent.add(
371 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
371 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
372
372
373 new_trees.append(parent)
373 new_trees.append(parent)
374 # Update ancestors
374 # Update ancestors
375 reversed_ancestors = reversed(
375 reversed_ancestors = reversed(
376 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
376 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
377 for parent, tree, path in reversed_ancestors:
377 for parent, tree, path in reversed_ancestors:
378 parent[path] = (DIR_STAT, tree.id)
378 parent[path] = (DIR_STAT, tree.id)
379 object_store.add_object(tree)
379 object_store.add_object(tree)
380
380
381 object_store.add_object(blob)
381 object_store.add_object(blob)
382 for tree in new_trees:
382 for tree in new_trees:
383 object_store.add_object(tree)
383 object_store.add_object(tree)
384
384
385 for node_path in removed:
385 for node_path in removed:
386 paths = node_path.split('/')
386 paths = node_path.split('/')
387 tree = commit_tree
387 tree = commit_tree
388 trees = [tree]
388 trees = [tree]
389 # Traverse deep into the forest...
389 # Traverse deep into the forest...
390 for path in paths:
390 for path in paths:
391 try:
391 try:
392 obj = repo[tree[path][1]]
392 obj = repo[tree[path][1]]
393 if isinstance(obj, objects.Tree):
393 if isinstance(obj, objects.Tree):
394 trees.append(obj)
394 trees.append(obj)
395 tree = obj
395 tree = obj
396 except KeyError:
396 except KeyError:
397 break
397 break
398 # Cut down the blob and all rotten trees on the way back...
398 # Cut down the blob and all rotten trees on the way back...
399 for path, tree in reversed(zip(paths, trees)):
399 for path, tree in reversed(zip(paths, trees)):
400 del tree[path]
400 del tree[path]
401 if tree:
401 if tree:
402 # This tree still has elements - don't remove it or any
402 # This tree still has elements - don't remove it or any
403 # of it's parents
403 # of it's parents
404 break
404 break
405
405
406 object_store.add_object(commit_tree)
406 object_store.add_object(commit_tree)
407
407
408 # Create commit
408 # Create commit
409 commit = objects.Commit()
409 commit = objects.Commit()
410 commit.tree = commit_tree.id
410 commit.tree = commit_tree.id
411 for k, v in commit_data.iteritems():
411 for k, v in commit_data.iteritems():
412 setattr(commit, k, v)
412 setattr(commit, k, v)
413 object_store.add_object(commit)
413 object_store.add_object(commit)
414
414
415 ref = 'refs/heads/%s' % branch
415 ref = 'refs/heads/%s' % branch
416 repo.refs[ref] = commit.id
416 repo.refs[ref] = commit.id
417
417
418 return commit.id
418 return commit.id
419
419
420 @reraise_safe_exceptions
420 @reraise_safe_exceptions
421 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
421 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
422 if url != 'default' and '://' not in url:
422 if url != 'default' and '://' not in url:
423 client = LocalGitClient(url)
423 client = LocalGitClient(url)
424 else:
424 else:
425 url_obj = url_parser(url)
425 url_obj = url_parser(url)
426 o = self._build_opener(url)
426 o = self._build_opener(url)
427 url, _ = url_obj.authinfo()
427 url, _ = url_obj.authinfo()
428 client = HttpGitClient(base_url=url, opener=o)
428 client = HttpGitClient(base_url=url, opener=o)
429 repo = self._factory.repo(wire)
429 repo = self._factory.repo(wire)
430
430
431 determine_wants = repo.object_store.determine_wants_all
431 determine_wants = repo.object_store.determine_wants_all
432 if refs:
432 if refs:
433 def determine_wants_requested(references):
433 def determine_wants_requested(references):
434 return [references[r] for r in references if r in refs]
434 return [references[r] for r in references if r in refs]
435 determine_wants = determine_wants_requested
435 determine_wants = determine_wants_requested
436
436
437 try:
437 try:
438 remote_refs = client.fetch(
438 remote_refs = client.fetch(
439 path=url, target=repo, determine_wants=determine_wants)
439 path=url, target=repo, determine_wants=determine_wants)
440 except NotGitRepository as e:
440 except NotGitRepository as e:
441 log.warning(
441 log.warning(
442 'Trying to fetch from "%s" failed, not a Git repository.', url)
442 'Trying to fetch from "%s" failed, not a Git repository.', url)
443 # Exception can contain unicode which we convert
443 # Exception can contain unicode which we convert
444 raise exceptions.AbortException(e)(repr(e))
444 raise exceptions.AbortException(e)(repr(e))
445
445
446 # mikhail: client.fetch() returns all the remote refs, but fetches only
446 # mikhail: client.fetch() returns all the remote refs, but fetches only
447 # refs filtered by `determine_wants` function. We need to filter result
447 # refs filtered by `determine_wants` function. We need to filter result
448 # as well
448 # as well
449 if refs:
449 if refs:
450 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
450 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
451
451
452 if apply_refs:
452 if apply_refs:
453 # TODO: johbo: Needs proper test coverage with a git repository
453 # TODO: johbo: Needs proper test coverage with a git repository
454 # that contains a tag object, so that we would end up with
454 # that contains a tag object, so that we would end up with
455 # a peeled ref at this point.
455 # a peeled ref at this point.
456 for k in remote_refs:
456 for k in remote_refs:
457 if k.endswith(self.peeled_ref_marker):
457 if k.endswith(self.peeled_ref_marker):
458 log.debug("Skipping peeled reference %s", k)
458 log.debug("Skipping peeled reference %s", k)
459 continue
459 continue
460 repo[k] = remote_refs[k]
460 repo[k] = remote_refs[k]
461
461
462 if refs and not update_after:
462 if refs and not update_after:
463 # mikhail: explicitly set the head to the last ref.
463 # mikhail: explicitly set the head to the last ref.
464 repo['HEAD'] = remote_refs[refs[-1]]
464 repo['HEAD'] = remote_refs[refs[-1]]
465
465
466 if update_after:
466 if update_after:
467 # we want to checkout HEAD
467 # we want to checkout HEAD
468 repo["HEAD"] = remote_refs["HEAD"]
468 repo["HEAD"] = remote_refs["HEAD"]
469 index.build_index_from_tree(repo.path, repo.index_path(),
469 index.build_index_from_tree(repo.path, repo.index_path(),
470 repo.object_store, repo["HEAD"].tree)
470 repo.object_store, repo["HEAD"].tree)
471 return remote_refs
471 return remote_refs
472
472
473 @reraise_safe_exceptions
473 @reraise_safe_exceptions
474 def sync_fetch(self, wire, url, refs=None):
474 def sync_fetch(self, wire, url, refs=None):
475 repo = self._factory.repo(wire)
475 repo = self._factory.repo(wire)
476 if refs and not isinstance(refs, (list, tuple)):
476 if refs and not isinstance(refs, (list, tuple)):
477 refs = [refs]
477 refs = [refs]
478 config = self._wire_to_config(wire)
478 config = self._wire_to_config(wire)
479 # get all remote refs we'll use to fetch later
479 # get all remote refs we'll use to fetch later
480 output, __ = self.run_git_command(
480 output, __ = self.run_git_command(
481 wire, ['ls-remote', url], fail_on_stderr=False,
481 wire, ['ls-remote', url], fail_on_stderr=False,
482 _copts=self._remote_conf(config),
482 _copts=self._remote_conf(config),
483 extra_env={'GIT_TERMINAL_PROMPT': '0'})
483 extra_env={'GIT_TERMINAL_PROMPT': '0'})
484
484
485 remote_refs = collections.OrderedDict()
485 remote_refs = collections.OrderedDict()
486 fetch_refs = []
486 fetch_refs = []
487
487
488 for ref_line in output.splitlines():
488 for ref_line in output.splitlines():
489 sha, ref = ref_line.split('\t')
489 sha, ref = ref_line.split('\t')
490 sha = sha.strip()
490 sha = sha.strip()
491 if ref in remote_refs:
491 if ref in remote_refs:
492 # duplicate, skip
492 # duplicate, skip
493 continue
493 continue
494 if ref.endswith(self.peeled_ref_marker):
494 if ref.endswith(self.peeled_ref_marker):
495 log.debug("Skipping peeled reference %s", ref)
495 log.debug("Skipping peeled reference %s", ref)
496 continue
496 continue
497 # don't sync HEAD
497 # don't sync HEAD
498 if ref in ['HEAD']:
498 if ref in ['HEAD']:
499 continue
499 continue
500
500
501 remote_refs[ref] = sha
501 remote_refs[ref] = sha
502
502
503 if refs and sha in refs:
503 if refs and sha in refs:
504 # we filter fetch using our specified refs
504 # we filter fetch using our specified refs
505 fetch_refs.append('{}:{}'.format(ref, ref))
505 fetch_refs.append('{}:{}'.format(ref, ref))
506 elif not refs:
506 elif not refs:
507 fetch_refs.append('{}:{}'.format(ref, ref))
507 fetch_refs.append('{}:{}'.format(ref, ref))
508 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
508 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
509 if fetch_refs:
509 if fetch_refs:
510 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
510 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
511 fetch_refs_chunks = list(chunk)
511 fetch_refs_chunks = list(chunk)
512 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
512 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
513 _out, _err = self.run_git_command(
513 _out, _err = self.run_git_command(
514 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
514 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
515 fail_on_stderr=False,
515 fail_on_stderr=False,
516 _copts=self._remote_conf(config),
516 _copts=self._remote_conf(config),
517 extra_env={'GIT_TERMINAL_PROMPT': '0'})
517 extra_env={'GIT_TERMINAL_PROMPT': '0'})
518
518
519 return remote_refs
519 return remote_refs
520
520
521 @reraise_safe_exceptions
521 @reraise_safe_exceptions
522 def sync_push(self, wire, url, refs=None):
522 def sync_push(self, wire, url, refs=None):
523 if not self.check_url(url, wire):
523 if not self.check_url(url, wire):
524 return
524 return
525 config = self._wire_to_config(wire)
525 config = self._wire_to_config(wire)
526 repo = self._factory.repo(wire)
526 repo = self._factory.repo(wire)
527 self.run_git_command(
527 self.run_git_command(
528 wire, ['push', url, '--mirror'], fail_on_stderr=False,
528 wire, ['push', url, '--mirror'], fail_on_stderr=False,
529 _copts=self._remote_conf(config),
529 _copts=self._remote_conf(config),
530 extra_env={'GIT_TERMINAL_PROMPT': '0'})
530 extra_env={'GIT_TERMINAL_PROMPT': '0'})
531
531
532 @reraise_safe_exceptions
532 @reraise_safe_exceptions
533 def get_remote_refs(self, wire, url):
533 def get_remote_refs(self, wire, url):
534 repo = Repo(url)
534 repo = Repo(url)
535 return repo.get_refs()
535 return repo.get_refs()
536
536
537 @reraise_safe_exceptions
537 @reraise_safe_exceptions
538 def get_description(self, wire):
538 def get_description(self, wire):
539 repo = self._factory.repo(wire)
539 repo = self._factory.repo(wire)
540 return repo.get_description()
540 return repo.get_description()
541
541
542 @reraise_safe_exceptions
542 @reraise_safe_exceptions
543 def get_missing_revs(self, wire, rev1, rev2, path2):
543 def get_missing_revs(self, wire, rev1, rev2, path2):
544 repo = self._factory.repo(wire)
544 repo = self._factory.repo(wire)
545 LocalGitClient(thin_packs=False).fetch(path2, repo)
545 LocalGitClient(thin_packs=False).fetch(path2, repo)
546
546
547 wire_remote = wire.copy()
547 wire_remote = wire.copy()
548 wire_remote['path'] = path2
548 wire_remote['path'] = path2
549 repo_remote = self._factory.repo(wire_remote)
549 repo_remote = self._factory.repo(wire_remote)
550 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
550 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
551
551
552 revs = [
552 revs = [
553 x.commit.id
553 x.commit.id
554 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
554 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
555 return revs
555 return revs
556
556
557 @reraise_safe_exceptions
557 @reraise_safe_exceptions
558 def get_object(self, wire, sha):
558 def get_object(self, wire, sha):
559 repo = self._factory.repo(wire)
559 repo = self._factory.repo(wire)
560 obj = repo.get_object(sha)
560 obj = repo.get_object(sha)
561 commit_id = obj.id
561 commit_id = obj.id
562
562
563 if isinstance(obj, Tag):
563 if isinstance(obj, Tag):
564 commit_id = obj.object[1]
564 commit_id = obj.object[1]
565
565
566 return {
566 return {
567 'id': obj.id,
567 'id': obj.id,
568 'type': obj.type_name,
568 'type': obj.type_name,
569 'commit_id': commit_id
569 'commit_id': commit_id,
570 'idx': 0
570 }
571 }
571
572
572 @reraise_safe_exceptions
573 @reraise_safe_exceptions
573 def get_object_attrs(self, wire, sha, *attrs):
574 def get_object_attrs(self, wire, sha, *attrs):
574 repo = self._factory.repo(wire)
575 repo = self._factory.repo(wire)
575 obj = repo.get_object(sha)
576 obj = repo.get_object(sha)
576 return list(getattr(obj, a) for a in attrs)
577 return list(getattr(obj, a) for a in attrs)
577
578
578 @reraise_safe_exceptions
579 @reraise_safe_exceptions
579 def get_refs(self, wire):
580 def get_refs(self, wire):
580 repo = self._factory.repo(wire)
581 repo = self._factory.repo(wire)
581 result = {}
582 result = {}
582 for ref, sha in repo.refs.as_dict().items():
583 for ref, sha in repo.refs.as_dict().items():
583 peeled_sha = repo.get_peeled(ref)
584 peeled_sha = repo.get_peeled(ref)
584 result[ref] = peeled_sha
585 result[ref] = peeled_sha
585 return result
586 return result
586
587
587 @reraise_safe_exceptions
588 @reraise_safe_exceptions
588 def get_refs_path(self, wire):
589 def get_refs_path(self, wire):
589 repo = self._factory.repo(wire)
590 repo = self._factory.repo(wire)
590 return repo.refs.path
591 return repo.refs.path
591
592
592 @reraise_safe_exceptions
593 @reraise_safe_exceptions
593 def head(self, wire, show_exc=True):
594 def head(self, wire, show_exc=True):
594 repo = self._factory.repo(wire)
595 repo = self._factory.repo(wire)
595 try:
596 try:
596 return repo.head()
597 return repo.head()
597 except Exception:
598 except Exception:
598 if show_exc:
599 if show_exc:
599 raise
600 raise
600
601
601 @reraise_safe_exceptions
602 @reraise_safe_exceptions
602 def init(self, wire):
603 def init(self, wire):
603 repo_path = str_to_dulwich(wire['path'])
604 repo_path = str_to_dulwich(wire['path'])
604 self.repo = Repo.init(repo_path)
605 self.repo = Repo.init(repo_path)
605
606
606 @reraise_safe_exceptions
607 @reraise_safe_exceptions
607 def init_bare(self, wire):
608 def init_bare(self, wire):
608 repo_path = str_to_dulwich(wire['path'])
609 repo_path = str_to_dulwich(wire['path'])
609 self.repo = Repo.init_bare(repo_path)
610 self.repo = Repo.init_bare(repo_path)
610
611
611 @reraise_safe_exceptions
612 @reraise_safe_exceptions
612 def revision(self, wire, rev):
613 def revision(self, wire, rev):
613 repo = self._factory.repo(wire)
614 repo = self._factory.repo(wire)
614 obj = repo[rev]
615 obj = repo[rev]
615 obj_data = {
616 obj_data = {
616 'id': obj.id,
617 'id': obj.id,
617 }
618 }
618 try:
619 try:
619 obj_data['tree'] = obj.tree
620 obj_data['tree'] = obj.tree
620 except AttributeError:
621 except AttributeError:
621 pass
622 pass
622 return obj_data
623 return obj_data
623
624
624 @reraise_safe_exceptions
625 @reraise_safe_exceptions
625 def commit_attribute(self, wire, rev, attr):
626 def commit_attribute(self, wire, rev, attr):
626 repo = self._factory.repo(wire)
627 repo = self._factory.repo(wire)
627 obj = repo[rev]
628 obj = repo[rev]
628 return getattr(obj, attr)
629 return getattr(obj, attr)
629
630
630 @reraise_safe_exceptions
631 @reraise_safe_exceptions
631 def set_refs(self, wire, key, value):
632 def set_refs(self, wire, key, value):
632 repo = self._factory.repo(wire)
633 repo = self._factory.repo(wire)
633 repo.refs[key] = value
634 repo.refs[key] = value
634
635
635 @reraise_safe_exceptions
636 @reraise_safe_exceptions
636 def remove_ref(self, wire, key):
637 def remove_ref(self, wire, key):
637 repo = self._factory.repo(wire)
638 repo = self._factory.repo(wire)
638 del repo.refs[key]
639 del repo.refs[key]
639
640
640 @reraise_safe_exceptions
641 @reraise_safe_exceptions
641 def tree_changes(self, wire, source_id, target_id):
642 def tree_changes(self, wire, source_id, target_id):
642 repo = self._factory.repo(wire)
643 repo = self._factory.repo(wire)
643 source = repo[source_id].tree if source_id else None
644 source = repo[source_id].tree if source_id else None
644 target = repo[target_id].tree
645 target = repo[target_id].tree
645 result = repo.object_store.tree_changes(source, target)
646 result = repo.object_store.tree_changes(source, target)
646 return list(result)
647 return list(result)
647
648
648 @reraise_safe_exceptions
649 @reraise_safe_exceptions
649 def tree_items(self, wire, tree_id):
650 def tree_items(self, wire, tree_id):
650 repo = self._factory.repo(wire)
651 repo = self._factory.repo(wire)
651 tree = repo[tree_id]
652 tree = repo[tree_id]
652
653
653 result = []
654 result = []
654 for item in tree.iteritems():
655 for item in tree.iteritems():
655 item_sha = item.sha
656 item_sha = item.sha
656 item_mode = item.mode
657 item_mode = item.mode
657
658
658 if FILE_MODE(item_mode) == GIT_LINK:
659 if FILE_MODE(item_mode) == GIT_LINK:
659 item_type = "link"
660 item_type = "link"
660 else:
661 else:
661 item_type = repo[item_sha].type_name
662 item_type = repo[item_sha].type_name
662
663
663 result.append((item.path, item_mode, item_sha, item_type))
664 result.append((item.path, item_mode, item_sha, item_type))
664 return result
665 return result
665
666
666 @reraise_safe_exceptions
667 @reraise_safe_exceptions
667 def update_server_info(self, wire):
668 def update_server_info(self, wire):
668 repo = self._factory.repo(wire)
669 repo = self._factory.repo(wire)
669 update_server_info(repo)
670 update_server_info(repo)
670
671
671 @reraise_safe_exceptions
672 @reraise_safe_exceptions
672 def discover_git_version(self):
673 def discover_git_version(self):
673 stdout, _ = self.run_git_command(
674 stdout, _ = self.run_git_command(
674 {}, ['--version'], _bare=True, _safe=True)
675 {}, ['--version'], _bare=True, _safe=True)
675 prefix = 'git version'
676 prefix = 'git version'
676 if stdout.startswith(prefix):
677 if stdout.startswith(prefix):
677 stdout = stdout[len(prefix):]
678 stdout = stdout[len(prefix):]
678 return stdout.strip()
679 return stdout.strip()
679
680
680 @reraise_safe_exceptions
681 @reraise_safe_exceptions
681 def run_git_command(self, wire, cmd, **opts):
682 def run_git_command(self, wire, cmd, **opts):
682 path = wire.get('path', None)
683 path = wire.get('path', None)
683
684
684 if path and os.path.isdir(path):
685 if path and os.path.isdir(path):
685 opts['cwd'] = path
686 opts['cwd'] = path
686
687
687 if '_bare' in opts:
688 if '_bare' in opts:
688 _copts = []
689 _copts = []
689 del opts['_bare']
690 del opts['_bare']
690 else:
691 else:
691 _copts = ['-c', 'core.quotepath=false', ]
692 _copts = ['-c', 'core.quotepath=false', ]
692 safe_call = False
693 safe_call = False
693 if '_safe' in opts:
694 if '_safe' in opts:
694 # no exc on failure
695 # no exc on failure
695 del opts['_safe']
696 del opts['_safe']
696 safe_call = True
697 safe_call = True
697
698
698 if '_copts' in opts:
699 if '_copts' in opts:
699 _copts.extend(opts['_copts'] or [])
700 _copts.extend(opts['_copts'] or [])
700 del opts['_copts']
701 del opts['_copts']
701
702
702 gitenv = os.environ.copy()
703 gitenv = os.environ.copy()
703 gitenv.update(opts.pop('extra_env', {}))
704 gitenv.update(opts.pop('extra_env', {}))
704 # need to clean fix GIT_DIR !
705 # need to clean fix GIT_DIR !
705 if 'GIT_DIR' in gitenv:
706 if 'GIT_DIR' in gitenv:
706 del gitenv['GIT_DIR']
707 del gitenv['GIT_DIR']
707 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
708 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
708 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
709 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
709
710
710 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
711 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
711 _opts = {'env': gitenv, 'shell': False}
712 _opts = {'env': gitenv, 'shell': False}
712
713
713 try:
714 try:
714 _opts.update(opts)
715 _opts.update(opts)
715 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
716 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
716
717
717 return ''.join(p), ''.join(p.error)
718 return ''.join(p), ''.join(p.error)
718 except (EnvironmentError, OSError) as err:
719 except (EnvironmentError, OSError) as err:
719 cmd = ' '.join(cmd) # human friendly CMD
720 cmd = ' '.join(cmd) # human friendly CMD
720 tb_err = ("Couldn't run git command (%s).\n"
721 tb_err = ("Couldn't run git command (%s).\n"
721 "Original error was:%s\n"
722 "Original error was:%s\n"
722 "Call options:%s\n"
723 "Call options:%s\n"
723 % (cmd, err, _opts))
724 % (cmd, err, _opts))
724 log.exception(tb_err)
725 log.exception(tb_err)
725 if safe_call:
726 if safe_call:
726 return '', err
727 return '', err
727 else:
728 else:
728 raise exceptions.VcsException()(tb_err)
729 raise exceptions.VcsException()(tb_err)
729
730
730 @reraise_safe_exceptions
731 @reraise_safe_exceptions
731 def install_hooks(self, wire, force=False):
732 def install_hooks(self, wire, force=False):
732 from vcsserver.hook_utils import install_git_hooks
733 from vcsserver.hook_utils import install_git_hooks
733 repo = self._factory.repo(wire)
734 repo = self._factory.repo(wire)
734 return install_git_hooks(repo.path, repo.bare, force_create=force)
735 return install_git_hooks(repo.path, repo.bare, force_create=force)
735
736
736 @reraise_safe_exceptions
737 @reraise_safe_exceptions
737 def get_hooks_info(self, wire):
738 def get_hooks_info(self, wire):
738 from vcsserver.hook_utils import (
739 from vcsserver.hook_utils import (
739 get_git_pre_hook_version, get_git_post_hook_version)
740 get_git_pre_hook_version, get_git_post_hook_version)
740 repo = self._factory.repo(wire)
741 repo = self._factory.repo(wire)
741 return {
742 return {
742 'pre_version': get_git_pre_hook_version(repo.path, repo.bare),
743 'pre_version': get_git_pre_hook_version(repo.path, repo.bare),
743 'post_version': get_git_post_hook_version(repo.path, repo.bare),
744 'post_version': get_git_post_hook_version(repo.path, repo.bare),
744 }
745 }
745
746
746
747
747 def str_to_dulwich(value):
748 def str_to_dulwich(value):
748 """
749 """
749 Dulwich 0.10.1a requires `unicode` objects to be passed in.
750 Dulwich 0.10.1a requires `unicode` objects to be passed in.
750 """
751 """
751 return value.decode(settings.WIRE_ENCODING)
752 return value.decode(settings.WIRE_ENCODING)
General Comments 0
You need to be logged in to leave comments. Login now