##// END OF EJS Templates
hooks: added few python3 related fixes to handle bytes vs str on Mercurial hooks
super-admin -
r1108:62b12ad9 python3
parent child Browse files
Show More
@@ -1,738 +1,769 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2020 RhodeCode GmbH
4 # Copyright (C) 2014-2020 RhodeCode GmbH
5 #
5 #
6 # This program is free software; you can redistribute it and/or modify
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
9 # (at your option) any later version.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
19
20 import io
20 import io
21 import os
21 import os
22 import sys
22 import sys
23 import logging
23 import logging
24 import collections
24 import collections
25 import importlib
25 import importlib
26 import base64
26 import base64
27 import msgpack
27 import msgpack
28 import dataclasses
29 import pygit2
28
30
29 from http.client import HTTPConnection
31 from http.client import HTTPConnection
30
32
31
33
32 import mercurial.scmutil
34 import mercurial.scmutil
33 import mercurial.node
35 import mercurial.node
34
36
35 from vcsserver.lib.rc_json import json
37 from vcsserver.lib.rc_json import json
36 from vcsserver import exceptions, subprocessio, settings
38 from vcsserver import exceptions, subprocessio, settings
37 from vcsserver.str_utils import safe_bytes
39 from vcsserver.str_utils import ascii_str, safe_str
40 from vcsserver.remote.git import Repository
38
41
39 log = logging.getLogger(__name__)
42 log = logging.getLogger(__name__)
40
43
41
44
42 class HooksHttpClient(object):
45 class HooksHttpClient(object):
43 proto = 'msgpack.v1'
46 proto = 'msgpack.v1'
44 connection = None
47 connection = None
45
48
46 def __init__(self, hooks_uri):
49 def __init__(self, hooks_uri):
47 self.hooks_uri = hooks_uri
50 self.hooks_uri = hooks_uri
48
51
49 def __call__(self, method, extras):
52 def __call__(self, method, extras):
50 connection = HTTPConnection(self.hooks_uri)
53 connection = HTTPConnection(self.hooks_uri)
51 # binary msgpack body
54 # binary msgpack body
52 headers, body = self._serialize(method, extras)
55 headers, body = self._serialize(method, extras)
53 try:
56 try:
54 connection.request('POST', '/', body, headers)
57 connection.request('POST', '/', body, headers)
55 except Exception as error:
58 except Exception as error:
56 log.error('Hooks calling Connection failed on %s, org error: %s', connection.__dict__, error)
59 log.error('Hooks calling Connection failed on %s, org error: %s', connection.__dict__, error)
57 raise
60 raise
58 response = connection.getresponse()
61 response = connection.getresponse()
59 try:
62 try:
60 return msgpack.load(response)
63 return msgpack.load(response)
61 except Exception:
64 except Exception:
62 response_data = response.read()
65 response_data = response.read()
63 log.exception('Failed to decode hook response json data. '
66 log.exception('Failed to decode hook response json data. '
64 'response_code:%s, raw_data:%s',
67 'response_code:%s, raw_data:%s',
65 response.status, response_data)
68 response.status, response_data)
66 raise
69 raise
67
70
68 @classmethod
71 @classmethod
69 def _serialize(cls, hook_name, extras):
72 def _serialize(cls, hook_name, extras):
70 data = {
73 data = {
71 'method': hook_name,
74 'method': hook_name,
72 'extras': extras
75 'extras': extras
73 }
76 }
74 headers = {
77 headers = {
75 'rc-hooks-protocol': cls.proto
78 'rc-hooks-protocol': cls.proto
76 }
79 }
77 return headers, msgpack.packb(data)
80 return headers, msgpack.packb(data)
78
81
79
82
80 class HooksDummyClient(object):
83 class HooksDummyClient(object):
81 def __init__(self, hooks_module):
84 def __init__(self, hooks_module):
82 self._hooks_module = importlib.import_module(hooks_module)
85 self._hooks_module = importlib.import_module(hooks_module)
83
86
84 def __call__(self, hook_name, extras):
87 def __call__(self, hook_name, extras):
85 with self._hooks_module.Hooks() as hooks:
88 with self._hooks_module.Hooks() as hooks:
86 return getattr(hooks, hook_name)(extras)
89 return getattr(hooks, hook_name)(extras)
87
90
88
91
89 class HooksShadowRepoClient(object):
92 class HooksShadowRepoClient(object):
90
93
91 def __call__(self, hook_name, extras):
94 def __call__(self, hook_name, extras):
92 return {'output': '', 'status': 0}
95 return {'output': '', 'status': 0}
93
96
94
97
95 class RemoteMessageWriter(object):
98 class RemoteMessageWriter(object):
96 """Writer base class."""
99 """Writer base class."""
97 def write(self, message):
100 def write(self, message):
98 raise NotImplementedError()
101 raise NotImplementedError()
99
102
100
103
101 class HgMessageWriter(RemoteMessageWriter):
104 class HgMessageWriter(RemoteMessageWriter):
102 """Writer that knows how to send messages to mercurial clients."""
105 """Writer that knows how to send messages to mercurial clients."""
103
106
104 def __init__(self, ui):
107 def __init__(self, ui):
105 self.ui = ui
108 self.ui = ui
106
109
107 def write(self, message):
110 def write(self, message: str):
108 # TODO: Check why the quiet flag is set by default.
111 # TODO: Check why the quiet flag is set by default.
109 old = self.ui.quiet
112 old = self.ui.quiet
110 self.ui.quiet = False
113 self.ui.quiet = False
111 self.ui.status(message.encode('utf-8'))
114 self.ui.status(message.encode('utf-8'))
112 self.ui.quiet = old
115 self.ui.quiet = old
113
116
114
117
115 class GitMessageWriter(RemoteMessageWriter):
118 class GitMessageWriter(RemoteMessageWriter):
116 """Writer that knows how to send messages to git clients."""
119 """Writer that knows how to send messages to git clients."""
117
120
118 def __init__(self, stdout=None):
121 def __init__(self, stdout=None):
119 self.stdout = stdout or sys.stdout
122 self.stdout = stdout or sys.stdout
120
123
121 def write(self, message):
124 def write(self, message: str):
122 self.stdout.write(safe_bytes(message))
125 self.stdout.write(message)
123
126
124
127
125 class SvnMessageWriter(RemoteMessageWriter):
128 class SvnMessageWriter(RemoteMessageWriter):
126 """Writer that knows how to send messages to svn clients."""
129 """Writer that knows how to send messages to svn clients."""
127
130
128 def __init__(self, stderr=None):
131 def __init__(self, stderr=None):
129 # SVN needs data sent to stderr for back-to-client messaging
132 # SVN needs data sent to stderr for back-to-client messaging
130 self.stderr = stderr or sys.stderr
133 self.stderr = stderr or sys.stderr
131
134
132 def write(self, message):
135 def write(self, message):
133 self.stderr.write(message.encode('utf-8'))
136 self.stderr.write(message.encode('utf-8'))
134
137
135
138
136 def _handle_exception(result):
139 def _handle_exception(result):
137 exception_class = result.get('exception')
140 exception_class = result.get('exception')
138 exception_traceback = result.get('exception_traceback')
141 exception_traceback = result.get('exception_traceback')
139
142
140 if exception_traceback:
143 if exception_traceback:
141 log.error('Got traceback from remote call:%s', exception_traceback)
144 log.error('Got traceback from remote call:%s', exception_traceback)
142
145
143 if exception_class == 'HTTPLockedRC':
146 if exception_class == 'HTTPLockedRC':
144 raise exceptions.RepositoryLockedException()(*result['exception_args'])
147 raise exceptions.RepositoryLockedException()(*result['exception_args'])
145 elif exception_class == 'HTTPBranchProtected':
148 elif exception_class == 'HTTPBranchProtected':
146 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
149 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
147 elif exception_class == 'RepositoryError':
150 elif exception_class == 'RepositoryError':
148 raise exceptions.VcsException()(*result['exception_args'])
151 raise exceptions.VcsException()(*result['exception_args'])
149 elif exception_class:
152 elif exception_class:
150 raise Exception('Got remote exception "%s" with args "%s"' %
153 raise Exception(
151 (exception_class, result['exception_args']))
154 f"""Got remote exception "{exception_class}" with args "{result['exception_args']}" """
155 )
152
156
153
157
154 def _get_hooks_client(extras):
158 def _get_hooks_client(extras):
155 hooks_uri = extras.get('hooks_uri')
159 hooks_uri = extras.get('hooks_uri')
156 is_shadow_repo = extras.get('is_shadow_repo')
160 is_shadow_repo = extras.get('is_shadow_repo')
157 if hooks_uri:
161 if hooks_uri:
158 return HooksHttpClient(extras['hooks_uri'])
162 return HooksHttpClient(extras['hooks_uri'])
159 elif is_shadow_repo:
163 elif is_shadow_repo:
160 return HooksShadowRepoClient()
164 return HooksShadowRepoClient()
161 else:
165 else:
162 return HooksDummyClient(extras['hooks_module'])
166 return HooksDummyClient(extras['hooks_module'])
163
167
164
168
165 def _call_hook(hook_name, extras, writer):
169 def _call_hook(hook_name, extras, writer):
166 hooks_client = _get_hooks_client(extras)
170 hooks_client = _get_hooks_client(extras)
167 log.debug('Hooks, using client:%s', hooks_client)
171 log.debug('Hooks, using client:%s', hooks_client)
168 result = hooks_client(hook_name, extras)
172 result = hooks_client(hook_name, extras)
169 log.debug('Hooks got result: %s', result)
173 log.debug('Hooks got result: %s', result)
170
171 _handle_exception(result)
174 _handle_exception(result)
172 writer.write(result['output'])
175 writer.write(result['output'])
173
176
174 return result['status']
177 return result['status']
175
178
176
179
177 def _extras_from_ui(ui):
180 def _extras_from_ui(ui):
178 hook_data = ui.config(b'rhodecode', b'RC_SCM_DATA')
181 hook_data = ui.config(b'rhodecode', b'RC_SCM_DATA')
179 if not hook_data:
182 if not hook_data:
180 # maybe it's inside environ ?
183 # maybe it's inside environ ?
181 env_hook_data = os.environ.get('RC_SCM_DATA')
184 env_hook_data = os.environ.get('RC_SCM_DATA')
182 if env_hook_data:
185 if env_hook_data:
183 hook_data = env_hook_data
186 hook_data = env_hook_data
184
187
185 extras = {}
188 extras = {}
186 if hook_data:
189 if hook_data:
187 extras = json.loads(hook_data)
190 extras = json.loads(hook_data)
188 return extras
191 return extras
189
192
190
193
191 def _rev_range_hash(repo, node, check_heads=False):
194 def _rev_range_hash(repo, node, check_heads=False):
192 from vcsserver.hgcompat import get_ctx
195 from vcsserver.hgcompat import get_ctx
193
196
194 commits = []
197 commits = []
195 revs = []
198 revs = []
196 start = get_ctx(repo, node).rev()
199 start = get_ctx(repo, node).rev()
197 end = len(repo)
200 end = len(repo)
198 for rev in range(start, end):
201 for rev in range(start, end):
199 revs.append(rev)
202 revs.append(rev)
200 ctx = get_ctx(repo, rev)
203 ctx = get_ctx(repo, rev)
201 commit_id = mercurial.node.hex(ctx.node())
204 commit_id = ascii_str(mercurial.node.hex(ctx.node()))
202 branch = ctx.branch()
205 branch = safe_str(ctx.branch())
203 commits.append((commit_id, branch))
206 commits.append((commit_id, branch))
204
207
205 parent_heads = []
208 parent_heads = []
206 if check_heads:
209 if check_heads:
207 parent_heads = _check_heads(repo, start, end, revs)
210 parent_heads = _check_heads(repo, start, end, revs)
208 return commits, parent_heads
211 return commits, parent_heads
209
212
210
213
211 def _check_heads(repo, start, end, commits):
214 def _check_heads(repo, start, end, commits):
212 from vcsserver.hgcompat import get_ctx
215 from vcsserver.hgcompat import get_ctx
213 changelog = repo.changelog
216 changelog = repo.changelog
214 parents = set()
217 parents = set()
215
218
216 for new_rev in commits:
219 for new_rev in commits:
217 for p in changelog.parentrevs(new_rev):
220 for p in changelog.parentrevs(new_rev):
218 if p == mercurial.node.nullrev:
221 if p == mercurial.node.nullrev:
219 continue
222 continue
220 if p < start:
223 if p < start:
221 parents.add(p)
224 parents.add(p)
222
225
223 for p in parents:
226 for p in parents:
224 branch = get_ctx(repo, p).branch()
227 branch = get_ctx(repo, p).branch()
225 # The heads descending from that parent, on the same branch
228 # The heads descending from that parent, on the same branch
226 parent_heads = set([p])
229 parent_heads = {p}
227 reachable = set([p])
230 reachable = {p}
228 for x in range(p + 1, end):
231 for x in range(p + 1, end):
229 if get_ctx(repo, x).branch() != branch:
232 if get_ctx(repo, x).branch() != branch:
230 continue
233 continue
231 for pp in changelog.parentrevs(x):
234 for pp in changelog.parentrevs(x):
232 if pp in reachable:
235 if pp in reachable:
233 reachable.add(x)
236 reachable.add(x)
234 parent_heads.discard(pp)
237 parent_heads.discard(pp)
235 parent_heads.add(x)
238 parent_heads.add(x)
236 # More than one head? Suggest merging
239 # More than one head? Suggest merging
237 if len(parent_heads) > 1:
240 if len(parent_heads) > 1:
238 return list(parent_heads)
241 return list(parent_heads)
239
242
240 return []
243 return []
241
244
242
245
243 def _get_git_env():
246 def _get_git_env():
244 env = {}
247 env = {}
245 for k, v in os.environ.items():
248 for k, v in os.environ.items():
246 if k.startswith('GIT'):
249 if k.startswith('GIT'):
247 env[k] = v
250 env[k] = v
248
251
249 # serialized version
252 # serialized version
250 return [(k, v) for k, v in env.items()]
253 return [(k, v) for k, v in env.items()]
251
254
252
255
253 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
256 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
254 env = {}
257 env = {}
255 for k, v in os.environ.items():
258 for k, v in os.environ.items():
256 if k.startswith('HG'):
259 if k.startswith('HG'):
257 env[k] = v
260 env[k] = v
258
261
259 env['HG_NODE'] = old_rev
262 env['HG_NODE'] = old_rev
260 env['HG_NODE_LAST'] = new_rev
263 env['HG_NODE_LAST'] = new_rev
261 env['HG_TXNID'] = txnid
264 env['HG_TXNID'] = txnid
262 env['HG_PENDING'] = repo_path
265 env['HG_PENDING'] = repo_path
263
266
264 return [(k, v) for k, v in env.items()]
267 return [(k, v) for k, v in env.items()]
265
268
266
269
267 def repo_size(ui, repo, **kwargs):
270 def repo_size(ui, repo, **kwargs):
268 extras = _extras_from_ui(ui)
271 extras = _extras_from_ui(ui)
269 return _call_hook('repo_size', extras, HgMessageWriter(ui))
272 return _call_hook('repo_size', extras, HgMessageWriter(ui))
270
273
271
274
272 def pre_pull(ui, repo, **kwargs):
275 def pre_pull(ui, repo, **kwargs):
273 extras = _extras_from_ui(ui)
276 extras = _extras_from_ui(ui)
274 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
277 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
275
278
276
279
277 def pre_pull_ssh(ui, repo, **kwargs):
280 def pre_pull_ssh(ui, repo, **kwargs):
278 extras = _extras_from_ui(ui)
281 extras = _extras_from_ui(ui)
279 if extras and extras.get('SSH'):
282 if extras and extras.get('SSH'):
280 return pre_pull(ui, repo, **kwargs)
283 return pre_pull(ui, repo, **kwargs)
281 return 0
284 return 0
282
285
283
286
284 def post_pull(ui, repo, **kwargs):
287 def post_pull(ui, repo, **kwargs):
285 extras = _extras_from_ui(ui)
288 extras = _extras_from_ui(ui)
286 return _call_hook('post_pull', extras, HgMessageWriter(ui))
289 return _call_hook('post_pull', extras, HgMessageWriter(ui))
287
290
288
291
289 def post_pull_ssh(ui, repo, **kwargs):
292 def post_pull_ssh(ui, repo, **kwargs):
290 extras = _extras_from_ui(ui)
293 extras = _extras_from_ui(ui)
291 if extras and extras.get('SSH'):
294 if extras and extras.get('SSH'):
292 return post_pull(ui, repo, **kwargs)
295 return post_pull(ui, repo, **kwargs)
293 return 0
296 return 0
294
297
295
298
296 def pre_push(ui, repo, node=None, **kwargs):
299 def pre_push(ui, repo, node=None, **kwargs):
297 """
300 """
298 Mercurial pre_push hook
301 Mercurial pre_push hook
299 """
302 """
300 extras = _extras_from_ui(ui)
303 extras = _extras_from_ui(ui)
301 detect_force_push = extras.get('detect_force_push')
304 detect_force_push = extras.get('detect_force_push')
302
305
303 rev_data = []
306 rev_data = []
304 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
307 hook_type: str = safe_str(kwargs.get('hooktype'))
308
309 if node and hook_type == 'pretxnchangegroup':
305 branches = collections.defaultdict(list)
310 branches = collections.defaultdict(list)
306 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
311 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
307 for commit_id, branch in commits:
312 for commit_id, branch in commits:
308 branches[branch].append(commit_id)
313 branches[branch].append(commit_id)
309
314
310 for branch, commits in branches.items():
315 for branch, commits in branches.items():
311 old_rev = kwargs.get('node_last') or commits[0]
316 old_rev = ascii_str(kwargs.get('node_last')) or commits[0]
312 rev_data.append({
317 rev_data.append({
313 'total_commits': len(commits),
318 'total_commits': len(commits),
314 'old_rev': old_rev,
319 'old_rev': old_rev,
315 'new_rev': commits[-1],
320 'new_rev': commits[-1],
316 'ref': '',
321 'ref': '',
317 'type': 'branch',
322 'type': 'branch',
318 'name': branch,
323 'name': branch,
319 })
324 })
320
325
321 for push_ref in rev_data:
326 for push_ref in rev_data:
322 push_ref['multiple_heads'] = _heads
327 push_ref['multiple_heads'] = _heads
323
328
324 repo_path = os.path.join(
329 repo_path = os.path.join(
325 extras.get('repo_store', ''), extras.get('repository', ''))
330 extras.get('repo_store', ''), extras.get('repository', ''))
326 push_ref['hg_env'] = _get_hg_env(
331 push_ref['hg_env'] = _get_hg_env(
327 old_rev=push_ref['old_rev'],
332 old_rev=push_ref['old_rev'],
328 new_rev=push_ref['new_rev'], txnid=kwargs.get('txnid'),
333 new_rev=push_ref['new_rev'], txnid=ascii_str(kwargs.get('txnid')),
329 repo_path=repo_path)
334 repo_path=repo_path)
330
335
331 extras['hook_type'] = kwargs.get('hooktype', 'pre_push')
336 extras['hook_type'] = hook_type or 'pre_push'
332 extras['commit_ids'] = rev_data
337 extras['commit_ids'] = rev_data
333
338
334 return _call_hook('pre_push', extras, HgMessageWriter(ui))
339 return _call_hook('pre_push', extras, HgMessageWriter(ui))
335
340
336
341
337 def pre_push_ssh(ui, repo, node=None, **kwargs):
342 def pre_push_ssh(ui, repo, node=None, **kwargs):
338 extras = _extras_from_ui(ui)
343 extras = _extras_from_ui(ui)
339 if extras.get('SSH'):
344 if extras.get('SSH'):
340 return pre_push(ui, repo, node, **kwargs)
345 return pre_push(ui, repo, node, **kwargs)
341
346
342 return 0
347 return 0
343
348
344
349
345 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
350 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
346 """
351 """
347 Mercurial pre_push hook for SSH
352 Mercurial pre_push hook for SSH
348 """
353 """
349 extras = _extras_from_ui(ui)
354 extras = _extras_from_ui(ui)
350 if extras.get('SSH'):
355 if extras.get('SSH'):
351 permission = extras['SSH_PERMISSIONS']
356 permission = extras['SSH_PERMISSIONS']
352
357
353 if 'repository.write' == permission or 'repository.admin' == permission:
358 if 'repository.write' == permission or 'repository.admin' == permission:
354 return 0
359 return 0
355
360
356 # non-zero ret code
361 # non-zero ret code
357 return 1
362 return 1
358
363
359 return 0
364 return 0
360
365
361
366
362 def post_push(ui, repo, node, **kwargs):
367 def post_push(ui, repo, node, **kwargs):
363 """
368 """
364 Mercurial post_push hook
369 Mercurial post_push hook
365 """
370 """
366 extras = _extras_from_ui(ui)
371 extras = _extras_from_ui(ui)
367
372
368 commit_ids = []
373 commit_ids = []
369 branches = []
374 branches = []
370 bookmarks = []
375 bookmarks = []
371 tags = []
376 tags = []
377 hook_type: str = safe_str(kwargs.get('hooktype'))
372
378
373 commits, _heads = _rev_range_hash(repo, node)
379 commits, _heads = _rev_range_hash(repo, node)
374 for commit_id, branch in commits:
380 for commit_id, branch in commits:
375 commit_ids.append(commit_id)
381 commit_ids.append(commit_id)
376 if branch not in branches:
382 if branch not in branches:
377 branches.append(branch)
383 branches.append(branch)
378
384
379 if hasattr(ui, '_rc_pushkey_branches'):
385 if hasattr(ui, '_rc_pushkey_bookmarks'):
380 bookmarks = ui._rc_pushkey_branches
386 bookmarks = ui._rc_pushkey_bookmarks
381
387
382 extras['hook_type'] = kwargs.get('hooktype', 'post_push')
388 extras['hook_type'] = hook_type or 'post_push'
383 extras['commit_ids'] = commit_ids
389 extras['commit_ids'] = commit_ids
390
384 extras['new_refs'] = {
391 extras['new_refs'] = {
385 'branches': branches,
392 'branches': branches,
386 'bookmarks': bookmarks,
393 'bookmarks': bookmarks,
387 'tags': tags
394 'tags': tags
388 }
395 }
389
396
390 return _call_hook('post_push', extras, HgMessageWriter(ui))
397 return _call_hook('post_push', extras, HgMessageWriter(ui))
391
398
392
399
393 def post_push_ssh(ui, repo, node, **kwargs):
400 def post_push_ssh(ui, repo, node, **kwargs):
394 """
401 """
395 Mercurial post_push hook for SSH
402 Mercurial post_push hook for SSH
396 """
403 """
397 if _extras_from_ui(ui).get('SSH'):
404 if _extras_from_ui(ui).get('SSH'):
398 return post_push(ui, repo, node, **kwargs)
405 return post_push(ui, repo, node, **kwargs)
399 return 0
406 return 0
400
407
401
408
402 def key_push(ui, repo, **kwargs):
409 def key_push(ui, repo, **kwargs):
403 from vcsserver.hgcompat import get_ctx
410 from vcsserver.hgcompat import get_ctx
404 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
411
412 if kwargs['new'] != b'0' and kwargs['namespace'] == b'bookmarks':
405 # store new bookmarks in our UI object propagated later to post_push
413 # store new bookmarks in our UI object propagated later to post_push
406 ui._rc_pushkey_branches = get_ctx(repo, kwargs['key']).bookmarks()
414 ui._rc_pushkey_bookmarks = get_ctx(repo, kwargs['key']).bookmarks()
407 return
415 return
408
416
409
417
410 # backward compat
418 # backward compat
411 log_pull_action = post_pull
419 log_pull_action = post_pull
412
420
413 # backward compat
421 # backward compat
414 log_push_action = post_push
422 log_push_action = post_push
415
423
416
424
417 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
425 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
418 """
426 """
419 Old hook name: keep here for backward compatibility.
427 Old hook name: keep here for backward compatibility.
420
428
421 This is only required when the installed git hooks are not upgraded.
429 This is only required when the installed git hooks are not upgraded.
422 """
430 """
423 pass
431 pass
424
432
425
433
426 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
434 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
427 """
435 """
428 Old hook name: keep here for backward compatibility.
436 Old hook name: keep here for backward compatibility.
429
437
430 This is only required when the installed git hooks are not upgraded.
438 This is only required when the installed git hooks are not upgraded.
431 """
439 """
432 pass
440 pass
433
441
434
442
435 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
443 @dataclasses.dataclass
444 class HookResponse:
445 status: int
446 output: str
436
447
437
448
438 def git_pre_pull(extras):
449 def git_pre_pull(extras) -> HookResponse:
439 """
450 """
440 Pre pull hook.
451 Pre pull hook.
441
452
442 :param extras: dictionary containing the keys defined in simplevcs
453 :param extras: dictionary containing the keys defined in simplevcs
443 :type extras: dict
454 :type extras: dict
444
455
445 :return: status code of the hook. 0 for success.
456 :return: status code of the hook. 0 for success.
446 :rtype: int
457 :rtype: int
447 """
458 """
448
459
449 if 'pull' not in extras['hooks']:
460 if 'pull' not in extras['hooks']:
450 return HookResponse(0, '')
461 return HookResponse(0, '')
451
462
452 stdout = io.BytesIO()
463 stdout = io.StringIO()
453 try:
464 try:
454 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
465 status_code = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
455
466
456 except Exception as error:
467 except Exception as error:
457 log.exception('Failed to call pre_pull hook')
468 log.exception('Failed to call pre_pull hook')
458 status = 128
469 status_code = 128
459 stdout.write(safe_bytes(f'ERROR: {error}\n'))
470 stdout.write(f'ERROR: {error}\n')
460
471
461 return HookResponse(status, stdout.getvalue())
472 return HookResponse(status_code, stdout.getvalue())
462
473
463
474
464 def git_post_pull(extras):
475 def git_post_pull(extras) -> HookResponse:
465 """
476 """
466 Post pull hook.
477 Post pull hook.
467
478
468 :param extras: dictionary containing the keys defined in simplevcs
479 :param extras: dictionary containing the keys defined in simplevcs
469 :type extras: dict
480 :type extras: dict
470
481
471 :return: status code of the hook. 0 for success.
482 :return: status code of the hook. 0 for success.
472 :rtype: int
483 :rtype: int
473 """
484 """
474 if 'pull' not in extras['hooks']:
485 if 'pull' not in extras['hooks']:
475 return HookResponse(0, '')
486 return HookResponse(0, '')
476
487
477 stdout = io.BytesIO()
488 stdout = io.StringIO()
478 try:
489 try:
479 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
490 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
480 except Exception as error:
491 except Exception as error:
481 status = 128
492 status = 128
482 stdout.write(safe_bytes(f'ERROR: {error}\n'))
493 stdout.write(f'ERROR: {error}\n')
483
494
484 return HookResponse(status, stdout.getvalue())
495 return HookResponse(status, stdout.getvalue())
485
496
486
497
487 def _parse_git_ref_lines(revision_lines):
498 def _parse_git_ref_lines(revision_lines):
488 rev_data = []
499 rev_data = []
489 for revision_line in revision_lines or []:
500 for revision_line in revision_lines or []:
490 old_rev, new_rev, ref = revision_line.strip().split(' ')
501 old_rev, new_rev, ref = revision_line.strip().split(' ')
491 ref_data = ref.split('/', 2)
502 ref_data = ref.split('/', 2)
492 if ref_data[1] in ('tags', 'heads'):
503 if ref_data[1] in ('tags', 'heads'):
493 rev_data.append({
504 rev_data.append({
494 # NOTE(marcink):
505 # NOTE(marcink):
495 # we're unable to tell total_commits for git at this point
506 # we're unable to tell total_commits for git at this point
496 # but we set the variable for consistency with GIT
507 # but we set the variable for consistency with GIT
497 'total_commits': -1,
508 'total_commits': -1,
498 'old_rev': old_rev,
509 'old_rev': old_rev,
499 'new_rev': new_rev,
510 'new_rev': new_rev,
500 'ref': ref,
511 'ref': ref,
501 'type': ref_data[1],
512 'type': ref_data[1],
502 'name': ref_data[2],
513 'name': ref_data[2],
503 })
514 })
504 return rev_data
515 return rev_data
505
516
506
517
507 def git_pre_receive(unused_repo_path, revision_lines, env):
518 def git_pre_receive(unused_repo_path, revision_lines, env) -> int:
508 """
519 """
509 Pre push hook.
520 Pre push hook.
510
521
511 :param extras: dictionary containing the keys defined in simplevcs
512 :type extras: dict
513
514 :return: status code of the hook. 0 for success.
522 :return: status code of the hook. 0 for success.
515 :rtype: int
516 """
523 """
517 extras = json.loads(env['RC_SCM_DATA'])
524 extras = json.loads(env['RC_SCM_DATA'])
518 rev_data = _parse_git_ref_lines(revision_lines)
525 rev_data = _parse_git_ref_lines(revision_lines)
519 if 'push' not in extras['hooks']:
526 if 'push' not in extras['hooks']:
520 return 0
527 return 0
521 empty_commit_id = '0' * 40
528 empty_commit_id = '0' * 40
522
529
523 detect_force_push = extras.get('detect_force_push')
530 detect_force_push = extras.get('detect_force_push')
524
531
525 for push_ref in rev_data:
532 for push_ref in rev_data:
526 # store our git-env which holds the temp store
533 # store our git-env which holds the temp store
527 push_ref['git_env'] = _get_git_env()
534 push_ref['git_env'] = _get_git_env()
528 push_ref['pruned_sha'] = ''
535 push_ref['pruned_sha'] = ''
529 if not detect_force_push:
536 if not detect_force_push:
530 # don't check for forced-push when we don't need to
537 # don't check for forced-push when we don't need to
531 continue
538 continue
532
539
533 type_ = push_ref['type']
540 type_ = push_ref['type']
534 new_branch = push_ref['old_rev'] == empty_commit_id
541 new_branch = push_ref['old_rev'] == empty_commit_id
535 delete_branch = push_ref['new_rev'] == empty_commit_id
542 delete_branch = push_ref['new_rev'] == empty_commit_id
536 if type_ == 'heads' and not (new_branch or delete_branch):
543 if type_ == 'heads' and not (new_branch or delete_branch):
537 old_rev = push_ref['old_rev']
544 old_rev = push_ref['old_rev']
538 new_rev = push_ref['new_rev']
545 new_rev = push_ref['new_rev']
539 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, '^{}'.format(new_rev)]
546 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, '^{}'.format(new_rev)]
540 stdout, stderr = subprocessio.run_command(
547 stdout, stderr = subprocessio.run_command(
541 cmd, env=os.environ.copy())
548 cmd, env=os.environ.copy())
542 # means we're having some non-reachable objects, this forced push was used
549 # means we're having some non-reachable objects, this forced push was used
543 if stdout:
550 if stdout:
544 push_ref['pruned_sha'] = stdout.splitlines()
551 push_ref['pruned_sha'] = stdout.splitlines()
545
552
546 extras['hook_type'] = 'pre_receive'
553 extras['hook_type'] = 'pre_receive'
547 extras['commit_ids'] = rev_data
554 extras['commit_ids'] = rev_data
548 return _call_hook('pre_push', extras, GitMessageWriter())
555
556 stdout = sys.stdout
557 status_code = _call_hook('pre_push', extras, GitMessageWriter(stdout))
558
559 return status_code
549
560
550
561
551 def git_post_receive(unused_repo_path, revision_lines, env):
562 def git_post_receive(unused_repo_path, revision_lines, env) -> int:
552 """
563 """
553 Post push hook.
564 Post push hook.
554
565
555 :param extras: dictionary containing the keys defined in simplevcs
556 :type extras: dict
557
558 :return: status code of the hook. 0 for success.
566 :return: status code of the hook. 0 for success.
559 :rtype: int
560 """
567 """
561 extras = json.loads(env['RC_SCM_DATA'])
568 extras = json.loads(env['RC_SCM_DATA'])
562 if 'push' not in extras['hooks']:
569 if 'push' not in extras['hooks']:
563 return 0
570 return 0
564
571
565 rev_data = _parse_git_ref_lines(revision_lines)
572 rev_data = _parse_git_ref_lines(revision_lines)
566
573
567 git_revs = []
574 git_revs = []
568
575
569 # N.B.(skreft): it is ok to just call git, as git before calling a
576 # N.B.(skreft): it is ok to just call git, as git before calling a
570 # subcommand sets the PATH environment variable so that it point to the
577 # subcommand sets the PATH environment variable so that it point to the
571 # correct version of the git executable.
578 # correct version of the git executable.
572 empty_commit_id = '0' * 40
579 empty_commit_id = '0' * 40
573 branches = []
580 branches = []
574 tags = []
581 tags = []
575 for push_ref in rev_data:
582 for push_ref in rev_data:
576 type_ = push_ref['type']
583 type_ = push_ref['type']
577
584
578 if type_ == 'heads':
585 if type_ == 'heads':
586 # starting new branch case
579 if push_ref['old_rev'] == empty_commit_id:
587 if push_ref['old_rev'] == empty_commit_id:
580 # starting new branch case
588 push_ref_name = push_ref['name']
581 if push_ref['name'] not in branches:
582 branches.append(push_ref['name'])
583
589
584 # Fix up head revision if needed
590 if push_ref_name not in branches:
585 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
591 branches.append(push_ref_name)
592
593 need_head_set = ''
594 with Repository(os.getcwd()) as repo:
586 try:
595 try:
587 subprocessio.run_command(cmd, env=os.environ.copy())
596 repo.head
588 except Exception:
597 except pygit2.GitError:
589 push_ref_name = push_ref['name']
598 need_head_set = f'refs/heads/{push_ref_name}'
590 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', '"HEAD"', f'"refs/heads/{push_ref_name}"']
599
600 if need_head_set:
601 repo.set_head(need_head_set)
591 print(f"Setting default branch to {push_ref_name}")
602 print(f"Setting default branch to {push_ref_name}")
592 subprocessio.run_command(cmd, env=os.environ.copy())
593
603
594 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
604 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref', '--format=%(refname)', 'refs/heads/*']
595 '--format=%(refname)', 'refs/heads/*']
596 stdout, stderr = subprocessio.run_command(
605 stdout, stderr = subprocessio.run_command(
597 cmd, env=os.environ.copy())
606 cmd, env=os.environ.copy())
598 heads = stdout
607 heads = safe_str(stdout)
599 heads = heads.replace(push_ref['ref'], '')
608 heads = heads.replace(push_ref['ref'], '')
600 heads = ' '.join(head for head
609 heads = ' '.join(head for head
601 in heads.splitlines() if head) or '.'
610 in heads.splitlines() if head) or '.'
602 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
611 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
603 '--pretty=format:%H', '--', push_ref['new_rev'],
612 '--pretty=format:%H', '--', push_ref['new_rev'],
604 '--not', heads]
613 '--not', heads]
605 stdout, stderr = subprocessio.run_command(
614 stdout, stderr = subprocessio.run_command(
606 cmd, env=os.environ.copy())
615 cmd, env=os.environ.copy())
607 git_revs.extend(stdout.splitlines())
616 git_revs.extend(list(map(ascii_str, stdout.splitlines())))
617
618 # delete branch case
608 elif push_ref['new_rev'] == empty_commit_id:
619 elif push_ref['new_rev'] == empty_commit_id:
609 # delete branch case
610 git_revs.append('delete_branch=>%s' % push_ref['name'])
620 git_revs.append('delete_branch=>%s' % push_ref['name'])
611 else:
621 else:
612 if push_ref['name'] not in branches:
622 if push_ref['name'] not in branches:
613 branches.append(push_ref['name'])
623 branches.append(push_ref['name'])
614
624
615 cmd = [settings.GIT_EXECUTABLE, 'log',
625 cmd = [settings.GIT_EXECUTABLE, 'log',
616 '{old_rev}..{new_rev}'.format(**push_ref),
626 '{old_rev}..{new_rev}'.format(**push_ref),
617 '--reverse', '--pretty=format:%H']
627 '--reverse', '--pretty=format:%H']
618 stdout, stderr = subprocessio.run_command(
628 stdout, stderr = subprocessio.run_command(
619 cmd, env=os.environ.copy())
629 cmd, env=os.environ.copy())
620 git_revs.extend(stdout.splitlines())
630 # we get bytes from stdout, we need str to be consistent
631 log_revs = list(map(ascii_str, stdout.splitlines()))
632 git_revs.extend(log_revs)
633
634 # Pure pygit2 impl. but still 2-3x slower :/
635 # results = []
636 #
637 # with Repository(os.getcwd()) as repo:
638 # repo_new_rev = repo[push_ref['new_rev']]
639 # repo_old_rev = repo[push_ref['old_rev']]
640 # walker = repo.walk(repo_new_rev.id, pygit2.GIT_SORT_TOPOLOGICAL)
641 #
642 # for commit in walker:
643 # if commit.id == repo_old_rev.id:
644 # break
645 # results.append(commit.id.hex)
646 # # reverse the order, can't use GIT_SORT_REVERSE
647 # log_revs = results[::-1]
648
621 elif type_ == 'tags':
649 elif type_ == 'tags':
622 if push_ref['name'] not in tags:
650 if push_ref['name'] not in tags:
623 tags.append(push_ref['name'])
651 tags.append(push_ref['name'])
624 git_revs.append('tag=>%s' % push_ref['name'])
652 git_revs.append('tag=>%s' % push_ref['name'])
625
653
626 extras['hook_type'] = 'post_receive'
654 extras['hook_type'] = 'post_receive'
627 extras['commit_ids'] = git_revs
655 extras['commit_ids'] = git_revs
628 extras['new_refs'] = {
656 extras['new_refs'] = {
629 'branches': branches,
657 'branches': branches,
630 'bookmarks': [],
658 'bookmarks': [],
631 'tags': tags,
659 'tags': tags,
632 }
660 }
633
661
662 stdout = sys.stdout
663
634 if 'repo_size' in extras['hooks']:
664 if 'repo_size' in extras['hooks']:
635 try:
665 try:
636 _call_hook('repo_size', extras, GitMessageWriter())
666 _call_hook('repo_size', extras, GitMessageWriter(stdout))
637 except Exception:
667 except Exception:
638 pass
668 pass
639
669
640 return _call_hook('post_push', extras, GitMessageWriter())
670 status_code = _call_hook('post_push', extras, GitMessageWriter(stdout))
671 return status_code
641
672
642
673
643 def _get_extras_from_txn_id(path, txn_id):
674 def _get_extras_from_txn_id(path, txn_id):
644 extras = {}
675 extras = {}
645 try:
676 try:
646 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
677 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
647 '-t', txn_id,
678 '-t', txn_id,
648 '--revprop', path, 'rc-scm-extras']
679 '--revprop', path, 'rc-scm-extras']
649 stdout, stderr = subprocessio.run_command(
680 stdout, stderr = subprocessio.run_command(
650 cmd, env=os.environ.copy())
681 cmd, env=os.environ.copy())
651 extras = json.loads(base64.urlsafe_b64decode(stdout))
682 extras = json.loads(base64.urlsafe_b64decode(stdout))
652 except Exception:
683 except Exception:
653 log.exception('Failed to extract extras info from txn_id')
684 log.exception('Failed to extract extras info from txn_id')
654
685
655 return extras
686 return extras
656
687
657
688
658 def _get_extras_from_commit_id(commit_id, path):
689 def _get_extras_from_commit_id(commit_id, path):
659 extras = {}
690 extras = {}
660 try:
691 try:
661 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
692 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
662 '-r', commit_id,
693 '-r', commit_id,
663 '--revprop', path, 'rc-scm-extras']
694 '--revprop', path, 'rc-scm-extras']
664 stdout, stderr = subprocessio.run_command(
695 stdout, stderr = subprocessio.run_command(
665 cmd, env=os.environ.copy())
696 cmd, env=os.environ.copy())
666 extras = json.loads(base64.urlsafe_b64decode(stdout))
697 extras = json.loads(base64.urlsafe_b64decode(stdout))
667 except Exception:
698 except Exception:
668 log.exception('Failed to extract extras info from commit_id')
699 log.exception('Failed to extract extras info from commit_id')
669
700
670 return extras
701 return extras
671
702
672
703
673 def svn_pre_commit(repo_path, commit_data, env):
704 def svn_pre_commit(repo_path, commit_data, env):
674 path, txn_id = commit_data
705 path, txn_id = commit_data
675 branches = []
706 branches = []
676 tags = []
707 tags = []
677
708
678 if env.get('RC_SCM_DATA'):
709 if env.get('RC_SCM_DATA'):
679 extras = json.loads(env['RC_SCM_DATA'])
710 extras = json.loads(env['RC_SCM_DATA'])
680 else:
711 else:
681 # fallback method to read from TXN-ID stored data
712 # fallback method to read from TXN-ID stored data
682 extras = _get_extras_from_txn_id(path, txn_id)
713 extras = _get_extras_from_txn_id(path, txn_id)
683 if not extras:
714 if not extras:
684 return 0
715 return 0
685
716
686 extras['hook_type'] = 'pre_commit'
717 extras['hook_type'] = 'pre_commit'
687 extras['commit_ids'] = [txn_id]
718 extras['commit_ids'] = [txn_id]
688 extras['txn_id'] = txn_id
719 extras['txn_id'] = txn_id
689 extras['new_refs'] = {
720 extras['new_refs'] = {
690 'total_commits': 1,
721 'total_commits': 1,
691 'branches': branches,
722 'branches': branches,
692 'bookmarks': [],
723 'bookmarks': [],
693 'tags': tags,
724 'tags': tags,
694 }
725 }
695
726
696 return _call_hook('pre_push', extras, SvnMessageWriter())
727 return _call_hook('pre_push', extras, SvnMessageWriter())
697
728
698
729
699 def svn_post_commit(repo_path, commit_data, env):
730 def svn_post_commit(repo_path, commit_data, env):
700 """
731 """
701 commit_data is path, rev, txn_id
732 commit_data is path, rev, txn_id
702 """
733 """
703 if len(commit_data) == 3:
734 if len(commit_data) == 3:
704 path, commit_id, txn_id = commit_data
735 path, commit_id, txn_id = commit_data
705 elif len(commit_data) == 2:
736 elif len(commit_data) == 2:
706 log.error('Failed to extract txn_id from commit_data using legacy method. '
737 log.error('Failed to extract txn_id from commit_data using legacy method. '
707 'Some functionality might be limited')
738 'Some functionality might be limited')
708 path, commit_id = commit_data
739 path, commit_id = commit_data
709 txn_id = None
740 txn_id = None
710
741
711 branches = []
742 branches = []
712 tags = []
743 tags = []
713
744
714 if env.get('RC_SCM_DATA'):
745 if env.get('RC_SCM_DATA'):
715 extras = json.loads(env['RC_SCM_DATA'])
746 extras = json.loads(env['RC_SCM_DATA'])
716 else:
747 else:
717 # fallback method to read from TXN-ID stored data
748 # fallback method to read from TXN-ID stored data
718 extras = _get_extras_from_commit_id(commit_id, path)
749 extras = _get_extras_from_commit_id(commit_id, path)
719 if not extras:
750 if not extras:
720 return 0
751 return 0
721
752
722 extras['hook_type'] = 'post_commit'
753 extras['hook_type'] = 'post_commit'
723 extras['commit_ids'] = [commit_id]
754 extras['commit_ids'] = [commit_id]
724 extras['txn_id'] = txn_id
755 extras['txn_id'] = txn_id
725 extras['new_refs'] = {
756 extras['new_refs'] = {
726 'branches': branches,
757 'branches': branches,
727 'bookmarks': [],
758 'bookmarks': [],
728 'tags': tags,
759 'tags': tags,
729 'total_commits': 1,
760 'total_commits': 1,
730 }
761 }
731
762
732 if 'repo_size' in extras['hooks']:
763 if 'repo_size' in extras['hooks']:
733 try:
764 try:
734 _call_hook('repo_size', extras, SvnMessageWriter())
765 _call_hook('repo_size', extras, SvnMessageWriter())
735 except Exception:
766 except Exception:
736 pass
767 pass
737
768
738 return _call_hook('post_push', extras, SvnMessageWriter())
769 return _call_hook('post_push', extras, SvnMessageWriter())
@@ -1,413 +1,414 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """Handles the Git smart protocol."""
18 """Handles the Git smart protocol."""
19
19
20 import os
20 import os
21 import socket
21 import socket
22 import logging
22 import logging
23
23
24 import dulwich.protocol
24 import dulwich.protocol
25 from dulwich.protocol import CAPABILITY_SIDE_BAND, CAPABILITY_SIDE_BAND_64K
25 from dulwich.protocol import CAPABILITY_SIDE_BAND, CAPABILITY_SIDE_BAND_64K
26 from webob import Request, Response, exc
26 from webob import Request, Response, exc
27
27
28 from vcsserver.lib.rc_json import json
28 from vcsserver.lib.rc_json import json
29 from vcsserver import hooks, subprocessio
29 from vcsserver import hooks, subprocessio
30 from vcsserver.str_utils import ascii_bytes
30 from vcsserver.str_utils import ascii_bytes
31
31
32
32
33 log = logging.getLogger(__name__)
33 log = logging.getLogger(__name__)
34
34
35
35
36 class FileWrapper(object):
36 class FileWrapper(object):
37 """File wrapper that ensures how much data is read from it."""
37 """File wrapper that ensures how much data is read from it."""
38
38
39 def __init__(self, fd, content_length):
39 def __init__(self, fd, content_length):
40 self.fd = fd
40 self.fd = fd
41 self.content_length = content_length
41 self.content_length = content_length
42 self.remain = content_length
42 self.remain = content_length
43
43
44 def read(self, size):
44 def read(self, size):
45 if size <= self.remain:
45 if size <= self.remain:
46 try:
46 try:
47 data = self.fd.read(size)
47 data = self.fd.read(size)
48 except socket.error:
48 except socket.error:
49 raise IOError(self)
49 raise IOError(self)
50 self.remain -= size
50 self.remain -= size
51 elif self.remain:
51 elif self.remain:
52 data = self.fd.read(self.remain)
52 data = self.fd.read(self.remain)
53 self.remain = 0
53 self.remain = 0
54 else:
54 else:
55 data = None
55 data = None
56 return data
56 return data
57
57
58 def __repr__(self):
58 def __repr__(self):
59 return '<FileWrapper %s len: %s, read: %s>' % (
59 return '<FileWrapper %s len: %s, read: %s>' % (
60 self.fd, self.content_length, self.content_length - self.remain
60 self.fd, self.content_length, self.content_length - self.remain
61 )
61 )
62
62
63
63
64 class GitRepository(object):
64 class GitRepository(object):
65 """WSGI app for handling Git smart protocol endpoints."""
65 """WSGI app for handling Git smart protocol endpoints."""
66
66
67 git_folder_signature = frozenset(('config', 'head', 'info', 'objects', 'refs'))
67 git_folder_signature = frozenset(('config', 'head', 'info', 'objects', 'refs'))
68 commands = frozenset(('git-upload-pack', 'git-receive-pack'))
68 commands = frozenset(('git-upload-pack', 'git-receive-pack'))
69 valid_accepts = frozenset(('application/x-{}-result'.format(c) for c in commands))
69 valid_accepts = frozenset(('application/x-{}-result'.format(c) for c in commands))
70
70
71 # The last bytes are the SHA1 of the first 12 bytes.
71 # The last bytes are the SHA1 of the first 12 bytes.
72 EMPTY_PACK = (
72 EMPTY_PACK = (
73 b'PACK\x00\x00\x00\x02\x00\x00\x00\x00\x02\x9d\x08' +
73 b'PACK\x00\x00\x00\x02\x00\x00\x00\x00\x02\x9d\x08' +
74 b'\x82;\xd8\xa8\xea\xb5\x10\xadj\xc7\\\x82<\xfd>\xd3\x1e'
74 b'\x82;\xd8\xa8\xea\xb5\x10\xadj\xc7\\\x82<\xfd>\xd3\x1e'
75 )
75 )
76 FLUSH_PACKET = b"0000"
76 FLUSH_PACKET = b"0000"
77
77
78 SIDE_BAND_CAPS = frozenset((CAPABILITY_SIDE_BAND, CAPABILITY_SIDE_BAND_64K))
78 SIDE_BAND_CAPS = frozenset((CAPABILITY_SIDE_BAND, CAPABILITY_SIDE_BAND_64K))
79
79
80 def __init__(self, repo_name, content_path, git_path, update_server_info, extras):
80 def __init__(self, repo_name, content_path, git_path, update_server_info, extras):
81 files = frozenset(f.lower() for f in os.listdir(content_path))
81 files = frozenset(f.lower() for f in os.listdir(content_path))
82 valid_dir_signature = self.git_folder_signature.issubset(files)
82 valid_dir_signature = self.git_folder_signature.issubset(files)
83
83
84 if not valid_dir_signature:
84 if not valid_dir_signature:
85 raise OSError('%s missing git signature' % content_path)
85 raise OSError('%s missing git signature' % content_path)
86
86
87 self.content_path = content_path
87 self.content_path = content_path
88 self.repo_name = repo_name
88 self.repo_name = repo_name
89 self.extras = extras
89 self.extras = extras
90 self.git_path = git_path
90 self.git_path = git_path
91 self.update_server_info = update_server_info
91 self.update_server_info = update_server_info
92
92
93 def _get_fixedpath(self, path):
93 def _get_fixedpath(self, path):
94 """
94 """
95 Small fix for repo_path
95 Small fix for repo_path
96
96
97 :param path:
97 :param path:
98 """
98 """
99 path = path.split(self.repo_name, 1)[-1]
99 path = path.split(self.repo_name, 1)[-1]
100 if path.startswith('.git'):
100 if path.startswith('.git'):
101 # for bare repos we still get the .git prefix inside, we skip it
101 # for bare repos we still get the .git prefix inside, we skip it
102 # here, and remove from the service command
102 # here, and remove from the service command
103 path = path[4:]
103 path = path[4:]
104
104
105 return path.strip('/')
105 return path.strip('/')
106
106
107 def inforefs(self, request, unused_environ):
107 def inforefs(self, request, unused_environ):
108 """
108 """
109 WSGI Response producer for HTTP GET Git Smart
109 WSGI Response producer for HTTP GET Git Smart
110 HTTP /info/refs request.
110 HTTP /info/refs request.
111 """
111 """
112
112
113 git_command = request.GET.get('service')
113 git_command = request.GET.get('service')
114 if git_command not in self.commands:
114 if git_command not in self.commands:
115 log.debug('command %s not allowed', git_command)
115 log.debug('command %s not allowed', git_command)
116 return exc.HTTPForbidden()
116 return exc.HTTPForbidden()
117
117
118 # please, resist the urge to add '\n' to git capture and increment
118 # please, resist the urge to add '\n' to git capture and increment
119 # line count by 1.
119 # line count by 1.
120 # by git docs: Documentation/technical/http-protocol.txt#L214 \n is
120 # by git docs: Documentation/technical/http-protocol.txt#L214 \n is
121 # a part of protocol.
121 # a part of protocol.
122 # The code in Git client not only does NOT need '\n', but actually
122 # The code in Git client not only does NOT need '\n', but actually
123 # blows up if you sprinkle "flush" (0000) as "0001\n".
123 # blows up if you sprinkle "flush" (0000) as "0001\n".
124 # It reads binary, per number of bytes specified.
124 # It reads binary, per number of bytes specified.
125 # if you do add '\n' as part of data, count it.
125 # if you do add '\n' as part of data, count it.
126 server_advert = '# service=%s\n' % git_command
126 server_advert = '# service=%s\n' % git_command
127 packet_len = hex(len(server_advert) + 4)[2:].rjust(4, '0').lower()
127 packet_len = hex(len(server_advert) + 4)[2:].rjust(4, '0').lower()
128 try:
128 try:
129 gitenv = dict(os.environ)
129 gitenv = dict(os.environ)
130 # forget all configs
130 # forget all configs
131 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
131 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
132 command = [self.git_path, git_command[4:], '--stateless-rpc',
132 command = [self.git_path, git_command[4:], '--stateless-rpc',
133 '--advertise-refs', self.content_path]
133 '--advertise-refs', self.content_path]
134 out = subprocessio.SubprocessIOChunker(
134 out = subprocessio.SubprocessIOChunker(
135 command,
135 command,
136 env=gitenv,
136 env=gitenv,
137 starting_values=[ascii_bytes(packet_len + server_advert) + self.FLUSH_PACKET],
137 starting_values=[ascii_bytes(packet_len + server_advert) + self.FLUSH_PACKET],
138 shell=False
138 shell=False
139 )
139 )
140 except OSError:
140 except OSError:
141 log.exception('Error processing command')
141 log.exception('Error processing command')
142 raise exc.HTTPExpectationFailed()
142 raise exc.HTTPExpectationFailed()
143
143
144 resp = Response()
144 resp = Response()
145 resp.content_type = f'application/x-{git_command}-advertisement'
145 resp.content_type = f'application/x-{git_command}-advertisement'
146 resp.charset = None
146 resp.charset = None
147 resp.app_iter = out
147 resp.app_iter = out
148
148
149 return resp
149 return resp
150
150
151 def _get_want_capabilities(self, request):
151 def _get_want_capabilities(self, request):
152 """Read the capabilities found in the first want line of the request."""
152 """Read the capabilities found in the first want line of the request."""
153 pos = request.body_file_seekable.tell()
153 pos = request.body_file_seekable.tell()
154 first_line = request.body_file_seekable.readline()
154 first_line = request.body_file_seekable.readline()
155 request.body_file_seekable.seek(pos)
155 request.body_file_seekable.seek(pos)
156
156
157 return frozenset(
157 return frozenset(
158 dulwich.protocol.extract_want_line_capabilities(first_line)[1])
158 dulwich.protocol.extract_want_line_capabilities(first_line)[1])
159
159
160 def _build_failed_pre_pull_response(self, capabilities, pre_pull_messages):
160 def _build_failed_pre_pull_response(self, capabilities, pre_pull_messages):
161 """
161 """
162 Construct a response with an empty PACK file.
162 Construct a response with an empty PACK file.
163
163
164 We use an empty PACK file, as that would trigger the failure of the pull
164 We use an empty PACK file, as that would trigger the failure of the pull
165 or clone command.
165 or clone command.
166
166
167 We also print in the error output a message explaining why the command
167 We also print in the error output a message explaining why the command
168 was aborted.
168 was aborted.
169
169
170 If additionally, the user is accepting messages we send them the output
170 If additionally, the user is accepting messages we send them the output
171 of the pre-pull hook.
171 of the pre-pull hook.
172
172
173 Note that for clients not supporting side-band we just send them the
173 Note that for clients not supporting side-band we just send them the
174 emtpy PACK file.
174 emtpy PACK file.
175 """
175 """
176
176
177 if self.SIDE_BAND_CAPS.intersection(capabilities):
177 if self.SIDE_BAND_CAPS.intersection(capabilities):
178 response = []
178 response = []
179 proto = dulwich.protocol.Protocol(None, response.append)
179 proto = dulwich.protocol.Protocol(None, response.append)
180 proto.write_pkt_line(dulwich.protocol.NAK_LINE)
180 proto.write_pkt_line(dulwich.protocol.NAK_LINE)
181
181
182 self._write_sideband_to_proto(proto, ascii_bytes(pre_pull_messages, allow_bytes=True), capabilities)
182 self._write_sideband_to_proto(proto, ascii_bytes(pre_pull_messages, allow_bytes=True), capabilities)
183 # N.B.(skreft): Do not change the sideband channel to 3, as that
183 # N.B.(skreft): Do not change the sideband channel to 3, as that
184 # produces a fatal error in the client:
184 # produces a fatal error in the client:
185 # fatal: error in sideband demultiplexer
185 # fatal: error in sideband demultiplexer
186 proto.write_sideband(
186 proto.write_sideband(
187 dulwich.protocol.SIDE_BAND_CHANNEL_PROGRESS,
187 dulwich.protocol.SIDE_BAND_CHANNEL_PROGRESS,
188 ascii_bytes('Pre pull hook failed: aborting\n', allow_bytes=True))
188 ascii_bytes('Pre pull hook failed: aborting\n', allow_bytes=True))
189 proto.write_sideband(
189 proto.write_sideband(
190 dulwich.protocol.SIDE_BAND_CHANNEL_DATA,
190 dulwich.protocol.SIDE_BAND_CHANNEL_DATA,
191 ascii_bytes(self.EMPTY_PACK, allow_bytes=True))
191 ascii_bytes(self.EMPTY_PACK, allow_bytes=True))
192
192
193 # writes b"0000" as default
193 # writes b"0000" as default
194 proto.write_pkt_line(None)
194 proto.write_pkt_line(None)
195
195
196 return response
196 return response
197 else:
197 else:
198 return [ascii_bytes(self.EMPTY_PACK, allow_bytes=True)]
198 return [ascii_bytes(self.EMPTY_PACK, allow_bytes=True)]
199
199
200 def _build_post_pull_response(self, response, capabilities, start_message, end_message):
200 def _build_post_pull_response(self, response, capabilities, start_message, end_message):
201 """
201 """
202 Given a list response we inject the post-pull messages.
202 Given a list response we inject the post-pull messages.
203
203
204 We only inject the messages if the client supports sideband, and the
204 We only inject the messages if the client supports sideband, and the
205 response has the format:
205 response has the format:
206 0008NAK\n...0000
206 0008NAK\n...0000
207
207
208 Note that we do not check the no-progress capability as by default, git
208 Note that we do not check the no-progress capability as by default, git
209 sends it, which effectively would block all messages.
209 sends it, which effectively would block all messages.
210 """
210 """
211
211
212 if not self.SIDE_BAND_CAPS.intersection(capabilities):
212 if not self.SIDE_BAND_CAPS.intersection(capabilities):
213 return response
213 return response
214
214
215 if not start_message and not end_message:
215 if not start_message and not end_message:
216 return response
216 return response
217
217
218 try:
218 try:
219 iter(response)
219 iter(response)
220 # iterator probably will work, we continue
220 # iterator probably will work, we continue
221 except TypeError:
221 except TypeError:
222 raise TypeError(f'response must be an iterator: got {type(response)}')
222 raise TypeError(f'response must be an iterator: got {type(response)}')
223 if isinstance(response, (list, tuple)):
223 if isinstance(response, (list, tuple)):
224 raise TypeError(f'response must be an iterator: got {type(response)}')
224 raise TypeError(f'response must be an iterator: got {type(response)}')
225
225
226 def injected_response():
226 def injected_response():
227
227
228 do_loop = 1
228 do_loop = 1
229 header_injected = 0
229 header_injected = 0
230 next_item = None
230 next_item = None
231 has_item = False
231 has_item = False
232 while do_loop:
232 while do_loop:
233
233
234 try:
234 try:
235 next_item = next(response)
235 next_item = next(response)
236 except StopIteration:
236 except StopIteration:
237 do_loop = 0
237 do_loop = 0
238
238
239 if has_item:
239 if has_item:
240 # last item ! alter it now
240 # last item ! alter it now
241 if do_loop == 0 and item.endswith(self.FLUSH_PACKET):
241 if do_loop == 0 and item.endswith(self.FLUSH_PACKET):
242 new_response = [item[:-4]]
242 new_response = [item[:-4]]
243 new_response.extend(self._get_messages(end_message, capabilities))
243 new_response.extend(self._get_messages(end_message, capabilities))
244 new_response.append(self.FLUSH_PACKET)
244 new_response.append(self.FLUSH_PACKET)
245 item = b''.join(new_response)
245 item = b''.join(new_response)
246
246
247 yield item
247 yield item
248 has_item = True
248 has_item = True
249 item = next_item
249 item = next_item
250
250
251 # alter item if it's the initial chunk
251 # alter item if it's the initial chunk
252 if not header_injected and item.startswith(b'0008NAK\n'):
252 if not header_injected and item.startswith(b'0008NAK\n'):
253 new_response = [b'0008NAK\n']
253 new_response = [b'0008NAK\n']
254 new_response.extend(self._get_messages(start_message, capabilities))
254 new_response.extend(self._get_messages(start_message, capabilities))
255 new_response.append(item[8:])
255 new_response.append(item[8:])
256 item = b''.join(new_response)
256 item = b''.join(new_response)
257 header_injected = 1
257 header_injected = 1
258
258
259 return injected_response()
259 return injected_response()
260
260
261 def _write_sideband_to_proto(self, proto, data, capabilities):
261 def _write_sideband_to_proto(self, proto, data, capabilities):
262 """
262 """
263 Write the data to the proto's sideband number 2 == SIDE_BAND_CHANNEL_PROGRESS
263 Write the data to the proto's sideband number 2 == SIDE_BAND_CHANNEL_PROGRESS
264
264
265 We do not use dulwich's write_sideband directly as it only supports
265 We do not use dulwich's write_sideband directly as it only supports
266 side-band-64k.
266 side-band-64k.
267 """
267 """
268 if not data:
268 if not data:
269 return
269 return
270
270
271 # N.B.(skreft): The values below are explained in the pack protocol
271 # N.B.(skreft): The values below are explained in the pack protocol
272 # documentation, section Packfile Data.
272 # documentation, section Packfile Data.
273 # https://github.com/git/git/blob/master/Documentation/technical/pack-protocol.txt
273 # https://github.com/git/git/blob/master/Documentation/technical/pack-protocol.txt
274 if CAPABILITY_SIDE_BAND_64K in capabilities:
274 if CAPABILITY_SIDE_BAND_64K in capabilities:
275 chunk_size = 65515
275 chunk_size = 65515
276 elif CAPABILITY_SIDE_BAND in capabilities:
276 elif CAPABILITY_SIDE_BAND in capabilities:
277 chunk_size = 995
277 chunk_size = 995
278 else:
278 else:
279 return
279 return
280
280
281 chunker = (data[i:i + chunk_size] for i in range(0, len(data), chunk_size))
281 chunker = (data[i:i + chunk_size] for i in range(0, len(data), chunk_size))
282
282
283 for chunk in chunker:
283 for chunk in chunker:
284 proto.write_sideband(dulwich.protocol.SIDE_BAND_CHANNEL_PROGRESS, ascii_bytes(chunk, allow_bytes=True))
284 proto.write_sideband(dulwich.protocol.SIDE_BAND_CHANNEL_PROGRESS, ascii_bytes(chunk, allow_bytes=True))
285
285
286 def _get_messages(self, data, capabilities):
286 def _get_messages(self, data, capabilities):
287 """Return a list with packets for sending data in sideband number 2."""
287 """Return a list with packets for sending data in sideband number 2."""
288 response = []
288 response = []
289 proto = dulwich.protocol.Protocol(None, response.append)
289 proto = dulwich.protocol.Protocol(None, response.append)
290
290
291 self._write_sideband_to_proto(proto, data, capabilities)
291 self._write_sideband_to_proto(proto, data, capabilities)
292
292
293 return response
293 return response
294
294
295 def backend(self, request, environ):
295 def backend(self, request, environ):
296 """
296 """
297 WSGI Response producer for HTTP POST Git Smart HTTP requests.
297 WSGI Response producer for HTTP POST Git Smart HTTP requests.
298 Reads commands and data from HTTP POST's body.
298 Reads commands and data from HTTP POST's body.
299 returns an iterator obj with contents of git command's
299 returns an iterator obj with contents of git command's
300 response to stdout
300 response to stdout
301 """
301 """
302 # TODO(skreft): think how we could detect an HTTPLockedException, as
302 # TODO(skreft): think how we could detect an HTTPLockedException, as
303 # we probably want to have the same mechanism used by mercurial and
303 # we probably want to have the same mechanism used by mercurial and
304 # simplevcs.
304 # simplevcs.
305 # For that we would need to parse the output of the command looking for
305 # For that we would need to parse the output of the command looking for
306 # some signs of the HTTPLockedError, parse the data and reraise it in
306 # some signs of the HTTPLockedError, parse the data and reraise it in
307 # pygrack. However, that would interfere with the streaming.
307 # pygrack. However, that would interfere with the streaming.
308 #
308 #
309 # Now the output of a blocked push is:
309 # Now the output of a blocked push is:
310 # Pushing to http://test_regular:test12@127.0.0.1:5001/vcs_test_git
310 # Pushing to http://test_regular:test12@127.0.0.1:5001/vcs_test_git
311 # POST git-receive-pack (1047 bytes)
311 # POST git-receive-pack (1047 bytes)
312 # remote: ERROR: Repository `vcs_test_git` locked by user `test_admin`. Reason:`lock_auto`
312 # remote: ERROR: Repository `vcs_test_git` locked by user `test_admin`. Reason:`lock_auto`
313 # To http://test_regular:test12@127.0.0.1:5001/vcs_test_git
313 # To http://test_regular:test12@127.0.0.1:5001/vcs_test_git
314 # ! [remote rejected] master -> master (pre-receive hook declined)
314 # ! [remote rejected] master -> master (pre-receive hook declined)
315 # error: failed to push some refs to 'http://test_regular:test12@127.0.0.1:5001/vcs_test_git'
315 # error: failed to push some refs to 'http://test_regular:test12@127.0.0.1:5001/vcs_test_git'
316
316
317 git_command = self._get_fixedpath(request.path_info)
317 git_command = self._get_fixedpath(request.path_info)
318 if git_command not in self.commands:
318 if git_command not in self.commands:
319 log.debug('command %s not allowed', git_command)
319 log.debug('command %s not allowed', git_command)
320 return exc.HTTPForbidden()
320 return exc.HTTPForbidden()
321
321
322 capabilities = None
322 capabilities = None
323 if git_command == 'git-upload-pack':
323 if git_command == 'git-upload-pack':
324 capabilities = self._get_want_capabilities(request)
324 capabilities = self._get_want_capabilities(request)
325
325
326 if 'CONTENT_LENGTH' in environ:
326 if 'CONTENT_LENGTH' in environ:
327 inputstream = FileWrapper(request.body_file_seekable,
327 inputstream = FileWrapper(request.body_file_seekable,
328 request.content_length)
328 request.content_length)
329 else:
329 else:
330 inputstream = request.body_file_seekable
330 inputstream = request.body_file_seekable
331
331
332 resp = Response()
332 resp = Response()
333 resp.content_type = 'application/x-{}-result'.format(git_command)
333 resp.content_type = 'application/x-{}-result'.format(git_command)
334 resp.charset = None
334 resp.charset = None
335
335
336 pre_pull_messages = ''
336 pre_pull_messages = ''
337 # Upload-pack == clone
337 # Upload-pack == clone
338 if git_command == 'git-upload-pack':
338 if git_command == 'git-upload-pack':
339 status, pre_pull_messages = hooks.git_pre_pull(self.extras)
339 hook_response = hooks.git_pre_pull(self.extras)
340 if status != 0:
340 if hook_response.status != 0:
341 pre_pull_messages = hook_response.output
341 resp.app_iter = self._build_failed_pre_pull_response(
342 resp.app_iter = self._build_failed_pre_pull_response(
342 capabilities, pre_pull_messages)
343 capabilities, pre_pull_messages)
343 return resp
344 return resp
344
345
345 gitenv = dict(os.environ)
346 gitenv = dict(os.environ)
346 # forget all configs
347 # forget all configs
347 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
348 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
348 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
349 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
349 cmd = [self.git_path, git_command[4:], '--stateless-rpc',
350 cmd = [self.git_path, git_command[4:], '--stateless-rpc',
350 self.content_path]
351 self.content_path]
351 log.debug('handling cmd %s', cmd)
352 log.debug('handling cmd %s', cmd)
352
353
353 out = subprocessio.SubprocessIOChunker(
354 out = subprocessio.SubprocessIOChunker(
354 cmd,
355 cmd,
355 input_stream=inputstream,
356 input_stream=inputstream,
356 env=gitenv,
357 env=gitenv,
357 cwd=self.content_path,
358 cwd=self.content_path,
358 shell=False,
359 shell=False,
359 fail_on_stderr=False,
360 fail_on_stderr=False,
360 fail_on_return_code=False
361 fail_on_return_code=False
361 )
362 )
362
363
363 if self.update_server_info and git_command == 'git-receive-pack':
364 if self.update_server_info and git_command == 'git-receive-pack':
364 # We need to fully consume the iterator here, as the
365 # We need to fully consume the iterator here, as the
365 # update-server-info command needs to be run after the push.
366 # update-server-info command needs to be run after the push.
366 out = list(out)
367 out = list(out)
367
368
368 # Updating refs manually after each push.
369 # Updating refs manually after each push.
369 # This is required as some clients are exposing Git repos internally
370 # This is required as some clients are exposing Git repos internally
370 # with the dumb protocol.
371 # with the dumb protocol.
371 cmd = [self.git_path, 'update-server-info']
372 cmd = [self.git_path, 'update-server-info']
372 log.debug('handling cmd %s', cmd)
373 log.debug('handling cmd %s', cmd)
373 output = subprocessio.SubprocessIOChunker(
374 output = subprocessio.SubprocessIOChunker(
374 cmd,
375 cmd,
375 input_stream=inputstream,
376 input_stream=inputstream,
376 env=gitenv,
377 env=gitenv,
377 cwd=self.content_path,
378 cwd=self.content_path,
378 shell=False,
379 shell=False,
379 fail_on_stderr=False,
380 fail_on_stderr=False,
380 fail_on_return_code=False
381 fail_on_return_code=False
381 )
382 )
382 # Consume all the output so the subprocess finishes
383 # Consume all the output so the subprocess finishes
383 for _ in output:
384 for _ in output:
384 pass
385 pass
385
386
386 # Upload-pack == clone
387 # Upload-pack == clone
387 if git_command == 'git-upload-pack':
388 if git_command == 'git-upload-pack':
388 unused_status, post_pull_messages = hooks.git_post_pull(self.extras)
389 hook_response = hooks.git_post_pull(self.extras)
389
390 post_pull_messages = hook_response.output
390 resp.app_iter = self._build_post_pull_response(out, capabilities, pre_pull_messages, post_pull_messages)
391 resp.app_iter = self._build_post_pull_response(out, capabilities, pre_pull_messages, post_pull_messages)
391 else:
392 else:
392 resp.app_iter = out
393 resp.app_iter = out
393
394
394 return resp
395 return resp
395
396
396 def __call__(self, environ, start_response):
397 def __call__(self, environ, start_response):
397 request = Request(environ)
398 request = Request(environ)
398 _path = self._get_fixedpath(request.path_info)
399 _path = self._get_fixedpath(request.path_info)
399 if _path.startswith('info/refs'):
400 if _path.startswith('info/refs'):
400 app = self.inforefs
401 app = self.inforefs
401 else:
402 else:
402 app = self.backend
403 app = self.backend
403
404
404 try:
405 try:
405 resp = app(request, environ)
406 resp = app(request, environ)
406 except exc.HTTPException as error:
407 except exc.HTTPException as error:
407 log.exception('HTTP Error')
408 log.exception('HTTP Error')
408 resp = error
409 resp = error
409 except Exception:
410 except Exception:
410 log.exception('Unknown error')
411 log.exception('Unknown error')
411 resp = exc.HTTPInternalServerError()
412 resp = exc.HTTPInternalServerError()
412
413
413 return resp(environ, start_response)
414 return resp(environ, start_response)
@@ -1,1374 +1,1375 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import collections
18 import collections
19 import logging
19 import logging
20 import os
20 import os
21 import posixpath as vcspath
21 import posixpath as vcspath
22 import re
22 import re
23 import stat
23 import stat
24 import traceback
24 import traceback
25 import urllib.request, urllib.parse, urllib.error
25 import urllib.request
26 import urllib.request, urllib.error, urllib.parse
26 import urllib.parse
27 import urllib.error
27 from functools import wraps
28 from functools import wraps
28
29
29 import more_itertools
30 import more_itertools
30 import pygit2
31 import pygit2
31 from pygit2 import Repository as LibGit2Repo
32 from pygit2 import Repository as LibGit2Repo
32 from pygit2 import index as LibGit2Index
33 from pygit2 import index as LibGit2Index
33 from dulwich import index, objects
34 from dulwich import index, objects
34 from dulwich.client import HttpGitClient, LocalGitClient
35 from dulwich.client import HttpGitClient, LocalGitClient
35 from dulwich.errors import (
36 from dulwich.errors import (
36 NotGitRepository, ChecksumMismatch, WrongObjectException,
37 NotGitRepository, ChecksumMismatch, WrongObjectException,
37 MissingCommitError, ObjectMissing, HangupException,
38 MissingCommitError, ObjectMissing, HangupException,
38 UnexpectedCommandError)
39 UnexpectedCommandError)
39 from dulwich.repo import Repo as DulwichRepo
40 from dulwich.repo import Repo as DulwichRepo
40 from dulwich.server import update_server_info
41 from dulwich.server import update_server_info
41
42
42 from vcsserver import exceptions, settings, subprocessio
43 from vcsserver import exceptions, settings, subprocessio
43 from vcsserver.str_utils import safe_str, safe_int, safe_bytes, ascii_str, ascii_bytes
44 from vcsserver.str_utils import safe_str, safe_int, safe_bytes, ascii_bytes
44 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, archive_repo, BinaryEnvelope
45 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, archive_repo, BinaryEnvelope
45 from vcsserver.hgcompat import (
46 from vcsserver.hgcompat import (
46 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
47 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
47 from vcsserver.git_lfs.lib import LFSOidStore
48 from vcsserver.git_lfs.lib import LFSOidStore
48 from vcsserver.vcs_base import RemoteBase
49 from vcsserver.vcs_base import RemoteBase
49
50
50 DIR_STAT = stat.S_IFDIR
51 DIR_STAT = stat.S_IFDIR
51 FILE_MODE = stat.S_IFMT
52 FILE_MODE = stat.S_IFMT
52 GIT_LINK = objects.S_IFGITLINK
53 GIT_LINK = objects.S_IFGITLINK
53 PEELED_REF_MARKER = b'^{}'
54 PEELED_REF_MARKER = b'^{}'
54 HEAD_MARKER = b'HEAD'
55 HEAD_MARKER = b'HEAD'
55
56
56 log = logging.getLogger(__name__)
57 log = logging.getLogger(__name__)
57
58
58
59
59 def reraise_safe_exceptions(func):
60 def reraise_safe_exceptions(func):
60 """Converts Dulwich exceptions to something neutral."""
61 """Converts Dulwich exceptions to something neutral."""
61
62
62 @wraps(func)
63 @wraps(func)
63 def wrapper(*args, **kwargs):
64 def wrapper(*args, **kwargs):
64 try:
65 try:
65 return func(*args, **kwargs)
66 return func(*args, **kwargs)
66 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
67 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
67 exc = exceptions.LookupException(org_exc=e)
68 exc = exceptions.LookupException(org_exc=e)
68 raise exc(safe_str(e))
69 raise exc(safe_str(e))
69 except (HangupException, UnexpectedCommandError) as e:
70 except (HangupException, UnexpectedCommandError) as e:
70 exc = exceptions.VcsException(org_exc=e)
71 exc = exceptions.VcsException(org_exc=e)
71 raise exc(safe_str(e))
72 raise exc(safe_str(e))
72 except Exception as e:
73 except Exception:
73 # NOTE(marcink): because of how dulwich handles some exceptions
74 # NOTE(marcink): because of how dulwich handles some exceptions
74 # (KeyError on empty repos), we cannot track this and catch all
75 # (KeyError on empty repos), we cannot track this and catch all
75 # exceptions, it's an exceptions from other handlers
76 # exceptions, it's an exceptions from other handlers
76 #if not hasattr(e, '_vcs_kind'):
77 #if not hasattr(e, '_vcs_kind'):
77 #log.exception("Unhandled exception in git remote call")
78 #log.exception("Unhandled exception in git remote call")
78 #raise_from_original(exceptions.UnhandledException)
79 #raise_from_original(exceptions.UnhandledException)
79 raise
80 raise
80 return wrapper
81 return wrapper
81
82
82
83
83 class Repo(DulwichRepo):
84 class Repo(DulwichRepo):
84 """
85 """
85 A wrapper for dulwich Repo class.
86 A wrapper for dulwich Repo class.
86
87
87 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
88 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
88 "Too many open files" error. We need to close all opened file descriptors
89 "Too many open files" error. We need to close all opened file descriptors
89 once the repo object is destroyed.
90 once the repo object is destroyed.
90 """
91 """
91 def __del__(self):
92 def __del__(self):
92 if hasattr(self, 'object_store'):
93 if hasattr(self, 'object_store'):
93 self.close()
94 self.close()
94
95
95
96
96 class Repository(LibGit2Repo):
97 class Repository(LibGit2Repo):
97
98
98 def __enter__(self):
99 def __enter__(self):
99 return self
100 return self
100
101
101 def __exit__(self, exc_type, exc_val, exc_tb):
102 def __exit__(self, exc_type, exc_val, exc_tb):
102 self.free()
103 self.free()
103
104
104
105
105 class GitFactory(RepoFactory):
106 class GitFactory(RepoFactory):
106 repo_type = 'git'
107 repo_type = 'git'
107
108
108 def _create_repo(self, wire, create, use_libgit2=False):
109 def _create_repo(self, wire, create, use_libgit2=False):
109 if use_libgit2:
110 if use_libgit2:
110 return Repository(safe_bytes(wire['path']))
111 repo = Repository(safe_bytes(wire['path']))
111 else:
112 else:
112 # dulwich mode
113 # dulwich mode
113 repo_path = safe_str(wire['path'], to_encoding=settings.WIRE_ENCODING)
114 repo_path = safe_str(wire['path'], to_encoding=settings.WIRE_ENCODING)
114 repo = Repo(repo_path)
115 repo = Repo(repo_path)
115
116
116 log.debug('repository created: got GIT object: %s', repo)
117 log.debug('repository created: got GIT object: %s', repo)
117 return repo
118 return repo
118
119
119 def repo(self, wire, create=False, use_libgit2=False):
120 def repo(self, wire, create=False, use_libgit2=False):
120 """
121 """
121 Get a repository instance for the given path.
122 Get a repository instance for the given path.
122 """
123 """
123 return self._create_repo(wire, create, use_libgit2)
124 return self._create_repo(wire, create, use_libgit2)
124
125
125 def repo_libgit2(self, wire):
126 def repo_libgit2(self, wire):
126 return self.repo(wire, use_libgit2=True)
127 return self.repo(wire, use_libgit2=True)
127
128
128
129
129 class GitRemote(RemoteBase):
130 class GitRemote(RemoteBase):
130
131
131 def __init__(self, factory):
132 def __init__(self, factory):
132 self._factory = factory
133 self._factory = factory
133 self._bulk_methods = {
134 self._bulk_methods = {
134 "date": self.date,
135 "date": self.date,
135 "author": self.author,
136 "author": self.author,
136 "branch": self.branch,
137 "branch": self.branch,
137 "message": self.message,
138 "message": self.message,
138 "parents": self.parents,
139 "parents": self.parents,
139 "_commit": self.revision,
140 "_commit": self.revision,
140 }
141 }
141
142
142 def _wire_to_config(self, wire):
143 def _wire_to_config(self, wire):
143 if 'config' in wire:
144 if 'config' in wire:
144 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
145 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
145 return {}
146 return {}
146
147
147 def _remote_conf(self, config):
148 def _remote_conf(self, config):
148 params = [
149 params = [
149 '-c', 'core.askpass=""',
150 '-c', 'core.askpass=""',
150 ]
151 ]
151 ssl_cert_dir = config.get('vcs_ssl_dir')
152 ssl_cert_dir = config.get('vcs_ssl_dir')
152 if ssl_cert_dir:
153 if ssl_cert_dir:
153 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
154 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
154 return params
155 return params
155
156
156 @reraise_safe_exceptions
157 @reraise_safe_exceptions
157 def discover_git_version(self):
158 def discover_git_version(self):
158 stdout, _ = self.run_git_command(
159 stdout, _ = self.run_git_command(
159 {}, ['--version'], _bare=True, _safe=True)
160 {}, ['--version'], _bare=True, _safe=True)
160 prefix = b'git version'
161 prefix = b'git version'
161 if stdout.startswith(prefix):
162 if stdout.startswith(prefix):
162 stdout = stdout[len(prefix):]
163 stdout = stdout[len(prefix):]
163 return safe_str(stdout.strip())
164 return safe_str(stdout.strip())
164
165
165 @reraise_safe_exceptions
166 @reraise_safe_exceptions
166 def is_empty(self, wire):
167 def is_empty(self, wire):
167 repo_init = self._factory.repo_libgit2(wire)
168 repo_init = self._factory.repo_libgit2(wire)
168 with repo_init as repo:
169 with repo_init as repo:
169
170
170 try:
171 try:
171 has_head = repo.head.name
172 has_head = repo.head.name
172 if has_head:
173 if has_head:
173 return False
174 return False
174
175
175 # NOTE(marcink): check again using more expensive method
176 # NOTE(marcink): check again using more expensive method
176 return repo.is_empty
177 return repo.is_empty
177 except Exception:
178 except Exception:
178 pass
179 pass
179
180
180 return True
181 return True
181
182
182 @reraise_safe_exceptions
183 @reraise_safe_exceptions
183 def assert_correct_path(self, wire):
184 def assert_correct_path(self, wire):
184 cache_on, context_uid, repo_id = self._cache_on(wire)
185 cache_on, context_uid, repo_id = self._cache_on(wire)
185 region = self._region(wire)
186 region = self._region(wire)
186
187
187 @region.conditional_cache_on_arguments(condition=cache_on)
188 @region.conditional_cache_on_arguments(condition=cache_on)
188 def _assert_correct_path(_context_uid, _repo_id):
189 def _assert_correct_path(_context_uid, _repo_id):
189 try:
190 try:
190 repo_init = self._factory.repo_libgit2(wire)
191 repo_init = self._factory.repo_libgit2(wire)
191 with repo_init as repo:
192 with repo_init as repo:
192 pass
193 pass
193 except pygit2.GitError:
194 except pygit2.GitError:
194 path = wire.get('path')
195 path = wire.get('path')
195 tb = traceback.format_exc()
196 tb = traceback.format_exc()
196 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
197 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
197 return False
198 return False
198
199
199 return True
200 return True
200 return _assert_correct_path(context_uid, repo_id)
201 return _assert_correct_path(context_uid, repo_id)
201
202
202 @reraise_safe_exceptions
203 @reraise_safe_exceptions
203 def bare(self, wire):
204 def bare(self, wire):
204 repo_init = self._factory.repo_libgit2(wire)
205 repo_init = self._factory.repo_libgit2(wire)
205 with repo_init as repo:
206 with repo_init as repo:
206 return repo.is_bare
207 return repo.is_bare
207
208
208 @reraise_safe_exceptions
209 @reraise_safe_exceptions
209 def blob_as_pretty_string(self, wire, sha):
210 def blob_as_pretty_string(self, wire, sha):
210 repo_init = self._factory.repo_libgit2(wire)
211 repo_init = self._factory.repo_libgit2(wire)
211 with repo_init as repo:
212 with repo_init as repo:
212 blob_obj = repo[sha]
213 blob_obj = repo[sha]
213 return BinaryEnvelope(blob_obj.data)
214 return BinaryEnvelope(blob_obj.data)
214
215
215 @reraise_safe_exceptions
216 @reraise_safe_exceptions
216 def blob_raw_length(self, wire, sha):
217 def blob_raw_length(self, wire, sha):
217 cache_on, context_uid, repo_id = self._cache_on(wire)
218 cache_on, context_uid, repo_id = self._cache_on(wire)
218 region = self._region(wire)
219 region = self._region(wire)
219
220
220 @region.conditional_cache_on_arguments(condition=cache_on)
221 @region.conditional_cache_on_arguments(condition=cache_on)
221 def _blob_raw_length(_repo_id, _sha):
222 def _blob_raw_length(_repo_id, _sha):
222
223
223 repo_init = self._factory.repo_libgit2(wire)
224 repo_init = self._factory.repo_libgit2(wire)
224 with repo_init as repo:
225 with repo_init as repo:
225 blob = repo[sha]
226 blob = repo[sha]
226 return blob.size
227 return blob.size
227
228
228 return _blob_raw_length(repo_id, sha)
229 return _blob_raw_length(repo_id, sha)
229
230
230 def _parse_lfs_pointer(self, raw_content):
231 def _parse_lfs_pointer(self, raw_content):
231 spec_string = b'version https://git-lfs.github.com/spec'
232 spec_string = b'version https://git-lfs.github.com/spec'
232 if raw_content and raw_content.startswith(spec_string):
233 if raw_content and raw_content.startswith(spec_string):
233
234
234 pattern = re.compile(rb"""
235 pattern = re.compile(rb"""
235 (?:\n)?
236 (?:\n)?
236 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
237 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
237 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
238 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
238 ^size[ ](?P<oid_size>[0-9]+)\n
239 ^size[ ](?P<oid_size>[0-9]+)\n
239 (?:\n)?
240 (?:\n)?
240 """, re.VERBOSE | re.MULTILINE)
241 """, re.VERBOSE | re.MULTILINE)
241 match = pattern.match(raw_content)
242 match = pattern.match(raw_content)
242 if match:
243 if match:
243 return match.groupdict()
244 return match.groupdict()
244
245
245 return {}
246 return {}
246
247
247 @reraise_safe_exceptions
248 @reraise_safe_exceptions
248 def is_large_file(self, wire, commit_id):
249 def is_large_file(self, wire, commit_id):
249 cache_on, context_uid, repo_id = self._cache_on(wire)
250 cache_on, context_uid, repo_id = self._cache_on(wire)
250 region = self._region(wire)
251 region = self._region(wire)
251
252
252 @region.conditional_cache_on_arguments(condition=cache_on)
253 @region.conditional_cache_on_arguments(condition=cache_on)
253 def _is_large_file(_repo_id, _sha):
254 def _is_large_file(_repo_id, _sha):
254 repo_init = self._factory.repo_libgit2(wire)
255 repo_init = self._factory.repo_libgit2(wire)
255 with repo_init as repo:
256 with repo_init as repo:
256 blob = repo[commit_id]
257 blob = repo[commit_id]
257 if blob.is_binary:
258 if blob.is_binary:
258 return {}
259 return {}
259
260
260 return self._parse_lfs_pointer(blob.data)
261 return self._parse_lfs_pointer(blob.data)
261
262
262 return _is_large_file(repo_id, commit_id)
263 return _is_large_file(repo_id, commit_id)
263
264
264 @reraise_safe_exceptions
265 @reraise_safe_exceptions
265 def is_binary(self, wire, tree_id):
266 def is_binary(self, wire, tree_id):
266 cache_on, context_uid, repo_id = self._cache_on(wire)
267 cache_on, context_uid, repo_id = self._cache_on(wire)
267 region = self._region(wire)
268 region = self._region(wire)
268
269
269 @region.conditional_cache_on_arguments(condition=cache_on)
270 @region.conditional_cache_on_arguments(condition=cache_on)
270 def _is_binary(_repo_id, _tree_id):
271 def _is_binary(_repo_id, _tree_id):
271 repo_init = self._factory.repo_libgit2(wire)
272 repo_init = self._factory.repo_libgit2(wire)
272 with repo_init as repo:
273 with repo_init as repo:
273 blob_obj = repo[tree_id]
274 blob_obj = repo[tree_id]
274 return blob_obj.is_binary
275 return blob_obj.is_binary
275
276
276 return _is_binary(repo_id, tree_id)
277 return _is_binary(repo_id, tree_id)
277
278
278 @reraise_safe_exceptions
279 @reraise_safe_exceptions
279 def md5_hash(self, wire, tree_id):
280 def md5_hash(self, wire, tree_id):
280 cache_on, context_uid, repo_id = self._cache_on(wire)
281 cache_on, context_uid, repo_id = self._cache_on(wire)
281 region = self._region(wire)
282 region = self._region(wire)
282
283
283 @region.conditional_cache_on_arguments(condition=cache_on)
284 @region.conditional_cache_on_arguments(condition=cache_on)
284 def _md5_hash(_repo_id, _tree_id):
285 def _md5_hash(_repo_id, _tree_id):
285 return ''
286 return ''
286
287
287 return _md5_hash(repo_id, tree_id)
288 return _md5_hash(repo_id, tree_id)
288
289
289 @reraise_safe_exceptions
290 @reraise_safe_exceptions
290 def in_largefiles_store(self, wire, oid):
291 def in_largefiles_store(self, wire, oid):
291 conf = self._wire_to_config(wire)
292 conf = self._wire_to_config(wire)
292 repo_init = self._factory.repo_libgit2(wire)
293 repo_init = self._factory.repo_libgit2(wire)
293 with repo_init as repo:
294 with repo_init as repo:
294 repo_name = repo.path
295 repo_name = repo.path
295
296
296 store_location = conf.get('vcs_git_lfs_store_location')
297 store_location = conf.get('vcs_git_lfs_store_location')
297 if store_location:
298 if store_location:
298
299
299 store = LFSOidStore(
300 store = LFSOidStore(
300 oid=oid, repo=repo_name, store_location=store_location)
301 oid=oid, repo=repo_name, store_location=store_location)
301 return store.has_oid()
302 return store.has_oid()
302
303
303 return False
304 return False
304
305
305 @reraise_safe_exceptions
306 @reraise_safe_exceptions
306 def store_path(self, wire, oid):
307 def store_path(self, wire, oid):
307 conf = self._wire_to_config(wire)
308 conf = self._wire_to_config(wire)
308 repo_init = self._factory.repo_libgit2(wire)
309 repo_init = self._factory.repo_libgit2(wire)
309 with repo_init as repo:
310 with repo_init as repo:
310 repo_name = repo.path
311 repo_name = repo.path
311
312
312 store_location = conf.get('vcs_git_lfs_store_location')
313 store_location = conf.get('vcs_git_lfs_store_location')
313 if store_location:
314 if store_location:
314 store = LFSOidStore(
315 store = LFSOidStore(
315 oid=oid, repo=repo_name, store_location=store_location)
316 oid=oid, repo=repo_name, store_location=store_location)
316 return store.oid_path
317 return store.oid_path
317 raise ValueError('Unable to fetch oid with path {}'.format(oid))
318 raise ValueError('Unable to fetch oid with path {}'.format(oid))
318
319
319 @reraise_safe_exceptions
320 @reraise_safe_exceptions
320 def bulk_request(self, wire, rev, pre_load):
321 def bulk_request(self, wire, rev, pre_load):
321 cache_on, context_uid, repo_id = self._cache_on(wire)
322 cache_on, context_uid, repo_id = self._cache_on(wire)
322 region = self._region(wire)
323 region = self._region(wire)
323
324
324 @region.conditional_cache_on_arguments(condition=cache_on)
325 @region.conditional_cache_on_arguments(condition=cache_on)
325 def _bulk_request(_repo_id, _rev, _pre_load):
326 def _bulk_request(_repo_id, _rev, _pre_load):
326 result = {}
327 result = {}
327 for attr in pre_load:
328 for attr in pre_load:
328 try:
329 try:
329 method = self._bulk_methods[attr]
330 method = self._bulk_methods[attr]
330 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
331 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
331 args = [wire, rev]
332 args = [wire, rev]
332 result[attr] = method(*args)
333 result[attr] = method(*args)
333 except KeyError as e:
334 except KeyError as e:
334 raise exceptions.VcsException(e)(f"Unknown bulk attribute: {attr}")
335 raise exceptions.VcsException(e)(f"Unknown bulk attribute: {attr}")
335 return result
336 return result
336
337
337 return _bulk_request(repo_id, rev, sorted(pre_load))
338 return _bulk_request(repo_id, rev, sorted(pre_load))
338
339
339 def _build_opener(self, url):
340 def _build_opener(self, url):
340 handlers = []
341 handlers = []
341 url_obj = url_parser(url)
342 url_obj = url_parser(url)
342 _, authinfo = url_obj.authinfo()
343 _, authinfo = url_obj.authinfo()
343
344
344 if authinfo:
345 if authinfo:
345 # create a password manager
346 # create a password manager
346 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
347 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
347 passmgr.add_password(*authinfo)
348 passmgr.add_password(*authinfo)
348
349
349 handlers.extend((httpbasicauthhandler(passmgr),
350 handlers.extend((httpbasicauthhandler(passmgr),
350 httpdigestauthhandler(passmgr)))
351 httpdigestauthhandler(passmgr)))
351
352
352 return urllib.request.build_opener(*handlers)
353 return urllib.request.build_opener(*handlers)
353
354
354 def _type_id_to_name(self, type_id: int):
355 def _type_id_to_name(self, type_id: int):
355 return {
356 return {
356 1: 'commit',
357 1: 'commit',
357 2: 'tree',
358 2: 'tree',
358 3: 'blob',
359 3: 'blob',
359 4: 'tag'
360 4: 'tag'
360 }[type_id]
361 }[type_id]
361
362
362 @reraise_safe_exceptions
363 @reraise_safe_exceptions
363 def check_url(self, url, config):
364 def check_url(self, url, config):
364 url_obj = url_parser(safe_bytes(url))
365 url_obj = url_parser(safe_bytes(url))
365 test_uri, _ = url_obj.authinfo()
366 test_uri, _ = url_obj.authinfo()
366 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
367 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
367 url_obj.query = obfuscate_qs(url_obj.query)
368 url_obj.query = obfuscate_qs(url_obj.query)
368 cleaned_uri = str(url_obj)
369 cleaned_uri = str(url_obj)
369 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
370 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
370
371
371 if not test_uri.endswith('info/refs'):
372 if not test_uri.endswith('info/refs'):
372 test_uri = test_uri.rstrip('/') + '/info/refs'
373 test_uri = test_uri.rstrip('/') + '/info/refs'
373
374
374 o = self._build_opener(url)
375 o = self._build_opener(url)
375 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
376 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
376
377
377 q = {"service": 'git-upload-pack'}
378 q = {"service": 'git-upload-pack'}
378 qs = '?%s' % urllib.parse.urlencode(q)
379 qs = '?%s' % urllib.parse.urlencode(q)
379 cu = "%s%s" % (test_uri, qs)
380 cu = "%s%s" % (test_uri, qs)
380 req = urllib.request.Request(cu, None, {})
381 req = urllib.request.Request(cu, None, {})
381
382
382 try:
383 try:
383 log.debug("Trying to open URL %s", cleaned_uri)
384 log.debug("Trying to open URL %s", cleaned_uri)
384 resp = o.open(req)
385 resp = o.open(req)
385 if resp.code != 200:
386 if resp.code != 200:
386 raise exceptions.URLError()('Return Code is not 200')
387 raise exceptions.URLError()('Return Code is not 200')
387 except Exception as e:
388 except Exception as e:
388 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
389 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
389 # means it cannot be cloned
390 # means it cannot be cloned
390 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
391 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
391
392
392 # now detect if it's proper git repo
393 # now detect if it's proper git repo
393 gitdata = resp.read()
394 gitdata = resp.read()
394 if 'service=git-upload-pack' in gitdata:
395 if 'service=git-upload-pack' in gitdata:
395 pass
396 pass
396 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
397 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
397 # old style git can return some other format !
398 # old style git can return some other format !
398 pass
399 pass
399 else:
400 else:
400 raise exceptions.URLError()(
401 raise exceptions.URLError()(
401 "url [%s] does not look like an git" % (cleaned_uri,))
402 "url [%s] does not look like an git" % (cleaned_uri,))
402
403
403 return True
404 return True
404
405
405 @reraise_safe_exceptions
406 @reraise_safe_exceptions
406 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
407 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
407 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
408 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
408 remote_refs = self.pull(wire, url, apply_refs=False)
409 remote_refs = self.pull(wire, url, apply_refs=False)
409 repo = self._factory.repo(wire)
410 repo = self._factory.repo(wire)
410 if isinstance(valid_refs, list):
411 if isinstance(valid_refs, list):
411 valid_refs = tuple(valid_refs)
412 valid_refs = tuple(valid_refs)
412
413
413 for k in remote_refs:
414 for k in remote_refs:
414 # only parse heads/tags and skip so called deferred tags
415 # only parse heads/tags and skip so called deferred tags
415 if k.startswith(valid_refs) and not k.endswith(deferred):
416 if k.startswith(valid_refs) and not k.endswith(deferred):
416 repo[k] = remote_refs[k]
417 repo[k] = remote_refs[k]
417
418
418 if update_after_clone:
419 if update_after_clone:
419 # we want to checkout HEAD
420 # we want to checkout HEAD
420 repo["HEAD"] = remote_refs["HEAD"]
421 repo["HEAD"] = remote_refs["HEAD"]
421 index.build_index_from_tree(repo.path, repo.index_path(),
422 index.build_index_from_tree(repo.path, repo.index_path(),
422 repo.object_store, repo["HEAD"].tree)
423 repo.object_store, repo["HEAD"].tree)
423
424
424 @reraise_safe_exceptions
425 @reraise_safe_exceptions
425 def branch(self, wire, commit_id):
426 def branch(self, wire, commit_id):
426 cache_on, context_uid, repo_id = self._cache_on(wire)
427 cache_on, context_uid, repo_id = self._cache_on(wire)
427 region = self._region(wire)
428 region = self._region(wire)
428
429
429 @region.conditional_cache_on_arguments(condition=cache_on)
430 @region.conditional_cache_on_arguments(condition=cache_on)
430 def _branch(_context_uid, _repo_id, _commit_id):
431 def _branch(_context_uid, _repo_id, _commit_id):
431 regex = re.compile('^refs/heads')
432 regex = re.compile('^refs/heads')
432
433
433 def filter_with(ref):
434 def filter_with(ref):
434 return regex.match(ref[0]) and ref[1] == _commit_id
435 return regex.match(ref[0]) and ref[1] == _commit_id
435
436
436 branches = list(filter(filter_with, list(self.get_refs(wire).items())))
437 branches = list(filter(filter_with, list(self.get_refs(wire).items())))
437 return [x[0].split('refs/heads/')[-1] for x in branches]
438 return [x[0].split('refs/heads/')[-1] for x in branches]
438
439
439 return _branch(context_uid, repo_id, commit_id)
440 return _branch(context_uid, repo_id, commit_id)
440
441
441 @reraise_safe_exceptions
442 @reraise_safe_exceptions
442 def commit_branches(self, wire, commit_id):
443 def commit_branches(self, wire, commit_id):
443 cache_on, context_uid, repo_id = self._cache_on(wire)
444 cache_on, context_uid, repo_id = self._cache_on(wire)
444 region = self._region(wire)
445 region = self._region(wire)
445
446
446 @region.conditional_cache_on_arguments(condition=cache_on)
447 @region.conditional_cache_on_arguments(condition=cache_on)
447 def _commit_branches(_context_uid, _repo_id, _commit_id):
448 def _commit_branches(_context_uid, _repo_id, _commit_id):
448 repo_init = self._factory.repo_libgit2(wire)
449 repo_init = self._factory.repo_libgit2(wire)
449 with repo_init as repo:
450 with repo_init as repo:
450 branches = [x for x in repo.branches.with_commit(_commit_id)]
451 branches = [x for x in repo.branches.with_commit(_commit_id)]
451 return branches
452 return branches
452
453
453 return _commit_branches(context_uid, repo_id, commit_id)
454 return _commit_branches(context_uid, repo_id, commit_id)
454
455
455 @reraise_safe_exceptions
456 @reraise_safe_exceptions
456 def add_object(self, wire, content):
457 def add_object(self, wire, content):
457 repo_init = self._factory.repo_libgit2(wire)
458 repo_init = self._factory.repo_libgit2(wire)
458 with repo_init as repo:
459 with repo_init as repo:
459 blob = objects.Blob()
460 blob = objects.Blob()
460 blob.set_raw_string(content)
461 blob.set_raw_string(content)
461 repo.object_store.add_object(blob)
462 repo.object_store.add_object(blob)
462 return blob.id
463 return blob.id
463
464
464 # TODO: this is quite complex, check if that can be simplified
465 # TODO: this is quite complex, check if that can be simplified
465 @reraise_safe_exceptions
466 @reraise_safe_exceptions
466 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
467 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
467 # Defines the root tree
468 # Defines the root tree
468 class _Root(object):
469 class _Root(object):
469 def __repr__(self):
470 def __repr__(self):
470 return 'ROOT TREE'
471 return 'ROOT TREE'
471 ROOT = _Root()
472 ROOT = _Root()
472
473
473 repo = self._factory.repo(wire)
474 repo = self._factory.repo(wire)
474 object_store = repo.object_store
475 object_store = repo.object_store
475
476
476 # Create tree and populates it with blobs
477 # Create tree and populates it with blobs
477 if commit_tree:
478 if commit_tree:
478 commit_tree = safe_bytes(commit_tree)
479 commit_tree = safe_bytes(commit_tree)
479
480
480 if commit_tree and repo[commit_tree]:
481 if commit_tree and repo[commit_tree]:
481 git_commit = repo[safe_bytes(commit_data['parents'][0])]
482 git_commit = repo[safe_bytes(commit_data['parents'][0])]
482 commit_tree = repo[git_commit.tree] # root tree
483 commit_tree = repo[git_commit.tree] # root tree
483 else:
484 else:
484 commit_tree = objects.Tree()
485 commit_tree = objects.Tree()
485
486
486 for node in updated:
487 for node in updated:
487 # Compute subdirs if needed
488 # Compute subdirs if needed
488 dirpath, nodename = vcspath.split(node['path'])
489 dirpath, nodename = vcspath.split(node['path'])
489 dirnames = list(map(safe_str, dirpath and dirpath.split('/') or []))
490 dirnames = list(map(safe_str, dirpath and dirpath.split('/') or []))
490 parent = commit_tree
491 parent = commit_tree
491 ancestors = [('', parent)]
492 ancestors = [('', parent)]
492
493
493 # Tries to dig for the deepest existing tree
494 # Tries to dig for the deepest existing tree
494 while dirnames:
495 while dirnames:
495 curdir = dirnames.pop(0)
496 curdir = dirnames.pop(0)
496 try:
497 try:
497 dir_id = parent[curdir][1]
498 dir_id = parent[curdir][1]
498 except KeyError:
499 except KeyError:
499 # put curdir back into dirnames and stops
500 # put curdir back into dirnames and stops
500 dirnames.insert(0, curdir)
501 dirnames.insert(0, curdir)
501 break
502 break
502 else:
503 else:
503 # If found, updates parent
504 # If found, updates parent
504 parent = repo[dir_id]
505 parent = repo[dir_id]
505 ancestors.append((curdir, parent))
506 ancestors.append((curdir, parent))
506 # Now parent is deepest existing tree and we need to create
507 # Now parent is deepest existing tree and we need to create
507 # subtrees for dirnames (in reverse order)
508 # subtrees for dirnames (in reverse order)
508 # [this only applies for nodes from added]
509 # [this only applies for nodes from added]
509 new_trees = []
510 new_trees = []
510
511
511 blob = objects.Blob.from_string(node['content'])
512 blob = objects.Blob.from_string(node['content'])
512
513
513 node_path = safe_bytes(node['node_path'])
514 node_path = safe_bytes(node['node_path'])
514
515
515 if dirnames:
516 if dirnames:
516 # If there are trees which should be created we need to build
517 # If there are trees which should be created we need to build
517 # them now (in reverse order)
518 # them now (in reverse order)
518 reversed_dirnames = list(reversed(dirnames))
519 reversed_dirnames = list(reversed(dirnames))
519 curtree = objects.Tree()
520 curtree = objects.Tree()
520 curtree[node_path] = node['mode'], blob.id
521 curtree[node_path] = node['mode'], blob.id
521 new_trees.append(curtree)
522 new_trees.append(curtree)
522 for dirname in reversed_dirnames[:-1]:
523 for dirname in reversed_dirnames[:-1]:
523 newtree = objects.Tree()
524 newtree = objects.Tree()
524 newtree[dirname] = (DIR_STAT, curtree.id)
525 newtree[dirname] = (DIR_STAT, curtree.id)
525 new_trees.append(newtree)
526 new_trees.append(newtree)
526 curtree = newtree
527 curtree = newtree
527 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
528 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
528 else:
529 else:
529 parent.add(name=node_path, mode=node['mode'], hexsha=blob.id)
530 parent.add(name=node_path, mode=node['mode'], hexsha=blob.id)
530
531
531 new_trees.append(parent)
532 new_trees.append(parent)
532 # Update ancestors
533 # Update ancestors
533 reversed_ancestors = reversed(
534 reversed_ancestors = reversed(
534 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
535 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
535 for parent, tree, path in reversed_ancestors:
536 for parent, tree, path in reversed_ancestors:
536 parent[path] = (DIR_STAT, tree.id)
537 parent[path] = (DIR_STAT, tree.id)
537 object_store.add_object(tree)
538 object_store.add_object(tree)
538
539
539 object_store.add_object(blob)
540 object_store.add_object(blob)
540 for tree in new_trees:
541 for tree in new_trees:
541 object_store.add_object(tree)
542 object_store.add_object(tree)
542
543
543 for node_path in removed:
544 for node_path in removed:
544 paths = node_path.split('/')
545 paths = node_path.split('/')
545 tree = commit_tree # start with top-level
546 tree = commit_tree # start with top-level
546 trees = [{'tree': tree, 'path': ROOT}]
547 trees = [{'tree': tree, 'path': ROOT}]
547 # Traverse deep into the forest...
548 # Traverse deep into the forest...
548 # resolve final tree by iterating the path.
549 # resolve final tree by iterating the path.
549 # e.g a/b/c.txt will get
550 # e.g a/b/c.txt will get
550 # - root as tree then
551 # - root as tree then
551 # - 'a' as tree,
552 # - 'a' as tree,
552 # - 'b' as tree,
553 # - 'b' as tree,
553 # - stop at c as blob.
554 # - stop at c as blob.
554 for path in paths:
555 for path in paths:
555 try:
556 try:
556 obj = repo[tree[path][1]]
557 obj = repo[tree[path][1]]
557 if isinstance(obj, objects.Tree):
558 if isinstance(obj, objects.Tree):
558 trees.append({'tree': obj, 'path': path})
559 trees.append({'tree': obj, 'path': path})
559 tree = obj
560 tree = obj
560 except KeyError:
561 except KeyError:
561 break
562 break
562 #PROBLEM:
563 #PROBLEM:
563 """
564 """
564 We're not editing same reference tree object
565 We're not editing same reference tree object
565 """
566 """
566 # Cut down the blob and all rotten trees on the way back...
567 # Cut down the blob and all rotten trees on the way back...
567 for path, tree_data in reversed(list(zip(paths, trees))):
568 for path, tree_data in reversed(list(zip(paths, trees))):
568 tree = tree_data['tree']
569 tree = tree_data['tree']
569 tree.__delitem__(path)
570 tree.__delitem__(path)
570 # This operation edits the tree, we need to mark new commit back
571 # This operation edits the tree, we need to mark new commit back
571
572
572 if len(tree) > 0:
573 if len(tree) > 0:
573 # This tree still has elements - don't remove it or any
574 # This tree still has elements - don't remove it or any
574 # of it's parents
575 # of it's parents
575 break
576 break
576
577
577 object_store.add_object(commit_tree)
578 object_store.add_object(commit_tree)
578
579
579 # Create commit
580 # Create commit
580 commit = objects.Commit()
581 commit = objects.Commit()
581 commit.tree = commit_tree.id
582 commit.tree = commit_tree.id
582 bytes_keys = [
583 bytes_keys = [
583 'author',
584 'author',
584 'committer',
585 'committer',
585 'message',
586 'message',
586 'encoding',
587 'encoding',
587 'parents'
588 'parents'
588 ]
589 ]
589
590
590 for k, v in commit_data.items():
591 for k, v in commit_data.items():
591 if k in bytes_keys:
592 if k in bytes_keys:
592 if k == 'parents':
593 if k == 'parents':
593 v = [safe_bytes(x) for x in v]
594 v = [safe_bytes(x) for x in v]
594 else:
595 else:
595 v = safe_bytes(v)
596 v = safe_bytes(v)
596 setattr(commit, k, v)
597 setattr(commit, k, v)
597
598
598 object_store.add_object(commit)
599 object_store.add_object(commit)
599
600
600 self.create_branch(wire, branch, safe_str(commit.id))
601 self.create_branch(wire, branch, safe_str(commit.id))
601
602
602 # dulwich set-ref
603 # dulwich set-ref
603 repo.refs[safe_bytes(f'refs/heads/{branch}')] = commit.id
604 repo.refs[safe_bytes(f'refs/heads/{branch}')] = commit.id
604
605
605 return commit.id
606 return commit.id
606
607
607 @reraise_safe_exceptions
608 @reraise_safe_exceptions
608 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
609 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
609 if url != 'default' and '://' not in url:
610 if url != 'default' and '://' not in url:
610 client = LocalGitClient(url)
611 client = LocalGitClient(url)
611 else:
612 else:
612 url_obj = url_parser(url)
613 url_obj = url_parser(url)
613 o = self._build_opener(url)
614 o = self._build_opener(url)
614 url, _ = url_obj.authinfo()
615 url, _ = url_obj.authinfo()
615 client = HttpGitClient(base_url=url, opener=o)
616 client = HttpGitClient(base_url=url, opener=o)
616 repo = self._factory.repo(wire)
617 repo = self._factory.repo(wire)
617
618
618 determine_wants = repo.object_store.determine_wants_all
619 determine_wants = repo.object_store.determine_wants_all
619 if refs:
620 if refs:
620 refs = [ascii_bytes(x) for x in refs]
621 refs = [ascii_bytes(x) for x in refs]
621
622
622 def determine_wants_requested(remote_refs):
623 def determine_wants_requested(remote_refs):
623 determined = []
624 determined = []
624 for ref_name, ref_hash in remote_refs.items():
625 for ref_name, ref_hash in remote_refs.items():
625 bytes_ref_name = safe_bytes(ref_name)
626 bytes_ref_name = safe_bytes(ref_name)
626
627
627 if bytes_ref_name in refs:
628 if bytes_ref_name in refs:
628 bytes_ref_hash = safe_bytes(ref_hash)
629 bytes_ref_hash = safe_bytes(ref_hash)
629 determined.append(bytes_ref_hash)
630 determined.append(bytes_ref_hash)
630 return determined
631 return determined
631
632
632 # swap with our custom requested wants
633 # swap with our custom requested wants
633 determine_wants = determine_wants_requested
634 determine_wants = determine_wants_requested
634
635
635 try:
636 try:
636 remote_refs = client.fetch(
637 remote_refs = client.fetch(
637 path=url, target=repo, determine_wants=determine_wants)
638 path=url, target=repo, determine_wants=determine_wants)
638
639
639 except NotGitRepository as e:
640 except NotGitRepository as e:
640 log.warning(
641 log.warning(
641 'Trying to fetch from "%s" failed, not a Git repository.', url)
642 'Trying to fetch from "%s" failed, not a Git repository.', url)
642 # Exception can contain unicode which we convert
643 # Exception can contain unicode which we convert
643 raise exceptions.AbortException(e)(repr(e))
644 raise exceptions.AbortException(e)(repr(e))
644
645
645 # mikhail: client.fetch() returns all the remote refs, but fetches only
646 # mikhail: client.fetch() returns all the remote refs, but fetches only
646 # refs filtered by `determine_wants` function. We need to filter result
647 # refs filtered by `determine_wants` function. We need to filter result
647 # as well
648 # as well
648 if refs:
649 if refs:
649 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
650 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
650
651
651 if apply_refs:
652 if apply_refs:
652 # TODO: johbo: Needs proper test coverage with a git repository
653 # TODO: johbo: Needs proper test coverage with a git repository
653 # that contains a tag object, so that we would end up with
654 # that contains a tag object, so that we would end up with
654 # a peeled ref at this point.
655 # a peeled ref at this point.
655 for k in remote_refs:
656 for k in remote_refs:
656 if k.endswith(PEELED_REF_MARKER):
657 if k.endswith(PEELED_REF_MARKER):
657 log.debug("Skipping peeled reference %s", k)
658 log.debug("Skipping peeled reference %s", k)
658 continue
659 continue
659 repo[k] = remote_refs[k]
660 repo[k] = remote_refs[k]
660
661
661 if refs and not update_after:
662 if refs and not update_after:
662 # mikhail: explicitly set the head to the last ref.
663 # mikhail: explicitly set the head to the last ref.
663 repo[HEAD_MARKER] = remote_refs[refs[-1]]
664 repo[HEAD_MARKER] = remote_refs[refs[-1]]
664
665
665 if update_after:
666 if update_after:
666 # we want to check out HEAD
667 # we want to check out HEAD
667 repo[HEAD_MARKER] = remote_refs[HEAD_MARKER]
668 repo[HEAD_MARKER] = remote_refs[HEAD_MARKER]
668 index.build_index_from_tree(repo.path, repo.index_path(),
669 index.build_index_from_tree(repo.path, repo.index_path(),
669 repo.object_store, repo[HEAD_MARKER].tree)
670 repo.object_store, repo[HEAD_MARKER].tree)
670 return remote_refs
671 return remote_refs
671
672
672 @reraise_safe_exceptions
673 @reraise_safe_exceptions
673 def sync_fetch(self, wire, url, refs=None, all_refs=False):
674 def sync_fetch(self, wire, url, refs=None, all_refs=False):
674 repo = self._factory.repo(wire)
675 repo = self._factory.repo(wire)
675 if refs and not isinstance(refs, (list, tuple)):
676 if refs and not isinstance(refs, (list, tuple)):
676 refs = [refs]
677 refs = [refs]
677
678
678 config = self._wire_to_config(wire)
679 config = self._wire_to_config(wire)
679 # get all remote refs we'll use to fetch later
680 # get all remote refs we'll use to fetch later
680 cmd = ['ls-remote']
681 cmd = ['ls-remote']
681 if not all_refs:
682 if not all_refs:
682 cmd += ['--heads', '--tags']
683 cmd += ['--heads', '--tags']
683 cmd += [url]
684 cmd += [url]
684 output, __ = self.run_git_command(
685 output, __ = self.run_git_command(
685 wire, cmd, fail_on_stderr=False,
686 wire, cmd, fail_on_stderr=False,
686 _copts=self._remote_conf(config),
687 _copts=self._remote_conf(config),
687 extra_env={'GIT_TERMINAL_PROMPT': '0'})
688 extra_env={'GIT_TERMINAL_PROMPT': '0'})
688
689
689 remote_refs = collections.OrderedDict()
690 remote_refs = collections.OrderedDict()
690 fetch_refs = []
691 fetch_refs = []
691
692
692 for ref_line in output.splitlines():
693 for ref_line in output.splitlines():
693 sha, ref = ref_line.split(b'\t')
694 sha, ref = ref_line.split(b'\t')
694 sha = sha.strip()
695 sha = sha.strip()
695 if ref in remote_refs:
696 if ref in remote_refs:
696 # duplicate, skip
697 # duplicate, skip
697 continue
698 continue
698 if ref.endswith(PEELED_REF_MARKER):
699 if ref.endswith(PEELED_REF_MARKER):
699 log.debug("Skipping peeled reference %s", ref)
700 log.debug("Skipping peeled reference %s", ref)
700 continue
701 continue
701 # don't sync HEAD
702 # don't sync HEAD
702 if ref in [HEAD_MARKER]:
703 if ref in [HEAD_MARKER]:
703 continue
704 continue
704
705
705 remote_refs[ref] = sha
706 remote_refs[ref] = sha
706
707
707 if refs and sha in refs:
708 if refs and sha in refs:
708 # we filter fetch using our specified refs
709 # we filter fetch using our specified refs
709 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
710 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
710 elif not refs:
711 elif not refs:
711 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
712 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
712 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
713 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
713
714
714 if fetch_refs:
715 if fetch_refs:
715 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
716 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
716 fetch_refs_chunks = list(chunk)
717 fetch_refs_chunks = list(chunk)
717 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
718 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
718 self.run_git_command(
719 self.run_git_command(
719 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
720 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
720 fail_on_stderr=False,
721 fail_on_stderr=False,
721 _copts=self._remote_conf(config),
722 _copts=self._remote_conf(config),
722 extra_env={'GIT_TERMINAL_PROMPT': '0'})
723 extra_env={'GIT_TERMINAL_PROMPT': '0'})
723
724
724 return remote_refs
725 return remote_refs
725
726
726 @reraise_safe_exceptions
727 @reraise_safe_exceptions
727 def sync_push(self, wire, url, refs=None):
728 def sync_push(self, wire, url, refs=None):
728 if not self.check_url(url, wire):
729 if not self.check_url(url, wire):
729 return
730 return
730 config = self._wire_to_config(wire)
731 config = self._wire_to_config(wire)
731 self._factory.repo(wire)
732 self._factory.repo(wire)
732 self.run_git_command(
733 self.run_git_command(
733 wire, ['push', url, '--mirror'], fail_on_stderr=False,
734 wire, ['push', url, '--mirror'], fail_on_stderr=False,
734 _copts=self._remote_conf(config),
735 _copts=self._remote_conf(config),
735 extra_env={'GIT_TERMINAL_PROMPT': '0'})
736 extra_env={'GIT_TERMINAL_PROMPT': '0'})
736
737
737 @reraise_safe_exceptions
738 @reraise_safe_exceptions
738 def get_remote_refs(self, wire, url):
739 def get_remote_refs(self, wire, url):
739 repo = Repo(url)
740 repo = Repo(url)
740 return repo.get_refs()
741 return repo.get_refs()
741
742
742 @reraise_safe_exceptions
743 @reraise_safe_exceptions
743 def get_description(self, wire):
744 def get_description(self, wire):
744 repo = self._factory.repo(wire)
745 repo = self._factory.repo(wire)
745 return repo.get_description()
746 return repo.get_description()
746
747
747 @reraise_safe_exceptions
748 @reraise_safe_exceptions
748 def get_missing_revs(self, wire, rev1, rev2, path2):
749 def get_missing_revs(self, wire, rev1, rev2, path2):
749 repo = self._factory.repo(wire)
750 repo = self._factory.repo(wire)
750 LocalGitClient(thin_packs=False).fetch(path2, repo)
751 LocalGitClient(thin_packs=False).fetch(path2, repo)
751
752
752 wire_remote = wire.copy()
753 wire_remote = wire.copy()
753 wire_remote['path'] = path2
754 wire_remote['path'] = path2
754 repo_remote = self._factory.repo(wire_remote)
755 repo_remote = self._factory.repo(wire_remote)
755 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
756 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
756
757
757 revs = [
758 revs = [
758 x.commit.id
759 x.commit.id
759 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
760 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
760 return revs
761 return revs
761
762
762 @reraise_safe_exceptions
763 @reraise_safe_exceptions
763 def get_object(self, wire, sha, maybe_unreachable=False):
764 def get_object(self, wire, sha, maybe_unreachable=False):
764 cache_on, context_uid, repo_id = self._cache_on(wire)
765 cache_on, context_uid, repo_id = self._cache_on(wire)
765 region = self._region(wire)
766 region = self._region(wire)
766
767
767 @region.conditional_cache_on_arguments(condition=cache_on)
768 @region.conditional_cache_on_arguments(condition=cache_on)
768 def _get_object(_context_uid, _repo_id, _sha):
769 def _get_object(_context_uid, _repo_id, _sha):
769 repo_init = self._factory.repo_libgit2(wire)
770 repo_init = self._factory.repo_libgit2(wire)
770 with repo_init as repo:
771 with repo_init as repo:
771
772
772 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
773 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
773 try:
774 try:
774 commit = repo.revparse_single(sha)
775 commit = repo.revparse_single(sha)
775 except KeyError:
776 except KeyError:
776 # NOTE(marcink): KeyError doesn't give us any meaningful information
777 # NOTE(marcink): KeyError doesn't give us any meaningful information
777 # here, we instead give something more explicit
778 # here, we instead give something more explicit
778 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
779 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
779 raise exceptions.LookupException(e)(missing_commit_err)
780 raise exceptions.LookupException(e)(missing_commit_err)
780 except ValueError as e:
781 except ValueError as e:
781 raise exceptions.LookupException(e)(missing_commit_err)
782 raise exceptions.LookupException(e)(missing_commit_err)
782
783
783 is_tag = False
784 is_tag = False
784 if isinstance(commit, pygit2.Tag):
785 if isinstance(commit, pygit2.Tag):
785 commit = repo.get(commit.target)
786 commit = repo.get(commit.target)
786 is_tag = True
787 is_tag = True
787
788
788 check_dangling = True
789 check_dangling = True
789 if is_tag:
790 if is_tag:
790 check_dangling = False
791 check_dangling = False
791
792
792 if check_dangling and maybe_unreachable:
793 if check_dangling and maybe_unreachable:
793 check_dangling = False
794 check_dangling = False
794
795
795 # we used a reference and it parsed means we're not having a dangling commit
796 # we used a reference and it parsed means we're not having a dangling commit
796 if sha != commit.hex:
797 if sha != commit.hex:
797 check_dangling = False
798 check_dangling = False
798
799
799 if check_dangling:
800 if check_dangling:
800 # check for dangling commit
801 # check for dangling commit
801 for branch in repo.branches.with_commit(commit.hex):
802 for branch in repo.branches.with_commit(commit.hex):
802 if branch:
803 if branch:
803 break
804 break
804 else:
805 else:
805 # NOTE(marcink): Empty error doesn't give us any meaningful information
806 # NOTE(marcink): Empty error doesn't give us any meaningful information
806 # here, we instead give something more explicit
807 # here, we instead give something more explicit
807 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
808 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
808 raise exceptions.LookupException(e)(missing_commit_err)
809 raise exceptions.LookupException(e)(missing_commit_err)
809
810
810 commit_id = commit.hex
811 commit_id = commit.hex
811 type_id = commit.type
812 type_id = commit.type
812
813
813 return {
814 return {
814 'id': commit_id,
815 'id': commit_id,
815 'type': self._type_id_to_name(type_id),
816 'type': self._type_id_to_name(type_id),
816 'commit_id': commit_id,
817 'commit_id': commit_id,
817 'idx': 0
818 'idx': 0
818 }
819 }
819
820
820 return _get_object(context_uid, repo_id, sha)
821 return _get_object(context_uid, repo_id, sha)
821
822
822 @reraise_safe_exceptions
823 @reraise_safe_exceptions
823 def get_refs(self, wire):
824 def get_refs(self, wire):
824 cache_on, context_uid, repo_id = self._cache_on(wire)
825 cache_on, context_uid, repo_id = self._cache_on(wire)
825 region = self._region(wire)
826 region = self._region(wire)
826
827
827 @region.conditional_cache_on_arguments(condition=cache_on)
828 @region.conditional_cache_on_arguments(condition=cache_on)
828 def _get_refs(_context_uid, _repo_id):
829 def _get_refs(_context_uid, _repo_id):
829
830
830 repo_init = self._factory.repo_libgit2(wire)
831 repo_init = self._factory.repo_libgit2(wire)
831 with repo_init as repo:
832 with repo_init as repo:
832 regex = re.compile('^refs/(heads|tags)/')
833 regex = re.compile('^refs/(heads|tags)/')
833 return {x.name: x.target.hex for x in
834 return {x.name: x.target.hex for x in
834 [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]}
835 [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]}
835
836
836 return _get_refs(context_uid, repo_id)
837 return _get_refs(context_uid, repo_id)
837
838
838 @reraise_safe_exceptions
839 @reraise_safe_exceptions
839 def get_branch_pointers(self, wire):
840 def get_branch_pointers(self, wire):
840 cache_on, context_uid, repo_id = self._cache_on(wire)
841 cache_on, context_uid, repo_id = self._cache_on(wire)
841 region = self._region(wire)
842 region = self._region(wire)
842
843
843 @region.conditional_cache_on_arguments(condition=cache_on)
844 @region.conditional_cache_on_arguments(condition=cache_on)
844 def _get_branch_pointers(_context_uid, _repo_id):
845 def _get_branch_pointers(_context_uid, _repo_id):
845
846
846 repo_init = self._factory.repo_libgit2(wire)
847 repo_init = self._factory.repo_libgit2(wire)
847 regex = re.compile('^refs/heads')
848 regex = re.compile('^refs/heads')
848 with repo_init as repo:
849 with repo_init as repo:
849 branches = [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]
850 branches = [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]
850 return {x.target.hex: x.shorthand for x in branches}
851 return {x.target.hex: x.shorthand for x in branches}
851
852
852 return _get_branch_pointers(context_uid, repo_id)
853 return _get_branch_pointers(context_uid, repo_id)
853
854
854 @reraise_safe_exceptions
855 @reraise_safe_exceptions
855 def head(self, wire, show_exc=True):
856 def head(self, wire, show_exc=True):
856 cache_on, context_uid, repo_id = self._cache_on(wire)
857 cache_on, context_uid, repo_id = self._cache_on(wire)
857 region = self._region(wire)
858 region = self._region(wire)
858
859
859 @region.conditional_cache_on_arguments(condition=cache_on)
860 @region.conditional_cache_on_arguments(condition=cache_on)
860 def _head(_context_uid, _repo_id, _show_exc):
861 def _head(_context_uid, _repo_id, _show_exc):
861 repo_init = self._factory.repo_libgit2(wire)
862 repo_init = self._factory.repo_libgit2(wire)
862 with repo_init as repo:
863 with repo_init as repo:
863 try:
864 try:
864 return repo.head.peel().hex
865 return repo.head.peel().hex
865 except Exception:
866 except Exception:
866 if show_exc:
867 if show_exc:
867 raise
868 raise
868 return _head(context_uid, repo_id, show_exc)
869 return _head(context_uid, repo_id, show_exc)
869
870
870 @reraise_safe_exceptions
871 @reraise_safe_exceptions
871 def init(self, wire):
872 def init(self, wire):
872 repo_path = safe_str(wire['path'])
873 repo_path = safe_str(wire['path'])
873 self.repo = Repo.init(repo_path)
874 self.repo = Repo.init(repo_path)
874
875
875 @reraise_safe_exceptions
876 @reraise_safe_exceptions
876 def init_bare(self, wire):
877 def init_bare(self, wire):
877 repo_path = safe_str(wire['path'])
878 repo_path = safe_str(wire['path'])
878 self.repo = Repo.init_bare(repo_path)
879 self.repo = Repo.init_bare(repo_path)
879
880
880 @reraise_safe_exceptions
881 @reraise_safe_exceptions
881 def revision(self, wire, rev):
882 def revision(self, wire, rev):
882
883
883 cache_on, context_uid, repo_id = self._cache_on(wire)
884 cache_on, context_uid, repo_id = self._cache_on(wire)
884 region = self._region(wire)
885 region = self._region(wire)
885
886
886 @region.conditional_cache_on_arguments(condition=cache_on)
887 @region.conditional_cache_on_arguments(condition=cache_on)
887 def _revision(_context_uid, _repo_id, _rev):
888 def _revision(_context_uid, _repo_id, _rev):
888 repo_init = self._factory.repo_libgit2(wire)
889 repo_init = self._factory.repo_libgit2(wire)
889 with repo_init as repo:
890 with repo_init as repo:
890 commit = repo[rev]
891 commit = repo[rev]
891 obj_data = {
892 obj_data = {
892 'id': commit.id.hex,
893 'id': commit.id.hex,
893 }
894 }
894 # tree objects itself don't have tree_id attribute
895 # tree objects itself don't have tree_id attribute
895 if hasattr(commit, 'tree_id'):
896 if hasattr(commit, 'tree_id'):
896 obj_data['tree'] = commit.tree_id.hex
897 obj_data['tree'] = commit.tree_id.hex
897
898
898 return obj_data
899 return obj_data
899 return _revision(context_uid, repo_id, rev)
900 return _revision(context_uid, repo_id, rev)
900
901
901 @reraise_safe_exceptions
902 @reraise_safe_exceptions
902 def date(self, wire, commit_id):
903 def date(self, wire, commit_id):
903 cache_on, context_uid, repo_id = self._cache_on(wire)
904 cache_on, context_uid, repo_id = self._cache_on(wire)
904 region = self._region(wire)
905 region = self._region(wire)
905
906
906 @region.conditional_cache_on_arguments(condition=cache_on)
907 @region.conditional_cache_on_arguments(condition=cache_on)
907 def _date(_repo_id, _commit_id):
908 def _date(_repo_id, _commit_id):
908 repo_init = self._factory.repo_libgit2(wire)
909 repo_init = self._factory.repo_libgit2(wire)
909 with repo_init as repo:
910 with repo_init as repo:
910 commit = repo[commit_id]
911 commit = repo[commit_id]
911
912
912 if hasattr(commit, 'commit_time'):
913 if hasattr(commit, 'commit_time'):
913 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
914 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
914 else:
915 else:
915 commit = commit.get_object()
916 commit = commit.get_object()
916 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
917 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
917
918
918 # TODO(marcink): check dulwich difference of offset vs timezone
919 # TODO(marcink): check dulwich difference of offset vs timezone
919 return [commit_time, commit_time_offset]
920 return [commit_time, commit_time_offset]
920 return _date(repo_id, commit_id)
921 return _date(repo_id, commit_id)
921
922
922 @reraise_safe_exceptions
923 @reraise_safe_exceptions
923 def author(self, wire, commit_id):
924 def author(self, wire, commit_id):
924 cache_on, context_uid, repo_id = self._cache_on(wire)
925 cache_on, context_uid, repo_id = self._cache_on(wire)
925 region = self._region(wire)
926 region = self._region(wire)
926
927
927 @region.conditional_cache_on_arguments(condition=cache_on)
928 @region.conditional_cache_on_arguments(condition=cache_on)
928 def _author(_repo_id, _commit_id):
929 def _author(_repo_id, _commit_id):
929 repo_init = self._factory.repo_libgit2(wire)
930 repo_init = self._factory.repo_libgit2(wire)
930 with repo_init as repo:
931 with repo_init as repo:
931 commit = repo[commit_id]
932 commit = repo[commit_id]
932
933
933 if hasattr(commit, 'author'):
934 if hasattr(commit, 'author'):
934 author = commit.author
935 author = commit.author
935 else:
936 else:
936 author = commit.get_object().author
937 author = commit.get_object().author
937
938
938 if author.email:
939 if author.email:
939 return "{} <{}>".format(author.name, author.email)
940 return "{} <{}>".format(author.name, author.email)
940
941
941 try:
942 try:
942 return "{}".format(author.name)
943 return "{}".format(author.name)
943 except Exception:
944 except Exception:
944 return "{}".format(safe_str(author.raw_name))
945 return "{}".format(safe_str(author.raw_name))
945
946
946 return _author(repo_id, commit_id)
947 return _author(repo_id, commit_id)
947
948
948 @reraise_safe_exceptions
949 @reraise_safe_exceptions
949 def message(self, wire, commit_id):
950 def message(self, wire, commit_id):
950 cache_on, context_uid, repo_id = self._cache_on(wire)
951 cache_on, context_uid, repo_id = self._cache_on(wire)
951 region = self._region(wire)
952 region = self._region(wire)
952
953
953 @region.conditional_cache_on_arguments(condition=cache_on)
954 @region.conditional_cache_on_arguments(condition=cache_on)
954 def _message(_repo_id, _commit_id):
955 def _message(_repo_id, _commit_id):
955 repo_init = self._factory.repo_libgit2(wire)
956 repo_init = self._factory.repo_libgit2(wire)
956 with repo_init as repo:
957 with repo_init as repo:
957 commit = repo[commit_id]
958 commit = repo[commit_id]
958 return commit.message
959 return commit.message
959 return _message(repo_id, commit_id)
960 return _message(repo_id, commit_id)
960
961
961 @reraise_safe_exceptions
962 @reraise_safe_exceptions
962 def parents(self, wire, commit_id):
963 def parents(self, wire, commit_id):
963 cache_on, context_uid, repo_id = self._cache_on(wire)
964 cache_on, context_uid, repo_id = self._cache_on(wire)
964 region = self._region(wire)
965 region = self._region(wire)
965
966
966 @region.conditional_cache_on_arguments(condition=cache_on)
967 @region.conditional_cache_on_arguments(condition=cache_on)
967 def _parents(_repo_id, _commit_id):
968 def _parents(_repo_id, _commit_id):
968 repo_init = self._factory.repo_libgit2(wire)
969 repo_init = self._factory.repo_libgit2(wire)
969 with repo_init as repo:
970 with repo_init as repo:
970 commit = repo[commit_id]
971 commit = repo[commit_id]
971 if hasattr(commit, 'parent_ids'):
972 if hasattr(commit, 'parent_ids'):
972 parent_ids = commit.parent_ids
973 parent_ids = commit.parent_ids
973 else:
974 else:
974 parent_ids = commit.get_object().parent_ids
975 parent_ids = commit.get_object().parent_ids
975
976
976 return [x.hex for x in parent_ids]
977 return [x.hex for x in parent_ids]
977 return _parents(repo_id, commit_id)
978 return _parents(repo_id, commit_id)
978
979
979 @reraise_safe_exceptions
980 @reraise_safe_exceptions
980 def children(self, wire, commit_id):
981 def children(self, wire, commit_id):
981 cache_on, context_uid, repo_id = self._cache_on(wire)
982 cache_on, context_uid, repo_id = self._cache_on(wire)
982 region = self._region(wire)
983 region = self._region(wire)
983
984
984 head = self.head(wire)
985 head = self.head(wire)
985
986
986 @region.conditional_cache_on_arguments(condition=cache_on)
987 @region.conditional_cache_on_arguments(condition=cache_on)
987 def _children(_repo_id, _commit_id):
988 def _children(_repo_id, _commit_id):
988
989
989 output, __ = self.run_git_command(
990 output, __ = self.run_git_command(
990 wire, ['rev-list', '--all', '--children', f'{commit_id}^..{head}'])
991 wire, ['rev-list', '--all', '--children', f'{commit_id}^..{head}'])
991
992
992 child_ids = []
993 child_ids = []
993 pat = re.compile(r'^{}'.format(commit_id))
994 pat = re.compile(r'^{}'.format(commit_id))
994 for line in output.splitlines():
995 for line in output.splitlines():
995 line = safe_str(line)
996 line = safe_str(line)
996 if pat.match(line):
997 if pat.match(line):
997 found_ids = line.split(' ')[1:]
998 found_ids = line.split(' ')[1:]
998 child_ids.extend(found_ids)
999 child_ids.extend(found_ids)
999 break
1000 break
1000
1001
1001 return child_ids
1002 return child_ids
1002 return _children(repo_id, commit_id)
1003 return _children(repo_id, commit_id)
1003
1004
1004 @reraise_safe_exceptions
1005 @reraise_safe_exceptions
1005 def set_refs(self, wire, key, value):
1006 def set_refs(self, wire, key, value):
1006 repo_init = self._factory.repo_libgit2(wire)
1007 repo_init = self._factory.repo_libgit2(wire)
1007 with repo_init as repo:
1008 with repo_init as repo:
1008 repo.references.create(key, value, force=True)
1009 repo.references.create(key, value, force=True)
1009
1010
1010 @reraise_safe_exceptions
1011 @reraise_safe_exceptions
1011 def create_branch(self, wire, branch_name, commit_id, force=False):
1012 def create_branch(self, wire, branch_name, commit_id, force=False):
1012 repo_init = self._factory.repo_libgit2(wire)
1013 repo_init = self._factory.repo_libgit2(wire)
1013 with repo_init as repo:
1014 with repo_init as repo:
1014 commit = repo[commit_id]
1015 commit = repo[commit_id]
1015
1016
1016 if force:
1017 if force:
1017 repo.branches.local.create(branch_name, commit, force=force)
1018 repo.branches.local.create(branch_name, commit, force=force)
1018 elif not repo.branches.get(branch_name):
1019 elif not repo.branches.get(branch_name):
1019 # create only if that branch isn't existing
1020 # create only if that branch isn't existing
1020 repo.branches.local.create(branch_name, commit, force=force)
1021 repo.branches.local.create(branch_name, commit, force=force)
1021
1022
1022 @reraise_safe_exceptions
1023 @reraise_safe_exceptions
1023 def remove_ref(self, wire, key):
1024 def remove_ref(self, wire, key):
1024 repo_init = self._factory.repo_libgit2(wire)
1025 repo_init = self._factory.repo_libgit2(wire)
1025 with repo_init as repo:
1026 with repo_init as repo:
1026 repo.references.delete(key)
1027 repo.references.delete(key)
1027
1028
1028 @reraise_safe_exceptions
1029 @reraise_safe_exceptions
1029 def tag_remove(self, wire, tag_name):
1030 def tag_remove(self, wire, tag_name):
1030 repo_init = self._factory.repo_libgit2(wire)
1031 repo_init = self._factory.repo_libgit2(wire)
1031 with repo_init as repo:
1032 with repo_init as repo:
1032 key = 'refs/tags/{}'.format(tag_name)
1033 key = 'refs/tags/{}'.format(tag_name)
1033 repo.references.delete(key)
1034 repo.references.delete(key)
1034
1035
1035 @reraise_safe_exceptions
1036 @reraise_safe_exceptions
1036 def tree_changes(self, wire, source_id, target_id):
1037 def tree_changes(self, wire, source_id, target_id):
1037 # TODO(marcink): remove this seems it's only used by tests
1038 # TODO(marcink): remove this seems it's only used by tests
1038 repo = self._factory.repo(wire)
1039 repo = self._factory.repo(wire)
1039 source = repo[source_id].tree if source_id else None
1040 source = repo[source_id].tree if source_id else None
1040 target = repo[target_id].tree
1041 target = repo[target_id].tree
1041 result = repo.object_store.tree_changes(source, target)
1042 result = repo.object_store.tree_changes(source, target)
1042 return list(result)
1043 return list(result)
1043
1044
1044 @reraise_safe_exceptions
1045 @reraise_safe_exceptions
1045 def tree_and_type_for_path(self, wire, commit_id, path):
1046 def tree_and_type_for_path(self, wire, commit_id, path):
1046
1047
1047 cache_on, context_uid, repo_id = self._cache_on(wire)
1048 cache_on, context_uid, repo_id = self._cache_on(wire)
1048 region = self._region(wire)
1049 region = self._region(wire)
1049
1050
1050 @region.conditional_cache_on_arguments(condition=cache_on)
1051 @region.conditional_cache_on_arguments(condition=cache_on)
1051 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
1052 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
1052 repo_init = self._factory.repo_libgit2(wire)
1053 repo_init = self._factory.repo_libgit2(wire)
1053
1054
1054 with repo_init as repo:
1055 with repo_init as repo:
1055 commit = repo[commit_id]
1056 commit = repo[commit_id]
1056 try:
1057 try:
1057 tree = commit.tree[path]
1058 tree = commit.tree[path]
1058 except KeyError:
1059 except KeyError:
1059 return None, None, None
1060 return None, None, None
1060
1061
1061 return tree.id.hex, tree.type_str, tree.filemode
1062 return tree.id.hex, tree.type_str, tree.filemode
1062 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
1063 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
1063
1064
1064 @reraise_safe_exceptions
1065 @reraise_safe_exceptions
1065 def tree_items(self, wire, tree_id):
1066 def tree_items(self, wire, tree_id):
1066 cache_on, context_uid, repo_id = self._cache_on(wire)
1067 cache_on, context_uid, repo_id = self._cache_on(wire)
1067 region = self._region(wire)
1068 region = self._region(wire)
1068
1069
1069 @region.conditional_cache_on_arguments(condition=cache_on)
1070 @region.conditional_cache_on_arguments(condition=cache_on)
1070 def _tree_items(_repo_id, _tree_id):
1071 def _tree_items(_repo_id, _tree_id):
1071
1072
1072 repo_init = self._factory.repo_libgit2(wire)
1073 repo_init = self._factory.repo_libgit2(wire)
1073 with repo_init as repo:
1074 with repo_init as repo:
1074 try:
1075 try:
1075 tree = repo[tree_id]
1076 tree = repo[tree_id]
1076 except KeyError:
1077 except KeyError:
1077 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1078 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1078
1079
1079 result = []
1080 result = []
1080 for item in tree:
1081 for item in tree:
1081 item_sha = item.hex
1082 item_sha = item.hex
1082 item_mode = item.filemode
1083 item_mode = item.filemode
1083 item_type = item.type_str
1084 item_type = item.type_str
1084
1085
1085 if item_type == 'commit':
1086 if item_type == 'commit':
1086 # NOTE(marcink): submodules we translate to 'link' for backward compat
1087 # NOTE(marcink): submodules we translate to 'link' for backward compat
1087 item_type = 'link'
1088 item_type = 'link'
1088
1089
1089 result.append((item.name, item_mode, item_sha, item_type))
1090 result.append((item.name, item_mode, item_sha, item_type))
1090 return result
1091 return result
1091 return _tree_items(repo_id, tree_id)
1092 return _tree_items(repo_id, tree_id)
1092
1093
1093 @reraise_safe_exceptions
1094 @reraise_safe_exceptions
1094 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1095 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1095 """
1096 """
1096 Old version that uses subprocess to call diff
1097 Old version that uses subprocess to call diff
1097 """
1098 """
1098
1099
1099 flags = [
1100 flags = [
1100 '-U%s' % context, '--patch',
1101 '-U%s' % context, '--patch',
1101 '--binary',
1102 '--binary',
1102 '--find-renames',
1103 '--find-renames',
1103 '--no-indent-heuristic',
1104 '--no-indent-heuristic',
1104 # '--indent-heuristic',
1105 # '--indent-heuristic',
1105 #'--full-index',
1106 #'--full-index',
1106 #'--abbrev=40'
1107 #'--abbrev=40'
1107 ]
1108 ]
1108
1109
1109 if opt_ignorews:
1110 if opt_ignorews:
1110 flags.append('--ignore-all-space')
1111 flags.append('--ignore-all-space')
1111
1112
1112 if commit_id_1 == self.EMPTY_COMMIT:
1113 if commit_id_1 == self.EMPTY_COMMIT:
1113 cmd = ['show'] + flags + [commit_id_2]
1114 cmd = ['show'] + flags + [commit_id_2]
1114 else:
1115 else:
1115 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1116 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1116
1117
1117 if file_filter:
1118 if file_filter:
1118 cmd.extend(['--', file_filter])
1119 cmd.extend(['--', file_filter])
1119
1120
1120 diff, __ = self.run_git_command(wire, cmd)
1121 diff, __ = self.run_git_command(wire, cmd)
1121 # If we used 'show' command, strip first few lines (until actual diff
1122 # If we used 'show' command, strip first few lines (until actual diff
1122 # starts)
1123 # starts)
1123 if commit_id_1 == self.EMPTY_COMMIT:
1124 if commit_id_1 == self.EMPTY_COMMIT:
1124 lines = diff.splitlines()
1125 lines = diff.splitlines()
1125 x = 0
1126 x = 0
1126 for line in lines:
1127 for line in lines:
1127 if line.startswith(b'diff'):
1128 if line.startswith(b'diff'):
1128 break
1129 break
1129 x += 1
1130 x += 1
1130 # Append new line just like 'diff' command do
1131 # Append new line just like 'diff' command do
1131 diff = '\n'.join(lines[x:]) + '\n'
1132 diff = '\n'.join(lines[x:]) + '\n'
1132 return diff
1133 return diff
1133
1134
1134 @reraise_safe_exceptions
1135 @reraise_safe_exceptions
1135 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1136 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1136 repo_init = self._factory.repo_libgit2(wire)
1137 repo_init = self._factory.repo_libgit2(wire)
1137
1138
1138 with repo_init as repo:
1139 with repo_init as repo:
1139 swap = True
1140 swap = True
1140 flags = 0
1141 flags = 0
1141 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1142 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1142
1143
1143 if opt_ignorews:
1144 if opt_ignorews:
1144 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1145 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1145
1146
1146 if commit_id_1 == self.EMPTY_COMMIT:
1147 if commit_id_1 == self.EMPTY_COMMIT:
1147 comm1 = repo[commit_id_2]
1148 comm1 = repo[commit_id_2]
1148 diff_obj = comm1.tree.diff_to_tree(
1149 diff_obj = comm1.tree.diff_to_tree(
1149 flags=flags, context_lines=context, swap=swap)
1150 flags=flags, context_lines=context, swap=swap)
1150
1151
1151 else:
1152 else:
1152 comm1 = repo[commit_id_2]
1153 comm1 = repo[commit_id_2]
1153 comm2 = repo[commit_id_1]
1154 comm2 = repo[commit_id_1]
1154 diff_obj = comm1.tree.diff_to_tree(
1155 diff_obj = comm1.tree.diff_to_tree(
1155 comm2.tree, flags=flags, context_lines=context, swap=swap)
1156 comm2.tree, flags=flags, context_lines=context, swap=swap)
1156 similar_flags = 0
1157 similar_flags = 0
1157 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1158 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1158 diff_obj.find_similar(flags=similar_flags)
1159 diff_obj.find_similar(flags=similar_flags)
1159
1160
1160 if file_filter:
1161 if file_filter:
1161 for p in diff_obj:
1162 for p in diff_obj:
1162 if p.delta.old_file.path == file_filter:
1163 if p.delta.old_file.path == file_filter:
1163 return BinaryEnvelope(p.data) or BinaryEnvelope(b'')
1164 return BinaryEnvelope(p.data) or BinaryEnvelope(b'')
1164 # fo matching path == no diff
1165 # fo matching path == no diff
1165 return BinaryEnvelope(b'')
1166 return BinaryEnvelope(b'')
1166 return BinaryEnvelope(diff_obj.patch) or BinaryEnvelope(b'')
1167 return BinaryEnvelope(diff_obj.patch) or BinaryEnvelope(b'')
1167
1168
1168 @reraise_safe_exceptions
1169 @reraise_safe_exceptions
1169 def node_history(self, wire, commit_id, path, limit):
1170 def node_history(self, wire, commit_id, path, limit):
1170 cache_on, context_uid, repo_id = self._cache_on(wire)
1171 cache_on, context_uid, repo_id = self._cache_on(wire)
1171 region = self._region(wire)
1172 region = self._region(wire)
1172
1173
1173 @region.conditional_cache_on_arguments(condition=cache_on)
1174 @region.conditional_cache_on_arguments(condition=cache_on)
1174 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1175 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1175 # optimize for n==1, rev-list is much faster for that use-case
1176 # optimize for n==1, rev-list is much faster for that use-case
1176 if limit == 1:
1177 if limit == 1:
1177 cmd = ['rev-list', '-1', commit_id, '--', path]
1178 cmd = ['rev-list', '-1', commit_id, '--', path]
1178 else:
1179 else:
1179 cmd = ['log']
1180 cmd = ['log']
1180 if limit:
1181 if limit:
1181 cmd.extend(['-n', str(safe_int(limit, 0))])
1182 cmd.extend(['-n', str(safe_int(limit, 0))])
1182 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1183 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1183
1184
1184 output, __ = self.run_git_command(wire, cmd)
1185 output, __ = self.run_git_command(wire, cmd)
1185 commit_ids = re.findall(rb'[0-9a-fA-F]{40}', output)
1186 commit_ids = re.findall(rb'[0-9a-fA-F]{40}', output)
1186
1187
1187 return [x for x in commit_ids]
1188 return [x for x in commit_ids]
1188 return _node_history(context_uid, repo_id, commit_id, path, limit)
1189 return _node_history(context_uid, repo_id, commit_id, path, limit)
1189
1190
1190 @reraise_safe_exceptions
1191 @reraise_safe_exceptions
1191 def node_annotate_legacy(self, wire, commit_id, path):
1192 def node_annotate_legacy(self, wire, commit_id, path):
1192 # note: replaced by pygit2 implementation
1193 # note: replaced by pygit2 implementation
1193 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1194 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1194 # -l ==> outputs long shas (and we need all 40 characters)
1195 # -l ==> outputs long shas (and we need all 40 characters)
1195 # --root ==> doesn't put '^' character for boundaries
1196 # --root ==> doesn't put '^' character for boundaries
1196 # -r commit_id ==> blames for the given commit
1197 # -r commit_id ==> blames for the given commit
1197 output, __ = self.run_git_command(wire, cmd)
1198 output, __ = self.run_git_command(wire, cmd)
1198
1199
1199 result = []
1200 result = []
1200 for i, blame_line in enumerate(output.splitlines()[:-1]):
1201 for i, blame_line in enumerate(output.splitlines()[:-1]):
1201 line_no = i + 1
1202 line_no = i + 1
1202 blame_commit_id, line = re.split(rb' ', blame_line, 1)
1203 blame_commit_id, line = re.split(rb' ', blame_line, 1)
1203 result.append((line_no, blame_commit_id, line))
1204 result.append((line_no, blame_commit_id, line))
1204
1205
1205 return result
1206 return result
1206
1207
1207 @reraise_safe_exceptions
1208 @reraise_safe_exceptions
1208 def node_annotate(self, wire, commit_id, path):
1209 def node_annotate(self, wire, commit_id, path):
1209
1210
1210 result_libgit = []
1211 result_libgit = []
1211 repo_init = self._factory.repo_libgit2(wire)
1212 repo_init = self._factory.repo_libgit2(wire)
1212 with repo_init as repo:
1213 with repo_init as repo:
1213 commit = repo[commit_id]
1214 commit = repo[commit_id]
1214 blame_obj = repo.blame(path, newest_commit=commit_id)
1215 blame_obj = repo.blame(path, newest_commit=commit_id)
1215 for i, line in enumerate(commit.tree[path].data.splitlines()):
1216 for i, line in enumerate(commit.tree[path].data.splitlines()):
1216 line_no = i + 1
1217 line_no = i + 1
1217 hunk = blame_obj.for_line(line_no)
1218 hunk = blame_obj.for_line(line_no)
1218 blame_commit_id = hunk.final_commit_id.hex
1219 blame_commit_id = hunk.final_commit_id.hex
1219
1220
1220 result_libgit.append((line_no, blame_commit_id, line))
1221 result_libgit.append((line_no, blame_commit_id, line))
1221
1222
1222 return result_libgit
1223 return result_libgit
1223
1224
1224 @reraise_safe_exceptions
1225 @reraise_safe_exceptions
1225 def update_server_info(self, wire):
1226 def update_server_info(self, wire):
1226 repo = self._factory.repo(wire)
1227 repo = self._factory.repo(wire)
1227 update_server_info(repo)
1228 update_server_info(repo)
1228
1229
1229 @reraise_safe_exceptions
1230 @reraise_safe_exceptions
1230 def get_all_commit_ids(self, wire):
1231 def get_all_commit_ids(self, wire):
1231
1232
1232 cache_on, context_uid, repo_id = self._cache_on(wire)
1233 cache_on, context_uid, repo_id = self._cache_on(wire)
1233 region = self._region(wire)
1234 region = self._region(wire)
1234
1235
1235 @region.conditional_cache_on_arguments(condition=cache_on)
1236 @region.conditional_cache_on_arguments(condition=cache_on)
1236 def _get_all_commit_ids(_context_uid, _repo_id):
1237 def _get_all_commit_ids(_context_uid, _repo_id):
1237
1238
1238 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1239 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1239 try:
1240 try:
1240 output, __ = self.run_git_command(wire, cmd)
1241 output, __ = self.run_git_command(wire, cmd)
1241 return output.splitlines()
1242 return output.splitlines()
1242 except Exception:
1243 except Exception:
1243 # Can be raised for empty repositories
1244 # Can be raised for empty repositories
1244 return []
1245 return []
1245
1246
1246 @region.conditional_cache_on_arguments(condition=cache_on)
1247 @region.conditional_cache_on_arguments(condition=cache_on)
1247 def _get_all_commit_ids_pygit2(_context_uid, _repo_id):
1248 def _get_all_commit_ids_pygit2(_context_uid, _repo_id):
1248 repo_init = self._factory.repo_libgit2(wire)
1249 repo_init = self._factory.repo_libgit2(wire)
1249 from pygit2 import GIT_SORT_REVERSE, GIT_SORT_TIME, GIT_BRANCH_ALL
1250 from pygit2 import GIT_SORT_REVERSE, GIT_SORT_TIME, GIT_BRANCH_ALL
1250 results = []
1251 results = []
1251 with repo_init as repo:
1252 with repo_init as repo:
1252 for commit in repo.walk(repo.head.target, GIT_SORT_TIME | GIT_BRANCH_ALL | GIT_SORT_REVERSE):
1253 for commit in repo.walk(repo.head.target, GIT_SORT_TIME | GIT_BRANCH_ALL | GIT_SORT_REVERSE):
1253 results.append(commit.id.hex)
1254 results.append(commit.id.hex)
1254
1255
1255 return _get_all_commit_ids(context_uid, repo_id)
1256 return _get_all_commit_ids(context_uid, repo_id)
1256
1257
1257 @reraise_safe_exceptions
1258 @reraise_safe_exceptions
1258 def run_git_command(self, wire, cmd, **opts):
1259 def run_git_command(self, wire, cmd, **opts):
1259 path = wire.get('path', None)
1260 path = wire.get('path', None)
1260
1261
1261 if path and os.path.isdir(path):
1262 if path and os.path.isdir(path):
1262 opts['cwd'] = path
1263 opts['cwd'] = path
1263
1264
1264 if '_bare' in opts:
1265 if '_bare' in opts:
1265 _copts = []
1266 _copts = []
1266 del opts['_bare']
1267 del opts['_bare']
1267 else:
1268 else:
1268 _copts = ['-c', 'core.quotepath=false', ]
1269 _copts = ['-c', 'core.quotepath=false', ]
1269 safe_call = False
1270 safe_call = False
1270 if '_safe' in opts:
1271 if '_safe' in opts:
1271 # no exc on failure
1272 # no exc on failure
1272 del opts['_safe']
1273 del opts['_safe']
1273 safe_call = True
1274 safe_call = True
1274
1275
1275 if '_copts' in opts:
1276 if '_copts' in opts:
1276 _copts.extend(opts['_copts'] or [])
1277 _copts.extend(opts['_copts'] or [])
1277 del opts['_copts']
1278 del opts['_copts']
1278
1279
1279 gitenv = os.environ.copy()
1280 gitenv = os.environ.copy()
1280 gitenv.update(opts.pop('extra_env', {}))
1281 gitenv.update(opts.pop('extra_env', {}))
1281 # need to clean fix GIT_DIR !
1282 # need to clean fix GIT_DIR !
1282 if 'GIT_DIR' in gitenv:
1283 if 'GIT_DIR' in gitenv:
1283 del gitenv['GIT_DIR']
1284 del gitenv['GIT_DIR']
1284 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1285 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1285 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1286 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1286
1287
1287 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1288 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1288 _opts = {'env': gitenv, 'shell': False}
1289 _opts = {'env': gitenv, 'shell': False}
1289
1290
1290 proc = None
1291 proc = None
1291 try:
1292 try:
1292 _opts.update(opts)
1293 _opts.update(opts)
1293 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1294 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1294
1295
1295 return b''.join(proc), b''.join(proc.stderr)
1296 return b''.join(proc), b''.join(proc.stderr)
1296 except OSError as err:
1297 except OSError as err:
1297 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
1298 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
1298 tb_err = ("Couldn't run git command (%s).\n"
1299 tb_err = ("Couldn't run git command (%s).\n"
1299 "Original error was:%s\n"
1300 "Original error was:%s\n"
1300 "Call options:%s\n"
1301 "Call options:%s\n"
1301 % (cmd, err, _opts))
1302 % (cmd, err, _opts))
1302 log.exception(tb_err)
1303 log.exception(tb_err)
1303 if safe_call:
1304 if safe_call:
1304 return '', err
1305 return '', err
1305 else:
1306 else:
1306 raise exceptions.VcsException()(tb_err)
1307 raise exceptions.VcsException()(tb_err)
1307 finally:
1308 finally:
1308 if proc:
1309 if proc:
1309 proc.close()
1310 proc.close()
1310
1311
1311 @reraise_safe_exceptions
1312 @reraise_safe_exceptions
1312 def install_hooks(self, wire, force=False):
1313 def install_hooks(self, wire, force=False):
1313 from vcsserver.hook_utils import install_git_hooks
1314 from vcsserver.hook_utils import install_git_hooks
1314 bare = self.bare(wire)
1315 bare = self.bare(wire)
1315 path = wire['path']
1316 path = wire['path']
1316 binary_dir = settings.BINARY_DIR
1317 binary_dir = settings.BINARY_DIR
1317 executable = None
1318 executable = None
1318 if binary_dir:
1319 if binary_dir:
1319 executable = os.path.join(binary_dir, 'python3')
1320 executable = os.path.join(binary_dir, 'python3')
1320 return install_git_hooks(path, bare, force_create=force)
1321 return install_git_hooks(path, bare, force_create=force)
1321
1322
1322 @reraise_safe_exceptions
1323 @reraise_safe_exceptions
1323 def get_hooks_info(self, wire):
1324 def get_hooks_info(self, wire):
1324 from vcsserver.hook_utils import (
1325 from vcsserver.hook_utils import (
1325 get_git_pre_hook_version, get_git_post_hook_version)
1326 get_git_pre_hook_version, get_git_post_hook_version)
1326 bare = self.bare(wire)
1327 bare = self.bare(wire)
1327 path = wire['path']
1328 path = wire['path']
1328 return {
1329 return {
1329 'pre_version': get_git_pre_hook_version(path, bare),
1330 'pre_version': get_git_pre_hook_version(path, bare),
1330 'post_version': get_git_post_hook_version(path, bare),
1331 'post_version': get_git_post_hook_version(path, bare),
1331 }
1332 }
1332
1333
1333 @reraise_safe_exceptions
1334 @reraise_safe_exceptions
1334 def set_head_ref(self, wire, head_name):
1335 def set_head_ref(self, wire, head_name):
1335 log.debug('Setting refs/head to `%s`', head_name)
1336 log.debug('Setting refs/head to `%s`', head_name)
1336 cmd = ['symbolic-ref', '"HEAD"', '"refs/heads/%s"' % head_name]
1337 cmd = ['symbolic-ref', '"HEAD"', '"refs/heads/%s"' % head_name]
1337 output, __ = self.run_git_command(wire, cmd)
1338 output, __ = self.run_git_command(wire, cmd)
1338 return [head_name] + output.splitlines()
1339 return [head_name] + output.splitlines()
1339
1340
1340 @reraise_safe_exceptions
1341 @reraise_safe_exceptions
1341 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1342 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1342 archive_dir_name, commit_id):
1343 archive_dir_name, commit_id):
1343
1344
1344 def file_walker(_commit_id, path):
1345 def file_walker(_commit_id, path):
1345 repo_init = self._factory.repo_libgit2(wire)
1346 repo_init = self._factory.repo_libgit2(wire)
1346
1347
1347 with repo_init as repo:
1348 with repo_init as repo:
1348 commit = repo[commit_id]
1349 commit = repo[commit_id]
1349
1350
1350 if path in ['', '/']:
1351 if path in ['', '/']:
1351 tree = commit.tree
1352 tree = commit.tree
1352 else:
1353 else:
1353 tree = commit.tree[path.rstrip('/')]
1354 tree = commit.tree[path.rstrip('/')]
1354 tree_id = tree.id.hex
1355 tree_id = tree.id.hex
1355 try:
1356 try:
1356 tree = repo[tree_id]
1357 tree = repo[tree_id]
1357 except KeyError:
1358 except KeyError:
1358 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1359 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1359
1360
1360 index = LibGit2Index.Index()
1361 index = LibGit2Index.Index()
1361 index.read_tree(tree)
1362 index.read_tree(tree)
1362 file_iter = index
1363 file_iter = index
1363
1364
1364 for fn in file_iter:
1365 for fn in file_iter:
1365 file_path = fn.path
1366 file_path = fn.path
1366 mode = fn.mode
1367 mode = fn.mode
1367 is_link = stat.S_ISLNK(mode)
1368 is_link = stat.S_ISLNK(mode)
1368 if mode == pygit2.GIT_FILEMODE_COMMIT:
1369 if mode == pygit2.GIT_FILEMODE_COMMIT:
1369 log.debug('Skipping path %s as a commit node', file_path)
1370 log.debug('Skipping path %s as a commit node', file_path)
1370 continue
1371 continue
1371 yield ArchiveNode(file_path, mode, is_link, repo[fn.hex].read_raw)
1372 yield ArchiveNode(file_path, mode, is_link, repo[fn.hex].read_raw)
1372
1373
1373 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1374 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1374 archive_dir_name, commit_id)
1375 archive_dir_name, commit_id)
General Comments 0
You need to be logged in to leave comments. Login now