##// END OF EJS Templates
core: multiple changes for python3 found during test runs of rhodecode-ce release
super-admin -
r1080:fe9c3296 python3
parent child Browse files
Show More
@@ -1,738 +1,738 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2020 RhodeCode GmbH
4 # Copyright (C) 2014-2020 RhodeCode GmbH
5 #
5 #
6 # This program is free software; you can redistribute it and/or modify
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
9 # (at your option) any later version.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
19
20 import io
20 import io
21 import os
21 import os
22 import sys
22 import sys
23 import logging
23 import logging
24 import collections
24 import collections
25 import importlib
25 import importlib
26 import base64
26 import base64
27 import msgpack
27 import msgpack
28
28
29 from http.client import HTTPConnection
29 from http.client import HTTPConnection
30
30
31
31
32 import mercurial.scmutil
32 import mercurial.scmutil
33 import mercurial.node
33 import mercurial.node
34
34
35 from vcsserver.lib.rc_json import json
35 from vcsserver.lib.rc_json import json
36 from vcsserver import exceptions, subprocessio, settings
36 from vcsserver import exceptions, subprocessio, settings
37 from vcsserver.str_utils import safe_bytes
37 from vcsserver.str_utils import safe_bytes
38
38
39 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
40
40
41
41
42 class HooksHttpClient(object):
42 class HooksHttpClient(object):
43 proto = 'msgpack.v1'
43 proto = 'msgpack.v1'
44 connection = None
44 connection = None
45
45
46 def __init__(self, hooks_uri):
46 def __init__(self, hooks_uri):
47 self.hooks_uri = hooks_uri
47 self.hooks_uri = hooks_uri
48
48
49 def __call__(self, method, extras):
49 def __call__(self, method, extras):
50 connection = HTTPConnection(self.hooks_uri)
50 connection = HTTPConnection(self.hooks_uri)
51 # binary msgpack body
51 # binary msgpack body
52 headers, body = self._serialize(method, extras)
52 headers, body = self._serialize(method, extras)
53 try:
53 try:
54 connection.request('POST', '/', body, headers)
54 connection.request('POST', '/', body, headers)
55 except Exception as error:
55 except Exception as error:
56 log.error('Hooks calling Connection failed on %s, org error: %s', connection.__dict__, error)
56 log.error('Hooks calling Connection failed on %s, org error: %s', connection.__dict__, error)
57 raise
57 raise
58 response = connection.getresponse()
58 response = connection.getresponse()
59 try:
59 try:
60 return msgpack.load(response)
60 return msgpack.load(response)
61 except Exception:
61 except Exception:
62 response_data = response.read()
62 response_data = response.read()
63 log.exception('Failed to decode hook response json data. '
63 log.exception('Failed to decode hook response json data. '
64 'response_code:%s, raw_data:%s',
64 'response_code:%s, raw_data:%s',
65 response.status, response_data)
65 response.status, response_data)
66 raise
66 raise
67
67
68 @classmethod
68 @classmethod
69 def _serialize(cls, hook_name, extras):
69 def _serialize(cls, hook_name, extras):
70 data = {
70 data = {
71 'method': hook_name,
71 'method': hook_name,
72 'extras': extras
72 'extras': extras
73 }
73 }
74 headers = {
74 headers = {
75 'rc-hooks-protocol': cls.proto
75 'rc-hooks-protocol': cls.proto
76 }
76 }
77 return headers, msgpack.packb(data)
77 return headers, msgpack.packb(data)
78
78
79
79
80 class HooksDummyClient(object):
80 class HooksDummyClient(object):
81 def __init__(self, hooks_module):
81 def __init__(self, hooks_module):
82 self._hooks_module = importlib.import_module(hooks_module)
82 self._hooks_module = importlib.import_module(hooks_module)
83
83
84 def __call__(self, hook_name, extras):
84 def __call__(self, hook_name, extras):
85 with self._hooks_module.Hooks() as hooks:
85 with self._hooks_module.Hooks() as hooks:
86 return getattr(hooks, hook_name)(extras)
86 return getattr(hooks, hook_name)(extras)
87
87
88
88
89 class HooksShadowRepoClient(object):
89 class HooksShadowRepoClient(object):
90
90
91 def __call__(self, hook_name, extras):
91 def __call__(self, hook_name, extras):
92 return {'output': '', 'status': 0}
92 return {'output': '', 'status': 0}
93
93
94
94
95 class RemoteMessageWriter(object):
95 class RemoteMessageWriter(object):
96 """Writer base class."""
96 """Writer base class."""
97 def write(self, message):
97 def write(self, message):
98 raise NotImplementedError()
98 raise NotImplementedError()
99
99
100
100
101 class HgMessageWriter(RemoteMessageWriter):
101 class HgMessageWriter(RemoteMessageWriter):
102 """Writer that knows how to send messages to mercurial clients."""
102 """Writer that knows how to send messages to mercurial clients."""
103
103
104 def __init__(self, ui):
104 def __init__(self, ui):
105 self.ui = ui
105 self.ui = ui
106
106
107 def write(self, message):
107 def write(self, message):
108 # TODO: Check why the quiet flag is set by default.
108 # TODO: Check why the quiet flag is set by default.
109 old = self.ui.quiet
109 old = self.ui.quiet
110 self.ui.quiet = False
110 self.ui.quiet = False
111 self.ui.status(message.encode('utf-8'))
111 self.ui.status(message.encode('utf-8'))
112 self.ui.quiet = old
112 self.ui.quiet = old
113
113
114
114
115 class GitMessageWriter(RemoteMessageWriter):
115 class GitMessageWriter(RemoteMessageWriter):
116 """Writer that knows how to send messages to git clients."""
116 """Writer that knows how to send messages to git clients."""
117
117
118 def __init__(self, stdout=None):
118 def __init__(self, stdout=None):
119 self.stdout = stdout or sys.stdout
119 self.stdout = stdout or sys.stdout
120
120
121 def write(self, message):
121 def write(self, message):
122 self.stdout.write(safe_bytes(message))
122 self.stdout.write(safe_bytes(message))
123
123
124
124
125 class SvnMessageWriter(RemoteMessageWriter):
125 class SvnMessageWriter(RemoteMessageWriter):
126 """Writer that knows how to send messages to svn clients."""
126 """Writer that knows how to send messages to svn clients."""
127
127
128 def __init__(self, stderr=None):
128 def __init__(self, stderr=None):
129 # SVN needs data sent to stderr for back-to-client messaging
129 # SVN needs data sent to stderr for back-to-client messaging
130 self.stderr = stderr or sys.stderr
130 self.stderr = stderr or sys.stderr
131
131
132 def write(self, message):
132 def write(self, message):
133 self.stderr.write(message.encode('utf-8'))
133 self.stderr.write(message.encode('utf-8'))
134
134
135
135
136 def _handle_exception(result):
136 def _handle_exception(result):
137 exception_class = result.get('exception')
137 exception_class = result.get('exception')
138 exception_traceback = result.get('exception_traceback')
138 exception_traceback = result.get('exception_traceback')
139
139
140 if exception_traceback:
140 if exception_traceback:
141 log.error('Got traceback from remote call:%s', exception_traceback)
141 log.error('Got traceback from remote call:%s', exception_traceback)
142
142
143 if exception_class == 'HTTPLockedRC':
143 if exception_class == 'HTTPLockedRC':
144 raise exceptions.RepositoryLockedException()(*result['exception_args'])
144 raise exceptions.RepositoryLockedException()(*result['exception_args'])
145 elif exception_class == 'HTTPBranchProtected':
145 elif exception_class == 'HTTPBranchProtected':
146 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
146 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
147 elif exception_class == 'RepositoryError':
147 elif exception_class == 'RepositoryError':
148 raise exceptions.VcsException()(*result['exception_args'])
148 raise exceptions.VcsException()(*result['exception_args'])
149 elif exception_class:
149 elif exception_class:
150 raise Exception('Got remote exception "%s" with args "%s"' %
150 raise Exception('Got remote exception "%s" with args "%s"' %
151 (exception_class, result['exception_args']))
151 (exception_class, result['exception_args']))
152
152
153
153
154 def _get_hooks_client(extras):
154 def _get_hooks_client(extras):
155 hooks_uri = extras.get('hooks_uri')
155 hooks_uri = extras.get('hooks_uri')
156 is_shadow_repo = extras.get('is_shadow_repo')
156 is_shadow_repo = extras.get('is_shadow_repo')
157 if hooks_uri:
157 if hooks_uri:
158 return HooksHttpClient(extras['hooks_uri'])
158 return HooksHttpClient(extras['hooks_uri'])
159 elif is_shadow_repo:
159 elif is_shadow_repo:
160 return HooksShadowRepoClient()
160 return HooksShadowRepoClient()
161 else:
161 else:
162 return HooksDummyClient(extras['hooks_module'])
162 return HooksDummyClient(extras['hooks_module'])
163
163
164
164
165 def _call_hook(hook_name, extras, writer):
165 def _call_hook(hook_name, extras, writer):
166 hooks_client = _get_hooks_client(extras)
166 hooks_client = _get_hooks_client(extras)
167 log.debug('Hooks, using client:%s', hooks_client)
167 log.debug('Hooks, using client:%s', hooks_client)
168 result = hooks_client(hook_name, extras)
168 result = hooks_client(hook_name, extras)
169 log.debug('Hooks got result: %s', result)
169 log.debug('Hooks got result: %s', result)
170
170
171 _handle_exception(result)
171 _handle_exception(result)
172 writer.write(result['output'])
172 writer.write(result['output'])
173
173
174 return result['status']
174 return result['status']
175
175
176
176
177 def _extras_from_ui(ui):
177 def _extras_from_ui(ui):
178 hook_data = ui.config(b'rhodecode', b'RC_SCM_DATA')
178 hook_data = ui.config(b'rhodecode', b'RC_SCM_DATA')
179 if not hook_data:
179 if not hook_data:
180 # maybe it's inside environ ?
180 # maybe it's inside environ ?
181 env_hook_data = os.environ.get('RC_SCM_DATA')
181 env_hook_data = os.environ.get('RC_SCM_DATA')
182 if env_hook_data:
182 if env_hook_data:
183 hook_data = env_hook_data
183 hook_data = env_hook_data
184
184
185 extras = {}
185 extras = {}
186 if hook_data:
186 if hook_data:
187 extras = json.loads(hook_data)
187 extras = json.loads(hook_data)
188 return extras
188 return extras
189
189
190
190
191 def _rev_range_hash(repo, node, check_heads=False):
191 def _rev_range_hash(repo, node, check_heads=False):
192 from vcsserver.hgcompat import get_ctx
192 from vcsserver.hgcompat import get_ctx
193
193
194 commits = []
194 commits = []
195 revs = []
195 revs = []
196 start = get_ctx(repo, node).rev()
196 start = get_ctx(repo, node).rev()
197 end = len(repo)
197 end = len(repo)
198 for rev in range(start, end):
198 for rev in range(start, end):
199 revs.append(rev)
199 revs.append(rev)
200 ctx = get_ctx(repo, rev)
200 ctx = get_ctx(repo, rev)
201 commit_id = mercurial.node.hex(ctx.node())
201 commit_id = mercurial.node.hex(ctx.node())
202 branch = ctx.branch()
202 branch = ctx.branch()
203 commits.append((commit_id, branch))
203 commits.append((commit_id, branch))
204
204
205 parent_heads = []
205 parent_heads = []
206 if check_heads:
206 if check_heads:
207 parent_heads = _check_heads(repo, start, end, revs)
207 parent_heads = _check_heads(repo, start, end, revs)
208 return commits, parent_heads
208 return commits, parent_heads
209
209
210
210
211 def _check_heads(repo, start, end, commits):
211 def _check_heads(repo, start, end, commits):
212 from vcsserver.hgcompat import get_ctx
212 from vcsserver.hgcompat import get_ctx
213 changelog = repo.changelog
213 changelog = repo.changelog
214 parents = set()
214 parents = set()
215
215
216 for new_rev in commits:
216 for new_rev in commits:
217 for p in changelog.parentrevs(new_rev):
217 for p in changelog.parentrevs(new_rev):
218 if p == mercurial.node.nullrev:
218 if p == mercurial.node.nullrev:
219 continue
219 continue
220 if p < start:
220 if p < start:
221 parents.add(p)
221 parents.add(p)
222
222
223 for p in parents:
223 for p in parents:
224 branch = get_ctx(repo, p).branch()
224 branch = get_ctx(repo, p).branch()
225 # The heads descending from that parent, on the same branch
225 # The heads descending from that parent, on the same branch
226 parent_heads = set([p])
226 parent_heads = set([p])
227 reachable = set([p])
227 reachable = set([p])
228 for x in range(p + 1, end):
228 for x in range(p + 1, end):
229 if get_ctx(repo, x).branch() != branch:
229 if get_ctx(repo, x).branch() != branch:
230 continue
230 continue
231 for pp in changelog.parentrevs(x):
231 for pp in changelog.parentrevs(x):
232 if pp in reachable:
232 if pp in reachable:
233 reachable.add(x)
233 reachable.add(x)
234 parent_heads.discard(pp)
234 parent_heads.discard(pp)
235 parent_heads.add(x)
235 parent_heads.add(x)
236 # More than one head? Suggest merging
236 # More than one head? Suggest merging
237 if len(parent_heads) > 1:
237 if len(parent_heads) > 1:
238 return list(parent_heads)
238 return list(parent_heads)
239
239
240 return []
240 return []
241
241
242
242
243 def _get_git_env():
243 def _get_git_env():
244 env = {}
244 env = {}
245 for k, v in os.environ.items():
245 for k, v in os.environ.items():
246 if k.startswith('GIT'):
246 if k.startswith('GIT'):
247 env[k] = v
247 env[k] = v
248
248
249 # serialized version
249 # serialized version
250 return [(k, v) for k, v in env.items()]
250 return [(k, v) for k, v in env.items()]
251
251
252
252
253 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
253 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
254 env = {}
254 env = {}
255 for k, v in os.environ.items():
255 for k, v in os.environ.items():
256 if k.startswith('HG'):
256 if k.startswith('HG'):
257 env[k] = v
257 env[k] = v
258
258
259 env['HG_NODE'] = old_rev
259 env['HG_NODE'] = old_rev
260 env['HG_NODE_LAST'] = new_rev
260 env['HG_NODE_LAST'] = new_rev
261 env['HG_TXNID'] = txnid
261 env['HG_TXNID'] = txnid
262 env['HG_PENDING'] = repo_path
262 env['HG_PENDING'] = repo_path
263
263
264 return [(k, v) for k, v in env.items()]
264 return [(k, v) for k, v in env.items()]
265
265
266
266
267 def repo_size(ui, repo, **kwargs):
267 def repo_size(ui, repo, **kwargs):
268 extras = _extras_from_ui(ui)
268 extras = _extras_from_ui(ui)
269 return _call_hook('repo_size', extras, HgMessageWriter(ui))
269 return _call_hook('repo_size', extras, HgMessageWriter(ui))
270
270
271
271
272 def pre_pull(ui, repo, **kwargs):
272 def pre_pull(ui, repo, **kwargs):
273 extras = _extras_from_ui(ui)
273 extras = _extras_from_ui(ui)
274 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
274 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
275
275
276
276
277 def pre_pull_ssh(ui, repo, **kwargs):
277 def pre_pull_ssh(ui, repo, **kwargs):
278 extras = _extras_from_ui(ui)
278 extras = _extras_from_ui(ui)
279 if extras and extras.get('SSH'):
279 if extras and extras.get('SSH'):
280 return pre_pull(ui, repo, **kwargs)
280 return pre_pull(ui, repo, **kwargs)
281 return 0
281 return 0
282
282
283
283
284 def post_pull(ui, repo, **kwargs):
284 def post_pull(ui, repo, **kwargs):
285 extras = _extras_from_ui(ui)
285 extras = _extras_from_ui(ui)
286 return _call_hook('post_pull', extras, HgMessageWriter(ui))
286 return _call_hook('post_pull', extras, HgMessageWriter(ui))
287
287
288
288
289 def post_pull_ssh(ui, repo, **kwargs):
289 def post_pull_ssh(ui, repo, **kwargs):
290 extras = _extras_from_ui(ui)
290 extras = _extras_from_ui(ui)
291 if extras and extras.get('SSH'):
291 if extras and extras.get('SSH'):
292 return post_pull(ui, repo, **kwargs)
292 return post_pull(ui, repo, **kwargs)
293 return 0
293 return 0
294
294
295
295
296 def pre_push(ui, repo, node=None, **kwargs):
296 def pre_push(ui, repo, node=None, **kwargs):
297 """
297 """
298 Mercurial pre_push hook
298 Mercurial pre_push hook
299 """
299 """
300 extras = _extras_from_ui(ui)
300 extras = _extras_from_ui(ui)
301 detect_force_push = extras.get('detect_force_push')
301 detect_force_push = extras.get('detect_force_push')
302
302
303 rev_data = []
303 rev_data = []
304 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
304 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
305 branches = collections.defaultdict(list)
305 branches = collections.defaultdict(list)
306 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
306 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
307 for commit_id, branch in commits:
307 for commit_id, branch in commits:
308 branches[branch].append(commit_id)
308 branches[branch].append(commit_id)
309
309
310 for branch, commits in branches.items():
310 for branch, commits in branches.items():
311 old_rev = kwargs.get('node_last') or commits[0]
311 old_rev = kwargs.get('node_last') or commits[0]
312 rev_data.append({
312 rev_data.append({
313 'total_commits': len(commits),
313 'total_commits': len(commits),
314 'old_rev': old_rev,
314 'old_rev': old_rev,
315 'new_rev': commits[-1],
315 'new_rev': commits[-1],
316 'ref': '',
316 'ref': '',
317 'type': 'branch',
317 'type': 'branch',
318 'name': branch,
318 'name': branch,
319 })
319 })
320
320
321 for push_ref in rev_data:
321 for push_ref in rev_data:
322 push_ref['multiple_heads'] = _heads
322 push_ref['multiple_heads'] = _heads
323
323
324 repo_path = os.path.join(
324 repo_path = os.path.join(
325 extras.get('repo_store', ''), extras.get('repository', ''))
325 extras.get('repo_store', ''), extras.get('repository', ''))
326 push_ref['hg_env'] = _get_hg_env(
326 push_ref['hg_env'] = _get_hg_env(
327 old_rev=push_ref['old_rev'],
327 old_rev=push_ref['old_rev'],
328 new_rev=push_ref['new_rev'], txnid=kwargs.get('txnid'),
328 new_rev=push_ref['new_rev'], txnid=kwargs.get('txnid'),
329 repo_path=repo_path)
329 repo_path=repo_path)
330
330
331 extras['hook_type'] = kwargs.get('hooktype', 'pre_push')
331 extras['hook_type'] = kwargs.get('hooktype', 'pre_push')
332 extras['commit_ids'] = rev_data
332 extras['commit_ids'] = rev_data
333
333
334 return _call_hook('pre_push', extras, HgMessageWriter(ui))
334 return _call_hook('pre_push', extras, HgMessageWriter(ui))
335
335
336
336
337 def pre_push_ssh(ui, repo, node=None, **kwargs):
337 def pre_push_ssh(ui, repo, node=None, **kwargs):
338 extras = _extras_from_ui(ui)
338 extras = _extras_from_ui(ui)
339 if extras.get('SSH'):
339 if extras.get('SSH'):
340 return pre_push(ui, repo, node, **kwargs)
340 return pre_push(ui, repo, node, **kwargs)
341
341
342 return 0
342 return 0
343
343
344
344
345 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
345 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
346 """
346 """
347 Mercurial pre_push hook for SSH
347 Mercurial pre_push hook for SSH
348 """
348 """
349 extras = _extras_from_ui(ui)
349 extras = _extras_from_ui(ui)
350 if extras.get('SSH'):
350 if extras.get('SSH'):
351 permission = extras['SSH_PERMISSIONS']
351 permission = extras['SSH_PERMISSIONS']
352
352
353 if 'repository.write' == permission or 'repository.admin' == permission:
353 if 'repository.write' == permission or 'repository.admin' == permission:
354 return 0
354 return 0
355
355
356 # non-zero ret code
356 # non-zero ret code
357 return 1
357 return 1
358
358
359 return 0
359 return 0
360
360
361
361
362 def post_push(ui, repo, node, **kwargs):
362 def post_push(ui, repo, node, **kwargs):
363 """
363 """
364 Mercurial post_push hook
364 Mercurial post_push hook
365 """
365 """
366 extras = _extras_from_ui(ui)
366 extras = _extras_from_ui(ui)
367
367
368 commit_ids = []
368 commit_ids = []
369 branches = []
369 branches = []
370 bookmarks = []
370 bookmarks = []
371 tags = []
371 tags = []
372
372
373 commits, _heads = _rev_range_hash(repo, node)
373 commits, _heads = _rev_range_hash(repo, node)
374 for commit_id, branch in commits:
374 for commit_id, branch in commits:
375 commit_ids.append(commit_id)
375 commit_ids.append(commit_id)
376 if branch not in branches:
376 if branch not in branches:
377 branches.append(branch)
377 branches.append(branch)
378
378
379 if hasattr(ui, '_rc_pushkey_branches'):
379 if hasattr(ui, '_rc_pushkey_branches'):
380 bookmarks = ui._rc_pushkey_branches
380 bookmarks = ui._rc_pushkey_branches
381
381
382 extras['hook_type'] = kwargs.get('hooktype', 'post_push')
382 extras['hook_type'] = kwargs.get('hooktype', 'post_push')
383 extras['commit_ids'] = commit_ids
383 extras['commit_ids'] = commit_ids
384 extras['new_refs'] = {
384 extras['new_refs'] = {
385 'branches': branches,
385 'branches': branches,
386 'bookmarks': bookmarks,
386 'bookmarks': bookmarks,
387 'tags': tags
387 'tags': tags
388 }
388 }
389
389
390 return _call_hook('post_push', extras, HgMessageWriter(ui))
390 return _call_hook('post_push', extras, HgMessageWriter(ui))
391
391
392
392
393 def post_push_ssh(ui, repo, node, **kwargs):
393 def post_push_ssh(ui, repo, node, **kwargs):
394 """
394 """
395 Mercurial post_push hook for SSH
395 Mercurial post_push hook for SSH
396 """
396 """
397 if _extras_from_ui(ui).get('SSH'):
397 if _extras_from_ui(ui).get('SSH'):
398 return post_push(ui, repo, node, **kwargs)
398 return post_push(ui, repo, node, **kwargs)
399 return 0
399 return 0
400
400
401
401
402 def key_push(ui, repo, **kwargs):
402 def key_push(ui, repo, **kwargs):
403 from vcsserver.hgcompat import get_ctx
403 from vcsserver.hgcompat import get_ctx
404 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
404 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
405 # store new bookmarks in our UI object propagated later to post_push
405 # store new bookmarks in our UI object propagated later to post_push
406 ui._rc_pushkey_branches = get_ctx(repo, kwargs['key']).bookmarks()
406 ui._rc_pushkey_branches = get_ctx(repo, kwargs['key']).bookmarks()
407 return
407 return
408
408
409
409
410 # backward compat
410 # backward compat
411 log_pull_action = post_pull
411 log_pull_action = post_pull
412
412
413 # backward compat
413 # backward compat
414 log_push_action = post_push
414 log_push_action = post_push
415
415
416
416
417 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
417 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
418 """
418 """
419 Old hook name: keep here for backward compatibility.
419 Old hook name: keep here for backward compatibility.
420
420
421 This is only required when the installed git hooks are not upgraded.
421 This is only required when the installed git hooks are not upgraded.
422 """
422 """
423 pass
423 pass
424
424
425
425
426 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
426 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
427 """
427 """
428 Old hook name: keep here for backward compatibility.
428 Old hook name: keep here for backward compatibility.
429
429
430 This is only required when the installed git hooks are not upgraded.
430 This is only required when the installed git hooks are not upgraded.
431 """
431 """
432 pass
432 pass
433
433
434
434
435 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
435 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
436
436
437
437
438 def git_pre_pull(extras):
438 def git_pre_pull(extras):
439 """
439 """
440 Pre pull hook.
440 Pre pull hook.
441
441
442 :param extras: dictionary containing the keys defined in simplevcs
442 :param extras: dictionary containing the keys defined in simplevcs
443 :type extras: dict
443 :type extras: dict
444
444
445 :return: status code of the hook. 0 for success.
445 :return: status code of the hook. 0 for success.
446 :rtype: int
446 :rtype: int
447 """
447 """
448
448
449 if 'pull' not in extras['hooks']:
449 if 'pull' not in extras['hooks']:
450 return HookResponse(0, '')
450 return HookResponse(0, '')
451
451
452 stdout = io.BytesIO()
452 stdout = io.BytesIO()
453 try:
453 try:
454 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
454 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
455
455
456 except Exception as error:
456 except Exception as error:
457 log.exception('Failed to call pre_pull hook')
457 log.exception('Failed to call pre_pull hook')
458 status = 128
458 status = 128
459 stdout.write(safe_bytes(f'ERROR: {error}\n'))
459 stdout.write(safe_bytes(f'ERROR: {error}\n'))
460
460
461 return HookResponse(status, stdout.getvalue())
461 return HookResponse(status, stdout.getvalue())
462
462
463
463
464 def git_post_pull(extras):
464 def git_post_pull(extras):
465 """
465 """
466 Post pull hook.
466 Post pull hook.
467
467
468 :param extras: dictionary containing the keys defined in simplevcs
468 :param extras: dictionary containing the keys defined in simplevcs
469 :type extras: dict
469 :type extras: dict
470
470
471 :return: status code of the hook. 0 for success.
471 :return: status code of the hook. 0 for success.
472 :rtype: int
472 :rtype: int
473 """
473 """
474 if 'pull' not in extras['hooks']:
474 if 'pull' not in extras['hooks']:
475 return HookResponse(0, '')
475 return HookResponse(0, '')
476
476
477 stdout = io.BytesIO()
477 stdout = io.BytesIO()
478 try:
478 try:
479 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
479 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
480 except Exception as error:
480 except Exception as error:
481 status = 128
481 status = 128
482 stdout.write(safe_bytes(f'ERROR: {error}\n'))
482 stdout.write(safe_bytes(f'ERROR: {error}\n'))
483
483
484 return HookResponse(status, stdout.getvalue())
484 return HookResponse(status, stdout.getvalue())
485
485
486
486
487 def _parse_git_ref_lines(revision_lines):
487 def _parse_git_ref_lines(revision_lines):
488 rev_data = []
488 rev_data = []
489 for revision_line in revision_lines or []:
489 for revision_line in revision_lines or []:
490 old_rev, new_rev, ref = revision_line.strip().split(' ')
490 old_rev, new_rev, ref = revision_line.strip().split(' ')
491 ref_data = ref.split('/', 2)
491 ref_data = ref.split('/', 2)
492 if ref_data[1] in ('tags', 'heads'):
492 if ref_data[1] in ('tags', 'heads'):
493 rev_data.append({
493 rev_data.append({
494 # NOTE(marcink):
494 # NOTE(marcink):
495 # we're unable to tell total_commits for git at this point
495 # we're unable to tell total_commits for git at this point
496 # but we set the variable for consistency with GIT
496 # but we set the variable for consistency with GIT
497 'total_commits': -1,
497 'total_commits': -1,
498 'old_rev': old_rev,
498 'old_rev': old_rev,
499 'new_rev': new_rev,
499 'new_rev': new_rev,
500 'ref': ref,
500 'ref': ref,
501 'type': ref_data[1],
501 'type': ref_data[1],
502 'name': ref_data[2],
502 'name': ref_data[2],
503 })
503 })
504 return rev_data
504 return rev_data
505
505
506
506
507 def git_pre_receive(unused_repo_path, revision_lines, env):
507 def git_pre_receive(unused_repo_path, revision_lines, env):
508 """
508 """
509 Pre push hook.
509 Pre push hook.
510
510
511 :param extras: dictionary containing the keys defined in simplevcs
511 :param extras: dictionary containing the keys defined in simplevcs
512 :type extras: dict
512 :type extras: dict
513
513
514 :return: status code of the hook. 0 for success.
514 :return: status code of the hook. 0 for success.
515 :rtype: int
515 :rtype: int
516 """
516 """
517 extras = json.loads(env['RC_SCM_DATA'])
517 extras = json.loads(env['RC_SCM_DATA'])
518 rev_data = _parse_git_ref_lines(revision_lines)
518 rev_data = _parse_git_ref_lines(revision_lines)
519 if 'push' not in extras['hooks']:
519 if 'push' not in extras['hooks']:
520 return 0
520 return 0
521 empty_commit_id = '0' * 40
521 empty_commit_id = '0' * 40
522
522
523 detect_force_push = extras.get('detect_force_push')
523 detect_force_push = extras.get('detect_force_push')
524
524
525 for push_ref in rev_data:
525 for push_ref in rev_data:
526 # store our git-env which holds the temp store
526 # store our git-env which holds the temp store
527 push_ref['git_env'] = _get_git_env()
527 push_ref['git_env'] = _get_git_env()
528 push_ref['pruned_sha'] = ''
528 push_ref['pruned_sha'] = ''
529 if not detect_force_push:
529 if not detect_force_push:
530 # don't check for forced-push when we don't need to
530 # don't check for forced-push when we don't need to
531 continue
531 continue
532
532
533 type_ = push_ref['type']
533 type_ = push_ref['type']
534 new_branch = push_ref['old_rev'] == empty_commit_id
534 new_branch = push_ref['old_rev'] == empty_commit_id
535 delete_branch = push_ref['new_rev'] == empty_commit_id
535 delete_branch = push_ref['new_rev'] == empty_commit_id
536 if type_ == 'heads' and not (new_branch or delete_branch):
536 if type_ == 'heads' and not (new_branch or delete_branch):
537 old_rev = push_ref['old_rev']
537 old_rev = push_ref['old_rev']
538 new_rev = push_ref['new_rev']
538 new_rev = push_ref['new_rev']
539 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, '^{}'.format(new_rev)]
539 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, '^{}'.format(new_rev)]
540 stdout, stderr = subprocessio.run_command(
540 stdout, stderr = subprocessio.run_command(
541 cmd, env=os.environ.copy())
541 cmd, env=os.environ.copy())
542 # means we're having some non-reachable objects, this forced push was used
542 # means we're having some non-reachable objects, this forced push was used
543 if stdout:
543 if stdout:
544 push_ref['pruned_sha'] = stdout.splitlines()
544 push_ref['pruned_sha'] = stdout.splitlines()
545
545
546 extras['hook_type'] = 'pre_receive'
546 extras['hook_type'] = 'pre_receive'
547 extras['commit_ids'] = rev_data
547 extras['commit_ids'] = rev_data
548 return _call_hook('pre_push', extras, GitMessageWriter())
548 return _call_hook('pre_push', extras, GitMessageWriter())
549
549
550
550
551 def git_post_receive(unused_repo_path, revision_lines, env):
551 def git_post_receive(unused_repo_path, revision_lines, env):
552 """
552 """
553 Post push hook.
553 Post push hook.
554
554
555 :param extras: dictionary containing the keys defined in simplevcs
555 :param extras: dictionary containing the keys defined in simplevcs
556 :type extras: dict
556 :type extras: dict
557
557
558 :return: status code of the hook. 0 for success.
558 :return: status code of the hook. 0 for success.
559 :rtype: int
559 :rtype: int
560 """
560 """
561 extras = json.loads(env['RC_SCM_DATA'])
561 extras = json.loads(env['RC_SCM_DATA'])
562 if 'push' not in extras['hooks']:
562 if 'push' not in extras['hooks']:
563 return 0
563 return 0
564
564
565 rev_data = _parse_git_ref_lines(revision_lines)
565 rev_data = _parse_git_ref_lines(revision_lines)
566
566
567 git_revs = []
567 git_revs = []
568
568
569 # N.B.(skreft): it is ok to just call git, as git before calling a
569 # N.B.(skreft): it is ok to just call git, as git before calling a
570 # subcommand sets the PATH environment variable so that it point to the
570 # subcommand sets the PATH environment variable so that it point to the
571 # correct version of the git executable.
571 # correct version of the git executable.
572 empty_commit_id = '0' * 40
572 empty_commit_id = '0' * 40
573 branches = []
573 branches = []
574 tags = []
574 tags = []
575 for push_ref in rev_data:
575 for push_ref in rev_data:
576 type_ = push_ref['type']
576 type_ = push_ref['type']
577
577
578 if type_ == 'heads':
578 if type_ == 'heads':
579 if push_ref['old_rev'] == empty_commit_id:
579 if push_ref['old_rev'] == empty_commit_id:
580 # starting new branch case
580 # starting new branch case
581 if push_ref['name'] not in branches:
581 if push_ref['name'] not in branches:
582 branches.append(push_ref['name'])
582 branches.append(push_ref['name'])
583
583
584 # Fix up head revision if needed
584 # Fix up head revision if needed
585 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
585 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
586 try:
586 try:
587 subprocessio.run_command(cmd, env=os.environ.copy())
587 subprocessio.run_command(cmd, env=os.environ.copy())
588 except Exception:
588 except Exception:
589 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', '"HEAD"',
589 push_ref_name = push_ref['name']
590 '"refs/heads/%s"' % push_ref['name']]
590 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', '"HEAD"', f'"refs/heads/{push_ref_name}"']
591 print(("Setting default branch to %s" % push_ref['name']))
591 print(f"Setting default branch to {push_ref_name}")
592 subprocessio.run_command(cmd, env=os.environ.copy())
592 subprocessio.run_command(cmd, env=os.environ.copy())
593
593
594 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
594 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
595 '--format=%(refname)', 'refs/heads/*']
595 '--format=%(refname)', 'refs/heads/*']
596 stdout, stderr = subprocessio.run_command(
596 stdout, stderr = subprocessio.run_command(
597 cmd, env=os.environ.copy())
597 cmd, env=os.environ.copy())
598 heads = stdout
598 heads = stdout
599 heads = heads.replace(push_ref['ref'], '')
599 heads = heads.replace(push_ref['ref'], '')
600 heads = ' '.join(head for head
600 heads = ' '.join(head for head
601 in heads.splitlines() if head) or '.'
601 in heads.splitlines() if head) or '.'
602 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
602 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
603 '--pretty=format:%H', '--', push_ref['new_rev'],
603 '--pretty=format:%H', '--', push_ref['new_rev'],
604 '--not', heads]
604 '--not', heads]
605 stdout, stderr = subprocessio.run_command(
605 stdout, stderr = subprocessio.run_command(
606 cmd, env=os.environ.copy())
606 cmd, env=os.environ.copy())
607 git_revs.extend(stdout.splitlines())
607 git_revs.extend(stdout.splitlines())
608 elif push_ref['new_rev'] == empty_commit_id:
608 elif push_ref['new_rev'] == empty_commit_id:
609 # delete branch case
609 # delete branch case
610 git_revs.append('delete_branch=>%s' % push_ref['name'])
610 git_revs.append('delete_branch=>%s' % push_ref['name'])
611 else:
611 else:
612 if push_ref['name'] not in branches:
612 if push_ref['name'] not in branches:
613 branches.append(push_ref['name'])
613 branches.append(push_ref['name'])
614
614
615 cmd = [settings.GIT_EXECUTABLE, 'log',
615 cmd = [settings.GIT_EXECUTABLE, 'log',
616 '{old_rev}..{new_rev}'.format(**push_ref),
616 '{old_rev}..{new_rev}'.format(**push_ref),
617 '--reverse', '--pretty=format:%H']
617 '--reverse', '--pretty=format:%H']
618 stdout, stderr = subprocessio.run_command(
618 stdout, stderr = subprocessio.run_command(
619 cmd, env=os.environ.copy())
619 cmd, env=os.environ.copy())
620 git_revs.extend(stdout.splitlines())
620 git_revs.extend(stdout.splitlines())
621 elif type_ == 'tags':
621 elif type_ == 'tags':
622 if push_ref['name'] not in tags:
622 if push_ref['name'] not in tags:
623 tags.append(push_ref['name'])
623 tags.append(push_ref['name'])
624 git_revs.append('tag=>%s' % push_ref['name'])
624 git_revs.append('tag=>%s' % push_ref['name'])
625
625
626 extras['hook_type'] = 'post_receive'
626 extras['hook_type'] = 'post_receive'
627 extras['commit_ids'] = git_revs
627 extras['commit_ids'] = git_revs
628 extras['new_refs'] = {
628 extras['new_refs'] = {
629 'branches': branches,
629 'branches': branches,
630 'bookmarks': [],
630 'bookmarks': [],
631 'tags': tags,
631 'tags': tags,
632 }
632 }
633
633
634 if 'repo_size' in extras['hooks']:
634 if 'repo_size' in extras['hooks']:
635 try:
635 try:
636 _call_hook('repo_size', extras, GitMessageWriter())
636 _call_hook('repo_size', extras, GitMessageWriter())
637 except:
637 except:
638 pass
638 pass
639
639
640 return _call_hook('post_push', extras, GitMessageWriter())
640 return _call_hook('post_push', extras, GitMessageWriter())
641
641
642
642
643 def _get_extras_from_txn_id(path, txn_id):
643 def _get_extras_from_txn_id(path, txn_id):
644 extras = {}
644 extras = {}
645 try:
645 try:
646 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
646 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
647 '-t', txn_id,
647 '-t', txn_id,
648 '--revprop', path, 'rc-scm-extras']
648 '--revprop', path, 'rc-scm-extras']
649 stdout, stderr = subprocessio.run_command(
649 stdout, stderr = subprocessio.run_command(
650 cmd, env=os.environ.copy())
650 cmd, env=os.environ.copy())
651 extras = json.loads(base64.urlsafe_b64decode(stdout))
651 extras = json.loads(base64.urlsafe_b64decode(stdout))
652 except Exception:
652 except Exception:
653 log.exception('Failed to extract extras info from txn_id')
653 log.exception('Failed to extract extras info from txn_id')
654
654
655 return extras
655 return extras
656
656
657
657
658 def _get_extras_from_commit_id(commit_id, path):
658 def _get_extras_from_commit_id(commit_id, path):
659 extras = {}
659 extras = {}
660 try:
660 try:
661 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
661 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
662 '-r', commit_id,
662 '-r', commit_id,
663 '--revprop', path, 'rc-scm-extras']
663 '--revprop', path, 'rc-scm-extras']
664 stdout, stderr = subprocessio.run_command(
664 stdout, stderr = subprocessio.run_command(
665 cmd, env=os.environ.copy())
665 cmd, env=os.environ.copy())
666 extras = json.loads(base64.urlsafe_b64decode(stdout))
666 extras = json.loads(base64.urlsafe_b64decode(stdout))
667 except Exception:
667 except Exception:
668 log.exception('Failed to extract extras info from commit_id')
668 log.exception('Failed to extract extras info from commit_id')
669
669
670 return extras
670 return extras
671
671
672
672
673 def svn_pre_commit(repo_path, commit_data, env):
673 def svn_pre_commit(repo_path, commit_data, env):
674 path, txn_id = commit_data
674 path, txn_id = commit_data
675 branches = []
675 branches = []
676 tags = []
676 tags = []
677
677
678 if env.get('RC_SCM_DATA'):
678 if env.get('RC_SCM_DATA'):
679 extras = json.loads(env['RC_SCM_DATA'])
679 extras = json.loads(env['RC_SCM_DATA'])
680 else:
680 else:
681 # fallback method to read from TXN-ID stored data
681 # fallback method to read from TXN-ID stored data
682 extras = _get_extras_from_txn_id(path, txn_id)
682 extras = _get_extras_from_txn_id(path, txn_id)
683 if not extras:
683 if not extras:
684 return 0
684 return 0
685
685
686 extras['hook_type'] = 'pre_commit'
686 extras['hook_type'] = 'pre_commit'
687 extras['commit_ids'] = [txn_id]
687 extras['commit_ids'] = [txn_id]
688 extras['txn_id'] = txn_id
688 extras['txn_id'] = txn_id
689 extras['new_refs'] = {
689 extras['new_refs'] = {
690 'total_commits': 1,
690 'total_commits': 1,
691 'branches': branches,
691 'branches': branches,
692 'bookmarks': [],
692 'bookmarks': [],
693 'tags': tags,
693 'tags': tags,
694 }
694 }
695
695
696 return _call_hook('pre_push', extras, SvnMessageWriter())
696 return _call_hook('pre_push', extras, SvnMessageWriter())
697
697
698
698
699 def svn_post_commit(repo_path, commit_data, env):
699 def svn_post_commit(repo_path, commit_data, env):
700 """
700 """
701 commit_data is path, rev, txn_id
701 commit_data is path, rev, txn_id
702 """
702 """
703 if len(commit_data) == 3:
703 if len(commit_data) == 3:
704 path, commit_id, txn_id = commit_data
704 path, commit_id, txn_id = commit_data
705 elif len(commit_data) == 2:
705 elif len(commit_data) == 2:
706 log.error('Failed to extract txn_id from commit_data using legacy method. '
706 log.error('Failed to extract txn_id from commit_data using legacy method. '
707 'Some functionality might be limited')
707 'Some functionality might be limited')
708 path, commit_id = commit_data
708 path, commit_id = commit_data
709 txn_id = None
709 txn_id = None
710
710
711 branches = []
711 branches = []
712 tags = []
712 tags = []
713
713
714 if env.get('RC_SCM_DATA'):
714 if env.get('RC_SCM_DATA'):
715 extras = json.loads(env['RC_SCM_DATA'])
715 extras = json.loads(env['RC_SCM_DATA'])
716 else:
716 else:
717 # fallback method to read from TXN-ID stored data
717 # fallback method to read from TXN-ID stored data
718 extras = _get_extras_from_commit_id(commit_id, path)
718 extras = _get_extras_from_commit_id(commit_id, path)
719 if not extras:
719 if not extras:
720 return 0
720 return 0
721
721
722 extras['hook_type'] = 'post_commit'
722 extras['hook_type'] = 'post_commit'
723 extras['commit_ids'] = [commit_id]
723 extras['commit_ids'] = [commit_id]
724 extras['txn_id'] = txn_id
724 extras['txn_id'] = txn_id
725 extras['new_refs'] = {
725 extras['new_refs'] = {
726 'branches': branches,
726 'branches': branches,
727 'bookmarks': [],
727 'bookmarks': [],
728 'tags': tags,
728 'tags': tags,
729 'total_commits': 1,
729 'total_commits': 1,
730 }
730 }
731
731
732 if 'repo_size' in extras['hooks']:
732 if 'repo_size' in extras['hooks']:
733 try:
733 try:
734 _call_hook('repo_size', extras, SvnMessageWriter())
734 _call_hook('repo_size', extras, SvnMessageWriter())
735 except Exception:
735 except Exception:
736 pass
736 pass
737
737
738 return _call_hook('post_push', extras, SvnMessageWriter())
738 return _call_hook('post_push', extras, SvnMessageWriter())
@@ -1,1349 +1,1366 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import collections
18 import collections
19 import logging
19 import logging
20 import os
20 import os
21 import posixpath as vcspath
21 import posixpath as vcspath
22 import re
22 import re
23 import stat
23 import stat
24 import traceback
24 import traceback
25 import urllib.request, urllib.parse, urllib.error
25 import urllib.request, urllib.parse, urllib.error
26 import urllib.request, urllib.error, urllib.parse
26 import urllib.request, urllib.error, urllib.parse
27 from functools import wraps
27 from functools import wraps
28
28
29 import more_itertools
29 import more_itertools
30 import pygit2
30 import pygit2
31 from pygit2 import Repository as LibGit2Repo
31 from pygit2 import Repository as LibGit2Repo
32 from pygit2 import index as LibGit2Index
32 from pygit2 import index as LibGit2Index
33 from dulwich import index, objects
33 from dulwich import index, objects
34 from dulwich.client import HttpGitClient, LocalGitClient
34 from dulwich.client import HttpGitClient, LocalGitClient
35 from dulwich.errors import (
35 from dulwich.errors import (
36 NotGitRepository, ChecksumMismatch, WrongObjectException,
36 NotGitRepository, ChecksumMismatch, WrongObjectException,
37 MissingCommitError, ObjectMissing, HangupException,
37 MissingCommitError, ObjectMissing, HangupException,
38 UnexpectedCommandError)
38 UnexpectedCommandError)
39 from dulwich.repo import Repo as DulwichRepo
39 from dulwich.repo import Repo as DulwichRepo
40 from dulwich.server import update_server_info
40 from dulwich.server import update_server_info
41
41
42 from vcsserver import exceptions, settings, subprocessio
42 from vcsserver import exceptions, settings, subprocessio
43 from vcsserver.str_utils import safe_str, safe_int, safe_bytes
43 from vcsserver.str_utils import safe_str, safe_int, safe_bytes, ascii_str, ascii_bytes
44 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, archive_repo
44 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, archive_repo
45 from vcsserver.hgcompat import (
45 from vcsserver.hgcompat import (
46 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
46 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
47 from vcsserver.git_lfs.lib import LFSOidStore
47 from vcsserver.git_lfs.lib import LFSOidStore
48 from vcsserver.vcs_base import RemoteBase
48 from vcsserver.vcs_base import RemoteBase
49
49
50 DIR_STAT = stat.S_IFDIR
50 DIR_STAT = stat.S_IFDIR
51 FILE_MODE = stat.S_IFMT
51 FILE_MODE = stat.S_IFMT
52 GIT_LINK = objects.S_IFGITLINK
52 GIT_LINK = objects.S_IFGITLINK
53 PEELED_REF_MARKER = b'^{}'
53 PEELED_REF_MARKER = b'^{}'
54
54 HEAD_MARKER = b'HEAD'
55
55
56 log = logging.getLogger(__name__)
56 log = logging.getLogger(__name__)
57
57
58
58
59 def reraise_safe_exceptions(func):
59 def reraise_safe_exceptions(func):
60 """Converts Dulwich exceptions to something neutral."""
60 """Converts Dulwich exceptions to something neutral."""
61
61
62 @wraps(func)
62 @wraps(func)
63 def wrapper(*args, **kwargs):
63 def wrapper(*args, **kwargs):
64 try:
64 try:
65 return func(*args, **kwargs)
65 return func(*args, **kwargs)
66 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
66 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
67 exc = exceptions.LookupException(org_exc=e)
67 exc = exceptions.LookupException(org_exc=e)
68 raise exc(safe_str(e))
68 raise exc(safe_str(e))
69 except (HangupException, UnexpectedCommandError) as e:
69 except (HangupException, UnexpectedCommandError) as e:
70 exc = exceptions.VcsException(org_exc=e)
70 exc = exceptions.VcsException(org_exc=e)
71 raise exc(safe_str(e))
71 raise exc(safe_str(e))
72 except Exception as e:
72 except Exception as e:
73 # NOTE(marcink): becuase of how dulwich handles some exceptions
73 # NOTE(marcink): becuase of how dulwich handles some exceptions
74 # (KeyError on empty repos), we cannot track this and catch all
74 # (KeyError on empty repos), we cannot track this and catch all
75 # exceptions, it's an exceptions from other handlers
75 # exceptions, it's an exceptions from other handlers
76 #if not hasattr(e, '_vcs_kind'):
76 #if not hasattr(e, '_vcs_kind'):
77 #log.exception("Unhandled exception in git remote call")
77 #log.exception("Unhandled exception in git remote call")
78 #raise_from_original(exceptions.UnhandledException)
78 #raise_from_original(exceptions.UnhandledException)
79 raise
79 raise
80 return wrapper
80 return wrapper
81
81
82
82
83 class Repo(DulwichRepo):
83 class Repo(DulwichRepo):
84 """
84 """
85 A wrapper for dulwich Repo class.
85 A wrapper for dulwich Repo class.
86
86
87 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
87 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
88 "Too many open files" error. We need to close all opened file descriptors
88 "Too many open files" error. We need to close all opened file descriptors
89 once the repo object is destroyed.
89 once the repo object is destroyed.
90 """
90 """
91 def __del__(self):
91 def __del__(self):
92 if hasattr(self, 'object_store'):
92 if hasattr(self, 'object_store'):
93 self.close()
93 self.close()
94
94
95
95
96 class Repository(LibGit2Repo):
96 class Repository(LibGit2Repo):
97
97
98 def __enter__(self):
98 def __enter__(self):
99 return self
99 return self
100
100
101 def __exit__(self, exc_type, exc_val, exc_tb):
101 def __exit__(self, exc_type, exc_val, exc_tb):
102 self.free()
102 self.free()
103
103
104
104
105 class GitFactory(RepoFactory):
105 class GitFactory(RepoFactory):
106 repo_type = 'git'
106 repo_type = 'git'
107
107
108 def _create_repo(self, wire, create, use_libgit2=False):
108 def _create_repo(self, wire, create, use_libgit2=False):
109 if use_libgit2:
109 if use_libgit2:
110 return Repository(wire['path'])
110 return Repository(safe_bytes(wire['path']))
111 else:
111 else:
112 # dulwich mode
112 repo_path = safe_str(wire['path'], to_encoding=settings.WIRE_ENCODING)
113 repo_path = safe_str(wire['path'], to_encoding=settings.WIRE_ENCODING)
113 return Repo(repo_path)
114 return Repo(repo_path)
114
115
115 def repo(self, wire, create=False, use_libgit2=False):
116 def repo(self, wire, create=False, use_libgit2=False):
116 """
117 """
117 Get a repository instance for the given path.
118 Get a repository instance for the given path.
118 """
119 """
119 return self._create_repo(wire, create, use_libgit2)
120 return self._create_repo(wire, create, use_libgit2)
120
121
121 def repo_libgit2(self, wire):
122 def repo_libgit2(self, wire):
122 return self.repo(wire, use_libgit2=True)
123 return self.repo(wire, use_libgit2=True)
123
124
124
125
125 class GitRemote(RemoteBase):
126 class GitRemote(RemoteBase):
126
127
127 def __init__(self, factory):
128 def __init__(self, factory):
128 self._factory = factory
129 self._factory = factory
129 self._bulk_methods = {
130 self._bulk_methods = {
130 "date": self.date,
131 "date": self.date,
131 "author": self.author,
132 "author": self.author,
132 "branch": self.branch,
133 "branch": self.branch,
133 "message": self.message,
134 "message": self.message,
134 "parents": self.parents,
135 "parents": self.parents,
135 "_commit": self.revision,
136 "_commit": self.revision,
136 }
137 }
137
138
138 def _wire_to_config(self, wire):
139 def _wire_to_config(self, wire):
139 if 'config' in wire:
140 if 'config' in wire:
140 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
141 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
141 return {}
142 return {}
142
143
143 def _remote_conf(self, config):
144 def _remote_conf(self, config):
144 params = [
145 params = [
145 '-c', 'core.askpass=""',
146 '-c', 'core.askpass=""',
146 ]
147 ]
147 ssl_cert_dir = config.get('vcs_ssl_dir')
148 ssl_cert_dir = config.get('vcs_ssl_dir')
148 if ssl_cert_dir:
149 if ssl_cert_dir:
149 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
150 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
150 return params
151 return params
151
152
152 @reraise_safe_exceptions
153 @reraise_safe_exceptions
153 def discover_git_version(self):
154 def discover_git_version(self):
154 stdout, _ = self.run_git_command(
155 stdout, _ = self.run_git_command(
155 {}, ['--version'], _bare=True, _safe=True)
156 {}, ['--version'], _bare=True, _safe=True)
156 prefix = b'git version'
157 prefix = b'git version'
157 if stdout.startswith(prefix):
158 if stdout.startswith(prefix):
158 stdout = stdout[len(prefix):]
159 stdout = stdout[len(prefix):]
159 return safe_str(stdout.strip())
160 return safe_str(stdout.strip())
160
161
161 @reraise_safe_exceptions
162 @reraise_safe_exceptions
162 def is_empty(self, wire):
163 def is_empty(self, wire):
163 repo_init = self._factory.repo_libgit2(wire)
164 repo_init = self._factory.repo_libgit2(wire)
164 with repo_init as repo:
165 with repo_init as repo:
165
166
166 try:
167 try:
167 has_head = repo.head.name
168 has_head = repo.head.name
168 if has_head:
169 if has_head:
169 return False
170 return False
170
171
171 # NOTE(marcink): check again using more expensive method
172 # NOTE(marcink): check again using more expensive method
172 return repo.is_empty
173 return repo.is_empty
173 except Exception:
174 except Exception:
174 pass
175 pass
175
176
176 return True
177 return True
177
178
178 @reraise_safe_exceptions
179 @reraise_safe_exceptions
179 def assert_correct_path(self, wire):
180 def assert_correct_path(self, wire):
180 cache_on, context_uid, repo_id = self._cache_on(wire)
181 cache_on, context_uid, repo_id = self._cache_on(wire)
181 region = self._region(wire)
182 region = self._region(wire)
182
183
183 @region.conditional_cache_on_arguments(condition=cache_on)
184 @region.conditional_cache_on_arguments(condition=cache_on)
184 def _assert_correct_path(_context_uid, _repo_id):
185 def _assert_correct_path(_context_uid, _repo_id):
185 try:
186 try:
186 repo_init = self._factory.repo_libgit2(wire)
187 repo_init = self._factory.repo_libgit2(wire)
187 with repo_init as repo:
188 with repo_init as repo:
188 pass
189 pass
189 except pygit2.GitError:
190 except pygit2.GitError:
190 path = wire.get('path')
191 path = wire.get('path')
191 tb = traceback.format_exc()
192 tb = traceback.format_exc()
192 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
193 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
193 return False
194 return False
194
195
195 return True
196 return True
196 return _assert_correct_path(context_uid, repo_id)
197 return _assert_correct_path(context_uid, repo_id)
197
198
198 @reraise_safe_exceptions
199 @reraise_safe_exceptions
199 def bare(self, wire):
200 def bare(self, wire):
200 repo_init = self._factory.repo_libgit2(wire)
201 repo_init = self._factory.repo_libgit2(wire)
201 with repo_init as repo:
202 with repo_init as repo:
202 return repo.is_bare
203 return repo.is_bare
203
204
204 @reraise_safe_exceptions
205 @reraise_safe_exceptions
205 def blob_as_pretty_string(self, wire, sha):
206 def blob_as_pretty_string(self, wire, sha):
206 repo_init = self._factory.repo_libgit2(wire)
207 repo_init = self._factory.repo_libgit2(wire)
207 with repo_init as repo:
208 with repo_init as repo:
208 blob_obj = repo[sha]
209 blob_obj = repo[sha]
209 blob = blob_obj.data
210 blob = blob_obj.data
210 return blob
211 return blob
211
212
212 @reraise_safe_exceptions
213 @reraise_safe_exceptions
213 def blob_raw_length(self, wire, sha):
214 def blob_raw_length(self, wire, sha):
214 cache_on, context_uid, repo_id = self._cache_on(wire)
215 cache_on, context_uid, repo_id = self._cache_on(wire)
215 region = self._region(wire)
216 region = self._region(wire)
216
217
217 @region.conditional_cache_on_arguments(condition=cache_on)
218 @region.conditional_cache_on_arguments(condition=cache_on)
218 def _blob_raw_length(_repo_id, _sha):
219 def _blob_raw_length(_repo_id, _sha):
219
220
220 repo_init = self._factory.repo_libgit2(wire)
221 repo_init = self._factory.repo_libgit2(wire)
221 with repo_init as repo:
222 with repo_init as repo:
222 blob = repo[sha]
223 blob = repo[sha]
223 return blob.size
224 return blob.size
224
225
225 return _blob_raw_length(repo_id, sha)
226 return _blob_raw_length(repo_id, sha)
226
227
227 def _parse_lfs_pointer(self, raw_content):
228 def _parse_lfs_pointer(self, raw_content):
228 spec_string = b'version https://git-lfs.github.com/spec'
229 spec_string = b'version https://git-lfs.github.com/spec'
229 if raw_content and raw_content.startswith(spec_string):
230 if raw_content and raw_content.startswith(spec_string):
230
231
231 pattern = re.compile(rb"""
232 pattern = re.compile(rb"""
232 (?:\n)?
233 (?:\n)?
233 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
234 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
234 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
235 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
235 ^size[ ](?P<oid_size>[0-9]+)\n
236 ^size[ ](?P<oid_size>[0-9]+)\n
236 (?:\n)?
237 (?:\n)?
237 """, re.VERBOSE | re.MULTILINE)
238 """, re.VERBOSE | re.MULTILINE)
238 match = pattern.match(raw_content)
239 match = pattern.match(raw_content)
239 if match:
240 if match:
240 return match.groupdict()
241 return match.groupdict()
241
242
242 return {}
243 return {}
243
244
244 @reraise_safe_exceptions
245 @reraise_safe_exceptions
245 def is_large_file(self, wire, commit_id):
246 def is_large_file(self, wire, commit_id):
246 cache_on, context_uid, repo_id = self._cache_on(wire)
247 cache_on, context_uid, repo_id = self._cache_on(wire)
247 region = self._region(wire)
248 region = self._region(wire)
248
249
249 @region.conditional_cache_on_arguments(condition=cache_on)
250 @region.conditional_cache_on_arguments(condition=cache_on)
250 def _is_large_file(_repo_id, _sha):
251 def _is_large_file(_repo_id, _sha):
251 repo_init = self._factory.repo_libgit2(wire)
252 repo_init = self._factory.repo_libgit2(wire)
252 with repo_init as repo:
253 with repo_init as repo:
253 blob = repo[commit_id]
254 blob = repo[commit_id]
254 if blob.is_binary:
255 if blob.is_binary:
255 return {}
256 return {}
256
257
257 return self._parse_lfs_pointer(blob.data)
258 return self._parse_lfs_pointer(blob.data)
258
259
259 return _is_large_file(repo_id, commit_id)
260 return _is_large_file(repo_id, commit_id)
260
261
261 @reraise_safe_exceptions
262 @reraise_safe_exceptions
262 def is_binary(self, wire, tree_id):
263 def is_binary(self, wire, tree_id):
263 cache_on, context_uid, repo_id = self._cache_on(wire)
264 cache_on, context_uid, repo_id = self._cache_on(wire)
264 region = self._region(wire)
265 region = self._region(wire)
265
266
266 @region.conditional_cache_on_arguments(condition=cache_on)
267 @region.conditional_cache_on_arguments(condition=cache_on)
267 def _is_binary(_repo_id, _tree_id):
268 def _is_binary(_repo_id, _tree_id):
268 repo_init = self._factory.repo_libgit2(wire)
269 repo_init = self._factory.repo_libgit2(wire)
269 with repo_init as repo:
270 with repo_init as repo:
270 blob_obj = repo[tree_id]
271 blob_obj = repo[tree_id]
271 return blob_obj.is_binary
272 return blob_obj.is_binary
272
273
273 return _is_binary(repo_id, tree_id)
274 return _is_binary(repo_id, tree_id)
274
275
275 @reraise_safe_exceptions
276 @reraise_safe_exceptions
276 def md5_hash(self, wire, tree_id):
277 def md5_hash(self, wire, tree_id):
277 cache_on, context_uid, repo_id = self._cache_on(wire)
278 cache_on, context_uid, repo_id = self._cache_on(wire)
278 region = self._region(wire)
279 region = self._region(wire)
279
280
280 @region.conditional_cache_on_arguments(condition=cache_on)
281 @region.conditional_cache_on_arguments(condition=cache_on)
281 def _md5_hash(_repo_id, _tree_id):
282 def _md5_hash(_repo_id, _tree_id):
282 return ''
283 return ''
283
284
284 return _md5_hash(repo_id, tree_id)
285 return _md5_hash(repo_id, tree_id)
285
286
286 @reraise_safe_exceptions
287 @reraise_safe_exceptions
287 def in_largefiles_store(self, wire, oid):
288 def in_largefiles_store(self, wire, oid):
288 conf = self._wire_to_config(wire)
289 conf = self._wire_to_config(wire)
289 repo_init = self._factory.repo_libgit2(wire)
290 repo_init = self._factory.repo_libgit2(wire)
290 with repo_init as repo:
291 with repo_init as repo:
291 repo_name = repo.path
292 repo_name = repo.path
292
293
293 store_location = conf.get('vcs_git_lfs_store_location')
294 store_location = conf.get('vcs_git_lfs_store_location')
294 if store_location:
295 if store_location:
295
296
296 store = LFSOidStore(
297 store = LFSOidStore(
297 oid=oid, repo=repo_name, store_location=store_location)
298 oid=oid, repo=repo_name, store_location=store_location)
298 return store.has_oid()
299 return store.has_oid()
299
300
300 return False
301 return False
301
302
302 @reraise_safe_exceptions
303 @reraise_safe_exceptions
303 def store_path(self, wire, oid):
304 def store_path(self, wire, oid):
304 conf = self._wire_to_config(wire)
305 conf = self._wire_to_config(wire)
305 repo_init = self._factory.repo_libgit2(wire)
306 repo_init = self._factory.repo_libgit2(wire)
306 with repo_init as repo:
307 with repo_init as repo:
307 repo_name = repo.path
308 repo_name = repo.path
308
309
309 store_location = conf.get('vcs_git_lfs_store_location')
310 store_location = conf.get('vcs_git_lfs_store_location')
310 if store_location:
311 if store_location:
311 store = LFSOidStore(
312 store = LFSOidStore(
312 oid=oid, repo=repo_name, store_location=store_location)
313 oid=oid, repo=repo_name, store_location=store_location)
313 return store.oid_path
314 return store.oid_path
314 raise ValueError('Unable to fetch oid with path {}'.format(oid))
315 raise ValueError('Unable to fetch oid with path {}'.format(oid))
315
316
316 @reraise_safe_exceptions
317 @reraise_safe_exceptions
317 def bulk_request(self, wire, rev, pre_load):
318 def bulk_request(self, wire, rev, pre_load):
318 cache_on, context_uid, repo_id = self._cache_on(wire)
319 cache_on, context_uid, repo_id = self._cache_on(wire)
319 region = self._region(wire)
320 region = self._region(wire)
320
321
321 @region.conditional_cache_on_arguments(condition=cache_on)
322 @region.conditional_cache_on_arguments(condition=cache_on)
322 def _bulk_request(_repo_id, _rev, _pre_load):
323 def _bulk_request(_repo_id, _rev, _pre_load):
323 result = {}
324 result = {}
324 for attr in pre_load:
325 for attr in pre_load:
325 try:
326 try:
326 method = self._bulk_methods[attr]
327 method = self._bulk_methods[attr]
327 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
328 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
328 args = [wire, rev]
329 args = [wire, rev]
329 result[attr] = method(*args)
330 result[attr] = method(*args)
330 except KeyError as e:
331 except KeyError as e:
331 raise exceptions.VcsException(e)(f"Unknown bulk attribute: {attr}")
332 raise exceptions.VcsException(e)(f"Unknown bulk attribute: {attr}")
332 return result
333 return result
333
334
334 return _bulk_request(repo_id, rev, sorted(pre_load))
335 return _bulk_request(repo_id, rev, sorted(pre_load))
335
336
336 def _build_opener(self, url):
337 def _build_opener(self, url):
337 handlers = []
338 handlers = []
338 url_obj = url_parser(url)
339 url_obj = url_parser(url)
339 _, authinfo = url_obj.authinfo()
340 _, authinfo = url_obj.authinfo()
340
341
341 if authinfo:
342 if authinfo:
342 # create a password manager
343 # create a password manager
343 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
344 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
344 passmgr.add_password(*authinfo)
345 passmgr.add_password(*authinfo)
345
346
346 handlers.extend((httpbasicauthhandler(passmgr),
347 handlers.extend((httpbasicauthhandler(passmgr),
347 httpdigestauthhandler(passmgr)))
348 httpdigestauthhandler(passmgr)))
348
349
349 return urllib.request.build_opener(*handlers)
350 return urllib.request.build_opener(*handlers)
350
351
351 def _type_id_to_name(self, type_id: int):
352 def _type_id_to_name(self, type_id: int):
352 return {
353 return {
353 1: 'commit',
354 1: 'commit',
354 2: 'tree',
355 2: 'tree',
355 3: 'blob',
356 3: 'blob',
356 4: 'tag'
357 4: 'tag'
357 }[type_id]
358 }[type_id]
358
359
359 @reraise_safe_exceptions
360 @reraise_safe_exceptions
360 def check_url(self, url, config):
361 def check_url(self, url, config):
361 url_obj = url_parser(safe_bytes(url))
362 url_obj = url_parser(safe_bytes(url))
362 test_uri, _ = url_obj.authinfo()
363 test_uri, _ = url_obj.authinfo()
363 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
364 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
364 url_obj.query = obfuscate_qs(url_obj.query)
365 url_obj.query = obfuscate_qs(url_obj.query)
365 cleaned_uri = str(url_obj)
366 cleaned_uri = str(url_obj)
366 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
367 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
367
368
368 if not test_uri.endswith('info/refs'):
369 if not test_uri.endswith('info/refs'):
369 test_uri = test_uri.rstrip('/') + '/info/refs'
370 test_uri = test_uri.rstrip('/') + '/info/refs'
370
371
371 o = self._build_opener(url)
372 o = self._build_opener(url)
372 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
373 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
373
374
374 q = {"service": 'git-upload-pack'}
375 q = {"service": 'git-upload-pack'}
375 qs = '?%s' % urllib.parse.urlencode(q)
376 qs = '?%s' % urllib.parse.urlencode(q)
376 cu = "%s%s" % (test_uri, qs)
377 cu = "%s%s" % (test_uri, qs)
377 req = urllib.request.Request(cu, None, {})
378 req = urllib.request.Request(cu, None, {})
378
379
379 try:
380 try:
380 log.debug("Trying to open URL %s", cleaned_uri)
381 log.debug("Trying to open URL %s", cleaned_uri)
381 resp = o.open(req)
382 resp = o.open(req)
382 if resp.code != 200:
383 if resp.code != 200:
383 raise exceptions.URLError()('Return Code is not 200')
384 raise exceptions.URLError()('Return Code is not 200')
384 except Exception as e:
385 except Exception as e:
385 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
386 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
386 # means it cannot be cloned
387 # means it cannot be cloned
387 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
388 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
388
389
389 # now detect if it's proper git repo
390 # now detect if it's proper git repo
390 gitdata = resp.read()
391 gitdata = resp.read()
391 if 'service=git-upload-pack' in gitdata:
392 if 'service=git-upload-pack' in gitdata:
392 pass
393 pass
393 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
394 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
394 # old style git can return some other format !
395 # old style git can return some other format !
395 pass
396 pass
396 else:
397 else:
397 raise exceptions.URLError()(
398 raise exceptions.URLError()(
398 "url [%s] does not look like an git" % (cleaned_uri,))
399 "url [%s] does not look like an git" % (cleaned_uri,))
399
400
400 return True
401 return True
401
402
402 @reraise_safe_exceptions
403 @reraise_safe_exceptions
403 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
404 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
404 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
405 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
405 remote_refs = self.pull(wire, url, apply_refs=False)
406 remote_refs = self.pull(wire, url, apply_refs=False)
406 repo = self._factory.repo(wire)
407 repo = self._factory.repo(wire)
407 if isinstance(valid_refs, list):
408 if isinstance(valid_refs, list):
408 valid_refs = tuple(valid_refs)
409 valid_refs = tuple(valid_refs)
409
410
410 for k in remote_refs:
411 for k in remote_refs:
411 # only parse heads/tags and skip so called deferred tags
412 # only parse heads/tags and skip so called deferred tags
412 if k.startswith(valid_refs) and not k.endswith(deferred):
413 if k.startswith(valid_refs) and not k.endswith(deferred):
413 repo[k] = remote_refs[k]
414 repo[k] = remote_refs[k]
414
415
415 if update_after_clone:
416 if update_after_clone:
416 # we want to checkout HEAD
417 # we want to checkout HEAD
417 repo["HEAD"] = remote_refs["HEAD"]
418 repo["HEAD"] = remote_refs["HEAD"]
418 index.build_index_from_tree(repo.path, repo.index_path(),
419 index.build_index_from_tree(repo.path, repo.index_path(),
419 repo.object_store, repo["HEAD"].tree)
420 repo.object_store, repo["HEAD"].tree)
420
421
421 @reraise_safe_exceptions
422 @reraise_safe_exceptions
422 def branch(self, wire, commit_id):
423 def branch(self, wire, commit_id):
423 cache_on, context_uid, repo_id = self._cache_on(wire)
424 cache_on, context_uid, repo_id = self._cache_on(wire)
424 region = self._region(wire)
425 region = self._region(wire)
425 @region.conditional_cache_on_arguments(condition=cache_on)
426 @region.conditional_cache_on_arguments(condition=cache_on)
426 def _branch(_context_uid, _repo_id, _commit_id):
427 def _branch(_context_uid, _repo_id, _commit_id):
427 regex = re.compile('^refs/heads')
428 regex = re.compile('^refs/heads')
428
429
429 def filter_with(ref):
430 def filter_with(ref):
430 return regex.match(ref[0]) and ref[1] == _commit_id
431 return regex.match(ref[0]) and ref[1] == _commit_id
431
432
432 branches = list(filter(filter_with, list(self.get_refs(wire).items())))
433 branches = list(filter(filter_with, list(self.get_refs(wire).items())))
433 return [x[0].split('refs/heads/')[-1] for x in branches]
434 return [x[0].split('refs/heads/')[-1] for x in branches]
434
435
435 return _branch(context_uid, repo_id, commit_id)
436 return _branch(context_uid, repo_id, commit_id)
436
437
437 @reraise_safe_exceptions
438 @reraise_safe_exceptions
438 def commit_branches(self, wire, commit_id):
439 def commit_branches(self, wire, commit_id):
439 cache_on, context_uid, repo_id = self._cache_on(wire)
440 cache_on, context_uid, repo_id = self._cache_on(wire)
440 region = self._region(wire)
441 region = self._region(wire)
441 @region.conditional_cache_on_arguments(condition=cache_on)
442 @region.conditional_cache_on_arguments(condition=cache_on)
442 def _commit_branches(_context_uid, _repo_id, _commit_id):
443 def _commit_branches(_context_uid, _repo_id, _commit_id):
443 repo_init = self._factory.repo_libgit2(wire)
444 repo_init = self._factory.repo_libgit2(wire)
444 with repo_init as repo:
445 with repo_init as repo:
445 branches = [x for x in repo.branches.with_commit(_commit_id)]
446 branches = [x for x in repo.branches.with_commit(_commit_id)]
446 return branches
447 return branches
447
448
448 return _commit_branches(context_uid, repo_id, commit_id)
449 return _commit_branches(context_uid, repo_id, commit_id)
449
450
450 @reraise_safe_exceptions
451 @reraise_safe_exceptions
451 def add_object(self, wire, content):
452 def add_object(self, wire, content):
452 repo_init = self._factory.repo_libgit2(wire)
453 repo_init = self._factory.repo_libgit2(wire)
453 with repo_init as repo:
454 with repo_init as repo:
454 blob = objects.Blob()
455 blob = objects.Blob()
455 blob.set_raw_string(content)
456 blob.set_raw_string(content)
456 repo.object_store.add_object(blob)
457 repo.object_store.add_object(blob)
457 return blob.id
458 return blob.id
458
459
459 # TODO: this is quite complex, check if that can be simplified
460 # TODO: this is quite complex, check if that can be simplified
460 @reraise_safe_exceptions
461 @reraise_safe_exceptions
461 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
462 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
462 # Defines the root tree
463 # Defines the root tree
463 class _Root(object):
464 class _Root(object):
464 def __repr__(self):
465 def __repr__(self):
465 return 'ROOT TREE'
466 return 'ROOT TREE'
466 ROOT = _Root()
467 ROOT = _Root()
467
468
468 repo = self._factory.repo(wire)
469 repo = self._factory.repo(wire)
469 object_store = repo.object_store
470 object_store = repo.object_store
470
471
471 # Create tree and populates it with blobs
472 # Create tree and populates it with blobs
472 if commit_tree:
473 if commit_tree:
473 commit_tree = safe_bytes(commit_tree)
474 commit_tree = safe_bytes(commit_tree)
474
475
475 if commit_tree and repo[commit_tree]:
476 if commit_tree and repo[commit_tree]:
476 git_commit = repo[safe_bytes(commit_data['parents'][0])]
477 git_commit = repo[safe_bytes(commit_data['parents'][0])]
477 commit_tree = repo[git_commit.tree] # root tree
478 commit_tree = repo[git_commit.tree] # root tree
478 else:
479 else:
479 commit_tree = objects.Tree()
480 commit_tree = objects.Tree()
480
481
481 for node in updated:
482 for node in updated:
482 # Compute subdirs if needed
483 # Compute subdirs if needed
483 dirpath, nodename = vcspath.split(node['path'])
484 dirpath, nodename = vcspath.split(node['path'])
484 dirnames = list(map(safe_str, dirpath and dirpath.split('/') or []))
485 dirnames = list(map(safe_str, dirpath and dirpath.split('/') or []))
485 parent = commit_tree
486 parent = commit_tree
486 ancestors = [('', parent)]
487 ancestors = [('', parent)]
487
488
488 # Tries to dig for the deepest existing tree
489 # Tries to dig for the deepest existing tree
489 while dirnames:
490 while dirnames:
490 curdir = dirnames.pop(0)
491 curdir = dirnames.pop(0)
491 try:
492 try:
492 dir_id = parent[curdir][1]
493 dir_id = parent[curdir][1]
493 except KeyError:
494 except KeyError:
494 # put curdir back into dirnames and stops
495 # put curdir back into dirnames and stops
495 dirnames.insert(0, curdir)
496 dirnames.insert(0, curdir)
496 break
497 break
497 else:
498 else:
498 # If found, updates parent
499 # If found, updates parent
499 parent = repo[dir_id]
500 parent = repo[dir_id]
500 ancestors.append((curdir, parent))
501 ancestors.append((curdir, parent))
501 # Now parent is deepest existing tree and we need to create
502 # Now parent is deepest existing tree and we need to create
502 # subtrees for dirnames (in reverse order)
503 # subtrees for dirnames (in reverse order)
503 # [this only applies for nodes from added]
504 # [this only applies for nodes from added]
504 new_trees = []
505 new_trees = []
505
506
506 blob = objects.Blob.from_string(node['content'])
507 blob = objects.Blob.from_string(node['content'])
507
508
508 if dirnames:
509 if dirnames:
509 # If there are trees which should be created we need to build
510 # If there are trees which should be created we need to build
510 # them now (in reverse order)
511 # them now (in reverse order)
511 reversed_dirnames = list(reversed(dirnames))
512 reversed_dirnames = list(reversed(dirnames))
512 curtree = objects.Tree()
513 curtree = objects.Tree()
513 curtree[node['node_path']] = node['mode'], blob.id
514 curtree[node['node_path']] = node['mode'], blob.id
514 new_trees.append(curtree)
515 new_trees.append(curtree)
515 for dirname in reversed_dirnames[:-1]:
516 for dirname in reversed_dirnames[:-1]:
516 newtree = objects.Tree()
517 newtree = objects.Tree()
517 newtree[dirname] = (DIR_STAT, curtree.id)
518 newtree[dirname] = (DIR_STAT, curtree.id)
518 new_trees.append(newtree)
519 new_trees.append(newtree)
519 curtree = newtree
520 curtree = newtree
520 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
521 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
521 else:
522 else:
522 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
523 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
523
524
524 new_trees.append(parent)
525 new_trees.append(parent)
525 # Update ancestors
526 # Update ancestors
526 reversed_ancestors = reversed(
527 reversed_ancestors = reversed(
527 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
528 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
528 for parent, tree, path in reversed_ancestors:
529 for parent, tree, path in reversed_ancestors:
529 parent[path] = (DIR_STAT, tree.id)
530 parent[path] = (DIR_STAT, tree.id)
530 object_store.add_object(tree)
531 object_store.add_object(tree)
531
532
532 object_store.add_object(blob)
533 object_store.add_object(blob)
533 for tree in new_trees:
534 for tree in new_trees:
534 object_store.add_object(tree)
535 object_store.add_object(tree)
535
536
536 for node_path in removed:
537 for node_path in removed:
537 paths = node_path.split('/')
538 paths = node_path.split('/')
538 tree = commit_tree # start with top-level
539 tree = commit_tree # start with top-level
539 trees = [{'tree': tree, 'path': ROOT}]
540 trees = [{'tree': tree, 'path': ROOT}]
540 # Traverse deep into the forest...
541 # Traverse deep into the forest...
541 # resolve final tree by iterating the path.
542 # resolve final tree by iterating the path.
542 # e.g a/b/c.txt will get
543 # e.g a/b/c.txt will get
543 # - root as tree then
544 # - root as tree then
544 # - 'a' as tree,
545 # - 'a' as tree,
545 # - 'b' as tree,
546 # - 'b' as tree,
546 # - stop at c as blob.
547 # - stop at c as blob.
547 for path in paths:
548 for path in paths:
548 try:
549 try:
549 obj = repo[tree[path][1]]
550 obj = repo[tree[path][1]]
550 if isinstance(obj, objects.Tree):
551 if isinstance(obj, objects.Tree):
551 trees.append({'tree': obj, 'path': path})
552 trees.append({'tree': obj, 'path': path})
552 tree = obj
553 tree = obj
553 except KeyError:
554 except KeyError:
554 break
555 break
555 #PROBLEM:
556 #PROBLEM:
556 """
557 """
557 We're not editing same reference tree object
558 We're not editing same reference tree object
558 """
559 """
559 # Cut down the blob and all rotten trees on the way back...
560 # Cut down the blob and all rotten trees on the way back...
560 for path, tree_data in reversed(list(zip(paths, trees))):
561 for path, tree_data in reversed(list(zip(paths, trees))):
561 tree = tree_data['tree']
562 tree = tree_data['tree']
562 tree.__delitem__(path)
563 tree.__delitem__(path)
563 # This operation edits the tree, we need to mark new commit back
564 # This operation edits the tree, we need to mark new commit back
564
565
565 if len(tree) > 0:
566 if len(tree) > 0:
566 # This tree still has elements - don't remove it or any
567 # This tree still has elements - don't remove it or any
567 # of it's parents
568 # of it's parents
568 break
569 break
569
570
570 object_store.add_object(commit_tree)
571 object_store.add_object(commit_tree)
571
572
572 # Create commit
573 # Create commit
573 commit = objects.Commit()
574 commit = objects.Commit()
574 commit.tree = commit_tree.id
575 commit.tree = commit_tree.id
575 bytes_keys = [
576 bytes_keys = [
576 'author',
577 'author',
577 'committer',
578 'committer',
578 'message',
579 'message',
579 'encoding',
580 'encoding',
580 'parents'
581 'parents'
581 ]
582 ]
582
583
583 for k, v in commit_data.items():
584 for k, v in commit_data.items():
584 if k in bytes_keys:
585 if k in bytes_keys:
585 if k == 'parents':
586 if k == 'parents':
586 v = [safe_bytes(x) for x in v]
587 v = [safe_bytes(x) for x in v]
587 else:
588 else:
588 v = safe_bytes(v)
589 v = safe_bytes(v)
589 setattr(commit, k, v)
590 setattr(commit, k, v)
590
591
591 object_store.add_object(commit)
592 object_store.add_object(commit)
592
593
593 self.create_branch(wire, branch, safe_str(commit.id))
594 self.create_branch(wire, branch, safe_str(commit.id))
594
595
595 # dulwich set-ref
596 # dulwich set-ref
596 repo.refs[safe_bytes(f'refs/heads/{branch}')] = commit.id
597 repo.refs[safe_bytes(f'refs/heads/{branch}')] = commit.id
597
598
598 return commit.id
599 return commit.id
599
600
600 @reraise_safe_exceptions
601 @reraise_safe_exceptions
601 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
602 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
602 if url != 'default' and '://' not in url:
603 if url != 'default' and '://' not in url:
603 client = LocalGitClient(url)
604 client = LocalGitClient(url)
604 else:
605 else:
605 url_obj = url_parser(url)
606 url_obj = url_parser(url)
606 o = self._build_opener(url)
607 o = self._build_opener(url)
607 url, _ = url_obj.authinfo()
608 url, _ = url_obj.authinfo()
608 client = HttpGitClient(base_url=url, opener=o)
609 client = HttpGitClient(base_url=url, opener=o)
609 repo = self._factory.repo(wire)
610 repo = self._factory.repo(wire)
610
611
611 determine_wants = repo.object_store.determine_wants_all
612 determine_wants = repo.object_store.determine_wants_all
612 if refs:
613 if refs:
613 def determine_wants_requested(references):
614 refs = [ascii_bytes(x) for x in refs]
614 return [references[r] for r in references if r in refs]
615
616 def determine_wants_requested(remote_refs):
617 determined = []
618 for ref_name, ref_hash in remote_refs.items():
619 bytes_ref_name = safe_bytes(ref_name)
620
621 if bytes_ref_name in refs:
622 bytes_ref_hash = safe_bytes(ref_hash)
623 determined.append(bytes_ref_hash)
624 return determined
625
626 # swap with our custom requested wants
615 determine_wants = determine_wants_requested
627 determine_wants = determine_wants_requested
616
628
617 try:
629 try:
618 remote_refs = client.fetch(
630 remote_refs = client.fetch(
619 path=url, target=repo, determine_wants=determine_wants)
631 path=url, target=repo, determine_wants=determine_wants)
632
620 except NotGitRepository as e:
633 except NotGitRepository as e:
621 log.warning(
634 log.warning(
622 'Trying to fetch from "%s" failed, not a Git repository.', url)
635 'Trying to fetch from "%s" failed, not a Git repository.', url)
623 # Exception can contain unicode which we convert
636 # Exception can contain unicode which we convert
624 raise exceptions.AbortException(e)(repr(e))
637 raise exceptions.AbortException(e)(repr(e))
625
638
626 # mikhail: client.fetch() returns all the remote refs, but fetches only
639 # mikhail: client.fetch() returns all the remote refs, but fetches only
627 # refs filtered by `determine_wants` function. We need to filter result
640 # refs filtered by `determine_wants` function. We need to filter result
628 # as well
641 # as well
629 if refs:
642 if refs:
630 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
643 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
631
644
632 if apply_refs:
645 if apply_refs:
633 # TODO: johbo: Needs proper test coverage with a git repository
646 # TODO: johbo: Needs proper test coverage with a git repository
634 # that contains a tag object, so that we would end up with
647 # that contains a tag object, so that we would end up with
635 # a peeled ref at this point.
648 # a peeled ref at this point.
636 for k in remote_refs:
649 for k in remote_refs:
637 if k.endswith(PEELED_REF_MARKER):
650 if k.endswith(PEELED_REF_MARKER):
638 log.debug("Skipping peeled reference %s", k)
651 log.debug("Skipping peeled reference %s", k)
639 continue
652 continue
640 repo[k] = remote_refs[k]
653 repo[k] = remote_refs[k]
641
654
642 if refs and not update_after:
655 if refs and not update_after:
643 # mikhail: explicitly set the head to the last ref.
656 # mikhail: explicitly set the head to the last ref.
644 repo["HEAD"] = remote_refs[refs[-1]]
657 repo[HEAD_MARKER] = remote_refs[refs[-1]]
645
658
646 if update_after:
659 if update_after:
647 # we want to checkout HEAD
660 # we want to checkout HEAD
648 repo["HEAD"] = remote_refs["HEAD"]
661 repo[HEAD_MARKER] = remote_refs[HEAD_MARKER]
649 index.build_index_from_tree(repo.path, repo.index_path(),
662 index.build_index_from_tree(repo.path, repo.index_path(),
650 repo.object_store, repo["HEAD"].tree)
663 repo.object_store, repo[HEAD_MARKER].tree)
651 return remote_refs
664 return remote_refs
652
665
653 @reraise_safe_exceptions
666 @reraise_safe_exceptions
654 def sync_fetch(self, wire, url, refs=None, all_refs=False):
667 def sync_fetch(self, wire, url, refs=None, all_refs=False):
655 repo = self._factory.repo(wire)
668 repo = self._factory.repo(wire)
656 if refs and not isinstance(refs, (list, tuple)):
669 if refs and not isinstance(refs, (list, tuple)):
657 refs = [refs]
670 refs = [refs]
658
671
659 config = self._wire_to_config(wire)
672 config = self._wire_to_config(wire)
660 # get all remote refs we'll use to fetch later
673 # get all remote refs we'll use to fetch later
661 cmd = ['ls-remote']
674 cmd = ['ls-remote']
662 if not all_refs:
675 if not all_refs:
663 cmd += ['--heads', '--tags']
676 cmd += ['--heads', '--tags']
664 cmd += [url]
677 cmd += [url]
665 output, __ = self.run_git_command(
678 output, __ = self.run_git_command(
666 wire, cmd, fail_on_stderr=False,
679 wire, cmd, fail_on_stderr=False,
667 _copts=self._remote_conf(config),
680 _copts=self._remote_conf(config),
668 extra_env={'GIT_TERMINAL_PROMPT': '0'})
681 extra_env={'GIT_TERMINAL_PROMPT': '0'})
669
682
670 remote_refs = collections.OrderedDict()
683 remote_refs = collections.OrderedDict()
671 fetch_refs = []
684 fetch_refs = []
672
685
673 for ref_line in output.splitlines():
686 for ref_line in output.splitlines():
674 sha, ref = ref_line.split(b'\t')
687 sha, ref = ref_line.split(b'\t')
675 sha = sha.strip()
688 sha = sha.strip()
676 if ref in remote_refs:
689 if ref in remote_refs:
677 # duplicate, skip
690 # duplicate, skip
678 continue
691 continue
679 if ref.endswith(PEELED_REF_MARKER):
692 if ref.endswith(PEELED_REF_MARKER):
680 log.debug("Skipping peeled reference %s", ref)
693 log.debug("Skipping peeled reference %s", ref)
681 continue
694 continue
682 # don't sync HEAD
695 # don't sync HEAD
683 if ref in [b'HEAD']:
696 if ref in [HEAD_MARKER]:
684 continue
697 continue
685
698
686 remote_refs[ref] = sha
699 remote_refs[ref] = sha
687
700
688 if refs and sha in refs:
701 if refs and sha in refs:
689 # we filter fetch using our specified refs
702 # we filter fetch using our specified refs
690 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
703 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
691 elif not refs:
704 elif not refs:
692 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
705 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
693 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
706 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
694
707
695 if fetch_refs:
708 if fetch_refs:
696 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
709 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
697 fetch_refs_chunks = list(chunk)
710 fetch_refs_chunks = list(chunk)
698 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
711 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
699 self.run_git_command(
712 self.run_git_command(
700 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
713 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
701 fail_on_stderr=False,
714 fail_on_stderr=False,
702 _copts=self._remote_conf(config),
715 _copts=self._remote_conf(config),
703 extra_env={'GIT_TERMINAL_PROMPT': '0'})
716 extra_env={'GIT_TERMINAL_PROMPT': '0'})
704
717
705 return remote_refs
718 return remote_refs
706
719
707 @reraise_safe_exceptions
720 @reraise_safe_exceptions
708 def sync_push(self, wire, url, refs=None):
721 def sync_push(self, wire, url, refs=None):
709 if not self.check_url(url, wire):
722 if not self.check_url(url, wire):
710 return
723 return
711 config = self._wire_to_config(wire)
724 config = self._wire_to_config(wire)
712 self._factory.repo(wire)
725 self._factory.repo(wire)
713 self.run_git_command(
726 self.run_git_command(
714 wire, ['push', url, '--mirror'], fail_on_stderr=False,
727 wire, ['push', url, '--mirror'], fail_on_stderr=False,
715 _copts=self._remote_conf(config),
728 _copts=self._remote_conf(config),
716 extra_env={'GIT_TERMINAL_PROMPT': '0'})
729 extra_env={'GIT_TERMINAL_PROMPT': '0'})
717
730
718 @reraise_safe_exceptions
731 @reraise_safe_exceptions
719 def get_remote_refs(self, wire, url):
732 def get_remote_refs(self, wire, url):
720 repo = Repo(url)
733 repo = Repo(url)
721 return repo.get_refs()
734 return repo.get_refs()
722
735
723 @reraise_safe_exceptions
736 @reraise_safe_exceptions
724 def get_description(self, wire):
737 def get_description(self, wire):
725 repo = self._factory.repo(wire)
738 repo = self._factory.repo(wire)
726 return repo.get_description()
739 return repo.get_description()
727
740
728 @reraise_safe_exceptions
741 @reraise_safe_exceptions
729 def get_missing_revs(self, wire, rev1, rev2, path2):
742 def get_missing_revs(self, wire, rev1, rev2, path2):
730 repo = self._factory.repo(wire)
743 repo = self._factory.repo(wire)
731 LocalGitClient(thin_packs=False).fetch(path2, repo)
744 LocalGitClient(thin_packs=False).fetch(path2, repo)
732
745
733 wire_remote = wire.copy()
746 wire_remote = wire.copy()
734 wire_remote['path'] = path2
747 wire_remote['path'] = path2
735 repo_remote = self._factory.repo(wire_remote)
748 repo_remote = self._factory.repo(wire_remote)
736 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
749 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
737
750
738 revs = [
751 revs = [
739 x.commit.id
752 x.commit.id
740 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
753 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
741 return revs
754 return revs
742
755
743 @reraise_safe_exceptions
756 @reraise_safe_exceptions
744 def get_object(self, wire, sha, maybe_unreachable=False):
757 def get_object(self, wire, sha, maybe_unreachable=False):
745 cache_on, context_uid, repo_id = self._cache_on(wire)
758 cache_on, context_uid, repo_id = self._cache_on(wire)
746 region = self._region(wire)
759 region = self._region(wire)
747
760
748 @region.conditional_cache_on_arguments(condition=cache_on)
761 @region.conditional_cache_on_arguments(condition=cache_on)
749 def _get_object(_context_uid, _repo_id, _sha):
762 def _get_object(_context_uid, _repo_id, _sha):
750 repo_init = self._factory.repo_libgit2(wire)
763 repo_init = self._factory.repo_libgit2(wire)
751 with repo_init as repo:
764 with repo_init as repo:
752
765
753 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
766 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
754 try:
767 try:
755 commit = repo.revparse_single(sha)
768 commit = repo.revparse_single(sha)
756 except KeyError:
769 except KeyError:
757 # NOTE(marcink): KeyError doesn't give us any meaningful information
770 # NOTE(marcink): KeyError doesn't give us any meaningful information
758 # here, we instead give something more explicit
771 # here, we instead give something more explicit
759 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
772 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
760 raise exceptions.LookupException(e)(missing_commit_err)
773 raise exceptions.LookupException(e)(missing_commit_err)
761 except ValueError as e:
774 except ValueError as e:
762 raise exceptions.LookupException(e)(missing_commit_err)
775 raise exceptions.LookupException(e)(missing_commit_err)
763
776
764 is_tag = False
777 is_tag = False
765 if isinstance(commit, pygit2.Tag):
778 if isinstance(commit, pygit2.Tag):
766 commit = repo.get(commit.target)
779 commit = repo.get(commit.target)
767 is_tag = True
780 is_tag = True
768
781
769 check_dangling = True
782 check_dangling = True
770 if is_tag:
783 if is_tag:
771 check_dangling = False
784 check_dangling = False
772
785
773 if check_dangling and maybe_unreachable:
786 if check_dangling and maybe_unreachable:
774 check_dangling = False
787 check_dangling = False
775
788
776 # we used a reference and it parsed means we're not having a dangling commit
789 # we used a reference and it parsed means we're not having a dangling commit
777 if sha != commit.hex:
790 if sha != commit.hex:
778 check_dangling = False
791 check_dangling = False
779
792
780 if check_dangling:
793 if check_dangling:
781 # check for dangling commit
794 # check for dangling commit
782 for branch in repo.branches.with_commit(commit.hex):
795 for branch in repo.branches.with_commit(commit.hex):
783 if branch:
796 if branch:
784 break
797 break
785 else:
798 else:
786 # NOTE(marcink): Empty error doesn't give us any meaningful information
799 # NOTE(marcink): Empty error doesn't give us any meaningful information
787 # here, we instead give something more explicit
800 # here, we instead give something more explicit
788 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
801 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
789 raise exceptions.LookupException(e)(missing_commit_err)
802 raise exceptions.LookupException(e)(missing_commit_err)
790
803
791 commit_id = commit.hex
804 commit_id = commit.hex
792 type_id = commit.type
805 type_id = commit.type
793
806
794 return {
807 return {
795 'id': commit_id,
808 'id': commit_id,
796 'type': self._type_id_to_name(type_id),
809 'type': self._type_id_to_name(type_id),
797 'commit_id': commit_id,
810 'commit_id': commit_id,
798 'idx': 0
811 'idx': 0
799 }
812 }
800
813
801 return _get_object(context_uid, repo_id, sha)
814 return _get_object(context_uid, repo_id, sha)
802
815
803 @reraise_safe_exceptions
816 @reraise_safe_exceptions
804 def get_refs(self, wire):
817 def get_refs(self, wire):
805 cache_on, context_uid, repo_id = self._cache_on(wire)
818 cache_on, context_uid, repo_id = self._cache_on(wire)
806 region = self._region(wire)
819 region = self._region(wire)
807
820
808 @region.conditional_cache_on_arguments(condition=cache_on)
821 @region.conditional_cache_on_arguments(condition=cache_on)
809 def _get_refs(_context_uid, _repo_id):
822 def _get_refs(_context_uid, _repo_id):
810
823
811 repo_init = self._factory.repo_libgit2(wire)
824 repo_init = self._factory.repo_libgit2(wire)
812 with repo_init as repo:
825 with repo_init as repo:
813 regex = re.compile('^refs/(heads|tags)/')
826 regex = re.compile('^refs/(heads|tags)/')
814 return {x.name: x.target.hex for x in
827 return {x.name: x.target.hex for x in
815 [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]}
828 [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]}
816
829
817 return _get_refs(context_uid, repo_id)
830 return _get_refs(context_uid, repo_id)
818
831
819 @reraise_safe_exceptions
832 @reraise_safe_exceptions
820 def get_branch_pointers(self, wire):
833 def get_branch_pointers(self, wire):
821 cache_on, context_uid, repo_id = self._cache_on(wire)
834 cache_on, context_uid, repo_id = self._cache_on(wire)
822 region = self._region(wire)
835 region = self._region(wire)
823
836
824 @region.conditional_cache_on_arguments(condition=cache_on)
837 @region.conditional_cache_on_arguments(condition=cache_on)
825 def _get_branch_pointers(_context_uid, _repo_id):
838 def _get_branch_pointers(_context_uid, _repo_id):
826
839
827 repo_init = self._factory.repo_libgit2(wire)
840 repo_init = self._factory.repo_libgit2(wire)
828 regex = re.compile('^refs/heads')
841 regex = re.compile('^refs/heads')
829 with repo_init as repo:
842 with repo_init as repo:
830 branches = [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]
843 branches = [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]
831 return {x.target.hex: x.shorthand for x in branches}
844 return {x.target.hex: x.shorthand for x in branches}
832
845
833 return _get_branch_pointers(context_uid, repo_id)
846 return _get_branch_pointers(context_uid, repo_id)
834
847
835 @reraise_safe_exceptions
848 @reraise_safe_exceptions
836 def head(self, wire, show_exc=True):
849 def head(self, wire, show_exc=True):
837 cache_on, context_uid, repo_id = self._cache_on(wire)
850 cache_on, context_uid, repo_id = self._cache_on(wire)
838 region = self._region(wire)
851 region = self._region(wire)
839
852
840 @region.conditional_cache_on_arguments(condition=cache_on)
853 @region.conditional_cache_on_arguments(condition=cache_on)
841 def _head(_context_uid, _repo_id, _show_exc):
854 def _head(_context_uid, _repo_id, _show_exc):
842 repo_init = self._factory.repo_libgit2(wire)
855 repo_init = self._factory.repo_libgit2(wire)
843 with repo_init as repo:
856 with repo_init as repo:
844 try:
857 try:
845 return repo.head.peel().hex
858 return repo.head.peel().hex
846 except Exception:
859 except Exception:
847 if show_exc:
860 if show_exc:
848 raise
861 raise
849 return _head(context_uid, repo_id, show_exc)
862 return _head(context_uid, repo_id, show_exc)
850
863
851 @reraise_safe_exceptions
864 @reraise_safe_exceptions
852 def init(self, wire):
865 def init(self, wire):
853 repo_path = safe_str(wire['path'])
866 repo_path = safe_str(wire['path'])
854 self.repo = Repo.init(repo_path)
867 self.repo = Repo.init(repo_path)
855
868
856 @reraise_safe_exceptions
869 @reraise_safe_exceptions
857 def init_bare(self, wire):
870 def init_bare(self, wire):
858 repo_path = safe_str(wire['path'])
871 repo_path = safe_str(wire['path'])
859 self.repo = Repo.init_bare(repo_path)
872 self.repo = Repo.init_bare(repo_path)
860
873
861 @reraise_safe_exceptions
874 @reraise_safe_exceptions
862 def revision(self, wire, rev):
875 def revision(self, wire, rev):
863
876
864 cache_on, context_uid, repo_id = self._cache_on(wire)
877 cache_on, context_uid, repo_id = self._cache_on(wire)
865 region = self._region(wire)
878 region = self._region(wire)
866
879
867 @region.conditional_cache_on_arguments(condition=cache_on)
880 @region.conditional_cache_on_arguments(condition=cache_on)
868 def _revision(_context_uid, _repo_id, _rev):
881 def _revision(_context_uid, _repo_id, _rev):
869 repo_init = self._factory.repo_libgit2(wire)
882 repo_init = self._factory.repo_libgit2(wire)
870 with repo_init as repo:
883 with repo_init as repo:
871 commit = repo[rev]
884 commit = repo[rev]
872 obj_data = {
885 obj_data = {
873 'id': commit.id.hex,
886 'id': commit.id.hex,
874 }
887 }
875 # tree objects itself don't have tree_id attribute
888 # tree objects itself don't have tree_id attribute
876 if hasattr(commit, 'tree_id'):
889 if hasattr(commit, 'tree_id'):
877 obj_data['tree'] = commit.tree_id.hex
890 obj_data['tree'] = commit.tree_id.hex
878
891
879 return obj_data
892 return obj_data
880 return _revision(context_uid, repo_id, rev)
893 return _revision(context_uid, repo_id, rev)
881
894
882 @reraise_safe_exceptions
895 @reraise_safe_exceptions
883 def date(self, wire, commit_id):
896 def date(self, wire, commit_id):
884 cache_on, context_uid, repo_id = self._cache_on(wire)
897 cache_on, context_uid, repo_id = self._cache_on(wire)
885 region = self._region(wire)
898 region = self._region(wire)
886
899
887 @region.conditional_cache_on_arguments(condition=cache_on)
900 @region.conditional_cache_on_arguments(condition=cache_on)
888 def _date(_repo_id, _commit_id):
901 def _date(_repo_id, _commit_id):
889 repo_init = self._factory.repo_libgit2(wire)
902 repo_init = self._factory.repo_libgit2(wire)
890 with repo_init as repo:
903 with repo_init as repo:
891 commit = repo[commit_id]
904 commit = repo[commit_id]
892
905
893 if hasattr(commit, 'commit_time'):
906 if hasattr(commit, 'commit_time'):
894 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
907 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
895 else:
908 else:
896 commit = commit.get_object()
909 commit = commit.get_object()
897 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
910 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
898
911
899 # TODO(marcink): check dulwich difference of offset vs timezone
912 # TODO(marcink): check dulwich difference of offset vs timezone
900 return [commit_time, commit_time_offset]
913 return [commit_time, commit_time_offset]
901 return _date(repo_id, commit_id)
914 return _date(repo_id, commit_id)
902
915
903 @reraise_safe_exceptions
916 @reraise_safe_exceptions
904 def author(self, wire, commit_id):
917 def author(self, wire, commit_id):
905 cache_on, context_uid, repo_id = self._cache_on(wire)
918 cache_on, context_uid, repo_id = self._cache_on(wire)
906 region = self._region(wire)
919 region = self._region(wire)
907
920
908 @region.conditional_cache_on_arguments(condition=cache_on)
921 @region.conditional_cache_on_arguments(condition=cache_on)
909 def _author(_repo_id, _commit_id):
922 def _author(_repo_id, _commit_id):
910 repo_init = self._factory.repo_libgit2(wire)
923 repo_init = self._factory.repo_libgit2(wire)
911 with repo_init as repo:
924 with repo_init as repo:
912 commit = repo[commit_id]
925 commit = repo[commit_id]
913
926
914 if hasattr(commit, 'author'):
927 if hasattr(commit, 'author'):
915 author = commit.author
928 author = commit.author
916 else:
929 else:
917 author = commit.get_object().author
930 author = commit.get_object().author
918
931
919 if author.email:
932 if author.email:
920 return "{} <{}>".format(author.name, author.email)
933 return "{} <{}>".format(author.name, author.email)
921
934
922 try:
935 try:
923 return "{}".format(author.name)
936 return "{}".format(author.name)
924 except Exception:
937 except Exception:
925 return "{}".format(safe_str(author.raw_name))
938 return "{}".format(safe_str(author.raw_name))
926
939
927 return _author(repo_id, commit_id)
940 return _author(repo_id, commit_id)
928
941
929 @reraise_safe_exceptions
942 @reraise_safe_exceptions
930 def message(self, wire, commit_id):
943 def message(self, wire, commit_id):
931 cache_on, context_uid, repo_id = self._cache_on(wire)
944 cache_on, context_uid, repo_id = self._cache_on(wire)
932 region = self._region(wire)
945 region = self._region(wire)
933 @region.conditional_cache_on_arguments(condition=cache_on)
946 @region.conditional_cache_on_arguments(condition=cache_on)
934 def _message(_repo_id, _commit_id):
947 def _message(_repo_id, _commit_id):
935 repo_init = self._factory.repo_libgit2(wire)
948 repo_init = self._factory.repo_libgit2(wire)
936 with repo_init as repo:
949 with repo_init as repo:
937 commit = repo[commit_id]
950 commit = repo[commit_id]
938 return commit.message
951 return commit.message
939 return _message(repo_id, commit_id)
952 return _message(repo_id, commit_id)
940
953
941 @reraise_safe_exceptions
954 @reraise_safe_exceptions
942 def parents(self, wire, commit_id):
955 def parents(self, wire, commit_id):
943 cache_on, context_uid, repo_id = self._cache_on(wire)
956 cache_on, context_uid, repo_id = self._cache_on(wire)
944 region = self._region(wire)
957 region = self._region(wire)
945
958
946 @region.conditional_cache_on_arguments(condition=cache_on)
959 @region.conditional_cache_on_arguments(condition=cache_on)
947 def _parents(_repo_id, _commit_id):
960 def _parents(_repo_id, _commit_id):
948 repo_init = self._factory.repo_libgit2(wire)
961 repo_init = self._factory.repo_libgit2(wire)
949 with repo_init as repo:
962 with repo_init as repo:
950 commit = repo[commit_id]
963 commit = repo[commit_id]
951 if hasattr(commit, 'parent_ids'):
964 if hasattr(commit, 'parent_ids'):
952 parent_ids = commit.parent_ids
965 parent_ids = commit.parent_ids
953 else:
966 else:
954 parent_ids = commit.get_object().parent_ids
967 parent_ids = commit.get_object().parent_ids
955
968
956 return [x.hex for x in parent_ids]
969 return [x.hex for x in parent_ids]
957 return _parents(repo_id, commit_id)
970 return _parents(repo_id, commit_id)
958
971
959 @reraise_safe_exceptions
972 @reraise_safe_exceptions
960 def children(self, wire, commit_id):
973 def children(self, wire, commit_id):
961 cache_on, context_uid, repo_id = self._cache_on(wire)
974 cache_on, context_uid, repo_id = self._cache_on(wire)
962 region = self._region(wire)
975 region = self._region(wire)
963
976
964 head = self.head(wire)
977 head = self.head(wire)
965
978
966 @region.conditional_cache_on_arguments(condition=cache_on)
979 @region.conditional_cache_on_arguments(condition=cache_on)
967 def _children(_repo_id, _commit_id):
980 def _children(_repo_id, _commit_id):
968
981
969 output, __ = self.run_git_command(
982 output, __ = self.run_git_command(
970 wire, ['rev-list', '--all', '--children', f'{commit_id}^..{head}'])
983 wire, ['rev-list', '--all', '--children', f'{commit_id}^..{head}'])
971
984
972 child_ids = []
985 child_ids = []
973 pat = re.compile(r'^{}'.format(commit_id))
986 pat = re.compile(r'^{}'.format(commit_id))
974 for line in output.splitlines():
987 for line in output.splitlines():
975 line = safe_str(line)
988 line = safe_str(line)
976 if pat.match(line):
989 if pat.match(line):
977 found_ids = line.split(' ')[1:]
990 found_ids = line.split(' ')[1:]
978 child_ids.extend(found_ids)
991 child_ids.extend(found_ids)
979 break
992 break
980
993
981 return child_ids
994 return child_ids
982 return _children(repo_id, commit_id)
995 return _children(repo_id, commit_id)
983
996
984 @reraise_safe_exceptions
997 @reraise_safe_exceptions
985 def set_refs(self, wire, key, value):
998 def set_refs(self, wire, key, value):
986 repo_init = self._factory.repo_libgit2(wire)
999 repo_init = self._factory.repo_libgit2(wire)
987 with repo_init as repo:
1000 with repo_init as repo:
988 repo.references.create(key, value, force=True)
1001 repo.references.create(key, value, force=True)
989
1002
990 @reraise_safe_exceptions
1003 @reraise_safe_exceptions
991 def create_branch(self, wire, branch_name, commit_id, force=False):
1004 def create_branch(self, wire, branch_name, commit_id, force=False):
992 repo_init = self._factory.repo_libgit2(wire)
1005 repo_init = self._factory.repo_libgit2(wire)
993 with repo_init as repo:
1006 with repo_init as repo:
994 commit = repo[commit_id]
1007 commit = repo[commit_id]
995
1008
996 if force:
1009 if force:
997 repo.branches.local.create(branch_name, commit, force=force)
1010 repo.branches.local.create(branch_name, commit, force=force)
998 elif not repo.branches.get(branch_name):
1011 elif not repo.branches.get(branch_name):
999 # create only if that branch isn't existing
1012 # create only if that branch isn't existing
1000 repo.branches.local.create(branch_name, commit, force=force)
1013 repo.branches.local.create(branch_name, commit, force=force)
1001
1014
1002 @reraise_safe_exceptions
1015 @reraise_safe_exceptions
1003 def remove_ref(self, wire, key):
1016 def remove_ref(self, wire, key):
1004 repo_init = self._factory.repo_libgit2(wire)
1017 repo_init = self._factory.repo_libgit2(wire)
1005 with repo_init as repo:
1018 with repo_init as repo:
1006 repo.references.delete(key)
1019 repo.references.delete(key)
1007
1020
1008 @reraise_safe_exceptions
1021 @reraise_safe_exceptions
1009 def tag_remove(self, wire, tag_name):
1022 def tag_remove(self, wire, tag_name):
1010 repo_init = self._factory.repo_libgit2(wire)
1023 repo_init = self._factory.repo_libgit2(wire)
1011 with repo_init as repo:
1024 with repo_init as repo:
1012 key = 'refs/tags/{}'.format(tag_name)
1025 key = 'refs/tags/{}'.format(tag_name)
1013 repo.references.delete(key)
1026 repo.references.delete(key)
1014
1027
1015 @reraise_safe_exceptions
1028 @reraise_safe_exceptions
1016 def tree_changes(self, wire, source_id, target_id):
1029 def tree_changes(self, wire, source_id, target_id):
1017 # TODO(marcink): remove this seems it's only used by tests
1030 # TODO(marcink): remove this seems it's only used by tests
1018 repo = self._factory.repo(wire)
1031 repo = self._factory.repo(wire)
1019 source = repo[source_id].tree if source_id else None
1032 source = repo[source_id].tree if source_id else None
1020 target = repo[target_id].tree
1033 target = repo[target_id].tree
1021 result = repo.object_store.tree_changes(source, target)
1034 result = repo.object_store.tree_changes(source, target)
1022 return list(result)
1035 return list(result)
1023
1036
1024 @reraise_safe_exceptions
1037 @reraise_safe_exceptions
1025 def tree_and_type_for_path(self, wire, commit_id, path):
1038 def tree_and_type_for_path(self, wire, commit_id, path):
1026
1039
1027 cache_on, context_uid, repo_id = self._cache_on(wire)
1040 cache_on, context_uid, repo_id = self._cache_on(wire)
1028 region = self._region(wire)
1041 region = self._region(wire)
1029
1042
1030 @region.conditional_cache_on_arguments(condition=cache_on)
1043 @region.conditional_cache_on_arguments(condition=cache_on)
1031 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
1044 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
1032 repo_init = self._factory.repo_libgit2(wire)
1045 repo_init = self._factory.repo_libgit2(wire)
1033
1046
1034 with repo_init as repo:
1047 with repo_init as repo:
1035 commit = repo[commit_id]
1048 commit = repo[commit_id]
1036 try:
1049 try:
1037 tree = commit.tree[path]
1050 tree = commit.tree[path]
1038 except KeyError:
1051 except KeyError:
1039 return None, None, None
1052 return None, None, None
1040
1053
1041 return tree.id.hex, tree.type_str, tree.filemode
1054 return tree.id.hex, tree.type_str, tree.filemode
1042 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
1055 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
1043
1056
1044 @reraise_safe_exceptions
1057 @reraise_safe_exceptions
1045 def tree_items(self, wire, tree_id):
1058 def tree_items(self, wire, tree_id):
1046 cache_on, context_uid, repo_id = self._cache_on(wire)
1059 cache_on, context_uid, repo_id = self._cache_on(wire)
1047 region = self._region(wire)
1060 region = self._region(wire)
1048
1061
1049 @region.conditional_cache_on_arguments(condition=cache_on)
1062 @region.conditional_cache_on_arguments(condition=cache_on)
1050 def _tree_items(_repo_id, _tree_id):
1063 def _tree_items(_repo_id, _tree_id):
1051
1064
1052 repo_init = self._factory.repo_libgit2(wire)
1065 repo_init = self._factory.repo_libgit2(wire)
1053 with repo_init as repo:
1066 with repo_init as repo:
1054 try:
1067 try:
1055 tree = repo[tree_id]
1068 tree = repo[tree_id]
1056 except KeyError:
1069 except KeyError:
1057 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1070 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1058
1071
1059 result = []
1072 result = []
1060 for item in tree:
1073 for item in tree:
1061 item_sha = item.hex
1074 item_sha = item.hex
1062 item_mode = item.filemode
1075 item_mode = item.filemode
1063 item_type = item.type_str
1076 item_type = item.type_str
1064
1077
1065 if item_type == 'commit':
1078 if item_type == 'commit':
1066 # NOTE(marcink): submodules we translate to 'link' for backward compat
1079 # NOTE(marcink): submodules we translate to 'link' for backward compat
1067 item_type = 'link'
1080 item_type = 'link'
1068
1081
1069 result.append((item.name, item_mode, item_sha, item_type))
1082 result.append((item.name, item_mode, item_sha, item_type))
1070 return result
1083 return result
1071 return _tree_items(repo_id, tree_id)
1084 return _tree_items(repo_id, tree_id)
1072
1085
1073 @reraise_safe_exceptions
1086 @reraise_safe_exceptions
1074 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1087 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1075 """
1088 """
1076 Old version that uses subprocess to call diff
1089 Old version that uses subprocess to call diff
1077 """
1090 """
1078
1091
1079 flags = [
1092 flags = [
1080 '-U%s' % context, '--patch',
1093 '-U%s' % context, '--patch',
1081 '--binary',
1094 '--binary',
1082 '--find-renames',
1095 '--find-renames',
1083 '--no-indent-heuristic',
1096 '--no-indent-heuristic',
1084 # '--indent-heuristic',
1097 # '--indent-heuristic',
1085 #'--full-index',
1098 #'--full-index',
1086 #'--abbrev=40'
1099 #'--abbrev=40'
1087 ]
1100 ]
1088
1101
1089 if opt_ignorews:
1102 if opt_ignorews:
1090 flags.append('--ignore-all-space')
1103 flags.append('--ignore-all-space')
1091
1104
1092 if commit_id_1 == self.EMPTY_COMMIT:
1105 if commit_id_1 == self.EMPTY_COMMIT:
1093 cmd = ['show'] + flags + [commit_id_2]
1106 cmd = ['show'] + flags + [commit_id_2]
1094 else:
1107 else:
1095 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1108 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1096
1109
1097 if file_filter:
1110 if file_filter:
1098 cmd.extend(['--', file_filter])
1111 cmd.extend(['--', file_filter])
1099
1112
1100 diff, __ = self.run_git_command(wire, cmd)
1113 diff, __ = self.run_git_command(wire, cmd)
1101 # If we used 'show' command, strip first few lines (until actual diff
1114 # If we used 'show' command, strip first few lines (until actual diff
1102 # starts)
1115 # starts)
1103 if commit_id_1 == self.EMPTY_COMMIT:
1116 if commit_id_1 == self.EMPTY_COMMIT:
1104 lines = diff.splitlines()
1117 lines = diff.splitlines()
1105 x = 0
1118 x = 0
1106 for line in lines:
1119 for line in lines:
1107 if line.startswith(b'diff'):
1120 if line.startswith(b'diff'):
1108 break
1121 break
1109 x += 1
1122 x += 1
1110 # Append new line just like 'diff' command do
1123 # Append new line just like 'diff' command do
1111 diff = '\n'.join(lines[x:]) + '\n'
1124 diff = '\n'.join(lines[x:]) + '\n'
1112 return diff
1125 return diff
1113
1126
1114 @reraise_safe_exceptions
1127 @reraise_safe_exceptions
1115 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1128 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1116 repo_init = self._factory.repo_libgit2(wire)
1129 repo_init = self._factory.repo_libgit2(wire)
1117 with repo_init as repo:
1130 with repo_init as repo:
1118 swap = True
1131 swap = True
1119 flags = 0
1132 flags = 0
1120 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1133 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1121
1134
1122 if opt_ignorews:
1135 if opt_ignorews:
1123 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1136 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1124
1137
1125 if commit_id_1 == self.EMPTY_COMMIT:
1138 if commit_id_1 == self.EMPTY_COMMIT:
1126 comm1 = repo[commit_id_2]
1139 comm1 = repo[commit_id_2]
1127 diff_obj = comm1.tree.diff_to_tree(
1140 diff_obj = comm1.tree.diff_to_tree(
1128 flags=flags, context_lines=context, swap=swap)
1141 flags=flags, context_lines=context, swap=swap)
1129
1142
1130 else:
1143 else:
1131 comm1 = repo[commit_id_2]
1144 comm1 = repo[commit_id_2]
1132 comm2 = repo[commit_id_1]
1145 comm2 = repo[commit_id_1]
1133 diff_obj = comm1.tree.diff_to_tree(
1146 diff_obj = comm1.tree.diff_to_tree(
1134 comm2.tree, flags=flags, context_lines=context, swap=swap)
1147 comm2.tree, flags=flags, context_lines=context, swap=swap)
1135 similar_flags = 0
1148 similar_flags = 0
1136 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1149 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1137 diff_obj.find_similar(flags=similar_flags)
1150 diff_obj.find_similar(flags=similar_flags)
1138
1151
1139 if file_filter:
1152 if file_filter:
1140 for p in diff_obj:
1153 for p in diff_obj:
1141 if p.delta.old_file.path == file_filter:
1154 if p.delta.old_file.path == file_filter:
1142 return p.patch or ''
1155 return p.patch or ''
1143 # fo matching path == no diff
1156 # fo matching path == no diff
1144 return ''
1157 return ''
1145 return diff_obj.patch or ''
1158 return diff_obj.patch or ''
1146
1159
1147 @reraise_safe_exceptions
1160 @reraise_safe_exceptions
1148 def node_history(self, wire, commit_id, path, limit):
1161 def node_history(self, wire, commit_id, path, limit):
1149 cache_on, context_uid, repo_id = self._cache_on(wire)
1162 cache_on, context_uid, repo_id = self._cache_on(wire)
1150 region = self._region(wire)
1163 region = self._region(wire)
1151
1164
1152 @region.conditional_cache_on_arguments(condition=cache_on)
1165 @region.conditional_cache_on_arguments(condition=cache_on)
1153 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1166 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1154 # optimize for n==1, rev-list is much faster for that use-case
1167 # optimize for n==1, rev-list is much faster for that use-case
1155 if limit == 1:
1168 if limit == 1:
1156 cmd = ['rev-list', '-1', commit_id, '--', path]
1169 cmd = ['rev-list', '-1', commit_id, '--', path]
1157 else:
1170 else:
1158 cmd = ['log']
1171 cmd = ['log']
1159 if limit:
1172 if limit:
1160 cmd.extend(['-n', str(safe_int(limit, 0))])
1173 cmd.extend(['-n', str(safe_int(limit, 0))])
1161 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1174 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1162
1175
1163 output, __ = self.run_git_command(wire, cmd)
1176 output, __ = self.run_git_command(wire, cmd)
1164 commit_ids = re.findall(rb'[0-9a-fA-F]{40}', output)
1177 commit_ids = re.findall(rb'[0-9a-fA-F]{40}', output)
1165
1178
1166 return [x for x in commit_ids]
1179 return [x for x in commit_ids]
1167 return _node_history(context_uid, repo_id, commit_id, path, limit)
1180 return _node_history(context_uid, repo_id, commit_id, path, limit)
1168
1181
1169 @reraise_safe_exceptions
1182 @reraise_safe_exceptions
1170 def node_annotate_legacy(self, wire, commit_id, path):
1183 def node_annotate_legacy(self, wire, commit_id, path):
1171 #note: replaced by pygit2 impelementation
1184 #note: replaced by pygit2 impelementation
1172 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1185 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1173 # -l ==> outputs long shas (and we need all 40 characters)
1186 # -l ==> outputs long shas (and we need all 40 characters)
1174 # --root ==> doesn't put '^' character for boundaries
1187 # --root ==> doesn't put '^' character for boundaries
1175 # -r commit_id ==> blames for the given commit
1188 # -r commit_id ==> blames for the given commit
1176 output, __ = self.run_git_command(wire, cmd)
1189 output, __ = self.run_git_command(wire, cmd)
1177
1190
1178 result = []
1191 result = []
1179 for i, blame_line in enumerate(output.splitlines()[:-1]):
1192 for i, blame_line in enumerate(output.splitlines()[:-1]):
1180 line_no = i + 1
1193 line_no = i + 1
1181 blame_commit_id, line = re.split(rb' ', blame_line, 1)
1194 blame_commit_id, line = re.split(rb' ', blame_line, 1)
1182 result.append((line_no, blame_commit_id, line))
1195 result.append((line_no, blame_commit_id, line))
1183
1196
1184 return result
1197 return result
1185
1198
1186 @reraise_safe_exceptions
1199 @reraise_safe_exceptions
1187 def node_annotate(self, wire, commit_id, path):
1200 def node_annotate(self, wire, commit_id, path):
1188
1201
1189 result_libgit = []
1202 result_libgit = []
1190 repo_init = self._factory.repo_libgit2(wire)
1203 repo_init = self._factory.repo_libgit2(wire)
1191 with repo_init as repo:
1204 with repo_init as repo:
1192 commit = repo[commit_id]
1205 commit = repo[commit_id]
1193 blame_obj = repo.blame(path, newest_commit=commit_id)
1206 blame_obj = repo.blame(path, newest_commit=commit_id)
1194 for i, line in enumerate(commit.tree[path].data.splitlines()):
1207 for i, line in enumerate(commit.tree[path].data.splitlines()):
1195 line_no = i + 1
1208 line_no = i + 1
1196 hunk = blame_obj.for_line(line_no)
1209 hunk = blame_obj.for_line(line_no)
1197 blame_commit_id = hunk.final_commit_id.hex
1210 blame_commit_id = hunk.final_commit_id.hex
1198
1211
1199 result_libgit.append((line_no, blame_commit_id, line))
1212 result_libgit.append((line_no, blame_commit_id, line))
1200
1213
1201 return result_libgit
1214 return result_libgit
1202
1215
1203 @reraise_safe_exceptions
1216 @reraise_safe_exceptions
1204 def update_server_info(self, wire):
1217 def update_server_info(self, wire):
1205 repo = self._factory.repo(wire)
1218 repo = self._factory.repo(wire)
1206 update_server_info(repo)
1219 update_server_info(repo)
1207
1220
1208 @reraise_safe_exceptions
1221 @reraise_safe_exceptions
1209 def get_all_commit_ids(self, wire):
1222 def get_all_commit_ids(self, wire):
1210
1223
1211 cache_on, context_uid, repo_id = self._cache_on(wire)
1224 cache_on, context_uid, repo_id = self._cache_on(wire)
1212 region = self._region(wire)
1225 region = self._region(wire)
1213
1226
1214 @region.conditional_cache_on_arguments(condition=cache_on)
1227 @region.conditional_cache_on_arguments(condition=cache_on)
1215 def _get_all_commit_ids(_context_uid, _repo_id):
1228 def _get_all_commit_ids(_context_uid, _repo_id):
1216
1229
1217 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1230 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1218 try:
1231 try:
1219 output, __ = self.run_git_command(wire, cmd)
1232 output, __ = self.run_git_command(wire, cmd)
1220 return output.splitlines()
1233 return output.splitlines()
1221 except Exception:
1234 except Exception:
1222 # Can be raised for empty repositories
1235 # Can be raised for empty repositories
1223 return []
1236 return []
1224
1237
1225 @region.conditional_cache_on_arguments(condition=cache_on)
1238 @region.conditional_cache_on_arguments(condition=cache_on)
1226 def _get_all_commit_ids_pygit2(_context_uid, _repo_id):
1239 def _get_all_commit_ids_pygit2(_context_uid, _repo_id):
1227 repo_init = self._factory.repo_libgit2(wire)
1240 repo_init = self._factory.repo_libgit2(wire)
1228 from pygit2 import GIT_SORT_REVERSE, GIT_SORT_TIME, GIT_BRANCH_ALL
1241 from pygit2 import GIT_SORT_REVERSE, GIT_SORT_TIME, GIT_BRANCH_ALL
1229 results = []
1242 results = []
1230 with repo_init as repo:
1243 with repo_init as repo:
1231 for commit in repo.walk(repo.head.target, GIT_SORT_TIME | GIT_BRANCH_ALL | GIT_SORT_REVERSE):
1244 for commit in repo.walk(repo.head.target, GIT_SORT_TIME | GIT_BRANCH_ALL | GIT_SORT_REVERSE):
1232 results.append(commit.id.hex)
1245 results.append(commit.id.hex)
1233
1246
1234 return _get_all_commit_ids(context_uid, repo_id)
1247 return _get_all_commit_ids(context_uid, repo_id)
1235
1248
1236 @reraise_safe_exceptions
1249 @reraise_safe_exceptions
1237 def run_git_command(self, wire, cmd, **opts):
1250 def run_git_command(self, wire, cmd, **opts):
1238 path = wire.get('path', None)
1251 path = wire.get('path', None)
1239
1252
1240 if path and os.path.isdir(path):
1253 if path and os.path.isdir(path):
1241 opts['cwd'] = path
1254 opts['cwd'] = path
1242
1255
1243 if '_bare' in opts:
1256 if '_bare' in opts:
1244 _copts = []
1257 _copts = []
1245 del opts['_bare']
1258 del opts['_bare']
1246 else:
1259 else:
1247 _copts = ['-c', 'core.quotepath=false', ]
1260 _copts = ['-c', 'core.quotepath=false', ]
1248 safe_call = False
1261 safe_call = False
1249 if '_safe' in opts:
1262 if '_safe' in opts:
1250 # no exc on failure
1263 # no exc on failure
1251 del opts['_safe']
1264 del opts['_safe']
1252 safe_call = True
1265 safe_call = True
1253
1266
1254 if '_copts' in opts:
1267 if '_copts' in opts:
1255 _copts.extend(opts['_copts'] or [])
1268 _copts.extend(opts['_copts'] or [])
1256 del opts['_copts']
1269 del opts['_copts']
1257
1270
1258 gitenv = os.environ.copy()
1271 gitenv = os.environ.copy()
1259 gitenv.update(opts.pop('extra_env', {}))
1272 gitenv.update(opts.pop('extra_env', {}))
1260 # need to clean fix GIT_DIR !
1273 # need to clean fix GIT_DIR !
1261 if 'GIT_DIR' in gitenv:
1274 if 'GIT_DIR' in gitenv:
1262 del gitenv['GIT_DIR']
1275 del gitenv['GIT_DIR']
1263 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1276 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1264 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1277 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1265
1278
1266 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1279 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1267 _opts = {'env': gitenv, 'shell': False}
1280 _opts = {'env': gitenv, 'shell': False}
1268
1281
1269 proc = None
1282 proc = None
1270 try:
1283 try:
1271 _opts.update(opts)
1284 _opts.update(opts)
1272 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1285 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1273
1286
1274 return b''.join(proc), b''.join(proc.stderr)
1287 return b''.join(proc), b''.join(proc.stderr)
1275 except OSError as err:
1288 except OSError as err:
1276 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
1289 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
1277 tb_err = ("Couldn't run git command (%s).\n"
1290 tb_err = ("Couldn't run git command (%s).\n"
1278 "Original error was:%s\n"
1291 "Original error was:%s\n"
1279 "Call options:%s\n"
1292 "Call options:%s\n"
1280 % (cmd, err, _opts))
1293 % (cmd, err, _opts))
1281 log.exception(tb_err)
1294 log.exception(tb_err)
1282 if safe_call:
1295 if safe_call:
1283 return '', err
1296 return '', err
1284 else:
1297 else:
1285 raise exceptions.VcsException()(tb_err)
1298 raise exceptions.VcsException()(tb_err)
1286 finally:
1299 finally:
1287 if proc:
1300 if proc:
1288 proc.close()
1301 proc.close()
1289
1302
1290 @reraise_safe_exceptions
1303 @reraise_safe_exceptions
1291 def install_hooks(self, wire, force=False):
1304 def install_hooks(self, wire, force=False):
1292 from vcsserver.hook_utils import install_git_hooks
1305 from vcsserver.hook_utils import install_git_hooks
1293 bare = self.bare(wire)
1306 bare = self.bare(wire)
1294 path = wire['path']
1307 path = wire['path']
1308 binary_dir = settings.BINARY_DIR
1309 executable = None
1310 if binary_dir:
1311 executable = os.path.join(binary_dir, 'python3')
1295 return install_git_hooks(path, bare, force_create=force)
1312 return install_git_hooks(path, bare, force_create=force)
1296
1313
1297 @reraise_safe_exceptions
1314 @reraise_safe_exceptions
1298 def get_hooks_info(self, wire):
1315 def get_hooks_info(self, wire):
1299 from vcsserver.hook_utils import (
1316 from vcsserver.hook_utils import (
1300 get_git_pre_hook_version, get_git_post_hook_version)
1317 get_git_pre_hook_version, get_git_post_hook_version)
1301 bare = self.bare(wire)
1318 bare = self.bare(wire)
1302 path = wire['path']
1319 path = wire['path']
1303 return {
1320 return {
1304 'pre_version': get_git_pre_hook_version(path, bare),
1321 'pre_version': get_git_pre_hook_version(path, bare),
1305 'post_version': get_git_post_hook_version(path, bare),
1322 'post_version': get_git_post_hook_version(path, bare),
1306 }
1323 }
1307
1324
1308 @reraise_safe_exceptions
1325 @reraise_safe_exceptions
1309 def set_head_ref(self, wire, head_name):
1326 def set_head_ref(self, wire, head_name):
1310 log.debug('Setting refs/head to `%s`', head_name)
1327 log.debug('Setting refs/head to `%s`', head_name)
1311 cmd = ['symbolic-ref', '"HEAD"', '"refs/heads/%s"' % head_name]
1328 cmd = ['symbolic-ref', '"HEAD"', '"refs/heads/%s"' % head_name]
1312 output, __ = self.run_git_command(wire, cmd)
1329 output, __ = self.run_git_command(wire, cmd)
1313 return [head_name] + output.splitlines()
1330 return [head_name] + output.splitlines()
1314
1331
1315 @reraise_safe_exceptions
1332 @reraise_safe_exceptions
1316 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1333 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1317 archive_dir_name, commit_id):
1334 archive_dir_name, commit_id):
1318
1335
1319 def file_walker(_commit_id, path):
1336 def file_walker(_commit_id, path):
1320 repo_init = self._factory.repo_libgit2(wire)
1337 repo_init = self._factory.repo_libgit2(wire)
1321
1338
1322 with repo_init as repo:
1339 with repo_init as repo:
1323 commit = repo[commit_id]
1340 commit = repo[commit_id]
1324
1341
1325 if path in ['', '/']:
1342 if path in ['', '/']:
1326 tree = commit.tree
1343 tree = commit.tree
1327 else:
1344 else:
1328 tree = commit.tree[path.rstrip('/')]
1345 tree = commit.tree[path.rstrip('/')]
1329 tree_id = tree.id.hex
1346 tree_id = tree.id.hex
1330 try:
1347 try:
1331 tree = repo[tree_id]
1348 tree = repo[tree_id]
1332 except KeyError:
1349 except KeyError:
1333 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1350 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1334
1351
1335 index = LibGit2Index.Index()
1352 index = LibGit2Index.Index()
1336 index.read_tree(tree)
1353 index.read_tree(tree)
1337 file_iter = index
1354 file_iter = index
1338
1355
1339 for fn in file_iter:
1356 for fn in file_iter:
1340 file_path = fn.path
1357 file_path = fn.path
1341 mode = fn.mode
1358 mode = fn.mode
1342 is_link = stat.S_ISLNK(mode)
1359 is_link = stat.S_ISLNK(mode)
1343 if mode == pygit2.GIT_FILEMODE_COMMIT:
1360 if mode == pygit2.GIT_FILEMODE_COMMIT:
1344 log.debug('Skipping path %s as a commit node', file_path)
1361 log.debug('Skipping path %s as a commit node', file_path)
1345 continue
1362 continue
1346 yield ArchiveNode(file_path, mode, is_link, repo[fn.hex].read_raw)
1363 yield ArchiveNode(file_path, mode, is_link, repo[fn.hex].read_raw)
1347
1364
1348 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1365 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1349 archive_dir_name, commit_id)
1366 archive_dir_name, commit_id)
@@ -1,1088 +1,1101 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import logging
19 import logging
20 import stat
20 import stat
21 import urllib.request
21 import urllib.request
22 import urllib.parse
22 import urllib.parse
23 import traceback
23 import traceback
24 import hashlib
24 import hashlib
25
25
26 from hgext import largefiles, rebase, purge
26 from hgext import largefiles, rebase, purge
27
27
28 from mercurial import commands
28 from mercurial import commands
29 from mercurial import unionrepo
29 from mercurial import unionrepo
30 from mercurial import verify
30 from mercurial import verify
31 from mercurial import repair
31 from mercurial import repair
32
32
33 import vcsserver
33 import vcsserver
34 from vcsserver import exceptions
34 from vcsserver import exceptions
35 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, archive_repo, ArchiveNode
35 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, archive_repo, ArchiveNode
36 from vcsserver.hgcompat import (
36 from vcsserver.hgcompat import (
37 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
37 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
38 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
38 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
39 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
39 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
40 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
40 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
41 RepoLookupError, InterventionRequired, RequirementError,
41 RepoLookupError, InterventionRequired, RequirementError,
42 alwaysmatcher, patternmatcher, hgutil, hgext_strip)
42 alwaysmatcher, patternmatcher, hgutil, hgext_strip)
43 from vcsserver.str_utils import ascii_bytes, ascii_str, safe_str, safe_bytes
43 from vcsserver.str_utils import ascii_bytes, ascii_str, safe_str, safe_bytes
44 from vcsserver.vcs_base import RemoteBase
44 from vcsserver.vcs_base import RemoteBase
45
45
46 log = logging.getLogger(__name__)
46 log = logging.getLogger(__name__)
47
47
48
48
49 def make_ui_from_config(repo_config):
49 def make_ui_from_config(repo_config):
50
50
51 class LoggingUI(ui.ui):
51 class LoggingUI(ui.ui):
52
52
53 def status(self, *msg, **opts):
53 def status(self, *msg, **opts):
54 str_msg = map(safe_str, msg)
54 str_msg = map(safe_str, msg)
55 log.info(' '.join(str_msg).rstrip('\n'))
55 log.info(' '.join(str_msg).rstrip('\n'))
56 #super(LoggingUI, self).status(*msg, **opts)
56 #super(LoggingUI, self).status(*msg, **opts)
57
57
58 def warn(self, *msg, **opts):
58 def warn(self, *msg, **opts):
59 str_msg = map(safe_str, msg)
59 str_msg = map(safe_str, msg)
60 log.warning('ui_logger:'+' '.join(str_msg).rstrip('\n'))
60 log.warning('ui_logger:'+' '.join(str_msg).rstrip('\n'))
61 #super(LoggingUI, self).warn(*msg, **opts)
61 #super(LoggingUI, self).warn(*msg, **opts)
62
62
63 def error(self, *msg, **opts):
63 def error(self, *msg, **opts):
64 str_msg = map(safe_str, msg)
64 str_msg = map(safe_str, msg)
65 log.error('ui_logger:'+' '.join(str_msg).rstrip('\n'))
65 log.error('ui_logger:'+' '.join(str_msg).rstrip('\n'))
66 #super(LoggingUI, self).error(*msg, **opts)
66 #super(LoggingUI, self).error(*msg, **opts)
67
67
68 def note(self, *msg, **opts):
68 def note(self, *msg, **opts):
69 str_msg = map(safe_str, msg)
69 str_msg = map(safe_str, msg)
70 log.info('ui_logger:'+' '.join(str_msg).rstrip('\n'))
70 log.info('ui_logger:'+' '.join(str_msg).rstrip('\n'))
71 #super(LoggingUI, self).note(*msg, **opts)
71 #super(LoggingUI, self).note(*msg, **opts)
72
72
73 def debug(self, *msg, **opts):
73 def debug(self, *msg, **opts):
74 str_msg = map(safe_str, msg)
74 str_msg = map(safe_str, msg)
75 log.debug('ui_logger:'+' '.join(str_msg).rstrip('\n'))
75 log.debug('ui_logger:'+' '.join(str_msg).rstrip('\n'))
76 #super(LoggingUI, self).debug(*msg, **opts)
76 #super(LoggingUI, self).debug(*msg, **opts)
77
77
78 baseui = LoggingUI()
78 baseui = LoggingUI()
79
79
80 # clean the baseui object
80 # clean the baseui object
81 baseui._ocfg = hgconfig.config()
81 baseui._ocfg = hgconfig.config()
82 baseui._ucfg = hgconfig.config()
82 baseui._ucfg = hgconfig.config()
83 baseui._tcfg = hgconfig.config()
83 baseui._tcfg = hgconfig.config()
84
84
85 for section, option, value in repo_config:
85 for section, option, value in repo_config:
86 baseui.setconfig(ascii_bytes(section), ascii_bytes(option), ascii_bytes(value))
86 baseui.setconfig(ascii_bytes(section), ascii_bytes(option), ascii_bytes(value))
87
87
88 # make our hgweb quiet so it doesn't print output
88 # make our hgweb quiet so it doesn't print output
89 baseui.setconfig(b'ui', b'quiet', b'true')
89 baseui.setconfig(b'ui', b'quiet', b'true')
90
90
91 baseui.setconfig(b'ui', b'paginate', b'never')
91 baseui.setconfig(b'ui', b'paginate', b'never')
92 # for better Error reporting of Mercurial
92 # for better Error reporting of Mercurial
93 baseui.setconfig(b'ui', b'message-output', b'stderr')
93 baseui.setconfig(b'ui', b'message-output', b'stderr')
94
94
95 # force mercurial to only use 1 thread, otherwise it may try to set a
95 # force mercurial to only use 1 thread, otherwise it may try to set a
96 # signal in a non-main thread, thus generating a ValueError.
96 # signal in a non-main thread, thus generating a ValueError.
97 baseui.setconfig(b'worker', b'numcpus', 1)
97 baseui.setconfig(b'worker', b'numcpus', 1)
98
98
99 # If there is no config for the largefiles extension, we explicitly disable
99 # If there is no config for the largefiles extension, we explicitly disable
100 # it here. This overrides settings from repositories hgrc file. Recent
100 # it here. This overrides settings from repositories hgrc file. Recent
101 # mercurial versions enable largefiles in hgrc on clone from largefile
101 # mercurial versions enable largefiles in hgrc on clone from largefile
102 # repo.
102 # repo.
103 if not baseui.hasconfig(b'extensions', b'largefiles'):
103 if not baseui.hasconfig(b'extensions', b'largefiles'):
104 log.debug('Explicitly disable largefiles extension for repo.')
104 log.debug('Explicitly disable largefiles extension for repo.')
105 baseui.setconfig(b'extensions', b'largefiles', b'!')
105 baseui.setconfig(b'extensions', b'largefiles', b'!')
106
106
107 return baseui
107 return baseui
108
108
109
109
110 def reraise_safe_exceptions(func):
110 def reraise_safe_exceptions(func):
111 """Decorator for converting mercurial exceptions to something neutral."""
111 """Decorator for converting mercurial exceptions to something neutral."""
112
112
113 def wrapper(*args, **kwargs):
113 def wrapper(*args, **kwargs):
114 try:
114 try:
115 return func(*args, **kwargs)
115 return func(*args, **kwargs)
116 except (Abort, InterventionRequired) as e:
116 except (Abort, InterventionRequired) as e:
117 raise_from_original(exceptions.AbortException(e), e)
117 raise_from_original(exceptions.AbortException(e), e)
118 except RepoLookupError as e:
118 except RepoLookupError as e:
119 raise_from_original(exceptions.LookupException(e), e)
119 raise_from_original(exceptions.LookupException(e), e)
120 except RequirementError as e:
120 except RequirementError as e:
121 raise_from_original(exceptions.RequirementException(e), e)
121 raise_from_original(exceptions.RequirementException(e), e)
122 except RepoError as e:
122 except RepoError as e:
123 raise_from_original(exceptions.VcsException(e), e)
123 raise_from_original(exceptions.VcsException(e), e)
124 except LookupError as e:
124 except LookupError as e:
125 raise_from_original(exceptions.LookupException(e), e)
125 raise_from_original(exceptions.LookupException(e), e)
126 except Exception as e:
126 except Exception as e:
127 if not hasattr(e, '_vcs_kind'):
127 if not hasattr(e, '_vcs_kind'):
128 log.exception("Unhandled exception in hg remote call")
128 log.exception("Unhandled exception in hg remote call")
129 raise_from_original(exceptions.UnhandledException(e), e)
129 raise_from_original(exceptions.UnhandledException(e), e)
130
130
131 raise
131 raise
132 return wrapper
132 return wrapper
133
133
134
134
135 class MercurialFactory(RepoFactory):
135 class MercurialFactory(RepoFactory):
136 repo_type = 'hg'
136 repo_type = 'hg'
137
137
138 def _create_config(self, config, hooks=True):
138 def _create_config(self, config, hooks=True):
139 if not hooks:
139 if not hooks:
140 hooks_to_clean = frozenset((
140 hooks_to_clean = frozenset((
141 'changegroup.repo_size', 'preoutgoing.pre_pull',
141 'changegroup.repo_size', 'preoutgoing.pre_pull',
142 'outgoing.pull_logger', 'prechangegroup.pre_push'))
142 'outgoing.pull_logger', 'prechangegroup.pre_push'))
143 new_config = []
143 new_config = []
144 for section, option, value in config:
144 for section, option, value in config:
145 if section == 'hooks' and option in hooks_to_clean:
145 if section == 'hooks' and option in hooks_to_clean:
146 continue
146 continue
147 new_config.append((section, option, value))
147 new_config.append((section, option, value))
148 config = new_config
148 config = new_config
149
149
150 baseui = make_ui_from_config(config)
150 baseui = make_ui_from_config(config)
151 return baseui
151 return baseui
152
152
153 def _create_repo(self, wire, create):
153 def _create_repo(self, wire, create):
154 baseui = self._create_config(wire["config"])
154 baseui = self._create_config(wire["config"])
155 return instance(baseui, ascii_bytes(wire["path"]), create)
155 return instance(baseui, safe_bytes(wire["path"]), create)
156
156
157 def repo(self, wire, create=False):
157 def repo(self, wire, create=False):
158 """
158 """
159 Get a repository instance for the given path.
159 Get a repository instance for the given path.
160 """
160 """
161 return self._create_repo(wire, create)
161 return self._create_repo(wire, create)
162
162
163
163
164 def patch_ui_message_output(baseui):
164 def patch_ui_message_output(baseui):
165 baseui.setconfig(b'ui', b'quiet', b'false')
165 baseui.setconfig(b'ui', b'quiet', b'false')
166 output = io.BytesIO()
166 output = io.BytesIO()
167
167
168 def write(data, **unused_kwargs):
168 def write(data, **unused_kwargs):
169 output.write(data)
169 output.write(data)
170
170
171 baseui.status = write
171 baseui.status = write
172 baseui.write = write
172 baseui.write = write
173 baseui.warn = write
173 baseui.warn = write
174 baseui.debug = write
174 baseui.debug = write
175
175
176 return baseui, output
176 return baseui, output
177
177
178
178
179 class HgRemote(RemoteBase):
179 class HgRemote(RemoteBase):
180
180
181 def __init__(self, factory):
181 def __init__(self, factory):
182 self._factory = factory
182 self._factory = factory
183 self._bulk_methods = {
183 self._bulk_methods = {
184 "affected_files": self.ctx_files,
184 "affected_files": self.ctx_files,
185 "author": self.ctx_user,
185 "author": self.ctx_user,
186 "branch": self.ctx_branch,
186 "branch": self.ctx_branch,
187 "children": self.ctx_children,
187 "children": self.ctx_children,
188 "date": self.ctx_date,
188 "date": self.ctx_date,
189 "message": self.ctx_description,
189 "message": self.ctx_description,
190 "parents": self.ctx_parents,
190 "parents": self.ctx_parents,
191 "status": self.ctx_status,
191 "status": self.ctx_status,
192 "obsolete": self.ctx_obsolete,
192 "obsolete": self.ctx_obsolete,
193 "phase": self.ctx_phase,
193 "phase": self.ctx_phase,
194 "hidden": self.ctx_hidden,
194 "hidden": self.ctx_hidden,
195 "_file_paths": self.ctx_list,
195 "_file_paths": self.ctx_list,
196 }
196 }
197
197
198 def _get_ctx(self, repo, ref):
198 def _get_ctx(self, repo, ref):
199 return get_ctx(repo, ref)
199 return get_ctx(repo, ref)
200
200
201 @reraise_safe_exceptions
201 @reraise_safe_exceptions
202 def discover_hg_version(self):
202 def discover_hg_version(self):
203 from mercurial import util
203 from mercurial import util
204 return safe_str(util.version())
204 return safe_str(util.version())
205
205
206 @reraise_safe_exceptions
206 @reraise_safe_exceptions
207 def is_empty(self, wire):
207 def is_empty(self, wire):
208 repo = self._factory.repo(wire)
208 repo = self._factory.repo(wire)
209
209
210 try:
210 try:
211 return len(repo) == 0
211 return len(repo) == 0
212 except Exception:
212 except Exception:
213 log.exception("failed to read object_store")
213 log.exception("failed to read object_store")
214 return False
214 return False
215
215
216 @reraise_safe_exceptions
216 @reraise_safe_exceptions
217 def bookmarks(self, wire):
217 def bookmarks(self, wire):
218 cache_on, context_uid, repo_id = self._cache_on(wire)
218 cache_on, context_uid, repo_id = self._cache_on(wire)
219 region = self._region(wire)
219 region = self._region(wire)
220
220
221 @region.conditional_cache_on_arguments(condition=cache_on)
221 @region.conditional_cache_on_arguments(condition=cache_on)
222 def _bookmarks(_context_uid, _repo_id):
222 def _bookmarks(_context_uid, _repo_id):
223 repo = self._factory.repo(wire)
223 repo = self._factory.repo(wire)
224 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo._bookmarks.items()}
224 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo._bookmarks.items()}
225
225
226 return _bookmarks(context_uid, repo_id)
226 return _bookmarks(context_uid, repo_id)
227
227
228 @reraise_safe_exceptions
228 @reraise_safe_exceptions
229 def branches(self, wire, normal, closed):
229 def branches(self, wire, normal, closed):
230 cache_on, context_uid, repo_id = self._cache_on(wire)
230 cache_on, context_uid, repo_id = self._cache_on(wire)
231 region = self._region(wire)
231 region = self._region(wire)
232
232
233 @region.conditional_cache_on_arguments(condition=cache_on)
233 @region.conditional_cache_on_arguments(condition=cache_on)
234 def _branches(_context_uid, _repo_id, _normal, _closed):
234 def _branches(_context_uid, _repo_id, _normal, _closed):
235 repo = self._factory.repo(wire)
235 repo = self._factory.repo(wire)
236 iter_branches = repo.branchmap().iterbranches()
236 iter_branches = repo.branchmap().iterbranches()
237 bt = {}
237 bt = {}
238 for branch_name, _heads, tip_node, is_closed in iter_branches:
238 for branch_name, _heads, tip_node, is_closed in iter_branches:
239 if normal and not is_closed:
239 if normal and not is_closed:
240 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
240 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
241 if closed and is_closed:
241 if closed and is_closed:
242 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
242 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
243
243
244 return bt
244 return bt
245
245
246 return _branches(context_uid, repo_id, normal, closed)
246 return _branches(context_uid, repo_id, normal, closed)
247
247
248 @reraise_safe_exceptions
248 @reraise_safe_exceptions
249 def bulk_request(self, wire, commit_id, pre_load):
249 def bulk_request(self, wire, commit_id, pre_load):
250 cache_on, context_uid, repo_id = self._cache_on(wire)
250 cache_on, context_uid, repo_id = self._cache_on(wire)
251 region = self._region(wire)
251 region = self._region(wire)
252
252
253 @region.conditional_cache_on_arguments(condition=cache_on)
253 @region.conditional_cache_on_arguments(condition=cache_on)
254 def _bulk_request(_repo_id, _commit_id, _pre_load):
254 def _bulk_request(_repo_id, _commit_id, _pre_load):
255 result = {}
255 result = {}
256 for attr in pre_load:
256 for attr in pre_load:
257 try:
257 try:
258 method = self._bulk_methods[attr]
258 method = self._bulk_methods[attr]
259 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
259 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
260 result[attr] = method(wire, commit_id)
260 result[attr] = method(wire, commit_id)
261 except KeyError as e:
261 except KeyError as e:
262 raise exceptions.VcsException(e)(
262 raise exceptions.VcsException(e)(
263 'Unknown bulk attribute: "%s"' % attr)
263 'Unknown bulk attribute: "%s"' % attr)
264 return result
264 return result
265
265
266 return _bulk_request(repo_id, commit_id, sorted(pre_load))
266 return _bulk_request(repo_id, commit_id, sorted(pre_load))
267
267
268 @reraise_safe_exceptions
268 @reraise_safe_exceptions
269 def ctx_branch(self, wire, commit_id):
269 def ctx_branch(self, wire, commit_id):
270 cache_on, context_uid, repo_id = self._cache_on(wire)
270 cache_on, context_uid, repo_id = self._cache_on(wire)
271 region = self._region(wire)
271 region = self._region(wire)
272
272
273 @region.conditional_cache_on_arguments(condition=cache_on)
273 @region.conditional_cache_on_arguments(condition=cache_on)
274 def _ctx_branch(_repo_id, _commit_id):
274 def _ctx_branch(_repo_id, _commit_id):
275 repo = self._factory.repo(wire)
275 repo = self._factory.repo(wire)
276 ctx = self._get_ctx(repo, commit_id)
276 ctx = self._get_ctx(repo, commit_id)
277 return ctx.branch()
277 return ctx.branch()
278 return _ctx_branch(repo_id, commit_id)
278 return _ctx_branch(repo_id, commit_id)
279
279
280 @reraise_safe_exceptions
280 @reraise_safe_exceptions
281 def ctx_date(self, wire, commit_id):
281 def ctx_date(self, wire, commit_id):
282 cache_on, context_uid, repo_id = self._cache_on(wire)
282 cache_on, context_uid, repo_id = self._cache_on(wire)
283 region = self._region(wire)
283 region = self._region(wire)
284
284
285 @region.conditional_cache_on_arguments(condition=cache_on)
285 @region.conditional_cache_on_arguments(condition=cache_on)
286 def _ctx_date(_repo_id, _commit_id):
286 def _ctx_date(_repo_id, _commit_id):
287 repo = self._factory.repo(wire)
287 repo = self._factory.repo(wire)
288 ctx = self._get_ctx(repo, commit_id)
288 ctx = self._get_ctx(repo, commit_id)
289 return ctx.date()
289 return ctx.date()
290 return _ctx_date(repo_id, commit_id)
290 return _ctx_date(repo_id, commit_id)
291
291
292 @reraise_safe_exceptions
292 @reraise_safe_exceptions
293 def ctx_description(self, wire, revision):
293 def ctx_description(self, wire, revision):
294 repo = self._factory.repo(wire)
294 repo = self._factory.repo(wire)
295 ctx = self._get_ctx(repo, revision)
295 ctx = self._get_ctx(repo, revision)
296 return ctx.description()
296 return ctx.description()
297
297
298 @reraise_safe_exceptions
298 @reraise_safe_exceptions
299 def ctx_files(self, wire, commit_id):
299 def ctx_files(self, wire, commit_id):
300 cache_on, context_uid, repo_id = self._cache_on(wire)
300 cache_on, context_uid, repo_id = self._cache_on(wire)
301 region = self._region(wire)
301 region = self._region(wire)
302
302
303 @region.conditional_cache_on_arguments(condition=cache_on)
303 @region.conditional_cache_on_arguments(condition=cache_on)
304 def _ctx_files(_repo_id, _commit_id):
304 def _ctx_files(_repo_id, _commit_id):
305 repo = self._factory.repo(wire)
305 repo = self._factory.repo(wire)
306 ctx = self._get_ctx(repo, commit_id)
306 ctx = self._get_ctx(repo, commit_id)
307 return ctx.files()
307 return ctx.files()
308
308
309 return _ctx_files(repo_id, commit_id)
309 return _ctx_files(repo_id, commit_id)
310
310
311 @reraise_safe_exceptions
311 @reraise_safe_exceptions
312 def ctx_list(self, path, revision):
312 def ctx_list(self, path, revision):
313 repo = self._factory.repo(path)
313 repo = self._factory.repo(path)
314 ctx = self._get_ctx(repo, revision)
314 ctx = self._get_ctx(repo, revision)
315 return list(ctx)
315 return list(ctx)
316
316
317 @reraise_safe_exceptions
317 @reraise_safe_exceptions
318 def ctx_parents(self, wire, commit_id):
318 def ctx_parents(self, wire, commit_id):
319 cache_on, context_uid, repo_id = self._cache_on(wire)
319 cache_on, context_uid, repo_id = self._cache_on(wire)
320 region = self._region(wire)
320 region = self._region(wire)
321
321
322 @region.conditional_cache_on_arguments(condition=cache_on)
322 @region.conditional_cache_on_arguments(condition=cache_on)
323 def _ctx_parents(_repo_id, _commit_id):
323 def _ctx_parents(_repo_id, _commit_id):
324 repo = self._factory.repo(wire)
324 repo = self._factory.repo(wire)
325 ctx = self._get_ctx(repo, commit_id)
325 ctx = self._get_ctx(repo, commit_id)
326 return [parent.hex() for parent in ctx.parents()
326 return [parent.hex() for parent in ctx.parents()
327 if not (parent.hidden() or parent.obsolete())]
327 if not (parent.hidden() or parent.obsolete())]
328
328
329 return _ctx_parents(repo_id, commit_id)
329 return _ctx_parents(repo_id, commit_id)
330
330
331 @reraise_safe_exceptions
331 @reraise_safe_exceptions
332 def ctx_children(self, wire, commit_id):
332 def ctx_children(self, wire, commit_id):
333 cache_on, context_uid, repo_id = self._cache_on(wire)
333 cache_on, context_uid, repo_id = self._cache_on(wire)
334 region = self._region(wire)
334 region = self._region(wire)
335
335
336 @region.conditional_cache_on_arguments(condition=cache_on)
336 @region.conditional_cache_on_arguments(condition=cache_on)
337 def _ctx_children(_repo_id, _commit_id):
337 def _ctx_children(_repo_id, _commit_id):
338 repo = self._factory.repo(wire)
338 repo = self._factory.repo(wire)
339 ctx = self._get_ctx(repo, commit_id)
339 ctx = self._get_ctx(repo, commit_id)
340 return [child.hex() for child in ctx.children()
340 return [child.hex() for child in ctx.children()
341 if not (child.hidden() or child.obsolete())]
341 if not (child.hidden() or child.obsolete())]
342
342
343 return _ctx_children(repo_id, commit_id)
343 return _ctx_children(repo_id, commit_id)
344
344
345 @reraise_safe_exceptions
345 @reraise_safe_exceptions
346 def ctx_phase(self, wire, commit_id):
346 def ctx_phase(self, wire, commit_id):
347 cache_on, context_uid, repo_id = self._cache_on(wire)
347 cache_on, context_uid, repo_id = self._cache_on(wire)
348 region = self._region(wire)
348 region = self._region(wire)
349
349
350 @region.conditional_cache_on_arguments(condition=cache_on)
350 @region.conditional_cache_on_arguments(condition=cache_on)
351 def _ctx_phase(_context_uid, _repo_id, _commit_id):
351 def _ctx_phase(_context_uid, _repo_id, _commit_id):
352 repo = self._factory.repo(wire)
352 repo = self._factory.repo(wire)
353 ctx = self._get_ctx(repo, commit_id)
353 ctx = self._get_ctx(repo, commit_id)
354 # public=0, draft=1, secret=3
354 # public=0, draft=1, secret=3
355 return ctx.phase()
355 return ctx.phase()
356 return _ctx_phase(context_uid, repo_id, commit_id)
356 return _ctx_phase(context_uid, repo_id, commit_id)
357
357
358 @reraise_safe_exceptions
358 @reraise_safe_exceptions
359 def ctx_obsolete(self, wire, commit_id):
359 def ctx_obsolete(self, wire, commit_id):
360 cache_on, context_uid, repo_id = self._cache_on(wire)
360 cache_on, context_uid, repo_id = self._cache_on(wire)
361 region = self._region(wire)
361 region = self._region(wire)
362
362
363 @region.conditional_cache_on_arguments(condition=cache_on)
363 @region.conditional_cache_on_arguments(condition=cache_on)
364 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
364 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
365 repo = self._factory.repo(wire)
365 repo = self._factory.repo(wire)
366 ctx = self._get_ctx(repo, commit_id)
366 ctx = self._get_ctx(repo, commit_id)
367 return ctx.obsolete()
367 return ctx.obsolete()
368 return _ctx_obsolete(context_uid, repo_id, commit_id)
368 return _ctx_obsolete(context_uid, repo_id, commit_id)
369
369
370 @reraise_safe_exceptions
370 @reraise_safe_exceptions
371 def ctx_hidden(self, wire, commit_id):
371 def ctx_hidden(self, wire, commit_id):
372 cache_on, context_uid, repo_id = self._cache_on(wire)
372 cache_on, context_uid, repo_id = self._cache_on(wire)
373 region = self._region(wire)
373 region = self._region(wire)
374
374
375 @region.conditional_cache_on_arguments(condition=cache_on)
375 @region.conditional_cache_on_arguments(condition=cache_on)
376 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
376 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
377 repo = self._factory.repo(wire)
377 repo = self._factory.repo(wire)
378 ctx = self._get_ctx(repo, commit_id)
378 ctx = self._get_ctx(repo, commit_id)
379 return ctx.hidden()
379 return ctx.hidden()
380 return _ctx_hidden(context_uid, repo_id, commit_id)
380 return _ctx_hidden(context_uid, repo_id, commit_id)
381
381
382 @reraise_safe_exceptions
382 @reraise_safe_exceptions
383 def ctx_substate(self, wire, revision):
383 def ctx_substate(self, wire, revision):
384 repo = self._factory.repo(wire)
384 repo = self._factory.repo(wire)
385 ctx = self._get_ctx(repo, revision)
385 ctx = self._get_ctx(repo, revision)
386 return ctx.substate
386 return ctx.substate
387
387
388 @reraise_safe_exceptions
388 @reraise_safe_exceptions
389 def ctx_status(self, wire, revision):
389 def ctx_status(self, wire, revision):
390 repo = self._factory.repo(wire)
390 repo = self._factory.repo(wire)
391 ctx = self._get_ctx(repo, revision)
391 ctx = self._get_ctx(repo, revision)
392 status = repo[ctx.p1().node()].status(other=ctx.node())
392 status = repo[ctx.p1().node()].status(other=ctx.node())
393 # object of status (odd, custom named tuple in mercurial) is not
393 # object of status (odd, custom named tuple in mercurial) is not
394 # correctly serializable, we make it a list, as the underling
394 # correctly serializable, we make it a list, as the underling
395 # API expects this to be a list
395 # API expects this to be a list
396 return list(status)
396 return list(status)
397
397
398 @reraise_safe_exceptions
398 @reraise_safe_exceptions
399 def ctx_user(self, wire, revision):
399 def ctx_user(self, wire, revision):
400 repo = self._factory.repo(wire)
400 repo = self._factory.repo(wire)
401 ctx = self._get_ctx(repo, revision)
401 ctx = self._get_ctx(repo, revision)
402 return ctx.user()
402 return ctx.user()
403
403
404 @reraise_safe_exceptions
404 @reraise_safe_exceptions
405 def check_url(self, url, config):
405 def check_url(self, url, config):
406 _proto = None
406 _proto = None
407 if '+' in url[:url.find('://')]:
407 if '+' in url[:url.find('://')]:
408 _proto = url[0:url.find('+')]
408 _proto = url[0:url.find('+')]
409 url = url[url.find('+') + 1:]
409 url = url[url.find('+') + 1:]
410 handlers = []
410 handlers = []
411 url_obj = url_parser(url)
411 url_obj = url_parser(url)
412 test_uri, authinfo = url_obj.authinfo()
412 test_uri, authinfo = url_obj.authinfo()
413 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
413 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
414 url_obj.query = obfuscate_qs(url_obj.query)
414 url_obj.query = obfuscate_qs(url_obj.query)
415
415
416 cleaned_uri = str(url_obj)
416 cleaned_uri = str(url_obj)
417 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
417 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
418
418
419 if authinfo:
419 if authinfo:
420 # create a password manager
420 # create a password manager
421 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
421 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
422 passmgr.add_password(*authinfo)
422 passmgr.add_password(*authinfo)
423
423
424 handlers.extend((httpbasicauthhandler(passmgr),
424 handlers.extend((httpbasicauthhandler(passmgr),
425 httpdigestauthhandler(passmgr)))
425 httpdigestauthhandler(passmgr)))
426
426
427 o = urllib.request.build_opener(*handlers)
427 o = urllib.request.build_opener(*handlers)
428 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
428 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
429 ('Accept', 'application/mercurial-0.1')]
429 ('Accept', 'application/mercurial-0.1')]
430
430
431 q = {"cmd": 'between'}
431 q = {"cmd": 'between'}
432 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
432 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
433 qs = '?%s' % urllib.parse.urlencode(q)
433 qs = '?%s' % urllib.parse.urlencode(q)
434 cu = "%s%s" % (test_uri, qs)
434 cu = "%s%s" % (test_uri, qs)
435 req = urllib.request.Request(cu, None, {})
435 req = urllib.request.Request(cu, None, {})
436
436
437 try:
437 try:
438 log.debug("Trying to open URL %s", cleaned_uri)
438 log.debug("Trying to open URL %s", cleaned_uri)
439 resp = o.open(req)
439 resp = o.open(req)
440 if resp.code != 200:
440 if resp.code != 200:
441 raise exceptions.URLError()('Return Code is not 200')
441 raise exceptions.URLError()('Return Code is not 200')
442 except Exception as e:
442 except Exception as e:
443 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
443 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
444 # means it cannot be cloned
444 # means it cannot be cloned
445 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
445 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
446
446
447 # now check if it's a proper hg repo, but don't do it for svn
447 # now check if it's a proper hg repo, but don't do it for svn
448 try:
448 try:
449 if _proto == 'svn':
449 if _proto == 'svn':
450 pass
450 pass
451 else:
451 else:
452 # check for pure hg repos
452 # check for pure hg repos
453 log.debug(
453 log.debug(
454 "Verifying if URL is a Mercurial repository: %s",
454 "Verifying if URL is a Mercurial repository: %s",
455 cleaned_uri)
455 cleaned_uri)
456 ui = make_ui_from_config(config)
456 ui = make_ui_from_config(config)
457 peer_checker = makepeer(ui, url)
457 peer_checker = makepeer(ui, url)
458 peer_checker.lookup('tip')
458 peer_checker.lookup('tip')
459 except Exception as e:
459 except Exception as e:
460 log.warning("URL is not a valid Mercurial repository: %s",
460 log.warning("URL is not a valid Mercurial repository: %s",
461 cleaned_uri)
461 cleaned_uri)
462 raise exceptions.URLError(e)(
462 raise exceptions.URLError(e)(
463 "url [%s] does not look like an hg repo org_exc: %s"
463 "url [%s] does not look like an hg repo org_exc: %s"
464 % (cleaned_uri, e))
464 % (cleaned_uri, e))
465
465
466 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
466 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
467 return True
467 return True
468
468
469 @reraise_safe_exceptions
469 @reraise_safe_exceptions
470 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
470 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
471 repo = self._factory.repo(wire)
471 repo = self._factory.repo(wire)
472
472
473 if file_filter:
473 if file_filter:
474 match_filter = match(file_filter[0], '', [file_filter[1]])
474 match_filter = match(file_filter[0], '', [file_filter[1]])
475 else:
475 else:
476 match_filter = file_filter
476 match_filter = file_filter
477 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
477 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
478
478
479 try:
479 try:
480 diff_iter = patch.diff(
480 diff_iter = patch.diff(
481 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts)
481 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts)
482 return b"".join(diff_iter)
482 return b"".join(diff_iter)
483 except RepoLookupError as e:
483 except RepoLookupError as e:
484 raise exceptions.LookupException(e)()
484 raise exceptions.LookupException(e)()
485
485
486 @reraise_safe_exceptions
486 @reraise_safe_exceptions
487 def node_history(self, wire, revision, path, limit):
487 def node_history(self, wire, revision, path, limit):
488 cache_on, context_uid, repo_id = self._cache_on(wire)
488 cache_on, context_uid, repo_id = self._cache_on(wire)
489 region = self._region(wire)
489 region = self._region(wire)
490
490
491 @region.conditional_cache_on_arguments(condition=cache_on)
491 @region.conditional_cache_on_arguments(condition=cache_on)
492 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
492 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
493 repo = self._factory.repo(wire)
493 repo = self._factory.repo(wire)
494
494
495 ctx = self._get_ctx(repo, revision)
495 ctx = self._get_ctx(repo, revision)
496 fctx = ctx.filectx(safe_bytes(path))
496 fctx = ctx.filectx(safe_bytes(path))
497
497
498 def history_iter():
498 def history_iter():
499 limit_rev = fctx.rev()
499 limit_rev = fctx.rev()
500 for obj in reversed(list(fctx.filelog())):
500 for obj in reversed(list(fctx.filelog())):
501 obj = fctx.filectx(obj)
501 obj = fctx.filectx(obj)
502 ctx = obj.changectx()
502 ctx = obj.changectx()
503 if ctx.hidden() or ctx.obsolete():
503 if ctx.hidden() or ctx.obsolete():
504 continue
504 continue
505
505
506 if limit_rev >= obj.rev():
506 if limit_rev >= obj.rev():
507 yield obj
507 yield obj
508
508
509 history = []
509 history = []
510 for cnt, obj in enumerate(history_iter()):
510 for cnt, obj in enumerate(history_iter()):
511 if limit and cnt >= limit:
511 if limit and cnt >= limit:
512 break
512 break
513 history.append(hex(obj.node()))
513 history.append(hex(obj.node()))
514
514
515 return [x for x in history]
515 return [x for x in history]
516 return _node_history(context_uid, repo_id, revision, path, limit)
516 return _node_history(context_uid, repo_id, revision, path, limit)
517
517
518 @reraise_safe_exceptions
518 @reraise_safe_exceptions
519 def node_history_untill(self, wire, revision, path, limit):
519 def node_history_untill(self, wire, revision, path, limit):
520 cache_on, context_uid, repo_id = self._cache_on(wire)
520 cache_on, context_uid, repo_id = self._cache_on(wire)
521 region = self._region(wire)
521 region = self._region(wire)
522
522
523 @region.conditional_cache_on_arguments(condition=cache_on)
523 @region.conditional_cache_on_arguments(condition=cache_on)
524 def _node_history_until(_context_uid, _repo_id):
524 def _node_history_until(_context_uid, _repo_id):
525 repo = self._factory.repo(wire)
525 repo = self._factory.repo(wire)
526 ctx = self._get_ctx(repo, revision)
526 ctx = self._get_ctx(repo, revision)
527 fctx = ctx.filectx(safe_bytes(path))
527 fctx = ctx.filectx(safe_bytes(path))
528
528
529 file_log = list(fctx.filelog())
529 file_log = list(fctx.filelog())
530 if limit:
530 if limit:
531 # Limit to the last n items
531 # Limit to the last n items
532 file_log = file_log[-limit:]
532 file_log = file_log[-limit:]
533
533
534 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
534 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
535 return _node_history_until(context_uid, repo_id, revision, path, limit)
535 return _node_history_until(context_uid, repo_id, revision, path, limit)
536
536
537 @reraise_safe_exceptions
537 @reraise_safe_exceptions
538 def fctx_annotate(self, wire, revision, path):
538 def fctx_annotate(self, wire, revision, path):
539 repo = self._factory.repo(wire)
539 repo = self._factory.repo(wire)
540 ctx = self._get_ctx(repo, revision)
540 ctx = self._get_ctx(repo, revision)
541 fctx = ctx.filectx(safe_bytes(path))
541 fctx = ctx.filectx(safe_bytes(path))
542
542
543 result = []
543 result = []
544 for i, annotate_obj in enumerate(fctx.annotate(), 1):
544 for i, annotate_obj in enumerate(fctx.annotate(), 1):
545 ln_no = i
545 ln_no = i
546 sha = hex(annotate_obj.fctx.node())
546 sha = hex(annotate_obj.fctx.node())
547 content = annotate_obj.text
547 content = annotate_obj.text
548 result.append((ln_no, sha, content))
548 result.append((ln_no, sha, content))
549 return result
549 return result
550
550
551 @reraise_safe_exceptions
551 @reraise_safe_exceptions
552 def fctx_node_data(self, wire, revision, path):
552 def fctx_node_data(self, wire, revision, path):
553 repo = self._factory.repo(wire)
553 repo = self._factory.repo(wire)
554 ctx = self._get_ctx(repo, revision)
554 ctx = self._get_ctx(repo, revision)
555 fctx = ctx.filectx(safe_bytes(path))
555 fctx = ctx.filectx(safe_bytes(path))
556 return fctx.data()
556 return fctx.data()
557
557
558 @reraise_safe_exceptions
558 @reraise_safe_exceptions
559 def fctx_flags(self, wire, commit_id, path):
559 def fctx_flags(self, wire, commit_id, path):
560 cache_on, context_uid, repo_id = self._cache_on(wire)
560 cache_on, context_uid, repo_id = self._cache_on(wire)
561 region = self._region(wire)
561 region = self._region(wire)
562
562
563 @region.conditional_cache_on_arguments(condition=cache_on)
563 @region.conditional_cache_on_arguments(condition=cache_on)
564 def _fctx_flags(_repo_id, _commit_id, _path):
564 def _fctx_flags(_repo_id, _commit_id, _path):
565 repo = self._factory.repo(wire)
565 repo = self._factory.repo(wire)
566 ctx = self._get_ctx(repo, commit_id)
566 ctx = self._get_ctx(repo, commit_id)
567 fctx = ctx.filectx(safe_bytes(path))
567 fctx = ctx.filectx(safe_bytes(path))
568 return fctx.flags()
568 return fctx.flags()
569
569
570 return _fctx_flags(repo_id, commit_id, path)
570 return _fctx_flags(repo_id, commit_id, path)
571
571
572 @reraise_safe_exceptions
572 @reraise_safe_exceptions
573 def fctx_size(self, wire, commit_id, path):
573 def fctx_size(self, wire, commit_id, path):
574 cache_on, context_uid, repo_id = self._cache_on(wire)
574 cache_on, context_uid, repo_id = self._cache_on(wire)
575 region = self._region(wire)
575 region = self._region(wire)
576
576
577 @region.conditional_cache_on_arguments(condition=cache_on)
577 @region.conditional_cache_on_arguments(condition=cache_on)
578 def _fctx_size(_repo_id, _revision, _path):
578 def _fctx_size(_repo_id, _revision, _path):
579 repo = self._factory.repo(wire)
579 repo = self._factory.repo(wire)
580 ctx = self._get_ctx(repo, commit_id)
580 ctx = self._get_ctx(repo, commit_id)
581 fctx = ctx.filectx(safe_bytes(path))
581 fctx = ctx.filectx(safe_bytes(path))
582 return fctx.size()
582 return fctx.size()
583 return _fctx_size(repo_id, commit_id, path)
583 return _fctx_size(repo_id, commit_id, path)
584
584
585 @reraise_safe_exceptions
585 @reraise_safe_exceptions
586 def get_all_commit_ids(self, wire, name):
586 def get_all_commit_ids(self, wire, name):
587 cache_on, context_uid, repo_id = self._cache_on(wire)
587 cache_on, context_uid, repo_id = self._cache_on(wire)
588 region = self._region(wire)
588 region = self._region(wire)
589
589
590 @region.conditional_cache_on_arguments(condition=cache_on)
590 @region.conditional_cache_on_arguments(condition=cache_on)
591 def _get_all_commit_ids(_context_uid, _repo_id, _name):
591 def _get_all_commit_ids(_context_uid, _repo_id, _name):
592 repo = self._factory.repo(wire)
592 repo = self._factory.repo(wire)
593 revs = [ascii_str(repo[x].hex()) for x in repo.filtered(b'visible').changelog.revs()]
593 revs = [ascii_str(repo[x].hex()) for x in repo.filtered(b'visible').changelog.revs()]
594 return revs
594 return revs
595 return _get_all_commit_ids(context_uid, repo_id, name)
595 return _get_all_commit_ids(context_uid, repo_id, name)
596
596
597 @reraise_safe_exceptions
597 @reraise_safe_exceptions
598 def get_config_value(self, wire, section, name, untrusted=False):
598 def get_config_value(self, wire, section, name, untrusted=False):
599 repo = self._factory.repo(wire)
599 repo = self._factory.repo(wire)
600 return repo.ui.config(ascii_bytes(section), ascii_bytes(name), untrusted=untrusted)
600 return repo.ui.config(ascii_bytes(section), ascii_bytes(name), untrusted=untrusted)
601
601
602 @reraise_safe_exceptions
602 @reraise_safe_exceptions
603 def is_large_file(self, wire, commit_id, path):
603 def is_large_file(self, wire, commit_id, path):
604 cache_on, context_uid, repo_id = self._cache_on(wire)
604 cache_on, context_uid, repo_id = self._cache_on(wire)
605 region = self._region(wire)
605 region = self._region(wire)
606
606
607 @region.conditional_cache_on_arguments(condition=cache_on)
607 @region.conditional_cache_on_arguments(condition=cache_on)
608 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
608 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
609 return largefiles.lfutil.isstandin(safe_bytes(path))
609 return largefiles.lfutil.isstandin(safe_bytes(path))
610
610
611 return _is_large_file(context_uid, repo_id, commit_id, path)
611 return _is_large_file(context_uid, repo_id, commit_id, path)
612
612
613 @reraise_safe_exceptions
613 @reraise_safe_exceptions
614 def is_binary(self, wire, revision, path):
614 def is_binary(self, wire, revision, path):
615 cache_on, context_uid, repo_id = self._cache_on(wire)
615 cache_on, context_uid, repo_id = self._cache_on(wire)
616 region = self._region(wire)
616 region = self._region(wire)
617
617
618 @region.conditional_cache_on_arguments(condition=cache_on)
618 @region.conditional_cache_on_arguments(condition=cache_on)
619 def _is_binary(_repo_id, _sha, _path):
619 def _is_binary(_repo_id, _sha, _path):
620 repo = self._factory.repo(wire)
620 repo = self._factory.repo(wire)
621 ctx = self._get_ctx(repo, revision)
621 ctx = self._get_ctx(repo, revision)
622 fctx = ctx.filectx(safe_bytes(path))
622 fctx = ctx.filectx(safe_bytes(path))
623 return fctx.isbinary()
623 return fctx.isbinary()
624
624
625 return _is_binary(repo_id, revision, path)
625 return _is_binary(repo_id, revision, path)
626
626
627 @reraise_safe_exceptions
627 @reraise_safe_exceptions
628 def md5_hash(self, wire, revision, path):
628 def md5_hash(self, wire, revision, path):
629 cache_on, context_uid, repo_id = self._cache_on(wire)
629 cache_on, context_uid, repo_id = self._cache_on(wire)
630 region = self._region(wire)
630 region = self._region(wire)
631
631
632 @region.conditional_cache_on_arguments(condition=cache_on)
632 @region.conditional_cache_on_arguments(condition=cache_on)
633 def _md5_hash(_repo_id, _sha, _path):
633 def _md5_hash(_repo_id, _sha, _path):
634 repo = self._factory.repo(wire)
634 repo = self._factory.repo(wire)
635 ctx = self._get_ctx(repo, revision)
635 ctx = self._get_ctx(repo, revision)
636 fctx = ctx.filectx(safe_bytes(path))
636 fctx = ctx.filectx(safe_bytes(path))
637 return hashlib.md5(fctx.data()).hexdigest()
637 return hashlib.md5(fctx.data()).hexdigest()
638
638
639 return _md5_hash(repo_id, revision, path)
639 return _md5_hash(repo_id, revision, path)
640
640
641 @reraise_safe_exceptions
641 @reraise_safe_exceptions
642 def in_largefiles_store(self, wire, sha):
642 def in_largefiles_store(self, wire, sha):
643 repo = self._factory.repo(wire)
643 repo = self._factory.repo(wire)
644 return largefiles.lfutil.instore(repo, sha)
644 return largefiles.lfutil.instore(repo, sha)
645
645
646 @reraise_safe_exceptions
646 @reraise_safe_exceptions
647 def in_user_cache(self, wire, sha):
647 def in_user_cache(self, wire, sha):
648 repo = self._factory.repo(wire)
648 repo = self._factory.repo(wire)
649 return largefiles.lfutil.inusercache(repo.ui, sha)
649 return largefiles.lfutil.inusercache(repo.ui, sha)
650
650
651 @reraise_safe_exceptions
651 @reraise_safe_exceptions
652 def store_path(self, wire, sha):
652 def store_path(self, wire, sha):
653 repo = self._factory.repo(wire)
653 repo = self._factory.repo(wire)
654 return largefiles.lfutil.storepath(repo, sha)
654 return largefiles.lfutil.storepath(repo, sha)
655
655
656 @reraise_safe_exceptions
656 @reraise_safe_exceptions
657 def link(self, wire, sha, path):
657 def link(self, wire, sha, path):
658 repo = self._factory.repo(wire)
658 repo = self._factory.repo(wire)
659 largefiles.lfutil.link(
659 largefiles.lfutil.link(
660 largefiles.lfutil.usercachepath(repo.ui, sha), path)
660 largefiles.lfutil.usercachepath(repo.ui, sha), path)
661
661
662 @reraise_safe_exceptions
662 @reraise_safe_exceptions
663 def localrepository(self, wire, create=False):
663 def localrepository(self, wire, create=False):
664 self._factory.repo(wire, create=create)
664 self._factory.repo(wire, create=create)
665
665
666 @reraise_safe_exceptions
666 @reraise_safe_exceptions
667 def lookup(self, wire, revision, both):
667 def lookup(self, wire, revision, both):
668 cache_on, context_uid, repo_id = self._cache_on(wire)
668 cache_on, context_uid, repo_id = self._cache_on(wire)
669 region = self._region(wire)
669 region = self._region(wire)
670
670
671 @region.conditional_cache_on_arguments(condition=cache_on)
671 @region.conditional_cache_on_arguments(condition=cache_on)
672 def _lookup(_context_uid, _repo_id, _revision, _both):
672 def _lookup(_context_uid, _repo_id, _revision, _both):
673
673
674 repo = self._factory.repo(wire)
674 repo = self._factory.repo(wire)
675 rev = _revision
675 rev = _revision
676 if isinstance(rev, int):
676 if isinstance(rev, int):
677 # NOTE(marcink):
677 # NOTE(marcink):
678 # since Mercurial doesn't support negative indexes properly
678 # since Mercurial doesn't support negative indexes properly
679 # we need to shift accordingly by one to get proper index, e.g
679 # we need to shift accordingly by one to get proper index, e.g
680 # repo[-1] => repo[-2]
680 # repo[-1] => repo[-2]
681 # repo[0] => repo[-1]
681 # repo[0] => repo[-1]
682 if rev <= 0:
682 if rev <= 0:
683 rev = rev + -1
683 rev = rev + -1
684 try:
684 try:
685 ctx = self._get_ctx(repo, rev)
685 ctx = self._get_ctx(repo, rev)
686 except (TypeError, RepoLookupError) as e:
686 except (TypeError, RepoLookupError) as e:
687 e._org_exc_tb = traceback.format_exc()
687 e._org_exc_tb = traceback.format_exc()
688 raise exceptions.LookupException(e)(rev)
688 raise exceptions.LookupException(e)(rev)
689 except LookupError as e:
689 except LookupError as e:
690 e._org_exc_tb = traceback.format_exc()
690 e._org_exc_tb = traceback.format_exc()
691 raise exceptions.LookupException(e)(e.name)
691 raise exceptions.LookupException(e)(e.name)
692
692
693 if not both:
693 if not both:
694 return ctx.hex()
694 return ctx.hex()
695
695
696 ctx = repo[ctx.hex()]
696 ctx = repo[ctx.hex()]
697 return ctx.hex(), ctx.rev()
697 return ctx.hex(), ctx.rev()
698
698
699 return _lookup(context_uid, repo_id, revision, both)
699 return _lookup(context_uid, repo_id, revision, both)
700
700
701 @reraise_safe_exceptions
701 @reraise_safe_exceptions
702 def sync_push(self, wire, url):
702 def sync_push(self, wire, url):
703 if not self.check_url(url, wire['config']):
703 if not self.check_url(url, wire['config']):
704 return
704 return
705
705
706 repo = self._factory.repo(wire)
706 repo = self._factory.repo(wire)
707
707
708 # Disable any prompts for this repo
708 # Disable any prompts for this repo
709 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
709 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
710
710
711 bookmarks = list(dict(repo._bookmarks).keys())
711 bookmarks = list(dict(repo._bookmarks).keys())
712 remote = peer(repo, {}, safe_bytes(url))
712 remote = peer(repo, {}, safe_bytes(url))
713 # Disable any prompts for this remote
713 # Disable any prompts for this remote
714 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
714 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
715
715
716 return exchange.push(
716 return exchange.push(
717 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
717 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
718
718
719 @reraise_safe_exceptions
719 @reraise_safe_exceptions
720 def revision(self, wire, rev):
720 def revision(self, wire, rev):
721 repo = self._factory.repo(wire)
721 repo = self._factory.repo(wire)
722 ctx = self._get_ctx(repo, rev)
722 ctx = self._get_ctx(repo, rev)
723 return ctx.rev()
723 return ctx.rev()
724
724
725 @reraise_safe_exceptions
725 @reraise_safe_exceptions
726 def rev_range(self, wire, commit_filter):
726 def rev_range(self, wire, commit_filter):
727 cache_on, context_uid, repo_id = self._cache_on(wire)
727 cache_on, context_uid, repo_id = self._cache_on(wire)
728 region = self._region(wire)
728 region = self._region(wire)
729
729
730 @region.conditional_cache_on_arguments(condition=cache_on)
730 @region.conditional_cache_on_arguments(condition=cache_on)
731 def _rev_range(_context_uid, _repo_id, _filter):
731 def _rev_range(_context_uid, _repo_id, _filter):
732 repo = self._factory.repo(wire)
732 repo = self._factory.repo(wire)
733 revisions = [
733 revisions = [
734 ascii_str(repo[rev].hex())
734 ascii_str(repo[rev].hex())
735 for rev in revrange(repo, list(map(ascii_bytes, commit_filter)))
735 for rev in revrange(repo, list(map(ascii_bytes, commit_filter)))
736 ]
736 ]
737 return revisions
737 return revisions
738
738
739 return _rev_range(context_uid, repo_id, sorted(commit_filter))
739 return _rev_range(context_uid, repo_id, sorted(commit_filter))
740
740
741 @reraise_safe_exceptions
741 @reraise_safe_exceptions
742 def rev_range_hash(self, wire, node):
742 def rev_range_hash(self, wire, node):
743 repo = self._factory.repo(wire)
743 repo = self._factory.repo(wire)
744
744
745 def get_revs(repo, rev_opt):
745 def get_revs(repo, rev_opt):
746 if rev_opt:
746 if rev_opt:
747 revs = revrange(repo, rev_opt)
747 revs = revrange(repo, rev_opt)
748 if len(revs) == 0:
748 if len(revs) == 0:
749 return (nullrev, nullrev)
749 return (nullrev, nullrev)
750 return max(revs), min(revs)
750 return max(revs), min(revs)
751 else:
751 else:
752 return len(repo) - 1, 0
752 return len(repo) - 1, 0
753
753
754 stop, start = get_revs(repo, [node + ':'])
754 stop, start = get_revs(repo, [node + ':'])
755 revs = [ascii_str(repo[r].hex()) for r in range(start, stop + 1)]
755 revs = [ascii_str(repo[r].hex()) for r in range(start, stop + 1)]
756 return revs
756 return revs
757
757
758 @reraise_safe_exceptions
758 @reraise_safe_exceptions
759 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
759 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
760 other_path = kwargs.pop('other_path', None)
760 org_path = safe_bytes(wire["path"])
761 other_path = safe_bytes(kwargs.pop('other_path', ''))
761
762
762 # case when we want to compare two independent repositories
763 # case when we want to compare two independent repositories
763 if other_path and other_path != wire["path"]:
764 if other_path and other_path != wire["path"]:
764 baseui = self._factory._create_config(wire["config"])
765 baseui = self._factory._create_config(wire["config"])
765 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
766 repo = unionrepo.makeunionrepository(baseui, other_path, org_path)
766 else:
767 else:
767 repo = self._factory.repo(wire)
768 repo = self._factory.repo(wire)
768 return list(repo.revs(rev_spec, *args))
769 return list(repo.revs(rev_spec, *args))
769
770
770 @reraise_safe_exceptions
771 @reraise_safe_exceptions
771 def verify(self, wire,):
772 def verify(self, wire,):
772 repo = self._factory.repo(wire)
773 repo = self._factory.repo(wire)
773 baseui = self._factory._create_config(wire['config'])
774 baseui = self._factory._create_config(wire['config'])
774
775
775 baseui, output = patch_ui_message_output(baseui)
776 baseui, output = patch_ui_message_output(baseui)
776
777
777 repo.ui = baseui
778 repo.ui = baseui
778 verify.verify(repo)
779 verify.verify(repo)
779 return output.getvalue()
780 return output.getvalue()
780
781
781 @reraise_safe_exceptions
782 @reraise_safe_exceptions
782 def hg_update_cache(self, wire,):
783 def hg_update_cache(self, wire,):
783 repo = self._factory.repo(wire)
784 repo = self._factory.repo(wire)
784 baseui = self._factory._create_config(wire['config'])
785 baseui = self._factory._create_config(wire['config'])
785 baseui, output = patch_ui_message_output(baseui)
786 baseui, output = patch_ui_message_output(baseui)
786
787
787 repo.ui = baseui
788 repo.ui = baseui
788 with repo.wlock(), repo.lock():
789 with repo.wlock(), repo.lock():
789 repo.updatecaches(full=True)
790 repo.updatecaches(full=True)
790
791
791 return output.getvalue()
792 return output.getvalue()
792
793
793 @reraise_safe_exceptions
794 @reraise_safe_exceptions
794 def hg_rebuild_fn_cache(self, wire,):
795 def hg_rebuild_fn_cache(self, wire,):
795 repo = self._factory.repo(wire)
796 repo = self._factory.repo(wire)
796 baseui = self._factory._create_config(wire['config'])
797 baseui = self._factory._create_config(wire['config'])
797 baseui, output = patch_ui_message_output(baseui)
798 baseui, output = patch_ui_message_output(baseui)
798
799
799 repo.ui = baseui
800 repo.ui = baseui
800
801
801 repair.rebuildfncache(baseui, repo)
802 repair.rebuildfncache(baseui, repo)
802
803
803 return output.getvalue()
804 return output.getvalue()
804
805
805 @reraise_safe_exceptions
806 @reraise_safe_exceptions
806 def tags(self, wire):
807 def tags(self, wire):
807 cache_on, context_uid, repo_id = self._cache_on(wire)
808 cache_on, context_uid, repo_id = self._cache_on(wire)
808 region = self._region(wire)
809 region = self._region(wire)
809
810
810 @region.conditional_cache_on_arguments(condition=cache_on)
811 @region.conditional_cache_on_arguments(condition=cache_on)
811 def _tags(_context_uid, _repo_id):
812 def _tags(_context_uid, _repo_id):
812 repo = self._factory.repo(wire)
813 repo = self._factory.repo(wire)
813 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo.tags().items()}
814 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo.tags().items()}
814
815
815 return _tags(context_uid, repo_id)
816 return _tags(context_uid, repo_id)
816
817
817 @reraise_safe_exceptions
818 @reraise_safe_exceptions
818 def update(self, wire, node=None, clean=False):
819 def update(self, wire, node='', clean=False):
819 repo = self._factory.repo(wire)
820 repo = self._factory.repo(wire)
820 baseui = self._factory._create_config(wire['config'])
821 baseui = self._factory._create_config(wire['config'])
822 node = safe_bytes(node)
823
821 commands.update(baseui, repo, node=node, clean=clean)
824 commands.update(baseui, repo, node=node, clean=clean)
822
825
823 @reraise_safe_exceptions
826 @reraise_safe_exceptions
824 def identify(self, wire):
827 def identify(self, wire):
825 repo = self._factory.repo(wire)
828 repo = self._factory.repo(wire)
826 baseui = self._factory._create_config(wire['config'])
829 baseui = self._factory._create_config(wire['config'])
827 output = io.BytesIO()
830 output = io.BytesIO()
828 baseui.write = output.write
831 baseui.write = output.write
829 # This is required to get a full node id
832 # This is required to get a full node id
830 baseui.debugflag = True
833 baseui.debugflag = True
831 commands.identify(baseui, repo, id=True)
834 commands.identify(baseui, repo, id=True)
832
835
833 return output.getvalue()
836 return output.getvalue()
834
837
835 @reraise_safe_exceptions
838 @reraise_safe_exceptions
836 def heads(self, wire, branch=None):
839 def heads(self, wire, branch=None):
837 repo = self._factory.repo(wire)
840 repo = self._factory.repo(wire)
838 baseui = self._factory._create_config(wire['config'])
841 baseui = self._factory._create_config(wire['config'])
839 output = io.BytesIO()
842 output = io.BytesIO()
840
843
841 def write(data, **unused_kwargs):
844 def write(data, **unused_kwargs):
842 output.write(data)
845 output.write(data)
843
846
844 baseui.write = write
847 baseui.write = write
845 if branch:
848 if branch:
846 args = [safe_bytes(branch)]
849 args = [safe_bytes(branch)]
847 else:
850 else:
848 args = []
851 args = []
849 commands.heads(baseui, repo, template=b'{node} ', *args)
852 commands.heads(baseui, repo, template=b'{node} ', *args)
850
853
851 return output.getvalue()
854 return output.getvalue()
852
855
853 @reraise_safe_exceptions
856 @reraise_safe_exceptions
854 def ancestor(self, wire, revision1, revision2):
857 def ancestor(self, wire, revision1, revision2):
855 repo = self._factory.repo(wire)
858 repo = self._factory.repo(wire)
856 changelog = repo.changelog
859 changelog = repo.changelog
857 lookup = repo.lookup
860 lookup = repo.lookup
858 a = changelog.ancestor(lookup(revision1), lookup(revision2))
861 a = changelog.ancestor(lookup(safe_bytes(revision1)), lookup(safe_bytes(revision2)))
859 return hex(a)
862 return hex(a)
860
863
861 @reraise_safe_exceptions
864 @reraise_safe_exceptions
862 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
865 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
863 baseui = self._factory._create_config(wire["config"], hooks=hooks)
866 baseui = self._factory._create_config(wire["config"], hooks=hooks)
864 clone(baseui, safe_bytes(source), safe_bytes(dest), noupdate=not update_after_clone)
867 clone(baseui, safe_bytes(source), safe_bytes(dest), noupdate=not update_after_clone)
865
868
866 @reraise_safe_exceptions
869 @reraise_safe_exceptions
867 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
870 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
868
871
869 repo = self._factory.repo(wire)
872 repo = self._factory.repo(wire)
870 baseui = self._factory._create_config(wire['config'])
873 baseui = self._factory._create_config(wire['config'])
871 publishing = baseui.configbool(b'phases', b'publish')
874 publishing = baseui.configbool(b'phases', b'publish')
872
875
873 def _filectxfn(_repo, ctx, path: bytes):
876 def _filectxfn(_repo, ctx, path: bytes):
874 """
877 """
875 Marks given path as added/changed/removed in a given _repo. This is
878 Marks given path as added/changed/removed in a given _repo. This is
876 for internal mercurial commit function.
879 for internal mercurial commit function.
877 """
880 """
878
881
879 # check if this path is removed
882 # check if this path is removed
880 if safe_str(path) in removed:
883 if safe_str(path) in removed:
881 # returning None is a way to mark node for removal
884 # returning None is a way to mark node for removal
882 return None
885 return None
883
886
884 # check if this path is added
887 # check if this path is added
885 for node in updated:
888 for node in updated:
886 if safe_bytes(node['path']) == path:
889 if safe_bytes(node['path']) == path:
887 return memfilectx(
890 return memfilectx(
888 _repo,
891 _repo,
889 changectx=ctx,
892 changectx=ctx,
890 path=safe_bytes(node['path']),
893 path=safe_bytes(node['path']),
891 data=safe_bytes(node['content']),
894 data=safe_bytes(node['content']),
892 islink=False,
895 islink=False,
893 isexec=bool(node['mode'] & stat.S_IXUSR),
896 isexec=bool(node['mode'] & stat.S_IXUSR),
894 copysource=False)
897 copysource=False)
895 abort_exc = exceptions.AbortException()
898 abort_exc = exceptions.AbortException()
896 raise abort_exc(f"Given path haven't been marked as added, changed or removed ({path})")
899 raise abort_exc(f"Given path haven't been marked as added, changed or removed ({path})")
897
900
898 if publishing:
901 if publishing:
899 new_commit_phase = b'public'
902 new_commit_phase = b'public'
900 else:
903 else:
901 new_commit_phase = b'draft'
904 new_commit_phase = b'draft'
902 with repo.ui.configoverride({(b'phases', b'new-commit'): new_commit_phase}):
905 with repo.ui.configoverride({(b'phases', b'new-commit'): new_commit_phase}):
903 kwargs = {safe_bytes(k): safe_bytes(v) for k, v in extra.items()}
906 kwargs = {safe_bytes(k): safe_bytes(v) for k, v in extra.items()}
904 commit_ctx = memctx(
907 commit_ctx = memctx(
905 repo=repo,
908 repo=repo,
906 parents=parents,
909 parents=parents,
907 text=safe_bytes(message),
910 text=safe_bytes(message),
908 files=[safe_bytes(x) for x in files],
911 files=[safe_bytes(x) for x in files],
909 filectxfn=_filectxfn,
912 filectxfn=_filectxfn,
910 user=safe_bytes(user),
913 user=safe_bytes(user),
911 date=(commit_time, commit_timezone),
914 date=(commit_time, commit_timezone),
912 extra=kwargs)
915 extra=kwargs)
913
916
914 n = repo.commitctx(commit_ctx)
917 n = repo.commitctx(commit_ctx)
915 new_id = hex(n)
918 new_id = hex(n)
916
919
917 return new_id
920 return new_id
918
921
919 @reraise_safe_exceptions
922 @reraise_safe_exceptions
920 def pull(self, wire, url, commit_ids=None):
923 def pull(self, wire, url, commit_ids=None):
921 repo = self._factory.repo(wire)
924 repo = self._factory.repo(wire)
922 # Disable any prompts for this repo
925 # Disable any prompts for this repo
923 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
926 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
924
927
925 remote = peer(repo, {}, safe_bytes(url))
928 remote = peer(repo, {}, safe_bytes(url))
926 # Disable any prompts for this remote
929 # Disable any prompts for this remote
927 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
930 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
928
931
929 if commit_ids:
932 if commit_ids:
930 commit_ids = [bin(commit_id) for commit_id in commit_ids]
933 commit_ids = [bin(commit_id) for commit_id in commit_ids]
931
934
932 return exchange.pull(
935 return exchange.pull(
933 repo, remote, heads=commit_ids, force=None).cgresult
936 repo, remote, heads=commit_ids, force=None).cgresult
934
937
935 @reraise_safe_exceptions
938 @reraise_safe_exceptions
936 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
939 def pull_cmd(self, wire, source, bookmark='', branch='', revision='', hooks=True):
937 repo = self._factory.repo(wire)
940 repo = self._factory.repo(wire)
938 baseui = self._factory._create_config(wire['config'], hooks=hooks)
941 baseui = self._factory._create_config(wire['config'], hooks=hooks)
939
942
943 source = safe_bytes(source)
944
940 # Mercurial internally has a lot of logic that checks ONLY if
945 # Mercurial internally has a lot of logic that checks ONLY if
941 # option is defined, we just pass those if they are defined then
946 # option is defined, we just pass those if they are defined then
942 opts = {}
947 opts = {}
943 if bookmark:
948 if bookmark:
949 if isinstance(branch, list):
950 bookmark = [safe_bytes(x) for x in bookmark]
951 else:
952 bookmark = safe_bytes(bookmark)
944 opts['bookmark'] = bookmark
953 opts['bookmark'] = bookmark
945 if branch:
954 if branch:
955 if isinstance(branch, list):
956 branch = [safe_bytes(x) for x in branch]
957 else:
958 branch = safe_bytes(branch)
946 opts['branch'] = branch
959 opts['branch'] = branch
947 if revision:
960 if revision:
948 opts['rev'] = revision
961 opts['rev'] = safe_bytes(revision)
949
962
950 commands.pull(baseui, repo, source, **opts)
963 commands.pull(baseui, repo, source, **opts)
951
964
952 @reraise_safe_exceptions
965 @reraise_safe_exceptions
953 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
966 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
954 repo = self._factory.repo(wire)
967 repo = self._factory.repo(wire)
955 baseui = self._factory._create_config(wire['config'], hooks=hooks)
968 baseui = self._factory._create_config(wire['config'], hooks=hooks)
956 commands.push(baseui, repo, dest=dest_path, rev=revisions,
969 commands.push(baseui, repo, dest=dest_path, rev=revisions,
957 new_branch=push_branches)
970 new_branch=push_branches)
958
971
959 @reraise_safe_exceptions
972 @reraise_safe_exceptions
960 def strip(self, wire, revision, update, backup):
973 def strip(self, wire, revision, update, backup):
961 repo = self._factory.repo(wire)
974 repo = self._factory.repo(wire)
962 ctx = self._get_ctx(repo, revision)
975 ctx = self._get_ctx(repo, revision)
963 hgext_strip(
976 hgext_strip(
964 repo.baseui, repo, ctx.node(), update=update, backup=backup)
977 repo.baseui, repo, ctx.node(), update=update, backup=backup)
965
978
966 @reraise_safe_exceptions
979 @reraise_safe_exceptions
967 def get_unresolved_files(self, wire):
980 def get_unresolved_files(self, wire):
968 repo = self._factory.repo(wire)
981 repo = self._factory.repo(wire)
969
982
970 log.debug('Calculating unresolved files for repo: %s', repo)
983 log.debug('Calculating unresolved files for repo: %s', repo)
971 output = io.BytesIO()
984 output = io.BytesIO()
972
985
973 def write(data, **unused_kwargs):
986 def write(data, **unused_kwargs):
974 output.write(data)
987 output.write(data)
975
988
976 baseui = self._factory._create_config(wire['config'])
989 baseui = self._factory._create_config(wire['config'])
977 baseui.write = write
990 baseui.write = write
978
991
979 commands.resolve(baseui, repo, list=True)
992 commands.resolve(baseui, repo, list=True)
980 unresolved = output.getvalue().splitlines(0)
993 unresolved = output.getvalue().splitlines(0)
981 return unresolved
994 return unresolved
982
995
983 @reraise_safe_exceptions
996 @reraise_safe_exceptions
984 def merge(self, wire, revision):
997 def merge(self, wire, revision):
985 repo = self._factory.repo(wire)
998 repo = self._factory.repo(wire)
986 baseui = self._factory._create_config(wire['config'])
999 baseui = self._factory._create_config(wire['config'])
987 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1000 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
988
1001
989 # In case of sub repositories are used mercurial prompts the user in
1002 # In case of sub repositories are used mercurial prompts the user in
990 # case of merge conflicts or different sub repository sources. By
1003 # case of merge conflicts or different sub repository sources. By
991 # setting the interactive flag to `False` mercurial doesn't prompt the
1004 # setting the interactive flag to `False` mercurial doesn't prompt the
992 # used but instead uses a default value.
1005 # used but instead uses a default value.
993 repo.ui.setconfig(b'ui', b'interactive', False)
1006 repo.ui.setconfig(b'ui', b'interactive', False)
994 commands.merge(baseui, repo, rev=revision)
1007 commands.merge(baseui, repo, rev=revision)
995
1008
996 @reraise_safe_exceptions
1009 @reraise_safe_exceptions
997 def merge_state(self, wire):
1010 def merge_state(self, wire):
998 repo = self._factory.repo(wire)
1011 repo = self._factory.repo(wire)
999 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1012 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1000
1013
1001 # In case of sub repositories are used mercurial prompts the user in
1014 # In case of sub repositories are used mercurial prompts the user in
1002 # case of merge conflicts or different sub repository sources. By
1015 # case of merge conflicts or different sub repository sources. By
1003 # setting the interactive flag to `False` mercurial doesn't prompt the
1016 # setting the interactive flag to `False` mercurial doesn't prompt the
1004 # used but instead uses a default value.
1017 # used but instead uses a default value.
1005 repo.ui.setconfig(b'ui', b'interactive', False)
1018 repo.ui.setconfig(b'ui', b'interactive', False)
1006 ms = hg_merge.mergestate(repo)
1019 ms = hg_merge.mergestate(repo)
1007 return [x for x in ms.unresolved()]
1020 return [x for x in ms.unresolved()]
1008
1021
1009 @reraise_safe_exceptions
1022 @reraise_safe_exceptions
1010 def commit(self, wire, message, username, close_branch=False):
1023 def commit(self, wire, message, username, close_branch=False):
1011 repo = self._factory.repo(wire)
1024 repo = self._factory.repo(wire)
1012 baseui = self._factory._create_config(wire['config'])
1025 baseui = self._factory._create_config(wire['config'])
1013 repo.ui.setconfig(b'ui', b'username', username)
1026 repo.ui.setconfig(b'ui', b'username', username)
1014 commands.commit(baseui, repo, message=message, close_branch=close_branch)
1027 commands.commit(baseui, repo, message=message, close_branch=close_branch)
1015
1028
1016 @reraise_safe_exceptions
1029 @reraise_safe_exceptions
1017 def rebase(self, wire, source=None, dest=None, abort=False):
1030 def rebase(self, wire, source=None, dest=None, abort=False):
1018 repo = self._factory.repo(wire)
1031 repo = self._factory.repo(wire)
1019 baseui = self._factory._create_config(wire['config'])
1032 baseui = self._factory._create_config(wire['config'])
1020 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1033 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1021 # In case of sub repositories are used mercurial prompts the user in
1034 # In case of sub repositories are used mercurial prompts the user in
1022 # case of merge conflicts or different sub repository sources. By
1035 # case of merge conflicts or different sub repository sources. By
1023 # setting the interactive flag to `False` mercurial doesn't prompt the
1036 # setting the interactive flag to `False` mercurial doesn't prompt the
1024 # used but instead uses a default value.
1037 # used but instead uses a default value.
1025 repo.ui.setconfig(b'ui', b'interactive', False)
1038 repo.ui.setconfig(b'ui', b'interactive', False)
1026 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
1039 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
1027
1040
1028 @reraise_safe_exceptions
1041 @reraise_safe_exceptions
1029 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
1042 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
1030 repo = self._factory.repo(wire)
1043 repo = self._factory.repo(wire)
1031 ctx = self._get_ctx(repo, revision)
1044 ctx = self._get_ctx(repo, revision)
1032 node = ctx.node()
1045 node = ctx.node()
1033
1046
1034 date = (tag_time, tag_timezone)
1047 date = (tag_time, tag_timezone)
1035 try:
1048 try:
1036 hg_tag.tag(repo, name, node, message, local, user, date)
1049 hg_tag.tag(repo, name, node, message, local, user, date)
1037 except Abort as e:
1050 except Abort as e:
1038 log.exception("Tag operation aborted")
1051 log.exception("Tag operation aborted")
1039 # Exception can contain unicode which we convert
1052 # Exception can contain unicode which we convert
1040 raise exceptions.AbortException(e)(repr(e))
1053 raise exceptions.AbortException(e)(repr(e))
1041
1054
1042 @reraise_safe_exceptions
1055 @reraise_safe_exceptions
1043 def bookmark(self, wire, bookmark, revision=None):
1056 def bookmark(self, wire, bookmark, revision=''):
1044 repo = self._factory.repo(wire)
1057 repo = self._factory.repo(wire)
1045 baseui = self._factory._create_config(wire['config'])
1058 baseui = self._factory._create_config(wire['config'])
1046 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
1059 commands.bookmark(baseui, repo, safe_bytes(bookmark), rev=safe_bytes(revision), force=True)
1047
1060
1048 @reraise_safe_exceptions
1061 @reraise_safe_exceptions
1049 def install_hooks(self, wire, force=False):
1062 def install_hooks(self, wire, force=False):
1050 # we don't need any special hooks for Mercurial
1063 # we don't need any special hooks for Mercurial
1051 pass
1064 pass
1052
1065
1053 @reraise_safe_exceptions
1066 @reraise_safe_exceptions
1054 def get_hooks_info(self, wire):
1067 def get_hooks_info(self, wire):
1055 return {
1068 return {
1056 'pre_version': vcsserver.__version__,
1069 'pre_version': vcsserver.__version__,
1057 'post_version': vcsserver.__version__,
1070 'post_version': vcsserver.__version__,
1058 }
1071 }
1059
1072
1060 @reraise_safe_exceptions
1073 @reraise_safe_exceptions
1061 def set_head_ref(self, wire, head_name):
1074 def set_head_ref(self, wire, head_name):
1062 pass
1075 pass
1063
1076
1064 @reraise_safe_exceptions
1077 @reraise_safe_exceptions
1065 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1078 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1066 archive_dir_name, commit_id):
1079 archive_dir_name, commit_id):
1067
1080
1068 def file_walker(_commit_id, path):
1081 def file_walker(_commit_id, path):
1069 repo = self._factory.repo(wire)
1082 repo = self._factory.repo(wire)
1070 ctx = repo[_commit_id]
1083 ctx = repo[_commit_id]
1071 is_root = path in ['', '/']
1084 is_root = path in ['', '/']
1072 if is_root:
1085 if is_root:
1073 matcher = alwaysmatcher(badfn=None)
1086 matcher = alwaysmatcher(badfn=None)
1074 else:
1087 else:
1075 matcher = patternmatcher('', [(b'glob', path+'/**', b'')], badfn=None)
1088 matcher = patternmatcher('', [(b'glob', path+'/**', b'')], badfn=None)
1076 file_iter = ctx.manifest().walk(matcher)
1089 file_iter = ctx.manifest().walk(matcher)
1077
1090
1078 for fn in file_iter:
1091 for fn in file_iter:
1079 file_path = fn
1092 file_path = fn
1080 flags = ctx.flags(fn)
1093 flags = ctx.flags(fn)
1081 mode = b'x' in flags and 0o755 or 0o644
1094 mode = b'x' in flags and 0o755 or 0o644
1082 is_link = b'l' in flags
1095 is_link = b'l' in flags
1083
1096
1084 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1097 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1085
1098
1086 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1099 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1087 archive_dir_name, commit_id)
1100 archive_dir_name, commit_id)
1088
1101
@@ -1,875 +1,879 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19 import os
19 import os
20 import subprocess
20 import subprocess
21 from urllib.error import URLError
21 from urllib.error import URLError
22 import urllib.parse
22 import urllib.parse
23 import logging
23 import logging
24 import posixpath as vcspath
24 import posixpath as vcspath
25 import io
25 import io
26 import urllib.request
26 import urllib.request
27 import urllib.parse
27 import urllib.parse
28 import urllib.error
28 import urllib.error
29 import traceback
29 import traceback
30
30
31 import svn.client
31 import svn.client
32 import svn.core
32 import svn.core
33 import svn.delta
33 import svn.delta
34 import svn.diff
34 import svn.diff
35 import svn.fs
35 import svn.fs
36 import svn.repos
36 import svn.repos
37
37
38 from vcsserver import svn_diff, exceptions, subprocessio, settings
38 from vcsserver import svn_diff, exceptions, subprocessio, settings
39 from vcsserver.base import RepoFactory, raise_from_original, ArchiveNode, archive_repo
39 from vcsserver.base import RepoFactory, raise_from_original, ArchiveNode, archive_repo
40 from vcsserver.exceptions import NoContentException
40 from vcsserver.exceptions import NoContentException
41 from vcsserver.str_utils import safe_str
41 from vcsserver.str_utils import safe_str, safe_bytes
42 from vcsserver.vcs_base import RemoteBase
42 from vcsserver.vcs_base import RemoteBase
43 from vcsserver.lib.svnremoterepo import svnremoterepo
43 from vcsserver.lib.svnremoterepo import svnremoterepo
44 log = logging.getLogger(__name__)
44 log = logging.getLogger(__name__)
45
45
46
46
47 svn_compatible_versions_map = {
47 svn_compatible_versions_map = {
48 'pre-1.4-compatible': '1.3',
48 'pre-1.4-compatible': '1.3',
49 'pre-1.5-compatible': '1.4',
49 'pre-1.5-compatible': '1.4',
50 'pre-1.6-compatible': '1.5',
50 'pre-1.6-compatible': '1.5',
51 'pre-1.8-compatible': '1.7',
51 'pre-1.8-compatible': '1.7',
52 'pre-1.9-compatible': '1.8',
52 'pre-1.9-compatible': '1.8',
53 }
53 }
54
54
55 current_compatible_version = '1.14'
55 current_compatible_version = '1.14'
56
56
57
57
58 def reraise_safe_exceptions(func):
58 def reraise_safe_exceptions(func):
59 """Decorator for converting svn exceptions to something neutral."""
59 """Decorator for converting svn exceptions to something neutral."""
60 def wrapper(*args, **kwargs):
60 def wrapper(*args, **kwargs):
61 try:
61 try:
62 return func(*args, **kwargs)
62 return func(*args, **kwargs)
63 except Exception as e:
63 except Exception as e:
64 if not hasattr(e, '_vcs_kind'):
64 if not hasattr(e, '_vcs_kind'):
65 log.exception("Unhandled exception in svn remote call")
65 log.exception("Unhandled exception in svn remote call")
66 raise_from_original(exceptions.UnhandledException(e))
66 raise_from_original(exceptions.UnhandledException(e), e)
67 raise
67 raise
68 return wrapper
68 return wrapper
69
69
70
70
71 class SubversionFactory(RepoFactory):
71 class SubversionFactory(RepoFactory):
72 repo_type = 'svn'
72 repo_type = 'svn'
73
73
74 def _create_repo(self, wire, create, compatible_version):
74 def _create_repo(self, wire, create, compatible_version):
75 path = svn.core.svn_path_canonicalize(wire['path'])
75 path = svn.core.svn_path_canonicalize(wire['path'])
76 if create:
76 if create:
77 fs_config = {'compatible-version': current_compatible_version}
77 fs_config = {'compatible-version': current_compatible_version}
78 if compatible_version:
78 if compatible_version:
79
79
80 compatible_version_string = \
80 compatible_version_string = \
81 svn_compatible_versions_map.get(compatible_version) \
81 svn_compatible_versions_map.get(compatible_version) \
82 or compatible_version
82 or compatible_version
83 fs_config['compatible-version'] = compatible_version_string
83 fs_config['compatible-version'] = compatible_version_string
84
84
85 log.debug('Create SVN repo with config "%s"', fs_config)
85 log.debug('Create SVN repo with config "%s"', fs_config)
86 repo = svn.repos.create(path, "", "", None, fs_config)
86 repo = svn.repos.create(path, "", "", None, fs_config)
87 else:
87 else:
88 repo = svn.repos.open(path)
88 repo = svn.repos.open(path)
89
89
90 log.debug('Got SVN object: %s', repo)
90 log.debug('Got SVN object: %s', repo)
91 return repo
91 return repo
92
92
93 def repo(self, wire, create=False, compatible_version=None):
93 def repo(self, wire, create=False, compatible_version=None):
94 """
94 """
95 Get a repository instance for the given path.
95 Get a repository instance for the given path.
96 """
96 """
97 return self._create_repo(wire, create, compatible_version)
97 return self._create_repo(wire, create, compatible_version)
98
98
99
99
100 NODE_TYPE_MAPPING = {
100 NODE_TYPE_MAPPING = {
101 svn.core.svn_node_file: 'file',
101 svn.core.svn_node_file: 'file',
102 svn.core.svn_node_dir: 'dir',
102 svn.core.svn_node_dir: 'dir',
103 }
103 }
104
104
105
105
106 class SvnRemote(RemoteBase):
106 class SvnRemote(RemoteBase):
107
107
108 def __init__(self, factory, hg_factory=None):
108 def __init__(self, factory, hg_factory=None):
109 self._factory = factory
109 self._factory = factory
110
110
111 @reraise_safe_exceptions
111 @reraise_safe_exceptions
112 def discover_svn_version(self):
112 def discover_svn_version(self):
113 try:
113 try:
114 import svn.core
114 import svn.core
115 svn_ver = svn.core.SVN_VERSION
115 svn_ver = svn.core.SVN_VERSION
116 except ImportError:
116 except ImportError:
117 svn_ver = None
117 svn_ver = None
118 return safe_str(svn_ver)
118 return safe_str(svn_ver)
119
119
120 @reraise_safe_exceptions
120 @reraise_safe_exceptions
121 def is_empty(self, wire):
121 def is_empty(self, wire):
122
122
123 try:
123 try:
124 return self.lookup(wire, -1) == 0
124 return self.lookup(wire, -1) == 0
125 except Exception:
125 except Exception:
126 log.exception("failed to read object_store")
126 log.exception("failed to read object_store")
127 return False
127 return False
128
128
129 def check_url(self, url):
129 def check_url(self, url):
130
130
131 # uuid function get's only valid UUID from proper repo, else
131 # uuid function get's only valid UUID from proper repo, else
132 # throws exception
132 # throws exception
133 username, password, src_url = self.get_url_and_credentials(url)
133 username, password, src_url = self.get_url_and_credentials(url)
134 try:
134 try:
135 svnremoterepo(username, password, src_url).svn().uuid
135 svnremoterepo(username, password, src_url).svn().uuid
136 except Exception:
136 except Exception:
137 tb = traceback.format_exc()
137 tb = traceback.format_exc()
138 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
138 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
139 raise URLError(
139 raise URLError(
140 '"%s" is not a valid Subversion source url.' % (url, ))
140 '"%s" is not a valid Subversion source url.' % (url, ))
141 return True
141 return True
142
142
143 def is_path_valid_repository(self, wire, path):
143 def is_path_valid_repository(self, wire, path):
144
144
145 # NOTE(marcink): short circuit the check for SVN repo
145 # NOTE(marcink): short circuit the check for SVN repo
146 # the repos.open might be expensive to check, but we have one cheap
146 # the repos.open might be expensive to check, but we have one cheap
147 # pre condition that we can use, to check for 'format' file
147 # pre condition that we can use, to check for 'format' file
148
148
149 if not os.path.isfile(os.path.join(path, 'format')):
149 if not os.path.isfile(os.path.join(path, 'format')):
150 return False
150 return False
151
151
152 try:
152 try:
153 svn.repos.open(path)
153 svn.repos.open(path)
154 except svn.core.SubversionException:
154 except svn.core.SubversionException:
155 tb = traceback.format_exc()
155 tb = traceback.format_exc()
156 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
156 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
157 return False
157 return False
158 return True
158 return True
159
159
160 @reraise_safe_exceptions
160 @reraise_safe_exceptions
161 def verify(self, wire,):
161 def verify(self, wire,):
162 repo_path = wire['path']
162 repo_path = wire['path']
163 if not self.is_path_valid_repository(wire, repo_path):
163 if not self.is_path_valid_repository(wire, repo_path):
164 raise Exception(
164 raise Exception(
165 "Path %s is not a valid Subversion repository." % repo_path)
165 "Path %s is not a valid Subversion repository." % repo_path)
166
166
167 cmd = ['svnadmin', 'info', repo_path]
167 cmd = ['svnadmin', 'info', repo_path]
168 stdout, stderr = subprocessio.run_command(cmd)
168 stdout, stderr = subprocessio.run_command(cmd)
169 return stdout
169 return stdout
170
170
171 def lookup(self, wire, revision):
171 def lookup(self, wire, revision):
172 if revision not in [-1, None, 'HEAD']:
172 if revision not in [-1, None, 'HEAD']:
173 raise NotImplementedError
173 raise NotImplementedError
174 repo = self._factory.repo(wire)
174 repo = self._factory.repo(wire)
175 fs_ptr = svn.repos.fs(repo)
175 fs_ptr = svn.repos.fs(repo)
176 head = svn.fs.youngest_rev(fs_ptr)
176 head = svn.fs.youngest_rev(fs_ptr)
177 return head
177 return head
178
178
179 def lookup_interval(self, wire, start_ts, end_ts):
179 def lookup_interval(self, wire, start_ts, end_ts):
180 repo = self._factory.repo(wire)
180 repo = self._factory.repo(wire)
181 fsobj = svn.repos.fs(repo)
181 fsobj = svn.repos.fs(repo)
182 start_rev = None
182 start_rev = None
183 end_rev = None
183 end_rev = None
184 if start_ts:
184 if start_ts:
185 start_ts_svn = apr_time_t(start_ts)
185 start_ts_svn = apr_time_t(start_ts)
186 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
186 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
187 else:
187 else:
188 start_rev = 1
188 start_rev = 1
189 if end_ts:
189 if end_ts:
190 end_ts_svn = apr_time_t(end_ts)
190 end_ts_svn = apr_time_t(end_ts)
191 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
191 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
192 else:
192 else:
193 end_rev = svn.fs.youngest_rev(fsobj)
193 end_rev = svn.fs.youngest_rev(fsobj)
194 return start_rev, end_rev
194 return start_rev, end_rev
195
195
196 def revision_properties(self, wire, revision):
196 def revision_properties(self, wire, revision):
197
197
198 cache_on, context_uid, repo_id = self._cache_on(wire)
198 cache_on, context_uid, repo_id = self._cache_on(wire)
199 region = self._region(wire)
199 region = self._region(wire)
200 @region.conditional_cache_on_arguments(condition=cache_on)
200 @region.conditional_cache_on_arguments(condition=cache_on)
201 def _revision_properties(_repo_id, _revision):
201 def _revision_properties(_repo_id, _revision):
202 repo = self._factory.repo(wire)
202 repo = self._factory.repo(wire)
203 fs_ptr = svn.repos.fs(repo)
203 fs_ptr = svn.repos.fs(repo)
204 return svn.fs.revision_proplist(fs_ptr, revision)
204 return svn.fs.revision_proplist(fs_ptr, revision)
205 return _revision_properties(repo_id, revision)
205 return _revision_properties(repo_id, revision)
206
206
207 def revision_changes(self, wire, revision):
207 def revision_changes(self, wire, revision):
208
208
209 repo = self._factory.repo(wire)
209 repo = self._factory.repo(wire)
210 fsobj = svn.repos.fs(repo)
210 fsobj = svn.repos.fs(repo)
211 rev_root = svn.fs.revision_root(fsobj, revision)
211 rev_root = svn.fs.revision_root(fsobj, revision)
212
212
213 editor = svn.repos.ChangeCollector(fsobj, rev_root)
213 editor = svn.repos.ChangeCollector(fsobj, rev_root)
214 editor_ptr, editor_baton = svn.delta.make_editor(editor)
214 editor_ptr, editor_baton = svn.delta.make_editor(editor)
215 base_dir = ""
215 base_dir = ""
216 send_deltas = False
216 send_deltas = False
217 svn.repos.replay2(
217 svn.repos.replay2(
218 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
218 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
219 editor_ptr, editor_baton, None)
219 editor_ptr, editor_baton, None)
220
220
221 added = []
221 added = []
222 changed = []
222 changed = []
223 removed = []
223 removed = []
224
224
225 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
225 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
226 for path, change in editor.changes.items():
226 for path, change in editor.changes.items():
227 # TODO: Decide what to do with directory nodes. Subversion can add
227 # TODO: Decide what to do with directory nodes. Subversion can add
228 # empty directories.
228 # empty directories.
229
229
230 if change.item_kind == svn.core.svn_node_dir:
230 if change.item_kind == svn.core.svn_node_dir:
231 continue
231 continue
232 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
232 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
233 added.append(path)
233 added.append(path)
234 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
234 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
235 svn.repos.CHANGE_ACTION_REPLACE]:
235 svn.repos.CHANGE_ACTION_REPLACE]:
236 changed.append(path)
236 changed.append(path)
237 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
237 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
238 removed.append(path)
238 removed.append(path)
239 else:
239 else:
240 raise NotImplementedError(
240 raise NotImplementedError(
241 "Action %s not supported on path %s" % (
241 "Action %s not supported on path %s" % (
242 change.action, path))
242 change.action, path))
243
243
244 changes = {
244 changes = {
245 'added': added,
245 'added': added,
246 'changed': changed,
246 'changed': changed,
247 'removed': removed,
247 'removed': removed,
248 }
248 }
249 return changes
249 return changes
250
250
251 @reraise_safe_exceptions
251 @reraise_safe_exceptions
252 def node_history(self, wire, path, revision, limit):
252 def node_history(self, wire, path, revision, limit):
253 cache_on, context_uid, repo_id = self._cache_on(wire)
253 cache_on, context_uid, repo_id = self._cache_on(wire)
254 region = self._region(wire)
254 region = self._region(wire)
255 @region.conditional_cache_on_arguments(condition=cache_on)
255 @region.conditional_cache_on_arguments(condition=cache_on)
256 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
256 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
257 cross_copies = False
257 cross_copies = False
258 repo = self._factory.repo(wire)
258 repo = self._factory.repo(wire)
259 fsobj = svn.repos.fs(repo)
259 fsobj = svn.repos.fs(repo)
260 rev_root = svn.fs.revision_root(fsobj, revision)
260 rev_root = svn.fs.revision_root(fsobj, revision)
261
261
262 history_revisions = []
262 history_revisions = []
263 history = svn.fs.node_history(rev_root, path)
263 history = svn.fs.node_history(rev_root, path)
264 history = svn.fs.history_prev(history, cross_copies)
264 history = svn.fs.history_prev(history, cross_copies)
265 while history:
265 while history:
266 __, node_revision = svn.fs.history_location(history)
266 __, node_revision = svn.fs.history_location(history)
267 history_revisions.append(node_revision)
267 history_revisions.append(node_revision)
268 if limit and len(history_revisions) >= limit:
268 if limit and len(history_revisions) >= limit:
269 break
269 break
270 history = svn.fs.history_prev(history, cross_copies)
270 history = svn.fs.history_prev(history, cross_copies)
271 return history_revisions
271 return history_revisions
272 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
272 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
273
273
274 def node_properties(self, wire, path, revision):
274 def node_properties(self, wire, path, revision):
275 cache_on, context_uid, repo_id = self._cache_on(wire)
275 cache_on, context_uid, repo_id = self._cache_on(wire)
276 region = self._region(wire)
276 region = self._region(wire)
277 @region.conditional_cache_on_arguments(condition=cache_on)
277 @region.conditional_cache_on_arguments(condition=cache_on)
278 def _node_properties(_repo_id, _path, _revision):
278 def _node_properties(_repo_id, _path, _revision):
279 repo = self._factory.repo(wire)
279 repo = self._factory.repo(wire)
280 fsobj = svn.repos.fs(repo)
280 fsobj = svn.repos.fs(repo)
281 rev_root = svn.fs.revision_root(fsobj, revision)
281 rev_root = svn.fs.revision_root(fsobj, revision)
282 return svn.fs.node_proplist(rev_root, path)
282 return svn.fs.node_proplist(rev_root, path)
283 return _node_properties(repo_id, path, revision)
283 return _node_properties(repo_id, path, revision)
284
284
285 def file_annotate(self, wire, path, revision):
285 def file_annotate(self, wire, path, revision):
286 abs_path = 'file://' + urllib.request.pathname2url(
286 abs_path = 'file://' + urllib.request.pathname2url(
287 vcspath.join(wire['path'], path))
287 vcspath.join(wire['path'], path))
288 file_uri = svn.core.svn_path_canonicalize(abs_path)
288 file_uri = svn.core.svn_path_canonicalize(abs_path)
289
289
290 start_rev = svn_opt_revision_value_t(0)
290 start_rev = svn_opt_revision_value_t(0)
291 peg_rev = svn_opt_revision_value_t(revision)
291 peg_rev = svn_opt_revision_value_t(revision)
292 end_rev = peg_rev
292 end_rev = peg_rev
293
293
294 annotations = []
294 annotations = []
295
295
296 def receiver(line_no, revision, author, date, line, pool):
296 def receiver(line_no, revision, author, date, line, pool):
297 annotations.append((line_no, revision, line))
297 annotations.append((line_no, revision, line))
298
298
299 # TODO: Cannot use blame5, missing typemap function in the swig code
299 # TODO: Cannot use blame5, missing typemap function in the swig code
300 try:
300 try:
301 svn.client.blame2(
301 svn.client.blame2(
302 file_uri, peg_rev, start_rev, end_rev,
302 file_uri, peg_rev, start_rev, end_rev,
303 receiver, svn.client.create_context())
303 receiver, svn.client.create_context())
304 except svn.core.SubversionException as exc:
304 except svn.core.SubversionException as exc:
305 log.exception("Error during blame operation.")
305 log.exception("Error during blame operation.")
306 raise Exception(
306 raise Exception(
307 "Blame not supported or file does not exist at path %s. "
307 "Blame not supported or file does not exist at path %s. "
308 "Error %s." % (path, exc))
308 "Error %s." % (path, exc))
309
309
310 return annotations
310 return annotations
311
311
312 def get_node_type(self, wire, path, revision=None):
312 def get_node_type(self, wire, path, revision=None):
313
313
314 cache_on, context_uid, repo_id = self._cache_on(wire)
314 cache_on, context_uid, repo_id = self._cache_on(wire)
315 region = self._region(wire)
315 region = self._region(wire)
316 @region.conditional_cache_on_arguments(condition=cache_on)
316 @region.conditional_cache_on_arguments(condition=cache_on)
317 def _get_node_type(_repo_id, _path, _revision):
317 def _get_node_type(_repo_id, _path, _revision):
318 repo = self._factory.repo(wire)
318 repo = self._factory.repo(wire)
319 fs_ptr = svn.repos.fs(repo)
319 fs_ptr = svn.repos.fs(repo)
320 if _revision is None:
320 if _revision is None:
321 _revision = svn.fs.youngest_rev(fs_ptr)
321 _revision = svn.fs.youngest_rev(fs_ptr)
322 root = svn.fs.revision_root(fs_ptr, _revision)
322 root = svn.fs.revision_root(fs_ptr, _revision)
323 node = svn.fs.check_path(root, path)
323 node = svn.fs.check_path(root, path)
324 return NODE_TYPE_MAPPING.get(node, None)
324 return NODE_TYPE_MAPPING.get(node, None)
325 return _get_node_type(repo_id, path, revision)
325 return _get_node_type(repo_id, path, revision)
326
326
327 def get_nodes(self, wire, path, revision=None):
327 def get_nodes(self, wire, path, revision=None):
328
328
329 cache_on, context_uid, repo_id = self._cache_on(wire)
329 cache_on, context_uid, repo_id = self._cache_on(wire)
330 region = self._region(wire)
330 region = self._region(wire)
331
331 @region.conditional_cache_on_arguments(condition=cache_on)
332 @region.conditional_cache_on_arguments(condition=cache_on)
332 def _get_nodes(_repo_id, _path, _revision):
333 def _get_nodes(_repo_id, _path, _revision):
333 repo = self._factory.repo(wire)
334 repo = self._factory.repo(wire)
334 fsobj = svn.repos.fs(repo)
335 fsobj = svn.repos.fs(repo)
335 if _revision is None:
336 if _revision is None:
336 _revision = svn.fs.youngest_rev(fsobj)
337 _revision = svn.fs.youngest_rev(fsobj)
337 root = svn.fs.revision_root(fsobj, _revision)
338 root = svn.fs.revision_root(fsobj, _revision)
338 entries = svn.fs.dir_entries(root, path)
339 entries = svn.fs.dir_entries(root, path)
339 result = []
340 result = []
340 for entry_path, entry_info in entries.items():
341 for entry_path, entry_info in entries.items():
341 result.append(
342 result.append(
342 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
343 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
343 return result
344 return result
344 return _get_nodes(repo_id, path, revision)
345 return _get_nodes(repo_id, path, revision)
345
346
346 def get_file_content(self, wire, path, rev=None):
347 def get_file_content(self, wire, path, rev=None):
347 repo = self._factory.repo(wire)
348 repo = self._factory.repo(wire)
348 fsobj = svn.repos.fs(repo)
349 fsobj = svn.repos.fs(repo)
349 if rev is None:
350 if rev is None:
350 rev = svn.fs.youngest_revision(fsobj)
351 rev = svn.fs.youngest_revision(fsobj)
351 root = svn.fs.revision_root(fsobj, rev)
352 root = svn.fs.revision_root(fsobj, rev)
352 content = svn.core.Stream(svn.fs.file_contents(root, path))
353 content = svn.core.Stream(svn.fs.file_contents(root, path))
353 return content.read()
354 return content.read()
354
355
355 def get_file_size(self, wire, path, revision=None):
356 def get_file_size(self, wire, path, revision=None):
356
357
357 cache_on, context_uid, repo_id = self._cache_on(wire)
358 cache_on, context_uid, repo_id = self._cache_on(wire)
358 region = self._region(wire)
359 region = self._region(wire)
359
360
360 @region.conditional_cache_on_arguments(condition=cache_on)
361 @region.conditional_cache_on_arguments(condition=cache_on)
361 def _get_file_size(_repo_id, _path, _revision):
362 def _get_file_size(_repo_id, _path, _revision):
362 repo = self._factory.repo(wire)
363 repo = self._factory.repo(wire)
363 fsobj = svn.repos.fs(repo)
364 fsobj = svn.repos.fs(repo)
364 if _revision is None:
365 if _revision is None:
365 _revision = svn.fs.youngest_revision(fsobj)
366 _revision = svn.fs.youngest_revision(fsobj)
366 root = svn.fs.revision_root(fsobj, _revision)
367 root = svn.fs.revision_root(fsobj, _revision)
367 size = svn.fs.file_length(root, path)
368 size = svn.fs.file_length(root, path)
368 return size
369 return size
369 return _get_file_size(repo_id, path, revision)
370 return _get_file_size(repo_id, path, revision)
370
371
371 def create_repository(self, wire, compatible_version=None):
372 def create_repository(self, wire, compatible_version=None):
372 log.info('Creating Subversion repository in path "%s"', wire['path'])
373 log.info('Creating Subversion repository in path "%s"', wire['path'])
373 self._factory.repo(wire, create=True,
374 self._factory.repo(wire, create=True,
374 compatible_version=compatible_version)
375 compatible_version=compatible_version)
375
376
376 def get_url_and_credentials(self, src_url):
377 def get_url_and_credentials(self, src_url):
377 obj = urllib.parse.urlparse(src_url)
378 obj = urllib.parse.urlparse(src_url)
378 username = obj.username or None
379 username = obj.username or None
379 password = obj.password or None
380 password = obj.password or None
380 return username, password, src_url
381 return username, password, src_url
381
382
382 def import_remote_repository(self, wire, src_url):
383 def import_remote_repository(self, wire, src_url):
383 repo_path = wire['path']
384 repo_path = wire['path']
384 if not self.is_path_valid_repository(wire, repo_path):
385 if not self.is_path_valid_repository(wire, repo_path):
385 raise Exception(
386 raise Exception(
386 "Path %s is not a valid Subversion repository." % repo_path)
387 "Path %s is not a valid Subversion repository." % repo_path)
387
388
388 username, password, src_url = self.get_url_and_credentials(src_url)
389 username, password, src_url = self.get_url_and_credentials(src_url)
389 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
390 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
390 '--trust-server-cert-failures=unknown-ca']
391 '--trust-server-cert-failures=unknown-ca']
391 if username and password:
392 if username and password:
392 rdump_cmd += ['--username', username, '--password', password]
393 rdump_cmd += ['--username', username, '--password', password]
393 rdump_cmd += [src_url]
394 rdump_cmd += [src_url]
394
395
395 rdump = subprocess.Popen(
396 rdump = subprocess.Popen(
396 rdump_cmd,
397 rdump_cmd,
397 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
398 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
398 load = subprocess.Popen(
399 load = subprocess.Popen(
399 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
400 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
400
401
401 # TODO: johbo: This can be a very long operation, might be better
402 # TODO: johbo: This can be a very long operation, might be better
402 # to track some kind of status and provide an api to check if the
403 # to track some kind of status and provide an api to check if the
403 # import is done.
404 # import is done.
404 rdump.wait()
405 rdump.wait()
405 load.wait()
406 load.wait()
406
407
407 log.debug('Return process ended with code: %s', rdump.returncode)
408 log.debug('Return process ended with code: %s', rdump.returncode)
408 if rdump.returncode != 0:
409 if rdump.returncode != 0:
409 errors = rdump.stderr.read()
410 errors = rdump.stderr.read()
410 log.error('svnrdump dump failed: statuscode %s: message: %s', rdump.returncode, errors)
411 log.error('svnrdump dump failed: statuscode %s: message: %s', rdump.returncode, errors)
411
412
412 reason = 'UNKNOWN'
413 reason = 'UNKNOWN'
413 if b'svnrdump: E230001:' in errors:
414 if b'svnrdump: E230001:' in errors:
414 reason = 'INVALID_CERTIFICATE'
415 reason = 'INVALID_CERTIFICATE'
415
416
416 if reason == 'UNKNOWN':
417 if reason == 'UNKNOWN':
417 reason = 'UNKNOWN:{}'.format(safe_str(errors))
418 reason = 'UNKNOWN:{}'.format(safe_str(errors))
418
419
419 raise Exception(
420 raise Exception(
420 'Failed to dump the remote repository from %s. Reason:%s' % (
421 'Failed to dump the remote repository from %s. Reason:%s' % (
421 src_url, reason))
422 src_url, reason))
422 if load.returncode != 0:
423 if load.returncode != 0:
423 raise Exception(
424 raise Exception(
424 'Failed to load the dump of remote repository from %s.' %
425 'Failed to load the dump of remote repository from %s.' %
425 (src_url, ))
426 (src_url, ))
426
427
427 def commit(self, wire, message, author, timestamp, updated, removed):
428 def commit(self, wire, message, author, timestamp, updated, removed):
428 assert isinstance(message, str)
429
429 assert isinstance(author, str)
430 updated = [{k: safe_bytes(v) for k, v in x.items() if isinstance(v, str)} for x in updated]
431
432 message = safe_bytes(message)
433 author = safe_bytes(author)
430
434
431 repo = self._factory.repo(wire)
435 repo = self._factory.repo(wire)
432 fsobj = svn.repos.fs(repo)
436 fsobj = svn.repos.fs(repo)
433
437
434 rev = svn.fs.youngest_rev(fsobj)
438 rev = svn.fs.youngest_rev(fsobj)
435 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
439 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
436 txn_root = svn.fs.txn_root(txn)
440 txn_root = svn.fs.txn_root(txn)
437
441
438 for node in updated:
442 for node in updated:
439 TxnNodeProcessor(node, txn_root).update()
443 TxnNodeProcessor(node, txn_root).update()
440 for node in removed:
444 for node in removed:
441 TxnNodeProcessor(node, txn_root).remove()
445 TxnNodeProcessor(node, txn_root).remove()
442
446
443 commit_id = svn.repos.fs_commit_txn(repo, txn)
447 commit_id = svn.repos.fs_commit_txn(repo, txn)
444
448
445 if timestamp:
449 if timestamp:
446 apr_time = apr_time_t(timestamp)
450 apr_time = apr_time_t(timestamp)
447 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
451 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
448 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
452 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
449
453
450 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
454 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
451 return commit_id
455 return commit_id
452
456
453 def diff(self, wire, rev1, rev2, path1=None, path2=None,
457 def diff(self, wire, rev1, rev2, path1=None, path2=None,
454 ignore_whitespace=False, context=3):
458 ignore_whitespace=False, context=3):
455
459
456 wire.update(cache=False)
460 wire.update(cache=False)
457 repo = self._factory.repo(wire)
461 repo = self._factory.repo(wire)
458 diff_creator = SvnDiffer(
462 diff_creator = SvnDiffer(
459 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
463 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
460 try:
464 try:
461 return diff_creator.generate_diff()
465 return diff_creator.generate_diff()
462 except svn.core.SubversionException as e:
466 except svn.core.SubversionException as e:
463 log.exception(
467 log.exception(
464 "Error during diff operation operation. "
468 "Error during diff operation operation. "
465 "Path might not exist %s, %s" % (path1, path2))
469 "Path might not exist %s, %s" % (path1, path2))
466 return ""
470 return ""
467
471
468 @reraise_safe_exceptions
472 @reraise_safe_exceptions
469 def is_large_file(self, wire, path):
473 def is_large_file(self, wire, path):
470 return False
474 return False
471
475
472 @reraise_safe_exceptions
476 @reraise_safe_exceptions
473 def is_binary(self, wire, rev, path):
477 def is_binary(self, wire, rev, path):
474 cache_on, context_uid, repo_id = self._cache_on(wire)
478 cache_on, context_uid, repo_id = self._cache_on(wire)
475 region = self._region(wire)
479 region = self._region(wire)
476
480
477 @region.conditional_cache_on_arguments(condition=cache_on)
481 @region.conditional_cache_on_arguments(condition=cache_on)
478 def _is_binary(_repo_id, _rev, _path):
482 def _is_binary(_repo_id, _rev, _path):
479 raw_bytes = self.get_file_content(wire, path, rev)
483 raw_bytes = self.get_file_content(wire, path, rev)
480 return raw_bytes and '\0' in raw_bytes
484 return raw_bytes and b'\0' in raw_bytes
481
485
482 return _is_binary(repo_id, rev, path)
486 return _is_binary(repo_id, rev, path)
483
487
484 @reraise_safe_exceptions
488 @reraise_safe_exceptions
485 def md5_hash(self, wire, rev, path):
489 def md5_hash(self, wire, rev, path):
486 cache_on, context_uid, repo_id = self._cache_on(wire)
490 cache_on, context_uid, repo_id = self._cache_on(wire)
487 region = self._region(wire)
491 region = self._region(wire)
488
492
489 @region.conditional_cache_on_arguments(condition=cache_on)
493 @region.conditional_cache_on_arguments(condition=cache_on)
490 def _md5_hash(_repo_id, _rev, _path):
494 def _md5_hash(_repo_id, _rev, _path):
491 return ''
495 return ''
492
496
493 return _md5_hash(repo_id, rev, path)
497 return _md5_hash(repo_id, rev, path)
494
498
495 @reraise_safe_exceptions
499 @reraise_safe_exceptions
496 def run_svn_command(self, wire, cmd, **opts):
500 def run_svn_command(self, wire, cmd, **opts):
497 path = wire.get('path', None)
501 path = wire.get('path', None)
498
502
499 if path and os.path.isdir(path):
503 if path and os.path.isdir(path):
500 opts['cwd'] = path
504 opts['cwd'] = path
501
505
502 safe_call = opts.pop('_safe', False)
506 safe_call = opts.pop('_safe', False)
503
507
504 svnenv = os.environ.copy()
508 svnenv = os.environ.copy()
505 svnenv.update(opts.pop('extra_env', {}))
509 svnenv.update(opts.pop('extra_env', {}))
506
510
507 _opts = {'env': svnenv, 'shell': False}
511 _opts = {'env': svnenv, 'shell': False}
508
512
509 try:
513 try:
510 _opts.update(opts)
514 _opts.update(opts)
511 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
515 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
512
516
513 return b''.join(proc), b''.join(proc.stderr)
517 return b''.join(proc), b''.join(proc.stderr)
514 except OSError as err:
518 except OSError as err:
515 if safe_call:
519 if safe_call:
516 return '', safe_str(err).strip()
520 return '', safe_str(err).strip()
517 else:
521 else:
518 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
522 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
519 tb_err = ("Couldn't run svn command (%s).\n"
523 tb_err = ("Couldn't run svn command (%s).\n"
520 "Original error was:%s\n"
524 "Original error was:%s\n"
521 "Call options:%s\n"
525 "Call options:%s\n"
522 % (cmd, err, _opts))
526 % (cmd, err, _opts))
523 log.exception(tb_err)
527 log.exception(tb_err)
524 raise exceptions.VcsException()(tb_err)
528 raise exceptions.VcsException()(tb_err)
525
529
526 @reraise_safe_exceptions
530 @reraise_safe_exceptions
527 def install_hooks(self, wire, force=False):
531 def install_hooks(self, wire, force=False):
528 from vcsserver.hook_utils import install_svn_hooks
532 from vcsserver.hook_utils import install_svn_hooks
529 repo_path = wire['path']
533 repo_path = wire['path']
530 binary_dir = settings.BINARY_DIR
534 binary_dir = settings.BINARY_DIR
531 executable = None
535 executable = None
532 if binary_dir:
536 if binary_dir:
533 executable = os.path.join(binary_dir, 'python')
537 executable = os.path.join(binary_dir, 'python3')
534 return install_svn_hooks(
538 return install_svn_hooks(repo_path, force_create=force)
535 repo_path, executable=executable, force_create=force)
536
539
537 @reraise_safe_exceptions
540 @reraise_safe_exceptions
538 def get_hooks_info(self, wire):
541 def get_hooks_info(self, wire):
539 from vcsserver.hook_utils import (
542 from vcsserver.hook_utils import (
540 get_svn_pre_hook_version, get_svn_post_hook_version)
543 get_svn_pre_hook_version, get_svn_post_hook_version)
541 repo_path = wire['path']
544 repo_path = wire['path']
542 return {
545 return {
543 'pre_version': get_svn_pre_hook_version(repo_path),
546 'pre_version': get_svn_pre_hook_version(repo_path),
544 'post_version': get_svn_post_hook_version(repo_path),
547 'post_version': get_svn_post_hook_version(repo_path),
545 }
548 }
546
549
547 @reraise_safe_exceptions
550 @reraise_safe_exceptions
548 def set_head_ref(self, wire, head_name):
551 def set_head_ref(self, wire, head_name):
549 pass
552 pass
550
553
551 @reraise_safe_exceptions
554 @reraise_safe_exceptions
552 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
555 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
553 archive_dir_name, commit_id):
556 archive_dir_name, commit_id):
554
557
555 def walk_tree(root, root_dir, _commit_id):
558 def walk_tree(root, root_dir, _commit_id):
556 """
559 """
557 Special recursive svn repo walker
560 Special recursive svn repo walker
558 """
561 """
559
562
560 filemode_default = 0o100644
563 filemode_default = 0o100644
561 filemode_executable = 0o100755
564 filemode_executable = 0o100755
562
565
563 file_iter = svn.fs.dir_entries(root, root_dir)
566 file_iter = svn.fs.dir_entries(root, root_dir)
564 for f_name in file_iter:
567 for f_name in file_iter:
565 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
568 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
566
569
567 if f_type == 'dir':
570 if f_type == 'dir':
568 # return only DIR, and then all entries in that dir
571 # return only DIR, and then all entries in that dir
569 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
572 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
570 new_root = os.path.join(root_dir, f_name)
573 new_root = os.path.join(root_dir, f_name)
571 for _f_name, _f_data, _f_type in walk_tree(root, new_root, _commit_id):
574 for _f_name, _f_data, _f_type in walk_tree(root, new_root, _commit_id):
572 yield _f_name, _f_data, _f_type
575 yield _f_name, _f_data, _f_type
573 else:
576 else:
574 f_path = os.path.join(root_dir, f_name).rstrip('/')
577 f_path = os.path.join(root_dir, f_name).rstrip('/')
575 prop_list = svn.fs.node_proplist(root, f_path)
578 prop_list = svn.fs.node_proplist(root, f_path)
576
579
577 f_mode = filemode_default
580 f_mode = filemode_default
578 if prop_list.get('svn:executable'):
581 if prop_list.get('svn:executable'):
579 f_mode = filemode_executable
582 f_mode = filemode_executable
580
583
581 f_is_link = False
584 f_is_link = False
582 if prop_list.get('svn:special'):
585 if prop_list.get('svn:special'):
583 f_is_link = True
586 f_is_link = True
584
587
585 data = {
588 data = {
586 'is_link': f_is_link,
589 'is_link': f_is_link,
587 'mode': f_mode,
590 'mode': f_mode,
588 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
591 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
589 }
592 }
590
593
591 yield f_path, data, f_type
594 yield f_path, data, f_type
592
595
593 def file_walker(_commit_id, path):
596 def file_walker(_commit_id, path):
594 repo = self._factory.repo(wire)
597 repo = self._factory.repo(wire)
595 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
598 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
596
599
597 def no_content():
600 def no_content():
598 raise NoContentException()
601 raise NoContentException()
599
602
600 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
603 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
601 file_path = f_name
604 file_path = f_name
602
605
603 if f_type == 'dir':
606 if f_type == 'dir':
604 mode = f_data['mode']
607 mode = f_data['mode']
605 yield ArchiveNode(file_path, mode, False, no_content)
608 yield ArchiveNode(file_path, mode, False, no_content)
606 else:
609 else:
607 mode = f_data['mode']
610 mode = f_data['mode']
608 is_link = f_data['is_link']
611 is_link = f_data['is_link']
609 data_stream = f_data['content_stream']
612 data_stream = f_data['content_stream']
610 yield ArchiveNode(file_path, mode, is_link, data_stream)
613 yield ArchiveNode(file_path, mode, is_link, data_stream)
611
614
612 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
615 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
613 archive_dir_name, commit_id)
616 archive_dir_name, commit_id)
614
617
615
618
616 class SvnDiffer(object):
619 class SvnDiffer(object):
617 """
620 """
618 Utility to create diffs based on difflib and the Subversion api
621 Utility to create diffs based on difflib and the Subversion api
619 """
622 """
620
623
621 binary_content = False
624 binary_content = False
622
625
623 def __init__(
626 def __init__(
624 self, repo, src_rev, src_path, tgt_rev, tgt_path,
627 self, repo, src_rev, src_path, tgt_rev, tgt_path,
625 ignore_whitespace, context):
628 ignore_whitespace, context):
626 self.repo = repo
629 self.repo = repo
627 self.ignore_whitespace = ignore_whitespace
630 self.ignore_whitespace = ignore_whitespace
628 self.context = context
631 self.context = context
629
632
630 fsobj = svn.repos.fs(repo)
633 fsobj = svn.repos.fs(repo)
631
634
632 self.tgt_rev = tgt_rev
635 self.tgt_rev = tgt_rev
633 self.tgt_path = tgt_path or ''
636 self.tgt_path = tgt_path or ''
634 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
637 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
635 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
638 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
636
639
637 self.src_rev = src_rev
640 self.src_rev = src_rev
638 self.src_path = src_path or self.tgt_path
641 self.src_path = src_path or self.tgt_path
639 self.src_root = svn.fs.revision_root(fsobj, src_rev)
642 self.src_root = svn.fs.revision_root(fsobj, src_rev)
640 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
643 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
641
644
642 self._validate()
645 self._validate()
643
646
644 def _validate(self):
647 def _validate(self):
645 if (self.tgt_kind != svn.core.svn_node_none and
648 if (self.tgt_kind != svn.core.svn_node_none and
646 self.src_kind != svn.core.svn_node_none and
649 self.src_kind != svn.core.svn_node_none and
647 self.src_kind != self.tgt_kind):
650 self.src_kind != self.tgt_kind):
648 # TODO: johbo: proper error handling
651 # TODO: johbo: proper error handling
649 raise Exception(
652 raise Exception(
650 "Source and target are not compatible for diff generation. "
653 "Source and target are not compatible for diff generation. "
651 "Source type: %s, target type: %s" %
654 "Source type: %s, target type: %s" %
652 (self.src_kind, self.tgt_kind))
655 (self.src_kind, self.tgt_kind))
653
656
654 def generate_diff(self):
657 def generate_diff(self):
655 buf = io.StringIO()
658 buf = io.StringIO()
656 if self.tgt_kind == svn.core.svn_node_dir:
659 if self.tgt_kind == svn.core.svn_node_dir:
657 self._generate_dir_diff(buf)
660 self._generate_dir_diff(buf)
658 else:
661 else:
659 self._generate_file_diff(buf)
662 self._generate_file_diff(buf)
660 return buf.getvalue()
663 return buf.getvalue()
661
664
662 def _generate_dir_diff(self, buf):
665 def _generate_dir_diff(self, buf):
663 editor = DiffChangeEditor()
666 editor = DiffChangeEditor()
664 editor_ptr, editor_baton = svn.delta.make_editor(editor)
667 editor_ptr, editor_baton = svn.delta.make_editor(editor)
665 svn.repos.dir_delta2(
668 svn.repos.dir_delta2(
666 self.src_root,
669 self.src_root,
667 self.src_path,
670 self.src_path,
668 '', # src_entry
671 '', # src_entry
669 self.tgt_root,
672 self.tgt_root,
670 self.tgt_path,
673 self.tgt_path,
671 editor_ptr, editor_baton,
674 editor_ptr, editor_baton,
672 authorization_callback_allow_all,
675 authorization_callback_allow_all,
673 False, # text_deltas
676 False, # text_deltas
674 svn.core.svn_depth_infinity, # depth
677 svn.core.svn_depth_infinity, # depth
675 False, # entry_props
678 False, # entry_props
676 False, # ignore_ancestry
679 False, # ignore_ancestry
677 )
680 )
678
681
679 for path, __, change in sorted(editor.changes):
682 for path, __, change in sorted(editor.changes):
680 self._generate_node_diff(
683 self._generate_node_diff(
681 buf, change, path, self.tgt_path, path, self.src_path)
684 buf, change, path, self.tgt_path, path, self.src_path)
682
685
683 def _generate_file_diff(self, buf):
686 def _generate_file_diff(self, buf):
684 change = None
687 change = None
685 if self.src_kind == svn.core.svn_node_none:
688 if self.src_kind == svn.core.svn_node_none:
686 change = "add"
689 change = "add"
687 elif self.tgt_kind == svn.core.svn_node_none:
690 elif self.tgt_kind == svn.core.svn_node_none:
688 change = "delete"
691 change = "delete"
689 tgt_base, tgt_path = vcspath.split(self.tgt_path)
692 tgt_base, tgt_path = vcspath.split(self.tgt_path)
690 src_base, src_path = vcspath.split(self.src_path)
693 src_base, src_path = vcspath.split(self.src_path)
691 self._generate_node_diff(
694 self._generate_node_diff(
692 buf, change, tgt_path, tgt_base, src_path, src_base)
695 buf, change, tgt_path, tgt_base, src_path, src_base)
693
696
694 def _generate_node_diff(
697 def _generate_node_diff(
695 self, buf, change, tgt_path, tgt_base, src_path, src_base):
698 self, buf, change, tgt_path, tgt_base, src_path, src_base):
696
699
697 if self.src_rev == self.tgt_rev and tgt_base == src_base:
700 if self.src_rev == self.tgt_rev and tgt_base == src_base:
698 # makes consistent behaviour with git/hg to return empty diff if
701 # makes consistent behaviour with git/hg to return empty diff if
699 # we compare same revisions
702 # we compare same revisions
700 return
703 return
701
704
702 tgt_full_path = vcspath.join(tgt_base, tgt_path)
705 tgt_full_path = vcspath.join(tgt_base, tgt_path)
703 src_full_path = vcspath.join(src_base, src_path)
706 src_full_path = vcspath.join(src_base, src_path)
704
707
705 self.binary_content = False
708 self.binary_content = False
706 mime_type = self._get_mime_type(tgt_full_path)
709 mime_type = self._get_mime_type(tgt_full_path)
707
710
708 if mime_type and not mime_type.startswith('text'):
711 if mime_type and not mime_type.startswith('text'):
709 self.binary_content = True
712 self.binary_content = True
710 buf.write("=" * 67 + '\n')
713 buf.write("=" * 67 + '\n')
711 buf.write("Cannot display: file marked as a binary type.\n")
714 buf.write("Cannot display: file marked as a binary type.\n")
712 buf.write("svn:mime-type = %s\n" % mime_type)
715 buf.write("svn:mime-type = %s\n" % mime_type)
713 buf.write("Index: %s\n" % (tgt_path, ))
716 buf.write("Index: %s\n" % (tgt_path, ))
714 buf.write("=" * 67 + '\n')
717 buf.write("=" * 67 + '\n')
715 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
718 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
716 'tgt_path': tgt_path})
719 'tgt_path': tgt_path})
717
720
718 if change == 'add':
721 if change == 'add':
719 # TODO: johbo: SVN is missing a zero here compared to git
722 # TODO: johbo: SVN is missing a zero here compared to git
720 buf.write("new file mode 10644\n")
723 buf.write("new file mode 10644\n")
721
724
722 #TODO(marcink): intro to binary detection of svn patches
725 #TODO(marcink): intro to binary detection of svn patches
723 # if self.binary_content:
726 # if self.binary_content:
724 # buf.write('GIT binary patch\n')
727 # buf.write('GIT binary patch\n')
725
728
726 buf.write("--- /dev/null\t(revision 0)\n")
729 buf.write("--- /dev/null\t(revision 0)\n")
727 src_lines = []
730 src_lines = []
728 else:
731 else:
729 if change == 'delete':
732 if change == 'delete':
730 buf.write("deleted file mode 10644\n")
733 buf.write("deleted file mode 10644\n")
731
734
732 #TODO(marcink): intro to binary detection of svn patches
735 #TODO(marcink): intro to binary detection of svn patches
733 # if self.binary_content:
736 # if self.binary_content:
734 # buf.write('GIT binary patch\n')
737 # buf.write('GIT binary patch\n')
735
738
736 buf.write("--- a/%s\t(revision %s)\n" % (
739 buf.write("--- a/%s\t(revision %s)\n" % (
737 src_path, self.src_rev))
740 src_path, self.src_rev))
738 src_lines = self._svn_readlines(self.src_root, src_full_path)
741 src_lines = self._svn_readlines(self.src_root, src_full_path)
739
742
740 if change == 'delete':
743 if change == 'delete':
741 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
744 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
742 tgt_lines = []
745 tgt_lines = []
743 else:
746 else:
744 buf.write("+++ b/%s\t(revision %s)\n" % (
747 buf.write("+++ b/%s\t(revision %s)\n" % (
745 tgt_path, self.tgt_rev))
748 tgt_path, self.tgt_rev))
746 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
749 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
747
750
748 if not self.binary_content:
751 if not self.binary_content:
749 udiff = svn_diff.unified_diff(
752 udiff = svn_diff.unified_diff(
750 src_lines, tgt_lines, context=self.context,
753 src_lines, tgt_lines, context=self.context,
751 ignore_blank_lines=self.ignore_whitespace,
754 ignore_blank_lines=self.ignore_whitespace,
752 ignore_case=False,
755 ignore_case=False,
753 ignore_space_changes=self.ignore_whitespace)
756 ignore_space_changes=self.ignore_whitespace)
754 buf.writelines(udiff)
757 buf.writelines(udiff)
755
758
756 def _get_mime_type(self, path):
759 def _get_mime_type(self, path):
757 try:
760 try:
758 mime_type = svn.fs.node_prop(
761 mime_type = svn.fs.node_prop(
759 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
762 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
760 except svn.core.SubversionException:
763 except svn.core.SubversionException:
761 mime_type = svn.fs.node_prop(
764 mime_type = svn.fs.node_prop(
762 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
765 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
763 return mime_type
766 return mime_type
764
767
765 def _svn_readlines(self, fs_root, node_path):
768 def _svn_readlines(self, fs_root, node_path):
766 if self.binary_content:
769 if self.binary_content:
767 return []
770 return []
768 node_kind = svn.fs.check_path(fs_root, node_path)
771 node_kind = svn.fs.check_path(fs_root, node_path)
769 if node_kind not in (
772 if node_kind not in (
770 svn.core.svn_node_file, svn.core.svn_node_symlink):
773 svn.core.svn_node_file, svn.core.svn_node_symlink):
771 return []
774 return []
772 content = svn.core.Stream(
775 content = svn.core.Stream(
773 svn.fs.file_contents(fs_root, node_path)).read()
776 svn.fs.file_contents(fs_root, node_path)).read()
774 return content.splitlines(True)
777 return content.splitlines(True)
775
778
776
779
777 class DiffChangeEditor(svn.delta.Editor):
780 class DiffChangeEditor(svn.delta.Editor):
778 """
781 """
779 Records changes between two given revisions
782 Records changes between two given revisions
780 """
783 """
781
784
782 def __init__(self):
785 def __init__(self):
783 self.changes = []
786 self.changes = []
784
787
785 def delete_entry(self, path, revision, parent_baton, pool=None):
788 def delete_entry(self, path, revision, parent_baton, pool=None):
786 self.changes.append((path, None, 'delete'))
789 self.changes.append((path, None, 'delete'))
787
790
788 def add_file(
791 def add_file(
789 self, path, parent_baton, copyfrom_path, copyfrom_revision,
792 self, path, parent_baton, copyfrom_path, copyfrom_revision,
790 file_pool=None):
793 file_pool=None):
791 self.changes.append((path, 'file', 'add'))
794 self.changes.append((path, 'file', 'add'))
792
795
793 def open_file(self, path, parent_baton, base_revision, file_pool=None):
796 def open_file(self, path, parent_baton, base_revision, file_pool=None):
794 self.changes.append((path, 'file', 'change'))
797 self.changes.append((path, 'file', 'change'))
795
798
796
799
797 def authorization_callback_allow_all(root, path, pool):
800 def authorization_callback_allow_all(root, path, pool):
798 return True
801 return True
799
802
800
803
801 class TxnNodeProcessor(object):
804 class TxnNodeProcessor(object):
802 """
805 """
803 Utility to process the change of one node within a transaction root.
806 Utility to process the change of one node within a transaction root.
804
807
805 It encapsulates the knowledge of how to add, update or remove
808 It encapsulates the knowledge of how to add, update or remove
806 a node for a given transaction root. The purpose is to support the method
809 a node for a given transaction root. The purpose is to support the method
807 `SvnRemote.commit`.
810 `SvnRemote.commit`.
808 """
811 """
809
812
810 def __init__(self, node, txn_root):
813 def __init__(self, node, txn_root):
811 assert isinstance(node['path'], str)
814 assert isinstance(node['path'], bytes)
812
815
813 self.node = node
816 self.node = node
814 self.txn_root = txn_root
817 self.txn_root = txn_root
815
818
816 def update(self):
819 def update(self):
817 self._ensure_parent_dirs()
820 self._ensure_parent_dirs()
818 self._add_file_if_node_does_not_exist()
821 self._add_file_if_node_does_not_exist()
819 self._update_file_content()
822 self._update_file_content()
820 self._update_file_properties()
823 self._update_file_properties()
821
824
822 def remove(self):
825 def remove(self):
823 svn.fs.delete(self.txn_root, self.node['path'])
826 svn.fs.delete(self.txn_root, self.node['path'])
824 # TODO: Clean up directory if empty
827 # TODO: Clean up directory if empty
825
828
826 def _ensure_parent_dirs(self):
829 def _ensure_parent_dirs(self):
827 curdir = vcspath.dirname(self.node['path'])
830 curdir = vcspath.dirname(self.node['path'])
828 dirs_to_create = []
831 dirs_to_create = []
829 while not self._svn_path_exists(curdir):
832 while not self._svn_path_exists(curdir):
830 dirs_to_create.append(curdir)
833 dirs_to_create.append(curdir)
831 curdir = vcspath.dirname(curdir)
834 curdir = vcspath.dirname(curdir)
832
835
833 for curdir in reversed(dirs_to_create):
836 for curdir in reversed(dirs_to_create):
834 log.debug('Creating missing directory "%s"', curdir)
837 log.debug('Creating missing directory "%s"', curdir)
835 svn.fs.make_dir(self.txn_root, curdir)
838 svn.fs.make_dir(self.txn_root, curdir)
836
839
837 def _svn_path_exists(self, path):
840 def _svn_path_exists(self, path):
838 path_status = svn.fs.check_path(self.txn_root, path)
841 path_status = svn.fs.check_path(self.txn_root, path)
839 return path_status != svn.core.svn_node_none
842 return path_status != svn.core.svn_node_none
840
843
841 def _add_file_if_node_does_not_exist(self):
844 def _add_file_if_node_does_not_exist(self):
842 kind = svn.fs.check_path(self.txn_root, self.node['path'])
845 kind = svn.fs.check_path(self.txn_root, self.node['path'])
843 if kind == svn.core.svn_node_none:
846 if kind == svn.core.svn_node_none:
844 svn.fs.make_file(self.txn_root, self.node['path'])
847 svn.fs.make_file(self.txn_root, self.node['path'])
845
848
846 def _update_file_content(self):
849 def _update_file_content(self):
847 assert isinstance(self.node['content'], str)
850 assert isinstance(self.node['content'], bytes)
851
848 handler, baton = svn.fs.apply_textdelta(
852 handler, baton = svn.fs.apply_textdelta(
849 self.txn_root, self.node['path'], None, None)
853 self.txn_root, self.node['path'], None, None)
850 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
854 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
851
855
852 def _update_file_properties(self):
856 def _update_file_properties(self):
853 properties = self.node.get('properties', {})
857 properties = self.node.get('properties', {})
854 for key, value in properties.items():
858 for key, value in properties.items():
855 svn.fs.change_node_prop(
859 svn.fs.change_node_prop(
856 self.txn_root, self.node['path'], key, value)
860 self.txn_root, self.node['path'], key, value)
857
861
858
862
859 def apr_time_t(timestamp):
863 def apr_time_t(timestamp):
860 """
864 """
861 Convert a Python timestamp into APR timestamp type apr_time_t
865 Convert a Python timestamp into APR timestamp type apr_time_t
862 """
866 """
863 return timestamp * 1E6
867 return timestamp * 1E6
864
868
865
869
866 def svn_opt_revision_value_t(num):
870 def svn_opt_revision_value_t(num):
867 """
871 """
868 Put `num` into a `svn_opt_revision_value_t` structure.
872 Put `num` into a `svn_opt_revision_value_t` structure.
869 """
873 """
870 value = svn.core.svn_opt_revision_value_t()
874 value = svn.core.svn_opt_revision_value_t()
871 value.number = num
875 value.number = num
872 revision = svn.core.svn_opt_revision_t()
876 revision = svn.core.svn_opt_revision_t()
873 revision.kind = svn.core.svn_opt_revision_number
877 revision.kind = svn.core.svn_opt_revision_number
874 revision.value = value
878 revision.value = value
875 return revision
879 return revision
@@ -1,56 +1,56 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import socket
18 import socket
19 import pytest
19 import pytest
20
20
21
21
22 def pytest_addoption(parser):
22 def pytest_addoption(parser):
23 parser.addoption(
23 parser.addoption(
24 '--perf-repeat-vcs', type=int, default=100,
24 '--perf-repeat-vcs', type=int, default=100,
25 help="Number of repetitions in performance tests.")
25 help="Number of repetitions in performance tests.")
26
26
27
27
28 @pytest.fixture(scope='session')
28 @pytest.fixture(scope='session')
29 def repeat(request):
29 def repeat(request):
30 """
30 """
31 The number of repetitions is based on this fixture.
31 The number of repetitions is based on this fixture.
32
32
33 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
33 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
34 tests are not too slow in our default test suite.
34 tests are not too slow in our default test suite.
35 """
35 """
36 return request.config.getoption('--perf-repeat-vcs')
36 return request.config.getoption('--perf-repeat-vcs')
37
37
38
38
39 @pytest.fixture(scope='session')
39 @pytest.fixture(scope='session')
40 def vcsserver_port(request):
40 def vcsserver_port(request):
41 port = get_available_port()
41 port = get_available_port()
42 print(('Using vcsserver port %s' % (port, )))
42 print(f'Using vcsserver port {port}')
43 return port
43 return port
44
44
45
45
46 def get_available_port():
46 def get_available_port():
47 family = socket.AF_INET
47 family = socket.AF_INET
48 socktype = socket.SOCK_STREAM
48 socktype = socket.SOCK_STREAM
49 host = '127.0.0.1'
49 host = '127.0.0.1'
50
50
51 mysocket = socket.socket(family, socktype)
51 mysocket = socket.socket(family, socktype)
52 mysocket.bind((host, 0))
52 mysocket.bind((host, 0))
53 port = mysocket.getsockname()[1]
53 port = mysocket.getsockname()[1]
54 mysocket.close()
54 mysocket.close()
55 del mysocket
55 del mysocket
56 return port
56 return port
@@ -1,162 +1,162 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import inspect
18 import inspect
19
19
20 import pytest
20 import pytest
21 import dulwich.errors
21 import dulwich.errors
22 from mock import Mock, patch
22 from mock import Mock, patch
23
23
24 from vcsserver.remote import git
24 from vcsserver.remote import git
25
25
26 SAMPLE_REFS = {
26 SAMPLE_REFS = {
27 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
27 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
28 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
28 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
29 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
29 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
30 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
30 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
31 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
31 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
32 }
32 }
33
33
34
34
35 @pytest.fixture
35 @pytest.fixture
36 def git_remote():
36 def git_remote():
37 """
37 """
38 A GitRemote instance with a mock factory.
38 A GitRemote instance with a mock factory.
39 """
39 """
40 factory = Mock()
40 factory = Mock()
41 remote = git.GitRemote(factory)
41 remote = git.GitRemote(factory)
42 return remote
42 return remote
43
43
44
44
45 def test_discover_git_version(git_remote):
45 def test_discover_git_version(git_remote):
46 version = git_remote.discover_git_version()
46 version = git_remote.discover_git_version()
47 assert version
47 assert version
48
48
49
49
50 class TestGitFetch(object):
50 class TestGitFetch(object):
51 def setup_method(self):
51 def setup_method(self):
52 self.mock_repo = Mock()
52 self.mock_repo = Mock()
53 factory = Mock()
53 factory = Mock()
54 factory.repo = Mock(return_value=self.mock_repo)
54 factory.repo = Mock(return_value=self.mock_repo)
55 self.remote_git = git.GitRemote(factory)
55 self.remote_git = git.GitRemote(factory)
56
56
57 def test_fetches_all_when_no_commit_ids_specified(self):
57 def test_fetches_all_when_no_commit_ids_specified(self):
58 def side_effect(determine_wants, *args, **kwargs):
58 def side_effect(determine_wants, *args, **kwargs):
59 determine_wants(SAMPLE_REFS)
59 determine_wants(SAMPLE_REFS)
60
60
61 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
61 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
62 mock_fetch.side_effect = side_effect
62 mock_fetch.side_effect = side_effect
63 self.remote_git.pull(wire={}, url='/tmp/', apply_refs=False)
63 self.remote_git.pull(wire={}, url='/tmp/', apply_refs=False)
64 determine_wants = self.mock_repo.object_store.determine_wants_all
64 determine_wants = self.mock_repo.object_store.determine_wants_all
65 determine_wants.assert_called_once_with(SAMPLE_REFS)
65 determine_wants.assert_called_once_with(SAMPLE_REFS)
66
66
67 def test_fetches_specified_commits(self):
67 def test_fetches_specified_commits(self):
68 selected_refs = {
68 selected_refs = {
69 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
69 'refs/tags/v0.1.8': b'74ebce002c088b8a5ecf40073db09375515ecd68',
70 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
70 'refs/tags/v0.1.3': b'5a3a8fb005554692b16e21dee62bf02667d8dc3e',
71 }
71 }
72
72
73 def side_effect(determine_wants, *args, **kwargs):
73 def side_effect(determine_wants, *args, **kwargs):
74 result = determine_wants(SAMPLE_REFS)
74 result = determine_wants(SAMPLE_REFS)
75 assert sorted(result) == sorted(selected_refs.values())
75 assert sorted(result) == sorted(selected_refs.values())
76 return result
76 return result
77
77
78 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
78 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
79 mock_fetch.side_effect = side_effect
79 mock_fetch.side_effect = side_effect
80 self.remote_git.pull(
80 self.remote_git.pull(
81 wire={}, url='/tmp/', apply_refs=False,
81 wire={}, url='/tmp/', apply_refs=False,
82 refs=list(selected_refs.keys()))
82 refs=list(selected_refs.keys()))
83 determine_wants = self.mock_repo.object_store.determine_wants_all
83 determine_wants = self.mock_repo.object_store.determine_wants_all
84 assert determine_wants.call_count == 0
84 assert determine_wants.call_count == 0
85
85
86 def test_get_remote_refs(self):
86 def test_get_remote_refs(self):
87 factory = Mock()
87 factory = Mock()
88 remote_git = git.GitRemote(factory)
88 remote_git = git.GitRemote(factory)
89 url = 'http://example.com/test/test.git'
89 url = 'http://example.com/test/test.git'
90 sample_refs = {
90 sample_refs = {
91 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
91 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
92 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
92 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
93 }
93 }
94
94
95 with patch('vcsserver.remote.git.Repo', create=False) as mock_repo:
95 with patch('vcsserver.remote.git.Repo', create=False) as mock_repo:
96 mock_repo().get_refs.return_value = sample_refs
96 mock_repo().get_refs.return_value = sample_refs
97 remote_refs = remote_git.get_remote_refs(wire={}, url=url)
97 remote_refs = remote_git.get_remote_refs(wire={}, url=url)
98 mock_repo().get_refs.assert_called_once_with()
98 mock_repo().get_refs.assert_called_once_with()
99 assert remote_refs == sample_refs
99 assert remote_refs == sample_refs
100
100
101
101
102 class TestReraiseSafeExceptions(object):
102 class TestReraiseSafeExceptions(object):
103
103
104 def test_method_decorated_with_reraise_safe_exceptions(self):
104 def test_method_decorated_with_reraise_safe_exceptions(self):
105 factory = Mock()
105 factory = Mock()
106 git_remote = git.GitRemote(factory)
106 git_remote = git.GitRemote(factory)
107
107
108 def fake_function():
108 def fake_function():
109 return None
109 return None
110
110
111 decorator = git.reraise_safe_exceptions(fake_function)
111 decorator = git.reraise_safe_exceptions(fake_function)
112
112
113 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
113 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
114 for method_name, method in methods:
114 for method_name, method in methods:
115 if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']:
115 if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']:
116 assert method.__func__.__code__ == decorator.__code__
116 assert method.__func__.__code__ == decorator.__code__
117
117
118 @pytest.mark.parametrize('side_effect, expected_type', [
118 @pytest.mark.parametrize('side_effect, expected_type', [
119 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
119 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
120 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
120 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
121 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
121 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
122 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
122 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
123 (dulwich.errors.HangupException(), 'error'),
123 (dulwich.errors.HangupException(), 'error'),
124 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
124 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
125 ])
125 ])
126 def test_safe_exceptions_reraised(self, side_effect, expected_type):
126 def test_safe_exceptions_reraised(self, side_effect, expected_type):
127 @git.reraise_safe_exceptions
127 @git.reraise_safe_exceptions
128 def fake_method():
128 def fake_method():
129 raise side_effect
129 raise side_effect
130
130
131 with pytest.raises(Exception) as exc_info:
131 with pytest.raises(Exception) as exc_info:
132 fake_method()
132 fake_method()
133 assert type(exc_info.value) == Exception
133 assert type(exc_info.value) == Exception
134 assert exc_info.value._vcs_kind == expected_type
134 assert exc_info.value._vcs_kind == expected_type
135
135
136
136
137 class TestDulwichRepoWrapper(object):
137 class TestDulwichRepoWrapper(object):
138 def test_calls_close_on_delete(self):
138 def test_calls_close_on_delete(self):
139 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
139 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
140 with patch.object(git.Repo, 'close') as close_mock:
140 with patch.object(git.Repo, 'close') as close_mock:
141 with isdir_patcher:
141 with isdir_patcher:
142 repo = git.Repo('/tmp/abcde')
142 repo = git.Repo('/tmp/abcde')
143 assert repo is not None
143 assert repo is not None
144 repo.__del__()
144 repo.__del__()
145 # can't use del repo as in python3 this isn't always calling .__del__()
145 # can't use del repo as in python3 this isn't always calling .__del__()
146
146
147 close_mock.assert_called_once_with()
147 close_mock.assert_called_once_with()
148
148
149
149
150 class TestGitFactory(object):
150 class TestGitFactory(object):
151 def test_create_repo_returns_dulwich_wrapper(self):
151 def test_create_repo_returns_dulwich_wrapper(self):
152
152
153 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
153 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
154 mock.side_effect = {'repo_objects': ''}
154 mock.side_effect = {'repo_objects': ''}
155 factory = git.GitFactory()
155 factory = git.GitFactory()
156 wire = {
156 wire = {
157 'path': '/tmp/abcde'
157 'path': '/tmp/abcde'
158 }
158 }
159 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
159 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
160 with isdir_patcher:
160 with isdir_patcher:
161 result = factory._create_repo(wire, True)
161 result = factory._create_repo(wire, True)
162 assert isinstance(result, git.Repo)
162 assert isinstance(result, git.Repo)
General Comments 0
You need to be logged in to leave comments. Login now