##// END OF EJS Templates
hooks: better error reporting for hooks module errors
super-admin -
r1159:90d8161d default
parent child Browse files
Show More
@@ -1,779 +1,784 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import os
19 import os
20 import sys
20 import sys
21 import logging
21 import logging
22 import collections
22 import collections
23 import importlib
23 import importlib
24 import base64
24 import base64
25 import msgpack
25 import msgpack
26 import dataclasses
26 import dataclasses
27 import pygit2
27 import pygit2
28
28
29 import http.client
29 import http.client
30
30
31
31
32 import mercurial.scmutil
32 import mercurial.scmutil
33 import mercurial.node
33 import mercurial.node
34
34
35 from vcsserver.lib.rc_json import json
35 from vcsserver.lib.rc_json import json
36 from vcsserver import exceptions, subprocessio, settings
36 from vcsserver import exceptions, subprocessio, settings
37 from vcsserver.str_utils import ascii_str, safe_str
37 from vcsserver.str_utils import ascii_str, safe_str
38 from vcsserver.remote.git_remote import Repository
38 from vcsserver.remote.git_remote import Repository
39
39
40 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
41
41
42
42
43 class HooksHttpClient:
43 class HooksHttpClient:
44 proto = 'msgpack.v1'
44 proto = 'msgpack.v1'
45 connection = None
45 connection = None
46
46
47 def __init__(self, hooks_uri):
47 def __init__(self, hooks_uri):
48 self.hooks_uri = hooks_uri
48 self.hooks_uri = hooks_uri
49
49
50 def __repr__(self):
50 def __repr__(self):
51 return f'{self.__class__}(hook_uri={self.hooks_uri}, proto={self.proto})'
51 return f'{self.__class__}(hook_uri={self.hooks_uri}, proto={self.proto})'
52
52
53 def __call__(self, method, extras):
53 def __call__(self, method, extras):
54 connection = http.client.HTTPConnection(self.hooks_uri)
54 connection = http.client.HTTPConnection(self.hooks_uri)
55 # binary msgpack body
55 # binary msgpack body
56 headers, body = self._serialize(method, extras)
56 headers, body = self._serialize(method, extras)
57 log.debug('Doing a new hooks call using HTTPConnection to %s', self.hooks_uri)
57 log.debug('Doing a new hooks call using HTTPConnection to %s', self.hooks_uri)
58
58
59 try:
59 try:
60 try:
60 try:
61 connection.request('POST', '/', body, headers)
61 connection.request('POST', '/', body, headers)
62 except Exception as error:
62 except Exception as error:
63 log.error('Hooks calling Connection failed on %s, org error: %s', connection.__dict__, error)
63 log.error('Hooks calling Connection failed on %s, org error: %s', connection.__dict__, error)
64 raise
64 raise
65
65
66 response = connection.getresponse()
66 response = connection.getresponse()
67 try:
67 try:
68 return msgpack.load(response)
68 return msgpack.load(response)
69 except Exception:
69 except Exception:
70 response_data = response.read()
70 response_data = response.read()
71 log.exception('Failed to decode hook response json data. '
71 log.exception('Failed to decode hook response json data. '
72 'response_code:%s, raw_data:%s',
72 'response_code:%s, raw_data:%s',
73 response.status, response_data)
73 response.status, response_data)
74 raise
74 raise
75 finally:
75 finally:
76 connection.close()
76 connection.close()
77
77
78 @classmethod
78 @classmethod
79 def _serialize(cls, hook_name, extras):
79 def _serialize(cls, hook_name, extras):
80 data = {
80 data = {
81 'method': hook_name,
81 'method': hook_name,
82 'extras': extras
82 'extras': extras
83 }
83 }
84 headers = {
84 headers = {
85 "rc-hooks-protocol": cls.proto,
85 "rc-hooks-protocol": cls.proto,
86 "Connection": "keep-alive"
86 "Connection": "keep-alive"
87 }
87 }
88 return headers, msgpack.packb(data)
88 return headers, msgpack.packb(data)
89
89
90
90
91 class HooksDummyClient:
91 class HooksDummyClient:
92 def __init__(self, hooks_module):
92 def __init__(self, hooks_module):
93 self._hooks_module = importlib.import_module(hooks_module)
93 self._hooks_module = importlib.import_module(hooks_module)
94
94
95 def __call__(self, hook_name, extras):
95 def __call__(self, hook_name, extras):
96 with self._hooks_module.Hooks() as hooks:
96 with self._hooks_module.Hooks() as hooks:
97 return getattr(hooks, hook_name)(extras)
97 return getattr(hooks, hook_name)(extras)
98
98
99
99
100 class HooksShadowRepoClient:
100 class HooksShadowRepoClient:
101
101
102 def __call__(self, hook_name, extras):
102 def __call__(self, hook_name, extras):
103 return {'output': '', 'status': 0}
103 return {'output': '', 'status': 0}
104
104
105
105
106 class RemoteMessageWriter:
106 class RemoteMessageWriter:
107 """Writer base class."""
107 """Writer base class."""
108 def write(self, message):
108 def write(self, message):
109 raise NotImplementedError()
109 raise NotImplementedError()
110
110
111
111
112 class HgMessageWriter(RemoteMessageWriter):
112 class HgMessageWriter(RemoteMessageWriter):
113 """Writer that knows how to send messages to mercurial clients."""
113 """Writer that knows how to send messages to mercurial clients."""
114
114
115 def __init__(self, ui):
115 def __init__(self, ui):
116 self.ui = ui
116 self.ui = ui
117
117
118 def write(self, message: str):
118 def write(self, message: str):
119 # TODO: Check why the quiet flag is set by default.
119 # TODO: Check why the quiet flag is set by default.
120 old = self.ui.quiet
120 old = self.ui.quiet
121 self.ui.quiet = False
121 self.ui.quiet = False
122 self.ui.status(message.encode('utf-8'))
122 self.ui.status(message.encode('utf-8'))
123 self.ui.quiet = old
123 self.ui.quiet = old
124
124
125
125
126 class GitMessageWriter(RemoteMessageWriter):
126 class GitMessageWriter(RemoteMessageWriter):
127 """Writer that knows how to send messages to git clients."""
127 """Writer that knows how to send messages to git clients."""
128
128
129 def __init__(self, stdout=None):
129 def __init__(self, stdout=None):
130 self.stdout = stdout or sys.stdout
130 self.stdout = stdout or sys.stdout
131
131
132 def write(self, message: str):
132 def write(self, message: str):
133 self.stdout.write(message)
133 self.stdout.write(message)
134
134
135
135
136 class SvnMessageWriter(RemoteMessageWriter):
136 class SvnMessageWriter(RemoteMessageWriter):
137 """Writer that knows how to send messages to svn clients."""
137 """Writer that knows how to send messages to svn clients."""
138
138
139 def __init__(self, stderr=None):
139 def __init__(self, stderr=None):
140 # SVN needs data sent to stderr for back-to-client messaging
140 # SVN needs data sent to stderr for back-to-client messaging
141 self.stderr = stderr or sys.stderr
141 self.stderr = stderr or sys.stderr
142
142
143 def write(self, message):
143 def write(self, message):
144 self.stderr.write(message.encode('utf-8'))
144 self.stderr.write(message.encode('utf-8'))
145
145
146
146
147 def _handle_exception(result):
147 def _handle_exception(result):
148 exception_class = result.get('exception')
148 exception_class = result.get('exception')
149 exception_traceback = result.get('exception_traceback')
149 exception_traceback = result.get('exception_traceback')
150 log.debug('Handling hook-call exception: %s', exception_class)
150 log.debug('Handling hook-call exception: %s', exception_class)
151
151
152 if exception_traceback:
152 if exception_traceback:
153 log.error('Got traceback from remote call:%s', exception_traceback)
153 log.error('Got traceback from remote call:%s', exception_traceback)
154
154
155 if exception_class == 'HTTPLockedRC':
155 if exception_class == 'HTTPLockedRC':
156 raise exceptions.RepositoryLockedException()(*result['exception_args'])
156 raise exceptions.RepositoryLockedException()(*result['exception_args'])
157 elif exception_class == 'HTTPBranchProtected':
157 elif exception_class == 'HTTPBranchProtected':
158 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
158 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
159 elif exception_class == 'RepositoryError':
159 elif exception_class == 'RepositoryError':
160 raise exceptions.VcsException()(*result['exception_args'])
160 raise exceptions.VcsException()(*result['exception_args'])
161 elif exception_class:
161 elif exception_class:
162 raise Exception(
162 raise Exception(
163 f"""Got remote exception "{exception_class}" with args "{result['exception_args']}" """
163 f"""Got remote exception "{exception_class}" with args "{result['exception_args']}" """
164 )
164 )
165
165
166
166
167 def _get_hooks_client(extras):
167 def _get_hooks_client(extras):
168 hooks_uri = extras.get('hooks_uri')
168 hooks_uri = extras.get('hooks_uri')
169 is_shadow_repo = extras.get('is_shadow_repo')
169 is_shadow_repo = extras.get('is_shadow_repo')
170
170
171 if hooks_uri:
171 if hooks_uri:
172 return HooksHttpClient(extras['hooks_uri'])
172 return HooksHttpClient(extras['hooks_uri'])
173 elif is_shadow_repo:
173 elif is_shadow_repo:
174 return HooksShadowRepoClient()
174 return HooksShadowRepoClient()
175 else:
175 else:
176 return HooksDummyClient(extras['hooks_module'])
176 try:
177 import_module = extras['hooks_module']
178 except KeyError:
179 log.error('Failed to get "hooks_module" from extras: %s', extras)
180 raise
181 return HooksDummyClient(import_module)
177
182
178
183
179 def _call_hook(hook_name, extras, writer):
184 def _call_hook(hook_name, extras, writer):
180 hooks_client = _get_hooks_client(extras)
185 hooks_client = _get_hooks_client(extras)
181 log.debug('Hooks, using client:%s', hooks_client)
186 log.debug('Hooks, using client:%s', hooks_client)
182 result = hooks_client(hook_name, extras)
187 result = hooks_client(hook_name, extras)
183 log.debug('Hooks got result: %s', result)
188 log.debug('Hooks got result: %s', result)
184 _handle_exception(result)
189 _handle_exception(result)
185 writer.write(result['output'])
190 writer.write(result['output'])
186
191
187 return result['status']
192 return result['status']
188
193
189
194
190 def _extras_from_ui(ui):
195 def _extras_from_ui(ui):
191 hook_data = ui.config(b'rhodecode', b'RC_SCM_DATA')
196 hook_data = ui.config(b'rhodecode', b'RC_SCM_DATA')
192 if not hook_data:
197 if not hook_data:
193 # maybe it's inside environ ?
198 # maybe it's inside environ ?
194 env_hook_data = os.environ.get('RC_SCM_DATA')
199 env_hook_data = os.environ.get('RC_SCM_DATA')
195 if env_hook_data:
200 if env_hook_data:
196 hook_data = env_hook_data
201 hook_data = env_hook_data
197
202
198 extras = {}
203 extras = {}
199 if hook_data:
204 if hook_data:
200 extras = json.loads(hook_data)
205 extras = json.loads(hook_data)
201 return extras
206 return extras
202
207
203
208
204 def _rev_range_hash(repo, node, check_heads=False):
209 def _rev_range_hash(repo, node, check_heads=False):
205 from vcsserver.hgcompat import get_ctx
210 from vcsserver.hgcompat import get_ctx
206
211
207 commits = []
212 commits = []
208 revs = []
213 revs = []
209 start = get_ctx(repo, node).rev()
214 start = get_ctx(repo, node).rev()
210 end = len(repo)
215 end = len(repo)
211 for rev in range(start, end):
216 for rev in range(start, end):
212 revs.append(rev)
217 revs.append(rev)
213 ctx = get_ctx(repo, rev)
218 ctx = get_ctx(repo, rev)
214 commit_id = ascii_str(mercurial.node.hex(ctx.node()))
219 commit_id = ascii_str(mercurial.node.hex(ctx.node()))
215 branch = safe_str(ctx.branch())
220 branch = safe_str(ctx.branch())
216 commits.append((commit_id, branch))
221 commits.append((commit_id, branch))
217
222
218 parent_heads = []
223 parent_heads = []
219 if check_heads:
224 if check_heads:
220 parent_heads = _check_heads(repo, start, end, revs)
225 parent_heads = _check_heads(repo, start, end, revs)
221 return commits, parent_heads
226 return commits, parent_heads
222
227
223
228
224 def _check_heads(repo, start, end, commits):
229 def _check_heads(repo, start, end, commits):
225 from vcsserver.hgcompat import get_ctx
230 from vcsserver.hgcompat import get_ctx
226 changelog = repo.changelog
231 changelog = repo.changelog
227 parents = set()
232 parents = set()
228
233
229 for new_rev in commits:
234 for new_rev in commits:
230 for p in changelog.parentrevs(new_rev):
235 for p in changelog.parentrevs(new_rev):
231 if p == mercurial.node.nullrev:
236 if p == mercurial.node.nullrev:
232 continue
237 continue
233 if p < start:
238 if p < start:
234 parents.add(p)
239 parents.add(p)
235
240
236 for p in parents:
241 for p in parents:
237 branch = get_ctx(repo, p).branch()
242 branch = get_ctx(repo, p).branch()
238 # The heads descending from that parent, on the same branch
243 # The heads descending from that parent, on the same branch
239 parent_heads = {p}
244 parent_heads = {p}
240 reachable = {p}
245 reachable = {p}
241 for x in range(p + 1, end):
246 for x in range(p + 1, end):
242 if get_ctx(repo, x).branch() != branch:
247 if get_ctx(repo, x).branch() != branch:
243 continue
248 continue
244 for pp in changelog.parentrevs(x):
249 for pp in changelog.parentrevs(x):
245 if pp in reachable:
250 if pp in reachable:
246 reachable.add(x)
251 reachable.add(x)
247 parent_heads.discard(pp)
252 parent_heads.discard(pp)
248 parent_heads.add(x)
253 parent_heads.add(x)
249 # More than one head? Suggest merging
254 # More than one head? Suggest merging
250 if len(parent_heads) > 1:
255 if len(parent_heads) > 1:
251 return list(parent_heads)
256 return list(parent_heads)
252
257
253 return []
258 return []
254
259
255
260
256 def _get_git_env():
261 def _get_git_env():
257 env = {}
262 env = {}
258 for k, v in os.environ.items():
263 for k, v in os.environ.items():
259 if k.startswith('GIT'):
264 if k.startswith('GIT'):
260 env[k] = v
265 env[k] = v
261
266
262 # serialized version
267 # serialized version
263 return [(k, v) for k, v in env.items()]
268 return [(k, v) for k, v in env.items()]
264
269
265
270
266 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
271 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
267 env = {}
272 env = {}
268 for k, v in os.environ.items():
273 for k, v in os.environ.items():
269 if k.startswith('HG'):
274 if k.startswith('HG'):
270 env[k] = v
275 env[k] = v
271
276
272 env['HG_NODE'] = old_rev
277 env['HG_NODE'] = old_rev
273 env['HG_NODE_LAST'] = new_rev
278 env['HG_NODE_LAST'] = new_rev
274 env['HG_TXNID'] = txnid
279 env['HG_TXNID'] = txnid
275 env['HG_PENDING'] = repo_path
280 env['HG_PENDING'] = repo_path
276
281
277 return [(k, v) for k, v in env.items()]
282 return [(k, v) for k, v in env.items()]
278
283
279
284
280 def repo_size(ui, repo, **kwargs):
285 def repo_size(ui, repo, **kwargs):
281 extras = _extras_from_ui(ui)
286 extras = _extras_from_ui(ui)
282 return _call_hook('repo_size', extras, HgMessageWriter(ui))
287 return _call_hook('repo_size', extras, HgMessageWriter(ui))
283
288
284
289
285 def pre_pull(ui, repo, **kwargs):
290 def pre_pull(ui, repo, **kwargs):
286 extras = _extras_from_ui(ui)
291 extras = _extras_from_ui(ui)
287 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
292 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
288
293
289
294
290 def pre_pull_ssh(ui, repo, **kwargs):
295 def pre_pull_ssh(ui, repo, **kwargs):
291 extras = _extras_from_ui(ui)
296 extras = _extras_from_ui(ui)
292 if extras and extras.get('SSH'):
297 if extras and extras.get('SSH'):
293 return pre_pull(ui, repo, **kwargs)
298 return pre_pull(ui, repo, **kwargs)
294 return 0
299 return 0
295
300
296
301
297 def post_pull(ui, repo, **kwargs):
302 def post_pull(ui, repo, **kwargs):
298 extras = _extras_from_ui(ui)
303 extras = _extras_from_ui(ui)
299 return _call_hook('post_pull', extras, HgMessageWriter(ui))
304 return _call_hook('post_pull', extras, HgMessageWriter(ui))
300
305
301
306
302 def post_pull_ssh(ui, repo, **kwargs):
307 def post_pull_ssh(ui, repo, **kwargs):
303 extras = _extras_from_ui(ui)
308 extras = _extras_from_ui(ui)
304 if extras and extras.get('SSH'):
309 if extras and extras.get('SSH'):
305 return post_pull(ui, repo, **kwargs)
310 return post_pull(ui, repo, **kwargs)
306 return 0
311 return 0
307
312
308
313
309 def pre_push(ui, repo, node=None, **kwargs):
314 def pre_push(ui, repo, node=None, **kwargs):
310 """
315 """
311 Mercurial pre_push hook
316 Mercurial pre_push hook
312 """
317 """
313 extras = _extras_from_ui(ui)
318 extras = _extras_from_ui(ui)
314 detect_force_push = extras.get('detect_force_push')
319 detect_force_push = extras.get('detect_force_push')
315
320
316 rev_data = []
321 rev_data = []
317 hook_type: str = safe_str(kwargs.get('hooktype'))
322 hook_type: str = safe_str(kwargs.get('hooktype'))
318
323
319 if node and hook_type == 'pretxnchangegroup':
324 if node and hook_type == 'pretxnchangegroup':
320 branches = collections.defaultdict(list)
325 branches = collections.defaultdict(list)
321 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
326 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
322 for commit_id, branch in commits:
327 for commit_id, branch in commits:
323 branches[branch].append(commit_id)
328 branches[branch].append(commit_id)
324
329
325 for branch, commits in branches.items():
330 for branch, commits in branches.items():
326 old_rev = ascii_str(kwargs.get('node_last')) or commits[0]
331 old_rev = ascii_str(kwargs.get('node_last')) or commits[0]
327 rev_data.append({
332 rev_data.append({
328 'total_commits': len(commits),
333 'total_commits': len(commits),
329 'old_rev': old_rev,
334 'old_rev': old_rev,
330 'new_rev': commits[-1],
335 'new_rev': commits[-1],
331 'ref': '',
336 'ref': '',
332 'type': 'branch',
337 'type': 'branch',
333 'name': branch,
338 'name': branch,
334 })
339 })
335
340
336 for push_ref in rev_data:
341 for push_ref in rev_data:
337 push_ref['multiple_heads'] = _heads
342 push_ref['multiple_heads'] = _heads
338
343
339 repo_path = os.path.join(
344 repo_path = os.path.join(
340 extras.get('repo_store', ''), extras.get('repository', ''))
345 extras.get('repo_store', ''), extras.get('repository', ''))
341 push_ref['hg_env'] = _get_hg_env(
346 push_ref['hg_env'] = _get_hg_env(
342 old_rev=push_ref['old_rev'],
347 old_rev=push_ref['old_rev'],
343 new_rev=push_ref['new_rev'], txnid=ascii_str(kwargs.get('txnid')),
348 new_rev=push_ref['new_rev'], txnid=ascii_str(kwargs.get('txnid')),
344 repo_path=repo_path)
349 repo_path=repo_path)
345
350
346 extras['hook_type'] = hook_type or 'pre_push'
351 extras['hook_type'] = hook_type or 'pre_push'
347 extras['commit_ids'] = rev_data
352 extras['commit_ids'] = rev_data
348
353
349 return _call_hook('pre_push', extras, HgMessageWriter(ui))
354 return _call_hook('pre_push', extras, HgMessageWriter(ui))
350
355
351
356
352 def pre_push_ssh(ui, repo, node=None, **kwargs):
357 def pre_push_ssh(ui, repo, node=None, **kwargs):
353 extras = _extras_from_ui(ui)
358 extras = _extras_from_ui(ui)
354 if extras.get('SSH'):
359 if extras.get('SSH'):
355 return pre_push(ui, repo, node, **kwargs)
360 return pre_push(ui, repo, node, **kwargs)
356
361
357 return 0
362 return 0
358
363
359
364
360 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
365 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
361 """
366 """
362 Mercurial pre_push hook for SSH
367 Mercurial pre_push hook for SSH
363 """
368 """
364 extras = _extras_from_ui(ui)
369 extras = _extras_from_ui(ui)
365 if extras.get('SSH'):
370 if extras.get('SSH'):
366 permission = extras['SSH_PERMISSIONS']
371 permission = extras['SSH_PERMISSIONS']
367
372
368 if 'repository.write' == permission or 'repository.admin' == permission:
373 if 'repository.write' == permission or 'repository.admin' == permission:
369 return 0
374 return 0
370
375
371 # non-zero ret code
376 # non-zero ret code
372 return 1
377 return 1
373
378
374 return 0
379 return 0
375
380
376
381
377 def post_push(ui, repo, node, **kwargs):
382 def post_push(ui, repo, node, **kwargs):
378 """
383 """
379 Mercurial post_push hook
384 Mercurial post_push hook
380 """
385 """
381 extras = _extras_from_ui(ui)
386 extras = _extras_from_ui(ui)
382
387
383 commit_ids = []
388 commit_ids = []
384 branches = []
389 branches = []
385 bookmarks = []
390 bookmarks = []
386 tags = []
391 tags = []
387 hook_type: str = safe_str(kwargs.get('hooktype'))
392 hook_type: str = safe_str(kwargs.get('hooktype'))
388
393
389 commits, _heads = _rev_range_hash(repo, node)
394 commits, _heads = _rev_range_hash(repo, node)
390 for commit_id, branch in commits:
395 for commit_id, branch in commits:
391 commit_ids.append(commit_id)
396 commit_ids.append(commit_id)
392 if branch not in branches:
397 if branch not in branches:
393 branches.append(branch)
398 branches.append(branch)
394
399
395 if hasattr(ui, '_rc_pushkey_bookmarks'):
400 if hasattr(ui, '_rc_pushkey_bookmarks'):
396 bookmarks = ui._rc_pushkey_bookmarks
401 bookmarks = ui._rc_pushkey_bookmarks
397
402
398 extras['hook_type'] = hook_type or 'post_push'
403 extras['hook_type'] = hook_type or 'post_push'
399 extras['commit_ids'] = commit_ids
404 extras['commit_ids'] = commit_ids
400
405
401 extras['new_refs'] = {
406 extras['new_refs'] = {
402 'branches': branches,
407 'branches': branches,
403 'bookmarks': bookmarks,
408 'bookmarks': bookmarks,
404 'tags': tags
409 'tags': tags
405 }
410 }
406
411
407 return _call_hook('post_push', extras, HgMessageWriter(ui))
412 return _call_hook('post_push', extras, HgMessageWriter(ui))
408
413
409
414
410 def post_push_ssh(ui, repo, node, **kwargs):
415 def post_push_ssh(ui, repo, node, **kwargs):
411 """
416 """
412 Mercurial post_push hook for SSH
417 Mercurial post_push hook for SSH
413 """
418 """
414 if _extras_from_ui(ui).get('SSH'):
419 if _extras_from_ui(ui).get('SSH'):
415 return post_push(ui, repo, node, **kwargs)
420 return post_push(ui, repo, node, **kwargs)
416 return 0
421 return 0
417
422
418
423
419 def key_push(ui, repo, **kwargs):
424 def key_push(ui, repo, **kwargs):
420 from vcsserver.hgcompat import get_ctx
425 from vcsserver.hgcompat import get_ctx
421
426
422 if kwargs['new'] != b'0' and kwargs['namespace'] == b'bookmarks':
427 if kwargs['new'] != b'0' and kwargs['namespace'] == b'bookmarks':
423 # store new bookmarks in our UI object propagated later to post_push
428 # store new bookmarks in our UI object propagated later to post_push
424 ui._rc_pushkey_bookmarks = get_ctx(repo, kwargs['key']).bookmarks()
429 ui._rc_pushkey_bookmarks = get_ctx(repo, kwargs['key']).bookmarks()
425 return
430 return
426
431
427
432
428 # backward compat
433 # backward compat
429 log_pull_action = post_pull
434 log_pull_action = post_pull
430
435
431 # backward compat
436 # backward compat
432 log_push_action = post_push
437 log_push_action = post_push
433
438
434
439
435 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
440 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
436 """
441 """
437 Old hook name: keep here for backward compatibility.
442 Old hook name: keep here for backward compatibility.
438
443
439 This is only required when the installed git hooks are not upgraded.
444 This is only required when the installed git hooks are not upgraded.
440 """
445 """
441 pass
446 pass
442
447
443
448
444 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
449 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
445 """
450 """
446 Old hook name: keep here for backward compatibility.
451 Old hook name: keep here for backward compatibility.
447
452
448 This is only required when the installed git hooks are not upgraded.
453 This is only required when the installed git hooks are not upgraded.
449 """
454 """
450 pass
455 pass
451
456
452
457
453 @dataclasses.dataclass
458 @dataclasses.dataclass
454 class HookResponse:
459 class HookResponse:
455 status: int
460 status: int
456 output: str
461 output: str
457
462
458
463
459 def git_pre_pull(extras) -> HookResponse:
464 def git_pre_pull(extras) -> HookResponse:
460 """
465 """
461 Pre pull hook.
466 Pre pull hook.
462
467
463 :param extras: dictionary containing the keys defined in simplevcs
468 :param extras: dictionary containing the keys defined in simplevcs
464 :type extras: dict
469 :type extras: dict
465
470
466 :return: status code of the hook. 0 for success.
471 :return: status code of the hook. 0 for success.
467 :rtype: int
472 :rtype: int
468 """
473 """
469
474
470 if 'pull' not in extras['hooks']:
475 if 'pull' not in extras['hooks']:
471 return HookResponse(0, '')
476 return HookResponse(0, '')
472
477
473 stdout = io.StringIO()
478 stdout = io.StringIO()
474 try:
479 try:
475 status_code = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
480 status_code = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
476
481
477 except Exception as error:
482 except Exception as error:
478 log.exception('Failed to call pre_pull hook')
483 log.exception('Failed to call pre_pull hook')
479 status_code = 128
484 status_code = 128
480 stdout.write(f'ERROR: {error}\n')
485 stdout.write(f'ERROR: {error}\n')
481
486
482 return HookResponse(status_code, stdout.getvalue())
487 return HookResponse(status_code, stdout.getvalue())
483
488
484
489
485 def git_post_pull(extras) -> HookResponse:
490 def git_post_pull(extras) -> HookResponse:
486 """
491 """
487 Post pull hook.
492 Post pull hook.
488
493
489 :param extras: dictionary containing the keys defined in simplevcs
494 :param extras: dictionary containing the keys defined in simplevcs
490 :type extras: dict
495 :type extras: dict
491
496
492 :return: status code of the hook. 0 for success.
497 :return: status code of the hook. 0 for success.
493 :rtype: int
498 :rtype: int
494 """
499 """
495 if 'pull' not in extras['hooks']:
500 if 'pull' not in extras['hooks']:
496 return HookResponse(0, '')
501 return HookResponse(0, '')
497
502
498 stdout = io.StringIO()
503 stdout = io.StringIO()
499 try:
504 try:
500 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
505 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
501 except Exception as error:
506 except Exception as error:
502 status = 128
507 status = 128
503 stdout.write(f'ERROR: {error}\n')
508 stdout.write(f'ERROR: {error}\n')
504
509
505 return HookResponse(status, stdout.getvalue())
510 return HookResponse(status, stdout.getvalue())
506
511
507
512
508 def _parse_git_ref_lines(revision_lines):
513 def _parse_git_ref_lines(revision_lines):
509 rev_data = []
514 rev_data = []
510 for revision_line in revision_lines or []:
515 for revision_line in revision_lines or []:
511 old_rev, new_rev, ref = revision_line.strip().split(' ')
516 old_rev, new_rev, ref = revision_line.strip().split(' ')
512 ref_data = ref.split('/', 2)
517 ref_data = ref.split('/', 2)
513 if ref_data[1] in ('tags', 'heads'):
518 if ref_data[1] in ('tags', 'heads'):
514 rev_data.append({
519 rev_data.append({
515 # NOTE(marcink):
520 # NOTE(marcink):
516 # we're unable to tell total_commits for git at this point
521 # we're unable to tell total_commits for git at this point
517 # but we set the variable for consistency with GIT
522 # but we set the variable for consistency with GIT
518 'total_commits': -1,
523 'total_commits': -1,
519 'old_rev': old_rev,
524 'old_rev': old_rev,
520 'new_rev': new_rev,
525 'new_rev': new_rev,
521 'ref': ref,
526 'ref': ref,
522 'type': ref_data[1],
527 'type': ref_data[1],
523 'name': ref_data[2],
528 'name': ref_data[2],
524 })
529 })
525 return rev_data
530 return rev_data
526
531
527
532
528 def git_pre_receive(unused_repo_path, revision_lines, env) -> int:
533 def git_pre_receive(unused_repo_path, revision_lines, env) -> int:
529 """
534 """
530 Pre push hook.
535 Pre push hook.
531
536
532 :return: status code of the hook. 0 for success.
537 :return: status code of the hook. 0 for success.
533 """
538 """
534 extras = json.loads(env['RC_SCM_DATA'])
539 extras = json.loads(env['RC_SCM_DATA'])
535 rev_data = _parse_git_ref_lines(revision_lines)
540 rev_data = _parse_git_ref_lines(revision_lines)
536 if 'push' not in extras['hooks']:
541 if 'push' not in extras['hooks']:
537 return 0
542 return 0
538 empty_commit_id = '0' * 40
543 empty_commit_id = '0' * 40
539
544
540 detect_force_push = extras.get('detect_force_push')
545 detect_force_push = extras.get('detect_force_push')
541
546
542 for push_ref in rev_data:
547 for push_ref in rev_data:
543 # store our git-env which holds the temp store
548 # store our git-env which holds the temp store
544 push_ref['git_env'] = _get_git_env()
549 push_ref['git_env'] = _get_git_env()
545 push_ref['pruned_sha'] = ''
550 push_ref['pruned_sha'] = ''
546 if not detect_force_push:
551 if not detect_force_push:
547 # don't check for forced-push when we don't need to
552 # don't check for forced-push when we don't need to
548 continue
553 continue
549
554
550 type_ = push_ref['type']
555 type_ = push_ref['type']
551 new_branch = push_ref['old_rev'] == empty_commit_id
556 new_branch = push_ref['old_rev'] == empty_commit_id
552 delete_branch = push_ref['new_rev'] == empty_commit_id
557 delete_branch = push_ref['new_rev'] == empty_commit_id
553 if type_ == 'heads' and not (new_branch or delete_branch):
558 if type_ == 'heads' and not (new_branch or delete_branch):
554 old_rev = push_ref['old_rev']
559 old_rev = push_ref['old_rev']
555 new_rev = push_ref['new_rev']
560 new_rev = push_ref['new_rev']
556 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, f'^{new_rev}']
561 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, f'^{new_rev}']
557 stdout, stderr = subprocessio.run_command(
562 stdout, stderr = subprocessio.run_command(
558 cmd, env=os.environ.copy())
563 cmd, env=os.environ.copy())
559 # means we're having some non-reachable objects, this forced push was used
564 # means we're having some non-reachable objects, this forced push was used
560 if stdout:
565 if stdout:
561 push_ref['pruned_sha'] = stdout.splitlines()
566 push_ref['pruned_sha'] = stdout.splitlines()
562
567
563 extras['hook_type'] = 'pre_receive'
568 extras['hook_type'] = 'pre_receive'
564 extras['commit_ids'] = rev_data
569 extras['commit_ids'] = rev_data
565
570
566 stdout = sys.stdout
571 stdout = sys.stdout
567 status_code = _call_hook('pre_push', extras, GitMessageWriter(stdout))
572 status_code = _call_hook('pre_push', extras, GitMessageWriter(stdout))
568
573
569 return status_code
574 return status_code
570
575
571
576
572 def git_post_receive(unused_repo_path, revision_lines, env) -> int:
577 def git_post_receive(unused_repo_path, revision_lines, env) -> int:
573 """
578 """
574 Post push hook.
579 Post push hook.
575
580
576 :return: status code of the hook. 0 for success.
581 :return: status code of the hook. 0 for success.
577 """
582 """
578 extras = json.loads(env['RC_SCM_DATA'])
583 extras = json.loads(env['RC_SCM_DATA'])
579 if 'push' not in extras['hooks']:
584 if 'push' not in extras['hooks']:
580 return 0
585 return 0
581
586
582 rev_data = _parse_git_ref_lines(revision_lines)
587 rev_data = _parse_git_ref_lines(revision_lines)
583
588
584 git_revs = []
589 git_revs = []
585
590
586 # N.B.(skreft): it is ok to just call git, as git before calling a
591 # N.B.(skreft): it is ok to just call git, as git before calling a
587 # subcommand sets the PATH environment variable so that it point to the
592 # subcommand sets the PATH environment variable so that it point to the
588 # correct version of the git executable.
593 # correct version of the git executable.
589 empty_commit_id = '0' * 40
594 empty_commit_id = '0' * 40
590 branches = []
595 branches = []
591 tags = []
596 tags = []
592 for push_ref in rev_data:
597 for push_ref in rev_data:
593 type_ = push_ref['type']
598 type_ = push_ref['type']
594
599
595 if type_ == 'heads':
600 if type_ == 'heads':
596 # starting new branch case
601 # starting new branch case
597 if push_ref['old_rev'] == empty_commit_id:
602 if push_ref['old_rev'] == empty_commit_id:
598 push_ref_name = push_ref['name']
603 push_ref_name = push_ref['name']
599
604
600 if push_ref_name not in branches:
605 if push_ref_name not in branches:
601 branches.append(push_ref_name)
606 branches.append(push_ref_name)
602
607
603 need_head_set = ''
608 need_head_set = ''
604 with Repository(os.getcwd()) as repo:
609 with Repository(os.getcwd()) as repo:
605 try:
610 try:
606 repo.head
611 repo.head
607 except pygit2.GitError:
612 except pygit2.GitError:
608 need_head_set = f'refs/heads/{push_ref_name}'
613 need_head_set = f'refs/heads/{push_ref_name}'
609
614
610 if need_head_set:
615 if need_head_set:
611 repo.set_head(need_head_set)
616 repo.set_head(need_head_set)
612 print(f"Setting default branch to {push_ref_name}")
617 print(f"Setting default branch to {push_ref_name}")
613
618
614 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref', '--format=%(refname)', 'refs/heads/*']
619 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref', '--format=%(refname)', 'refs/heads/*']
615 stdout, stderr = subprocessio.run_command(
620 stdout, stderr = subprocessio.run_command(
616 cmd, env=os.environ.copy())
621 cmd, env=os.environ.copy())
617 heads = safe_str(stdout)
622 heads = safe_str(stdout)
618 heads = heads.replace(push_ref['ref'], '')
623 heads = heads.replace(push_ref['ref'], '')
619 heads = ' '.join(head for head
624 heads = ' '.join(head for head
620 in heads.splitlines() if head) or '.'
625 in heads.splitlines() if head) or '.'
621 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
626 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
622 '--pretty=format:%H', '--', push_ref['new_rev'],
627 '--pretty=format:%H', '--', push_ref['new_rev'],
623 '--not', heads]
628 '--not', heads]
624 stdout, stderr = subprocessio.run_command(
629 stdout, stderr = subprocessio.run_command(
625 cmd, env=os.environ.copy())
630 cmd, env=os.environ.copy())
626 git_revs.extend(list(map(ascii_str, stdout.splitlines())))
631 git_revs.extend(list(map(ascii_str, stdout.splitlines())))
627
632
628 # delete branch case
633 # delete branch case
629 elif push_ref['new_rev'] == empty_commit_id:
634 elif push_ref['new_rev'] == empty_commit_id:
630 git_revs.append(f'delete_branch=>{push_ref["name"]}')
635 git_revs.append(f'delete_branch=>{push_ref["name"]}')
631 else:
636 else:
632 if push_ref['name'] not in branches:
637 if push_ref['name'] not in branches:
633 branches.append(push_ref['name'])
638 branches.append(push_ref['name'])
634
639
635 cmd = [settings.GIT_EXECUTABLE, 'log',
640 cmd = [settings.GIT_EXECUTABLE, 'log',
636 f'{push_ref["old_rev"]}..{push_ref["new_rev"]}',
641 f'{push_ref["old_rev"]}..{push_ref["new_rev"]}',
637 '--reverse', '--pretty=format:%H']
642 '--reverse', '--pretty=format:%H']
638 stdout, stderr = subprocessio.run_command(
643 stdout, stderr = subprocessio.run_command(
639 cmd, env=os.environ.copy())
644 cmd, env=os.environ.copy())
640 # we get bytes from stdout, we need str to be consistent
645 # we get bytes from stdout, we need str to be consistent
641 log_revs = list(map(ascii_str, stdout.splitlines()))
646 log_revs = list(map(ascii_str, stdout.splitlines()))
642 git_revs.extend(log_revs)
647 git_revs.extend(log_revs)
643
648
644 # Pure pygit2 impl. but still 2-3x slower :/
649 # Pure pygit2 impl. but still 2-3x slower :/
645 # results = []
650 # results = []
646 #
651 #
647 # with Repository(os.getcwd()) as repo:
652 # with Repository(os.getcwd()) as repo:
648 # repo_new_rev = repo[push_ref['new_rev']]
653 # repo_new_rev = repo[push_ref['new_rev']]
649 # repo_old_rev = repo[push_ref['old_rev']]
654 # repo_old_rev = repo[push_ref['old_rev']]
650 # walker = repo.walk(repo_new_rev.id, pygit2.GIT_SORT_TOPOLOGICAL)
655 # walker = repo.walk(repo_new_rev.id, pygit2.GIT_SORT_TOPOLOGICAL)
651 #
656 #
652 # for commit in walker:
657 # for commit in walker:
653 # if commit.id == repo_old_rev.id:
658 # if commit.id == repo_old_rev.id:
654 # break
659 # break
655 # results.append(commit.id.hex)
660 # results.append(commit.id.hex)
656 # # reverse the order, can't use GIT_SORT_REVERSE
661 # # reverse the order, can't use GIT_SORT_REVERSE
657 # log_revs = results[::-1]
662 # log_revs = results[::-1]
658
663
659 elif type_ == 'tags':
664 elif type_ == 'tags':
660 if push_ref['name'] not in tags:
665 if push_ref['name'] not in tags:
661 tags.append(push_ref['name'])
666 tags.append(push_ref['name'])
662 git_revs.append(f'tag=>{push_ref["name"]}')
667 git_revs.append(f'tag=>{push_ref["name"]}')
663
668
664 extras['hook_type'] = 'post_receive'
669 extras['hook_type'] = 'post_receive'
665 extras['commit_ids'] = git_revs
670 extras['commit_ids'] = git_revs
666 extras['new_refs'] = {
671 extras['new_refs'] = {
667 'branches': branches,
672 'branches': branches,
668 'bookmarks': [],
673 'bookmarks': [],
669 'tags': tags,
674 'tags': tags,
670 }
675 }
671
676
672 stdout = sys.stdout
677 stdout = sys.stdout
673
678
674 if 'repo_size' in extras['hooks']:
679 if 'repo_size' in extras['hooks']:
675 try:
680 try:
676 _call_hook('repo_size', extras, GitMessageWriter(stdout))
681 _call_hook('repo_size', extras, GitMessageWriter(stdout))
677 except Exception:
682 except Exception:
678 pass
683 pass
679
684
680 status_code = _call_hook('post_push', extras, GitMessageWriter(stdout))
685 status_code = _call_hook('post_push', extras, GitMessageWriter(stdout))
681 return status_code
686 return status_code
682
687
683
688
684 def _get_extras_from_txn_id(path, txn_id):
689 def _get_extras_from_txn_id(path, txn_id):
685 extras = {}
690 extras = {}
686 try:
691 try:
687 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
692 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
688 '-t', txn_id,
693 '-t', txn_id,
689 '--revprop', path, 'rc-scm-extras']
694 '--revprop', path, 'rc-scm-extras']
690 stdout, stderr = subprocessio.run_command(
695 stdout, stderr = subprocessio.run_command(
691 cmd, env=os.environ.copy())
696 cmd, env=os.environ.copy())
692 extras = json.loads(base64.urlsafe_b64decode(stdout))
697 extras = json.loads(base64.urlsafe_b64decode(stdout))
693 except Exception:
698 except Exception:
694 log.exception('Failed to extract extras info from txn_id')
699 log.exception('Failed to extract extras info from txn_id')
695
700
696 return extras
701 return extras
697
702
698
703
699 def _get_extras_from_commit_id(commit_id, path):
704 def _get_extras_from_commit_id(commit_id, path):
700 extras = {}
705 extras = {}
701 try:
706 try:
702 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
707 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
703 '-r', commit_id,
708 '-r', commit_id,
704 '--revprop', path, 'rc-scm-extras']
709 '--revprop', path, 'rc-scm-extras']
705 stdout, stderr = subprocessio.run_command(
710 stdout, stderr = subprocessio.run_command(
706 cmd, env=os.environ.copy())
711 cmd, env=os.environ.copy())
707 extras = json.loads(base64.urlsafe_b64decode(stdout))
712 extras = json.loads(base64.urlsafe_b64decode(stdout))
708 except Exception:
713 except Exception:
709 log.exception('Failed to extract extras info from commit_id')
714 log.exception('Failed to extract extras info from commit_id')
710
715
711 return extras
716 return extras
712
717
713
718
714 def svn_pre_commit(repo_path, commit_data, env):
719 def svn_pre_commit(repo_path, commit_data, env):
715 path, txn_id = commit_data
720 path, txn_id = commit_data
716 branches = []
721 branches = []
717 tags = []
722 tags = []
718
723
719 if env.get('RC_SCM_DATA'):
724 if env.get('RC_SCM_DATA'):
720 extras = json.loads(env['RC_SCM_DATA'])
725 extras = json.loads(env['RC_SCM_DATA'])
721 else:
726 else:
722 # fallback method to read from TXN-ID stored data
727 # fallback method to read from TXN-ID stored data
723 extras = _get_extras_from_txn_id(path, txn_id)
728 extras = _get_extras_from_txn_id(path, txn_id)
724 if not extras:
729 if not extras:
725 return 0
730 return 0
726
731
727 extras['hook_type'] = 'pre_commit'
732 extras['hook_type'] = 'pre_commit'
728 extras['commit_ids'] = [txn_id]
733 extras['commit_ids'] = [txn_id]
729 extras['txn_id'] = txn_id
734 extras['txn_id'] = txn_id
730 extras['new_refs'] = {
735 extras['new_refs'] = {
731 'total_commits': 1,
736 'total_commits': 1,
732 'branches': branches,
737 'branches': branches,
733 'bookmarks': [],
738 'bookmarks': [],
734 'tags': tags,
739 'tags': tags,
735 }
740 }
736
741
737 return _call_hook('pre_push', extras, SvnMessageWriter())
742 return _call_hook('pre_push', extras, SvnMessageWriter())
738
743
739
744
740 def svn_post_commit(repo_path, commit_data, env):
745 def svn_post_commit(repo_path, commit_data, env):
741 """
746 """
742 commit_data is path, rev, txn_id
747 commit_data is path, rev, txn_id
743 """
748 """
744 if len(commit_data) == 3:
749 if len(commit_data) == 3:
745 path, commit_id, txn_id = commit_data
750 path, commit_id, txn_id = commit_data
746 elif len(commit_data) == 2:
751 elif len(commit_data) == 2:
747 log.error('Failed to extract txn_id from commit_data using legacy method. '
752 log.error('Failed to extract txn_id from commit_data using legacy method. '
748 'Some functionality might be limited')
753 'Some functionality might be limited')
749 path, commit_id = commit_data
754 path, commit_id = commit_data
750 txn_id = None
755 txn_id = None
751
756
752 branches = []
757 branches = []
753 tags = []
758 tags = []
754
759
755 if env.get('RC_SCM_DATA'):
760 if env.get('RC_SCM_DATA'):
756 extras = json.loads(env['RC_SCM_DATA'])
761 extras = json.loads(env['RC_SCM_DATA'])
757 else:
762 else:
758 # fallback method to read from TXN-ID stored data
763 # fallback method to read from TXN-ID stored data
759 extras = _get_extras_from_commit_id(commit_id, path)
764 extras = _get_extras_from_commit_id(commit_id, path)
760 if not extras:
765 if not extras:
761 return 0
766 return 0
762
767
763 extras['hook_type'] = 'post_commit'
768 extras['hook_type'] = 'post_commit'
764 extras['commit_ids'] = [commit_id]
769 extras['commit_ids'] = [commit_id]
765 extras['txn_id'] = txn_id
770 extras['txn_id'] = txn_id
766 extras['new_refs'] = {
771 extras['new_refs'] = {
767 'branches': branches,
772 'branches': branches,
768 'bookmarks': [],
773 'bookmarks': [],
769 'tags': tags,
774 'tags': tags,
770 'total_commits': 1,
775 'total_commits': 1,
771 }
776 }
772
777
773 if 'repo_size' in extras['hooks']:
778 if 'repo_size' in extras['hooks']:
774 try:
779 try:
775 _call_hook('repo_size', extras, SvnMessageWriter())
780 _call_hook('repo_size', extras, SvnMessageWriter())
776 except Exception:
781 except Exception:
777 pass
782 pass
778
783
779 return _call_hook('post_push', extras, SvnMessageWriter())
784 return _call_hook('post_push', extras, SvnMessageWriter())
@@ -1,942 +1,941 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19 import os
19 import os
20 import subprocess
20 import subprocess
21 from urllib.error import URLError
21 from urllib.error import URLError
22 import urllib.parse
22 import urllib.parse
23 import logging
23 import logging
24 import posixpath as vcspath
24 import posixpath as vcspath
25 import io
25 import io
26 import urllib.request
26 import urllib.request
27 import urllib.parse
27 import urllib.parse
28 import urllib.error
28 import urllib.error
29 import traceback
29 import traceback
30
30
31
31
32 import svn.client # noqa
32 import svn.client # noqa
33 import svn.core # noqa
33 import svn.core # noqa
34 import svn.delta # noqa
34 import svn.delta # noqa
35 import svn.diff # noqa
35 import svn.diff # noqa
36 import svn.fs # noqa
36 import svn.fs # noqa
37 import svn.repos # noqa
37 import svn.repos # noqa
38
38
39 from vcsserver import svn_diff, exceptions, subprocessio, settings
39 from vcsserver import svn_diff, exceptions, subprocessio, settings
40 from vcsserver.base import (
40 from vcsserver.base import (
41 RepoFactory,
41 RepoFactory,
42 raise_from_original,
42 raise_from_original,
43 ArchiveNode,
43 ArchiveNode,
44 store_archive_in_cache,
44 store_archive_in_cache,
45 BytesEnvelope,
45 BytesEnvelope,
46 BinaryEnvelope,
46 BinaryEnvelope,
47 )
47 )
48 from vcsserver.exceptions import NoContentException
48 from vcsserver.exceptions import NoContentException
49 from vcsserver.str_utils import safe_str, safe_bytes
49 from vcsserver.str_utils import safe_str, safe_bytes
50 from vcsserver.type_utils import assert_bytes
50 from vcsserver.type_utils import assert_bytes
51 from vcsserver.vcs_base import RemoteBase
51 from vcsserver.vcs_base import RemoteBase
52 from vcsserver.lib.svnremoterepo import svnremoterepo
52 from vcsserver.lib.svnremoterepo import svnremoterepo
53
53
54 log = logging.getLogger(__name__)
54 log = logging.getLogger(__name__)
55
55
56
56
57 svn_compatible_versions_map = {
57 svn_compatible_versions_map = {
58 'pre-1.4-compatible': '1.3',
58 'pre-1.4-compatible': '1.3',
59 'pre-1.5-compatible': '1.4',
59 'pre-1.5-compatible': '1.4',
60 'pre-1.6-compatible': '1.5',
60 'pre-1.6-compatible': '1.5',
61 'pre-1.8-compatible': '1.7',
61 'pre-1.8-compatible': '1.7',
62 'pre-1.9-compatible': '1.8',
62 'pre-1.9-compatible': '1.8',
63 }
63 }
64
64
65 current_compatible_version = '1.14'
65 current_compatible_version = '1.14'
66
66
67
67
68 def reraise_safe_exceptions(func):
68 def reraise_safe_exceptions(func):
69 """Decorator for converting svn exceptions to something neutral."""
69 """Decorator for converting svn exceptions to something neutral."""
70 def wrapper(*args, **kwargs):
70 def wrapper(*args, **kwargs):
71 try:
71 try:
72 return func(*args, **kwargs)
72 return func(*args, **kwargs)
73 except Exception as e:
73 except Exception as e:
74 if not hasattr(e, '_vcs_kind'):
74 if not hasattr(e, '_vcs_kind'):
75 log.exception("Unhandled exception in svn remote call")
75 log.exception("Unhandled exception in svn remote call")
76 raise_from_original(exceptions.UnhandledException(e), e)
76 raise_from_original(exceptions.UnhandledException(e), e)
77 raise
77 raise
78 return wrapper
78 return wrapper
79
79
80
80
81 class SubversionFactory(RepoFactory):
81 class SubversionFactory(RepoFactory):
82 repo_type = 'svn'
82 repo_type = 'svn'
83
83
84 def _create_repo(self, wire, create, compatible_version):
84 def _create_repo(self, wire, create, compatible_version):
85 path = svn.core.svn_path_canonicalize(wire['path'])
85 path = svn.core.svn_path_canonicalize(wire['path'])
86 if create:
86 if create:
87 fs_config = {'compatible-version': current_compatible_version}
87 fs_config = {'compatible-version': current_compatible_version}
88 if compatible_version:
88 if compatible_version:
89
89
90 compatible_version_string = \
90 compatible_version_string = \
91 svn_compatible_versions_map.get(compatible_version) \
91 svn_compatible_versions_map.get(compatible_version) \
92 or compatible_version
92 or compatible_version
93 fs_config['compatible-version'] = compatible_version_string
93 fs_config['compatible-version'] = compatible_version_string
94
94
95 log.debug('Create SVN repo with config `%s`', fs_config)
95 log.debug('Create SVN repo with config `%s`', fs_config)
96 repo = svn.repos.create(path, "", "", None, fs_config)
96 repo = svn.repos.create(path, "", "", None, fs_config)
97 else:
97 else:
98 repo = svn.repos.open(path)
98 repo = svn.repos.open(path)
99
99
100 log.debug('repository created: got SVN object: %s', repo)
100 log.debug('repository created: got SVN object: %s', repo)
101 return repo
101 return repo
102
102
103 def repo(self, wire, create=False, compatible_version=None):
103 def repo(self, wire, create=False, compatible_version=None):
104 """
104 """
105 Get a repository instance for the given path.
105 Get a repository instance for the given path.
106 """
106 """
107 return self._create_repo(wire, create, compatible_version)
107 return self._create_repo(wire, create, compatible_version)
108
108
109
109
110 NODE_TYPE_MAPPING = {
110 NODE_TYPE_MAPPING = {
111 svn.core.svn_node_file: 'file',
111 svn.core.svn_node_file: 'file',
112 svn.core.svn_node_dir: 'dir',
112 svn.core.svn_node_dir: 'dir',
113 }
113 }
114
114
115
115
116 class SvnRemote(RemoteBase):
116 class SvnRemote(RemoteBase):
117
117
118 def __init__(self, factory, hg_factory=None):
118 def __init__(self, factory, hg_factory=None):
119 self._factory = factory
119 self._factory = factory
120
120
121 self._bulk_methods = {
121 self._bulk_methods = {
122 # NOT supported in SVN ATM...
122 # NOT supported in SVN ATM...
123 }
123 }
124 self._bulk_file_methods = {
124 self._bulk_file_methods = {
125 "size": self.get_file_size,
125 "size": self.get_file_size,
126 "data": self.get_file_content,
126 "data": self.get_file_content,
127 "flags": self.get_node_type,
127 "flags": self.get_node_type,
128 "is_binary": self.is_binary,
128 "is_binary": self.is_binary,
129 "md5": self.md5_hash
129 "md5": self.md5_hash
130 }
130 }
131
131
132 @reraise_safe_exceptions
132 @reraise_safe_exceptions
133 def bulk_file_request(self, wire, commit_id, path, pre_load):
133 def bulk_file_request(self, wire, commit_id, path, pre_load):
134 cache_on, context_uid, repo_id = self._cache_on(wire)
134 cache_on, context_uid, repo_id = self._cache_on(wire)
135 region = self._region(wire)
135 region = self._region(wire)
136
136
137 # since we use unified API, we need to cast from str to in for SVN
137 # since we use unified API, we need to cast from str to in for SVN
138 commit_id = int(commit_id)
138 commit_id = int(commit_id)
139
139
140 @region.conditional_cache_on_arguments(condition=cache_on)
140 @region.conditional_cache_on_arguments(condition=cache_on)
141 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
141 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
142 result = {}
142 result = {}
143 for attr in pre_load:
143 for attr in pre_load:
144 try:
144 try:
145 method = self._bulk_file_methods[attr]
145 method = self._bulk_file_methods[attr]
146 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
146 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
147 result[attr] = method(wire, _commit_id, _path)
147 result[attr] = method(wire, _commit_id, _path)
148 except KeyError as e:
148 except KeyError as e:
149 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
149 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
150 return result
150 return result
151
151
152 return BinaryEnvelope(_bulk_file_request(repo_id, commit_id, path, sorted(pre_load)))
152 return BinaryEnvelope(_bulk_file_request(repo_id, commit_id, path, sorted(pre_load)))
153
153
154 @reraise_safe_exceptions
154 @reraise_safe_exceptions
155 def discover_svn_version(self):
155 def discover_svn_version(self):
156 try:
156 try:
157 import svn.core
157 import svn.core
158 svn_ver = svn.core.SVN_VERSION
158 svn_ver = svn.core.SVN_VERSION
159 except ImportError:
159 except ImportError:
160 svn_ver = None
160 svn_ver = None
161 return safe_str(svn_ver)
161 return safe_str(svn_ver)
162
162
163 @reraise_safe_exceptions
163 @reraise_safe_exceptions
164 def is_empty(self, wire):
164 def is_empty(self, wire):
165 try:
165 try:
166 return self.lookup(wire, -1) == 0
166 return self.lookup(wire, -1) == 0
167 except Exception:
167 except Exception:
168 log.exception("failed to read object_store")
168 log.exception("failed to read object_store")
169 return False
169 return False
170
170
171 def check_url(self, url, config):
171 def check_url(self, url, config):
172
172
173 # uuid function gets only valid UUID from proper repo, else
173 # uuid function gets only valid UUID from proper repo, else
174 # throws exception
174 # throws exception
175 username, password, src_url = self.get_url_and_credentials(url)
175 username, password, src_url = self.get_url_and_credentials(url)
176 try:
176 try:
177 svnremoterepo(safe_bytes(username), safe_bytes(password), safe_bytes(src_url)).svn().uuid
177 svnremoterepo(safe_bytes(username), safe_bytes(password), safe_bytes(src_url)).svn().uuid
178 except Exception:
178 except Exception:
179 tb = traceback.format_exc()
179 tb = traceback.format_exc()
180 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
180 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
181 raise URLError(f'"{url}" is not a valid Subversion source url.')
181 raise URLError(f'"{url}" is not a valid Subversion source url.')
182 return True
182 return True
183
183
184 def is_path_valid_repository(self, wire, path):
184 def is_path_valid_repository(self, wire, path):
185
185
186 # NOTE(marcink): short circuit the check for SVN repo
186 # NOTE(marcink): short circuit the check for SVN repo
187 # the repos.open might be expensive to check, but we have one cheap
187 # the repos.open might be expensive to check, but we have one cheap
188 # pre condition that we can use, to check for 'format' file
188 # pre condition that we can use, to check for 'format' file
189
190 if not os.path.isfile(os.path.join(path, 'format')):
189 if not os.path.isfile(os.path.join(path, 'format')):
191 return False
190 return False
192
191
193 try:
192 try:
194 svn.repos.open(path)
193 svn.repos.open(path)
195 except svn.core.SubversionException:
194 except svn.core.SubversionException:
196 tb = traceback.format_exc()
195 tb = traceback.format_exc()
197 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
196 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
198 return False
197 return False
199 return True
198 return True
200
199
201 @reraise_safe_exceptions
200 @reraise_safe_exceptions
202 def verify(self, wire,):
201 def verify(self, wire,):
203 repo_path = wire['path']
202 repo_path = wire['path']
204 if not self.is_path_valid_repository(wire, repo_path):
203 if not self.is_path_valid_repository(wire, repo_path):
205 raise Exception(
204 raise Exception(
206 f"Path {repo_path} is not a valid Subversion repository.")
205 f"Path {repo_path} is not a valid Subversion repository.")
207
206
208 cmd = ['svnadmin', 'info', repo_path]
207 cmd = ['svnadmin', 'info', repo_path]
209 stdout, stderr = subprocessio.run_command(cmd)
208 stdout, stderr = subprocessio.run_command(cmd)
210 return stdout
209 return stdout
211
210
212 @reraise_safe_exceptions
211 @reraise_safe_exceptions
213 def lookup(self, wire, revision):
212 def lookup(self, wire, revision):
214 if revision not in [-1, None, 'HEAD']:
213 if revision not in [-1, None, 'HEAD']:
215 raise NotImplementedError
214 raise NotImplementedError
216 repo = self._factory.repo(wire)
215 repo = self._factory.repo(wire)
217 fs_ptr = svn.repos.fs(repo)
216 fs_ptr = svn.repos.fs(repo)
218 head = svn.fs.youngest_rev(fs_ptr)
217 head = svn.fs.youngest_rev(fs_ptr)
219 return head
218 return head
220
219
221 @reraise_safe_exceptions
220 @reraise_safe_exceptions
222 def lookup_interval(self, wire, start_ts, end_ts):
221 def lookup_interval(self, wire, start_ts, end_ts):
223 repo = self._factory.repo(wire)
222 repo = self._factory.repo(wire)
224 fsobj = svn.repos.fs(repo)
223 fsobj = svn.repos.fs(repo)
225 start_rev = None
224 start_rev = None
226 end_rev = None
225 end_rev = None
227 if start_ts:
226 if start_ts:
228 start_ts_svn = apr_time_t(start_ts)
227 start_ts_svn = apr_time_t(start_ts)
229 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
228 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
230 else:
229 else:
231 start_rev = 1
230 start_rev = 1
232 if end_ts:
231 if end_ts:
233 end_ts_svn = apr_time_t(end_ts)
232 end_ts_svn = apr_time_t(end_ts)
234 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
233 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
235 else:
234 else:
236 end_rev = svn.fs.youngest_rev(fsobj)
235 end_rev = svn.fs.youngest_rev(fsobj)
237 return start_rev, end_rev
236 return start_rev, end_rev
238
237
239 @reraise_safe_exceptions
238 @reraise_safe_exceptions
240 def revision_properties(self, wire, revision):
239 def revision_properties(self, wire, revision):
241
240
242 cache_on, context_uid, repo_id = self._cache_on(wire)
241 cache_on, context_uid, repo_id = self._cache_on(wire)
243 region = self._region(wire)
242 region = self._region(wire)
244
243
245 @region.conditional_cache_on_arguments(condition=cache_on)
244 @region.conditional_cache_on_arguments(condition=cache_on)
246 def _revision_properties(_repo_id, _revision):
245 def _revision_properties(_repo_id, _revision):
247 repo = self._factory.repo(wire)
246 repo = self._factory.repo(wire)
248 fs_ptr = svn.repos.fs(repo)
247 fs_ptr = svn.repos.fs(repo)
249 return svn.fs.revision_proplist(fs_ptr, revision)
248 return svn.fs.revision_proplist(fs_ptr, revision)
250 return _revision_properties(repo_id, revision)
249 return _revision_properties(repo_id, revision)
251
250
252 def revision_changes(self, wire, revision):
251 def revision_changes(self, wire, revision):
253
252
254 repo = self._factory.repo(wire)
253 repo = self._factory.repo(wire)
255 fsobj = svn.repos.fs(repo)
254 fsobj = svn.repos.fs(repo)
256 rev_root = svn.fs.revision_root(fsobj, revision)
255 rev_root = svn.fs.revision_root(fsobj, revision)
257
256
258 editor = svn.repos.ChangeCollector(fsobj, rev_root)
257 editor = svn.repos.ChangeCollector(fsobj, rev_root)
259 editor_ptr, editor_baton = svn.delta.make_editor(editor)
258 editor_ptr, editor_baton = svn.delta.make_editor(editor)
260 base_dir = ""
259 base_dir = ""
261 send_deltas = False
260 send_deltas = False
262 svn.repos.replay2(
261 svn.repos.replay2(
263 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
262 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
264 editor_ptr, editor_baton, None)
263 editor_ptr, editor_baton, None)
265
264
266 added = []
265 added = []
267 changed = []
266 changed = []
268 removed = []
267 removed = []
269
268
270 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
269 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
271 for path, change in editor.changes.items():
270 for path, change in editor.changes.items():
272 # TODO: Decide what to do with directory nodes. Subversion can add
271 # TODO: Decide what to do with directory nodes. Subversion can add
273 # empty directories.
272 # empty directories.
274
273
275 if change.item_kind == svn.core.svn_node_dir:
274 if change.item_kind == svn.core.svn_node_dir:
276 continue
275 continue
277 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
276 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
278 added.append(path)
277 added.append(path)
279 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
278 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
280 svn.repos.CHANGE_ACTION_REPLACE]:
279 svn.repos.CHANGE_ACTION_REPLACE]:
281 changed.append(path)
280 changed.append(path)
282 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
281 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
283 removed.append(path)
282 removed.append(path)
284 else:
283 else:
285 raise NotImplementedError(
284 raise NotImplementedError(
286 "Action {} not supported on path {}".format(
285 "Action {} not supported on path {}".format(
287 change.action, path))
286 change.action, path))
288
287
289 changes = {
288 changes = {
290 'added': added,
289 'added': added,
291 'changed': changed,
290 'changed': changed,
292 'removed': removed,
291 'removed': removed,
293 }
292 }
294 return changes
293 return changes
295
294
296 @reraise_safe_exceptions
295 @reraise_safe_exceptions
297 def node_history(self, wire, path, revision, limit):
296 def node_history(self, wire, path, revision, limit):
298 cache_on, context_uid, repo_id = self._cache_on(wire)
297 cache_on, context_uid, repo_id = self._cache_on(wire)
299 region = self._region(wire)
298 region = self._region(wire)
300
299
301 @region.conditional_cache_on_arguments(condition=cache_on)
300 @region.conditional_cache_on_arguments(condition=cache_on)
302 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
301 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
303 cross_copies = False
302 cross_copies = False
304 repo = self._factory.repo(wire)
303 repo = self._factory.repo(wire)
305 fsobj = svn.repos.fs(repo)
304 fsobj = svn.repos.fs(repo)
306 rev_root = svn.fs.revision_root(fsobj, revision)
305 rev_root = svn.fs.revision_root(fsobj, revision)
307
306
308 history_revisions = []
307 history_revisions = []
309 history = svn.fs.node_history(rev_root, path)
308 history = svn.fs.node_history(rev_root, path)
310 history = svn.fs.history_prev(history, cross_copies)
309 history = svn.fs.history_prev(history, cross_copies)
311 while history:
310 while history:
312 __, node_revision = svn.fs.history_location(history)
311 __, node_revision = svn.fs.history_location(history)
313 history_revisions.append(node_revision)
312 history_revisions.append(node_revision)
314 if limit and len(history_revisions) >= limit:
313 if limit and len(history_revisions) >= limit:
315 break
314 break
316 history = svn.fs.history_prev(history, cross_copies)
315 history = svn.fs.history_prev(history, cross_copies)
317 return history_revisions
316 return history_revisions
318 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
317 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
319
318
320 @reraise_safe_exceptions
319 @reraise_safe_exceptions
321 def node_properties(self, wire, path, revision):
320 def node_properties(self, wire, path, revision):
322 cache_on, context_uid, repo_id = self._cache_on(wire)
321 cache_on, context_uid, repo_id = self._cache_on(wire)
323 region = self._region(wire)
322 region = self._region(wire)
324
323
325 @region.conditional_cache_on_arguments(condition=cache_on)
324 @region.conditional_cache_on_arguments(condition=cache_on)
326 def _node_properties(_repo_id, _path, _revision):
325 def _node_properties(_repo_id, _path, _revision):
327 repo = self._factory.repo(wire)
326 repo = self._factory.repo(wire)
328 fsobj = svn.repos.fs(repo)
327 fsobj = svn.repos.fs(repo)
329 rev_root = svn.fs.revision_root(fsobj, revision)
328 rev_root = svn.fs.revision_root(fsobj, revision)
330 return svn.fs.node_proplist(rev_root, path)
329 return svn.fs.node_proplist(rev_root, path)
331 return _node_properties(repo_id, path, revision)
330 return _node_properties(repo_id, path, revision)
332
331
333 def file_annotate(self, wire, path, revision):
332 def file_annotate(self, wire, path, revision):
334 abs_path = 'file://' + urllib.request.pathname2url(
333 abs_path = 'file://' + urllib.request.pathname2url(
335 vcspath.join(wire['path'], path))
334 vcspath.join(wire['path'], path))
336 file_uri = svn.core.svn_path_canonicalize(abs_path)
335 file_uri = svn.core.svn_path_canonicalize(abs_path)
337
336
338 start_rev = svn_opt_revision_value_t(0)
337 start_rev = svn_opt_revision_value_t(0)
339 peg_rev = svn_opt_revision_value_t(revision)
338 peg_rev = svn_opt_revision_value_t(revision)
340 end_rev = peg_rev
339 end_rev = peg_rev
341
340
342 annotations = []
341 annotations = []
343
342
344 def receiver(line_no, revision, author, date, line, pool):
343 def receiver(line_no, revision, author, date, line, pool):
345 annotations.append((line_no, revision, line))
344 annotations.append((line_no, revision, line))
346
345
347 # TODO: Cannot use blame5, missing typemap function in the swig code
346 # TODO: Cannot use blame5, missing typemap function in the swig code
348 try:
347 try:
349 svn.client.blame2(
348 svn.client.blame2(
350 file_uri, peg_rev, start_rev, end_rev,
349 file_uri, peg_rev, start_rev, end_rev,
351 receiver, svn.client.create_context())
350 receiver, svn.client.create_context())
352 except svn.core.SubversionException as exc:
351 except svn.core.SubversionException as exc:
353 log.exception("Error during blame operation.")
352 log.exception("Error during blame operation.")
354 raise Exception(
353 raise Exception(
355 f"Blame not supported or file does not exist at path {path}. "
354 f"Blame not supported or file does not exist at path {path}. "
356 f"Error {exc}.")
355 f"Error {exc}.")
357
356
358 return BinaryEnvelope(annotations)
357 return BinaryEnvelope(annotations)
359
358
360 @reraise_safe_exceptions
359 @reraise_safe_exceptions
361 def get_node_type(self, wire, revision=None, path=''):
360 def get_node_type(self, wire, revision=None, path=''):
362
361
363 cache_on, context_uid, repo_id = self._cache_on(wire)
362 cache_on, context_uid, repo_id = self._cache_on(wire)
364 region = self._region(wire)
363 region = self._region(wire)
365
364
366 @region.conditional_cache_on_arguments(condition=cache_on)
365 @region.conditional_cache_on_arguments(condition=cache_on)
367 def _get_node_type(_repo_id, _revision, _path):
366 def _get_node_type(_repo_id, _revision, _path):
368 repo = self._factory.repo(wire)
367 repo = self._factory.repo(wire)
369 fs_ptr = svn.repos.fs(repo)
368 fs_ptr = svn.repos.fs(repo)
370 if _revision is None:
369 if _revision is None:
371 _revision = svn.fs.youngest_rev(fs_ptr)
370 _revision = svn.fs.youngest_rev(fs_ptr)
372 root = svn.fs.revision_root(fs_ptr, _revision)
371 root = svn.fs.revision_root(fs_ptr, _revision)
373 node = svn.fs.check_path(root, path)
372 node = svn.fs.check_path(root, path)
374 return NODE_TYPE_MAPPING.get(node, None)
373 return NODE_TYPE_MAPPING.get(node, None)
375 return _get_node_type(repo_id, revision, path)
374 return _get_node_type(repo_id, revision, path)
376
375
377 @reraise_safe_exceptions
376 @reraise_safe_exceptions
378 def get_nodes(self, wire, revision=None, path=''):
377 def get_nodes(self, wire, revision=None, path=''):
379
378
380 cache_on, context_uid, repo_id = self._cache_on(wire)
379 cache_on, context_uid, repo_id = self._cache_on(wire)
381 region = self._region(wire)
380 region = self._region(wire)
382
381
383 @region.conditional_cache_on_arguments(condition=cache_on)
382 @region.conditional_cache_on_arguments(condition=cache_on)
384 def _get_nodes(_repo_id, _path, _revision):
383 def _get_nodes(_repo_id, _path, _revision):
385 repo = self._factory.repo(wire)
384 repo = self._factory.repo(wire)
386 fsobj = svn.repos.fs(repo)
385 fsobj = svn.repos.fs(repo)
387 if _revision is None:
386 if _revision is None:
388 _revision = svn.fs.youngest_rev(fsobj)
387 _revision = svn.fs.youngest_rev(fsobj)
389 root = svn.fs.revision_root(fsobj, _revision)
388 root = svn.fs.revision_root(fsobj, _revision)
390 entries = svn.fs.dir_entries(root, path)
389 entries = svn.fs.dir_entries(root, path)
391 result = []
390 result = []
392 for entry_path, entry_info in entries.items():
391 for entry_path, entry_info in entries.items():
393 result.append(
392 result.append(
394 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
393 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
395 return result
394 return result
396 return _get_nodes(repo_id, path, revision)
395 return _get_nodes(repo_id, path, revision)
397
396
398 @reraise_safe_exceptions
397 @reraise_safe_exceptions
399 def get_file_content(self, wire, rev=None, path=''):
398 def get_file_content(self, wire, rev=None, path=''):
400 repo = self._factory.repo(wire)
399 repo = self._factory.repo(wire)
401 fsobj = svn.repos.fs(repo)
400 fsobj = svn.repos.fs(repo)
402
401
403 if rev is None:
402 if rev is None:
404 rev = svn.fs.youngest_rev(fsobj)
403 rev = svn.fs.youngest_rev(fsobj)
405
404
406 root = svn.fs.revision_root(fsobj, rev)
405 root = svn.fs.revision_root(fsobj, rev)
407 content = svn.core.Stream(svn.fs.file_contents(root, path))
406 content = svn.core.Stream(svn.fs.file_contents(root, path))
408 return BytesEnvelope(content.read())
407 return BytesEnvelope(content.read())
409
408
410 @reraise_safe_exceptions
409 @reraise_safe_exceptions
411 def get_file_size(self, wire, revision=None, path=''):
410 def get_file_size(self, wire, revision=None, path=''):
412
411
413 cache_on, context_uid, repo_id = self._cache_on(wire)
412 cache_on, context_uid, repo_id = self._cache_on(wire)
414 region = self._region(wire)
413 region = self._region(wire)
415
414
416 @region.conditional_cache_on_arguments(condition=cache_on)
415 @region.conditional_cache_on_arguments(condition=cache_on)
417 def _get_file_size(_repo_id, _revision, _path):
416 def _get_file_size(_repo_id, _revision, _path):
418 repo = self._factory.repo(wire)
417 repo = self._factory.repo(wire)
419 fsobj = svn.repos.fs(repo)
418 fsobj = svn.repos.fs(repo)
420 if _revision is None:
419 if _revision is None:
421 _revision = svn.fs.youngest_revision(fsobj)
420 _revision = svn.fs.youngest_revision(fsobj)
422 root = svn.fs.revision_root(fsobj, _revision)
421 root = svn.fs.revision_root(fsobj, _revision)
423 size = svn.fs.file_length(root, path)
422 size = svn.fs.file_length(root, path)
424 return size
423 return size
425 return _get_file_size(repo_id, revision, path)
424 return _get_file_size(repo_id, revision, path)
426
425
427 def create_repository(self, wire, compatible_version=None):
426 def create_repository(self, wire, compatible_version=None):
428 log.info('Creating Subversion repository in path "%s"', wire['path'])
427 log.info('Creating Subversion repository in path "%s"', wire['path'])
429 self._factory.repo(wire, create=True,
428 self._factory.repo(wire, create=True,
430 compatible_version=compatible_version)
429 compatible_version=compatible_version)
431
430
432 def get_url_and_credentials(self, src_url) -> tuple[str, str, str]:
431 def get_url_and_credentials(self, src_url) -> tuple[str, str, str]:
433 obj = urllib.parse.urlparse(src_url)
432 obj = urllib.parse.urlparse(src_url)
434 username = obj.username or ''
433 username = obj.username or ''
435 password = obj.password or ''
434 password = obj.password or ''
436 return username, password, src_url
435 return username, password, src_url
437
436
438 def import_remote_repository(self, wire, src_url):
437 def import_remote_repository(self, wire, src_url):
439 repo_path = wire['path']
438 repo_path = wire['path']
440 if not self.is_path_valid_repository(wire, repo_path):
439 if not self.is_path_valid_repository(wire, repo_path):
441 raise Exception(
440 raise Exception(
442 f"Path {repo_path} is not a valid Subversion repository.")
441 f"Path {repo_path} is not a valid Subversion repository.")
443
442
444 username, password, src_url = self.get_url_and_credentials(src_url)
443 username, password, src_url = self.get_url_and_credentials(src_url)
445 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
444 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
446 '--trust-server-cert-failures=unknown-ca']
445 '--trust-server-cert-failures=unknown-ca']
447 if username and password:
446 if username and password:
448 rdump_cmd += ['--username', username, '--password', password]
447 rdump_cmd += ['--username', username, '--password', password]
449 rdump_cmd += [src_url]
448 rdump_cmd += [src_url]
450
449
451 rdump = subprocess.Popen(
450 rdump = subprocess.Popen(
452 rdump_cmd,
451 rdump_cmd,
453 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
452 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
454 load = subprocess.Popen(
453 load = subprocess.Popen(
455 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
454 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
456
455
457 # TODO: johbo: This can be a very long operation, might be better
456 # TODO: johbo: This can be a very long operation, might be better
458 # to track some kind of status and provide an api to check if the
457 # to track some kind of status and provide an api to check if the
459 # import is done.
458 # import is done.
460 rdump.wait()
459 rdump.wait()
461 load.wait()
460 load.wait()
462
461
463 log.debug('Return process ended with code: %s', rdump.returncode)
462 log.debug('Return process ended with code: %s', rdump.returncode)
464 if rdump.returncode != 0:
463 if rdump.returncode != 0:
465 errors = rdump.stderr.read()
464 errors = rdump.stderr.read()
466 log.error('svnrdump dump failed: statuscode %s: message: %s', rdump.returncode, errors)
465 log.error('svnrdump dump failed: statuscode %s: message: %s', rdump.returncode, errors)
467
466
468 reason = 'UNKNOWN'
467 reason = 'UNKNOWN'
469 if b'svnrdump: E230001:' in errors:
468 if b'svnrdump: E230001:' in errors:
470 reason = 'INVALID_CERTIFICATE'
469 reason = 'INVALID_CERTIFICATE'
471
470
472 if reason == 'UNKNOWN':
471 if reason == 'UNKNOWN':
473 reason = f'UNKNOWN:{safe_str(errors)}'
472 reason = f'UNKNOWN:{safe_str(errors)}'
474
473
475 raise Exception(
474 raise Exception(
476 'Failed to dump the remote repository from {}. Reason:{}'.format(
475 'Failed to dump the remote repository from {}. Reason:{}'.format(
477 src_url, reason))
476 src_url, reason))
478 if load.returncode != 0:
477 if load.returncode != 0:
479 raise Exception(
478 raise Exception(
480 f'Failed to load the dump of remote repository from {src_url}.')
479 f'Failed to load the dump of remote repository from {src_url}.')
481
480
482 def commit(self, wire, message, author, timestamp, updated, removed):
481 def commit(self, wire, message, author, timestamp, updated, removed):
483
482
484 message = safe_bytes(message)
483 message = safe_bytes(message)
485 author = safe_bytes(author)
484 author = safe_bytes(author)
486
485
487 repo = self._factory.repo(wire)
486 repo = self._factory.repo(wire)
488 fsobj = svn.repos.fs(repo)
487 fsobj = svn.repos.fs(repo)
489
488
490 rev = svn.fs.youngest_rev(fsobj)
489 rev = svn.fs.youngest_rev(fsobj)
491 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
490 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
492 txn_root = svn.fs.txn_root(txn)
491 txn_root = svn.fs.txn_root(txn)
493
492
494 for node in updated:
493 for node in updated:
495 TxnNodeProcessor(node, txn_root).update()
494 TxnNodeProcessor(node, txn_root).update()
496 for node in removed:
495 for node in removed:
497 TxnNodeProcessor(node, txn_root).remove()
496 TxnNodeProcessor(node, txn_root).remove()
498
497
499 commit_id = svn.repos.fs_commit_txn(repo, txn)
498 commit_id = svn.repos.fs_commit_txn(repo, txn)
500
499
501 if timestamp:
500 if timestamp:
502 apr_time = apr_time_t(timestamp)
501 apr_time = apr_time_t(timestamp)
503 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
502 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
504 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
503 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
505
504
506 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
505 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
507 return commit_id
506 return commit_id
508
507
509 @reraise_safe_exceptions
508 @reraise_safe_exceptions
510 def diff(self, wire, rev1, rev2, path1=None, path2=None,
509 def diff(self, wire, rev1, rev2, path1=None, path2=None,
511 ignore_whitespace=False, context=3):
510 ignore_whitespace=False, context=3):
512
511
513 wire.update(cache=False)
512 wire.update(cache=False)
514 repo = self._factory.repo(wire)
513 repo = self._factory.repo(wire)
515 diff_creator = SvnDiffer(
514 diff_creator = SvnDiffer(
516 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
515 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
517 try:
516 try:
518 return BytesEnvelope(diff_creator.generate_diff())
517 return BytesEnvelope(diff_creator.generate_diff())
519 except svn.core.SubversionException as e:
518 except svn.core.SubversionException as e:
520 log.exception(
519 log.exception(
521 "Error during diff operation operation. "
520 "Error during diff operation operation. "
522 "Path might not exist %s, %s", path1, path2)
521 "Path might not exist %s, %s", path1, path2)
523 return BytesEnvelope(b'')
522 return BytesEnvelope(b'')
524
523
525 @reraise_safe_exceptions
524 @reraise_safe_exceptions
526 def is_large_file(self, wire, path):
525 def is_large_file(self, wire, path):
527 return False
526 return False
528
527
529 @reraise_safe_exceptions
528 @reraise_safe_exceptions
530 def is_binary(self, wire, rev, path):
529 def is_binary(self, wire, rev, path):
531 cache_on, context_uid, repo_id = self._cache_on(wire)
530 cache_on, context_uid, repo_id = self._cache_on(wire)
532 region = self._region(wire)
531 region = self._region(wire)
533
532
534 @region.conditional_cache_on_arguments(condition=cache_on)
533 @region.conditional_cache_on_arguments(condition=cache_on)
535 def _is_binary(_repo_id, _rev, _path):
534 def _is_binary(_repo_id, _rev, _path):
536 raw_bytes = self.get_file_content(wire, rev, path)
535 raw_bytes = self.get_file_content(wire, rev, path)
537 if not raw_bytes:
536 if not raw_bytes:
538 return False
537 return False
539 return b'\0' in raw_bytes
538 return b'\0' in raw_bytes
540
539
541 return _is_binary(repo_id, rev, path)
540 return _is_binary(repo_id, rev, path)
542
541
543 @reraise_safe_exceptions
542 @reraise_safe_exceptions
544 def md5_hash(self, wire, rev, path):
543 def md5_hash(self, wire, rev, path):
545 cache_on, context_uid, repo_id = self._cache_on(wire)
544 cache_on, context_uid, repo_id = self._cache_on(wire)
546 region = self._region(wire)
545 region = self._region(wire)
547
546
548 @region.conditional_cache_on_arguments(condition=cache_on)
547 @region.conditional_cache_on_arguments(condition=cache_on)
549 def _md5_hash(_repo_id, _rev, _path):
548 def _md5_hash(_repo_id, _rev, _path):
550 return ''
549 return ''
551
550
552 return _md5_hash(repo_id, rev, path)
551 return _md5_hash(repo_id, rev, path)
553
552
554 @reraise_safe_exceptions
553 @reraise_safe_exceptions
555 def run_svn_command(self, wire, cmd, **opts):
554 def run_svn_command(self, wire, cmd, **opts):
556 path = wire.get('path', None)
555 path = wire.get('path', None)
557
556
558 if path and os.path.isdir(path):
557 if path and os.path.isdir(path):
559 opts['cwd'] = path
558 opts['cwd'] = path
560
559
561 safe_call = opts.pop('_safe', False)
560 safe_call = opts.pop('_safe', False)
562
561
563 svnenv = os.environ.copy()
562 svnenv = os.environ.copy()
564 svnenv.update(opts.pop('extra_env', {}))
563 svnenv.update(opts.pop('extra_env', {}))
565
564
566 _opts = {'env': svnenv, 'shell': False}
565 _opts = {'env': svnenv, 'shell': False}
567
566
568 try:
567 try:
569 _opts.update(opts)
568 _opts.update(opts)
570 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
569 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
571
570
572 return b''.join(proc), b''.join(proc.stderr)
571 return b''.join(proc), b''.join(proc.stderr)
573 except OSError as err:
572 except OSError as err:
574 if safe_call:
573 if safe_call:
575 return '', safe_str(err).strip()
574 return '', safe_str(err).strip()
576 else:
575 else:
577 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
576 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
578 tb_err = ("Couldn't run svn command (%s).\n"
577 tb_err = ("Couldn't run svn command (%s).\n"
579 "Original error was:%s\n"
578 "Original error was:%s\n"
580 "Call options:%s\n"
579 "Call options:%s\n"
581 % (cmd, err, _opts))
580 % (cmd, err, _opts))
582 log.exception(tb_err)
581 log.exception(tb_err)
583 raise exceptions.VcsException()(tb_err)
582 raise exceptions.VcsException()(tb_err)
584
583
585 @reraise_safe_exceptions
584 @reraise_safe_exceptions
586 def install_hooks(self, wire, force=False):
585 def install_hooks(self, wire, force=False):
587 from vcsserver.hook_utils import install_svn_hooks
586 from vcsserver.hook_utils import install_svn_hooks
588 repo_path = wire['path']
587 repo_path = wire['path']
589 binary_dir = settings.BINARY_DIR
588 binary_dir = settings.BINARY_DIR
590 executable = None
589 executable = None
591 if binary_dir:
590 if binary_dir:
592 executable = os.path.join(binary_dir, 'python3')
591 executable = os.path.join(binary_dir, 'python3')
593 return install_svn_hooks(repo_path, force_create=force)
592 return install_svn_hooks(repo_path, force_create=force)
594
593
595 @reraise_safe_exceptions
594 @reraise_safe_exceptions
596 def get_hooks_info(self, wire):
595 def get_hooks_info(self, wire):
597 from vcsserver.hook_utils import (
596 from vcsserver.hook_utils import (
598 get_svn_pre_hook_version, get_svn_post_hook_version)
597 get_svn_pre_hook_version, get_svn_post_hook_version)
599 repo_path = wire['path']
598 repo_path = wire['path']
600 return {
599 return {
601 'pre_version': get_svn_pre_hook_version(repo_path),
600 'pre_version': get_svn_pre_hook_version(repo_path),
602 'post_version': get_svn_post_hook_version(repo_path),
601 'post_version': get_svn_post_hook_version(repo_path),
603 }
602 }
604
603
605 @reraise_safe_exceptions
604 @reraise_safe_exceptions
606 def set_head_ref(self, wire, head_name):
605 def set_head_ref(self, wire, head_name):
607 pass
606 pass
608
607
609 @reraise_safe_exceptions
608 @reraise_safe_exceptions
610 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
609 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
611 archive_dir_name, commit_id, cache_config):
610 archive_dir_name, commit_id, cache_config):
612
611
613 def walk_tree(root, root_dir, _commit_id):
612 def walk_tree(root, root_dir, _commit_id):
614 """
613 """
615 Special recursive svn repo walker
614 Special recursive svn repo walker
616 """
615 """
617 root_dir = safe_bytes(root_dir)
616 root_dir = safe_bytes(root_dir)
618
617
619 filemode_default = 0o100644
618 filemode_default = 0o100644
620 filemode_executable = 0o100755
619 filemode_executable = 0o100755
621
620
622 file_iter = svn.fs.dir_entries(root, root_dir)
621 file_iter = svn.fs.dir_entries(root, root_dir)
623 for f_name in file_iter:
622 for f_name in file_iter:
624 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
623 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
625
624
626 if f_type == 'dir':
625 if f_type == 'dir':
627 # return only DIR, and then all entries in that dir
626 # return only DIR, and then all entries in that dir
628 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
627 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
629 new_root = os.path.join(root_dir, f_name)
628 new_root = os.path.join(root_dir, f_name)
630 yield from walk_tree(root, new_root, _commit_id)
629 yield from walk_tree(root, new_root, _commit_id)
631 else:
630 else:
632
631
633 f_path = os.path.join(root_dir, f_name).rstrip(b'/')
632 f_path = os.path.join(root_dir, f_name).rstrip(b'/')
634 prop_list = svn.fs.node_proplist(root, f_path)
633 prop_list = svn.fs.node_proplist(root, f_path)
635
634
636 f_mode = filemode_default
635 f_mode = filemode_default
637 if prop_list.get('svn:executable'):
636 if prop_list.get('svn:executable'):
638 f_mode = filemode_executable
637 f_mode = filemode_executable
639
638
640 f_is_link = False
639 f_is_link = False
641 if prop_list.get('svn:special'):
640 if prop_list.get('svn:special'):
642 f_is_link = True
641 f_is_link = True
643
642
644 data = {
643 data = {
645 'is_link': f_is_link,
644 'is_link': f_is_link,
646 'mode': f_mode,
645 'mode': f_mode,
647 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
646 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
648 }
647 }
649
648
650 yield f_path, data, f_type
649 yield f_path, data, f_type
651
650
652 def file_walker(_commit_id, path):
651 def file_walker(_commit_id, path):
653 repo = self._factory.repo(wire)
652 repo = self._factory.repo(wire)
654 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
653 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
655
654
656 def no_content():
655 def no_content():
657 raise NoContentException()
656 raise NoContentException()
658
657
659 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
658 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
660 file_path = f_name
659 file_path = f_name
661
660
662 if f_type == 'dir':
661 if f_type == 'dir':
663 mode = f_data['mode']
662 mode = f_data['mode']
664 yield ArchiveNode(file_path, mode, False, no_content)
663 yield ArchiveNode(file_path, mode, False, no_content)
665 else:
664 else:
666 mode = f_data['mode']
665 mode = f_data['mode']
667 is_link = f_data['is_link']
666 is_link = f_data['is_link']
668 data_stream = f_data['content_stream']
667 data_stream = f_data['content_stream']
669 yield ArchiveNode(file_path, mode, is_link, data_stream)
668 yield ArchiveNode(file_path, mode, is_link, data_stream)
670
669
671 return store_archive_in_cache(
670 return store_archive_in_cache(
672 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
671 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
673
672
674
673
675 class SvnDiffer:
674 class SvnDiffer:
676 """
675 """
677 Utility to create diffs based on difflib and the Subversion api
676 Utility to create diffs based on difflib and the Subversion api
678 """
677 """
679
678
680 binary_content = False
679 binary_content = False
681
680
682 def __init__(
681 def __init__(
683 self, repo, src_rev, src_path, tgt_rev, tgt_path,
682 self, repo, src_rev, src_path, tgt_rev, tgt_path,
684 ignore_whitespace, context):
683 ignore_whitespace, context):
685 self.repo = repo
684 self.repo = repo
686 self.ignore_whitespace = ignore_whitespace
685 self.ignore_whitespace = ignore_whitespace
687 self.context = context
686 self.context = context
688
687
689 fsobj = svn.repos.fs(repo)
688 fsobj = svn.repos.fs(repo)
690
689
691 self.tgt_rev = tgt_rev
690 self.tgt_rev = tgt_rev
692 self.tgt_path = tgt_path or ''
691 self.tgt_path = tgt_path or ''
693 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
692 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
694 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
693 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
695
694
696 self.src_rev = src_rev
695 self.src_rev = src_rev
697 self.src_path = src_path or self.tgt_path
696 self.src_path = src_path or self.tgt_path
698 self.src_root = svn.fs.revision_root(fsobj, src_rev)
697 self.src_root = svn.fs.revision_root(fsobj, src_rev)
699 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
698 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
700
699
701 self._validate()
700 self._validate()
702
701
703 def _validate(self):
702 def _validate(self):
704 if (self.tgt_kind != svn.core.svn_node_none and
703 if (self.tgt_kind != svn.core.svn_node_none and
705 self.src_kind != svn.core.svn_node_none and
704 self.src_kind != svn.core.svn_node_none and
706 self.src_kind != self.tgt_kind):
705 self.src_kind != self.tgt_kind):
707 # TODO: johbo: proper error handling
706 # TODO: johbo: proper error handling
708 raise Exception(
707 raise Exception(
709 "Source and target are not compatible for diff generation. "
708 "Source and target are not compatible for diff generation. "
710 "Source type: %s, target type: %s" %
709 "Source type: %s, target type: %s" %
711 (self.src_kind, self.tgt_kind))
710 (self.src_kind, self.tgt_kind))
712
711
713 def generate_diff(self) -> bytes:
712 def generate_diff(self) -> bytes:
714 buf = io.BytesIO()
713 buf = io.BytesIO()
715 if self.tgt_kind == svn.core.svn_node_dir:
714 if self.tgt_kind == svn.core.svn_node_dir:
716 self._generate_dir_diff(buf)
715 self._generate_dir_diff(buf)
717 else:
716 else:
718 self._generate_file_diff(buf)
717 self._generate_file_diff(buf)
719 return buf.getvalue()
718 return buf.getvalue()
720
719
721 def _generate_dir_diff(self, buf: io.BytesIO):
720 def _generate_dir_diff(self, buf: io.BytesIO):
722 editor = DiffChangeEditor()
721 editor = DiffChangeEditor()
723 editor_ptr, editor_baton = svn.delta.make_editor(editor)
722 editor_ptr, editor_baton = svn.delta.make_editor(editor)
724 svn.repos.dir_delta2(
723 svn.repos.dir_delta2(
725 self.src_root,
724 self.src_root,
726 self.src_path,
725 self.src_path,
727 '', # src_entry
726 '', # src_entry
728 self.tgt_root,
727 self.tgt_root,
729 self.tgt_path,
728 self.tgt_path,
730 editor_ptr, editor_baton,
729 editor_ptr, editor_baton,
731 authorization_callback_allow_all,
730 authorization_callback_allow_all,
732 False, # text_deltas
731 False, # text_deltas
733 svn.core.svn_depth_infinity, # depth
732 svn.core.svn_depth_infinity, # depth
734 False, # entry_props
733 False, # entry_props
735 False, # ignore_ancestry
734 False, # ignore_ancestry
736 )
735 )
737
736
738 for path, __, change in sorted(editor.changes):
737 for path, __, change in sorted(editor.changes):
739 self._generate_node_diff(
738 self._generate_node_diff(
740 buf, change, path, self.tgt_path, path, self.src_path)
739 buf, change, path, self.tgt_path, path, self.src_path)
741
740
742 def _generate_file_diff(self, buf: io.BytesIO):
741 def _generate_file_diff(self, buf: io.BytesIO):
743 change = None
742 change = None
744 if self.src_kind == svn.core.svn_node_none:
743 if self.src_kind == svn.core.svn_node_none:
745 change = "add"
744 change = "add"
746 elif self.tgt_kind == svn.core.svn_node_none:
745 elif self.tgt_kind == svn.core.svn_node_none:
747 change = "delete"
746 change = "delete"
748 tgt_base, tgt_path = vcspath.split(self.tgt_path)
747 tgt_base, tgt_path = vcspath.split(self.tgt_path)
749 src_base, src_path = vcspath.split(self.src_path)
748 src_base, src_path = vcspath.split(self.src_path)
750 self._generate_node_diff(
749 self._generate_node_diff(
751 buf, change, tgt_path, tgt_base, src_path, src_base)
750 buf, change, tgt_path, tgt_base, src_path, src_base)
752
751
753 def _generate_node_diff(
752 def _generate_node_diff(
754 self, buf: io.BytesIO, change, tgt_path, tgt_base, src_path, src_base):
753 self, buf: io.BytesIO, change, tgt_path, tgt_base, src_path, src_base):
755
754
756 tgt_path_bytes = safe_bytes(tgt_path)
755 tgt_path_bytes = safe_bytes(tgt_path)
757 tgt_path = safe_str(tgt_path)
756 tgt_path = safe_str(tgt_path)
758
757
759 src_path_bytes = safe_bytes(src_path)
758 src_path_bytes = safe_bytes(src_path)
760 src_path = safe_str(src_path)
759 src_path = safe_str(src_path)
761
760
762 if self.src_rev == self.tgt_rev and tgt_base == src_base:
761 if self.src_rev == self.tgt_rev and tgt_base == src_base:
763 # makes consistent behaviour with git/hg to return empty diff if
762 # makes consistent behaviour with git/hg to return empty diff if
764 # we compare same revisions
763 # we compare same revisions
765 return
764 return
766
765
767 tgt_full_path = vcspath.join(tgt_base, tgt_path)
766 tgt_full_path = vcspath.join(tgt_base, tgt_path)
768 src_full_path = vcspath.join(src_base, src_path)
767 src_full_path = vcspath.join(src_base, src_path)
769
768
770 self.binary_content = False
769 self.binary_content = False
771 mime_type = self._get_mime_type(tgt_full_path)
770 mime_type = self._get_mime_type(tgt_full_path)
772
771
773 if mime_type and not mime_type.startswith(b'text'):
772 if mime_type and not mime_type.startswith(b'text'):
774 self.binary_content = True
773 self.binary_content = True
775 buf.write(b"=" * 67 + b'\n')
774 buf.write(b"=" * 67 + b'\n')
776 buf.write(b"Cannot display: file marked as a binary type.\n")
775 buf.write(b"Cannot display: file marked as a binary type.\n")
777 buf.write(b"svn:mime-type = %s\n" % mime_type)
776 buf.write(b"svn:mime-type = %s\n" % mime_type)
778 buf.write(b"Index: %b\n" % tgt_path_bytes)
777 buf.write(b"Index: %b\n" % tgt_path_bytes)
779 buf.write(b"=" * 67 + b'\n')
778 buf.write(b"=" * 67 + b'\n')
780 buf.write(b"diff --git a/%b b/%b\n" % (tgt_path_bytes, tgt_path_bytes))
779 buf.write(b"diff --git a/%b b/%b\n" % (tgt_path_bytes, tgt_path_bytes))
781
780
782 if change == 'add':
781 if change == 'add':
783 # TODO: johbo: SVN is missing a zero here compared to git
782 # TODO: johbo: SVN is missing a zero here compared to git
784 buf.write(b"new file mode 10644\n")
783 buf.write(b"new file mode 10644\n")
785
784
786 # TODO(marcink): intro to binary detection of svn patches
785 # TODO(marcink): intro to binary detection of svn patches
787 # if self.binary_content:
786 # if self.binary_content:
788 # buf.write(b'GIT binary patch\n')
787 # buf.write(b'GIT binary patch\n')
789
788
790 buf.write(b"--- /dev/null\t(revision 0)\n")
789 buf.write(b"--- /dev/null\t(revision 0)\n")
791 src_lines = []
790 src_lines = []
792 else:
791 else:
793 if change == 'delete':
792 if change == 'delete':
794 buf.write(b"deleted file mode 10644\n")
793 buf.write(b"deleted file mode 10644\n")
795
794
796 # TODO(marcink): intro to binary detection of svn patches
795 # TODO(marcink): intro to binary detection of svn patches
797 # if self.binary_content:
796 # if self.binary_content:
798 # buf.write('GIT binary patch\n')
797 # buf.write('GIT binary patch\n')
799
798
800 buf.write(b"--- a/%b\t(revision %d)\n" % (src_path_bytes, self.src_rev))
799 buf.write(b"--- a/%b\t(revision %d)\n" % (src_path_bytes, self.src_rev))
801 src_lines = self._svn_readlines(self.src_root, src_full_path)
800 src_lines = self._svn_readlines(self.src_root, src_full_path)
802
801
803 if change == 'delete':
802 if change == 'delete':
804 buf.write(b"+++ /dev/null\t(revision %d)\n" % self.tgt_rev)
803 buf.write(b"+++ /dev/null\t(revision %d)\n" % self.tgt_rev)
805 tgt_lines = []
804 tgt_lines = []
806 else:
805 else:
807 buf.write(b"+++ b/%b\t(revision %d)\n" % (tgt_path_bytes, self.tgt_rev))
806 buf.write(b"+++ b/%b\t(revision %d)\n" % (tgt_path_bytes, self.tgt_rev))
808 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
807 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
809
808
810 # we made our diff header, time to generate the diff content into our buffer
809 # we made our diff header, time to generate the diff content into our buffer
811
810
812 if not self.binary_content:
811 if not self.binary_content:
813 udiff = svn_diff.unified_diff(
812 udiff = svn_diff.unified_diff(
814 src_lines, tgt_lines, context=self.context,
813 src_lines, tgt_lines, context=self.context,
815 ignore_blank_lines=self.ignore_whitespace,
814 ignore_blank_lines=self.ignore_whitespace,
816 ignore_case=False,
815 ignore_case=False,
817 ignore_space_changes=self.ignore_whitespace)
816 ignore_space_changes=self.ignore_whitespace)
818
817
819 buf.writelines(udiff)
818 buf.writelines(udiff)
820
819
821 def _get_mime_type(self, path) -> bytes:
820 def _get_mime_type(self, path) -> bytes:
822 try:
821 try:
823 mime_type = svn.fs.node_prop(
822 mime_type = svn.fs.node_prop(
824 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
823 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
825 except svn.core.SubversionException:
824 except svn.core.SubversionException:
826 mime_type = svn.fs.node_prop(
825 mime_type = svn.fs.node_prop(
827 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
826 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
828 return mime_type
827 return mime_type
829
828
830 def _svn_readlines(self, fs_root, node_path):
829 def _svn_readlines(self, fs_root, node_path):
831 if self.binary_content:
830 if self.binary_content:
832 return []
831 return []
833 node_kind = svn.fs.check_path(fs_root, node_path)
832 node_kind = svn.fs.check_path(fs_root, node_path)
834 if node_kind not in (
833 if node_kind not in (
835 svn.core.svn_node_file, svn.core.svn_node_symlink):
834 svn.core.svn_node_file, svn.core.svn_node_symlink):
836 return []
835 return []
837 content = svn.core.Stream(
836 content = svn.core.Stream(
838 svn.fs.file_contents(fs_root, node_path)).read()
837 svn.fs.file_contents(fs_root, node_path)).read()
839
838
840 return content.splitlines(True)
839 return content.splitlines(True)
841
840
842
841
843 class DiffChangeEditor(svn.delta.Editor):
842 class DiffChangeEditor(svn.delta.Editor):
844 """
843 """
845 Records changes between two given revisions
844 Records changes between two given revisions
846 """
845 """
847
846
848 def __init__(self):
847 def __init__(self):
849 self.changes = []
848 self.changes = []
850
849
851 def delete_entry(self, path, revision, parent_baton, pool=None):
850 def delete_entry(self, path, revision, parent_baton, pool=None):
852 self.changes.append((path, None, 'delete'))
851 self.changes.append((path, None, 'delete'))
853
852
854 def add_file(
853 def add_file(
855 self, path, parent_baton, copyfrom_path, copyfrom_revision,
854 self, path, parent_baton, copyfrom_path, copyfrom_revision,
856 file_pool=None):
855 file_pool=None):
857 self.changes.append((path, 'file', 'add'))
856 self.changes.append((path, 'file', 'add'))
858
857
859 def open_file(self, path, parent_baton, base_revision, file_pool=None):
858 def open_file(self, path, parent_baton, base_revision, file_pool=None):
860 self.changes.append((path, 'file', 'change'))
859 self.changes.append((path, 'file', 'change'))
861
860
862
861
863 def authorization_callback_allow_all(root, path, pool):
862 def authorization_callback_allow_all(root, path, pool):
864 return True
863 return True
865
864
866
865
867 class TxnNodeProcessor:
866 class TxnNodeProcessor:
868 """
867 """
869 Utility to process the change of one node within a transaction root.
868 Utility to process the change of one node within a transaction root.
870
869
871 It encapsulates the knowledge of how to add, update or remove
870 It encapsulates the knowledge of how to add, update or remove
872 a node for a given transaction root. The purpose is to support the method
871 a node for a given transaction root. The purpose is to support the method
873 `SvnRemote.commit`.
872 `SvnRemote.commit`.
874 """
873 """
875
874
876 def __init__(self, node, txn_root):
875 def __init__(self, node, txn_root):
877 assert_bytes(node['path'])
876 assert_bytes(node['path'])
878
877
879 self.node = node
878 self.node = node
880 self.txn_root = txn_root
879 self.txn_root = txn_root
881
880
882 def update(self):
881 def update(self):
883 self._ensure_parent_dirs()
882 self._ensure_parent_dirs()
884 self._add_file_if_node_does_not_exist()
883 self._add_file_if_node_does_not_exist()
885 self._update_file_content()
884 self._update_file_content()
886 self._update_file_properties()
885 self._update_file_properties()
887
886
888 def remove(self):
887 def remove(self):
889 svn.fs.delete(self.txn_root, self.node['path'])
888 svn.fs.delete(self.txn_root, self.node['path'])
890 # TODO: Clean up directory if empty
889 # TODO: Clean up directory if empty
891
890
892 def _ensure_parent_dirs(self):
891 def _ensure_parent_dirs(self):
893 curdir = vcspath.dirname(self.node['path'])
892 curdir = vcspath.dirname(self.node['path'])
894 dirs_to_create = []
893 dirs_to_create = []
895 while not self._svn_path_exists(curdir):
894 while not self._svn_path_exists(curdir):
896 dirs_to_create.append(curdir)
895 dirs_to_create.append(curdir)
897 curdir = vcspath.dirname(curdir)
896 curdir = vcspath.dirname(curdir)
898
897
899 for curdir in reversed(dirs_to_create):
898 for curdir in reversed(dirs_to_create):
900 log.debug('Creating missing directory "%s"', curdir)
899 log.debug('Creating missing directory "%s"', curdir)
901 svn.fs.make_dir(self.txn_root, curdir)
900 svn.fs.make_dir(self.txn_root, curdir)
902
901
903 def _svn_path_exists(self, path):
902 def _svn_path_exists(self, path):
904 path_status = svn.fs.check_path(self.txn_root, path)
903 path_status = svn.fs.check_path(self.txn_root, path)
905 return path_status != svn.core.svn_node_none
904 return path_status != svn.core.svn_node_none
906
905
907 def _add_file_if_node_does_not_exist(self):
906 def _add_file_if_node_does_not_exist(self):
908 kind = svn.fs.check_path(self.txn_root, self.node['path'])
907 kind = svn.fs.check_path(self.txn_root, self.node['path'])
909 if kind == svn.core.svn_node_none:
908 if kind == svn.core.svn_node_none:
910 svn.fs.make_file(self.txn_root, self.node['path'])
909 svn.fs.make_file(self.txn_root, self.node['path'])
911
910
912 def _update_file_content(self):
911 def _update_file_content(self):
913 assert_bytes(self.node['content'])
912 assert_bytes(self.node['content'])
914
913
915 handler, baton = svn.fs.apply_textdelta(
914 handler, baton = svn.fs.apply_textdelta(
916 self.txn_root, self.node['path'], None, None)
915 self.txn_root, self.node['path'], None, None)
917 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
916 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
918
917
919 def _update_file_properties(self):
918 def _update_file_properties(self):
920 properties = self.node.get('properties', {})
919 properties = self.node.get('properties', {})
921 for key, value in properties.items():
920 for key, value in properties.items():
922 svn.fs.change_node_prop(
921 svn.fs.change_node_prop(
923 self.txn_root, self.node['path'], safe_bytes(key), safe_bytes(value))
922 self.txn_root, self.node['path'], safe_bytes(key), safe_bytes(value))
924
923
925
924
926 def apr_time_t(timestamp):
925 def apr_time_t(timestamp):
927 """
926 """
928 Convert a Python timestamp into APR timestamp type apr_time_t
927 Convert a Python timestamp into APR timestamp type apr_time_t
929 """
928 """
930 return int(timestamp * 1E6)
929 return int(timestamp * 1E6)
931
930
932
931
933 def svn_opt_revision_value_t(num):
932 def svn_opt_revision_value_t(num):
934 """
933 """
935 Put `num` into a `svn_opt_revision_value_t` structure.
934 Put `num` into a `svn_opt_revision_value_t` structure.
936 """
935 """
937 value = svn.core.svn_opt_revision_value_t()
936 value = svn.core.svn_opt_revision_value_t()
938 value.number = num
937 value.number = num
939 revision = svn.core.svn_opt_revision_t()
938 revision = svn.core.svn_opt_revision_t()
940 revision.kind = svn.core.svn_opt_revision_number
939 revision.kind = svn.core.svn_opt_revision_number
941 revision.value = value
940 revision.value = value
942 return revision
941 return revision
General Comments 0
You need to be logged in to leave comments. Login now