##// END OF EJS Templates
subprocess: use subprocessio helper to run various subprocess commands.
marcink -
r370:79380b7f default
parent child Browse files
Show More
@@ -1,484 +1,467 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2018 RhodeCode GmbH
4 # Copyright (C) 2014-2018 RhodeCode GmbH
5 #
5 #
6 # This program is free software; you can redistribute it and/or modify
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
9 # (at your option) any later version.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
19
20 import io
20 import io
21 import os
21 import os
22 import sys
22 import sys
23 import json
23 import json
24 import logging
24 import logging
25 import collections
25 import collections
26 import importlib
26 import importlib
27
27
28 from httplib import HTTPConnection
28 from httplib import HTTPConnection
29
29
30
30
31 import mercurial.scmutil
31 import mercurial.scmutil
32 import mercurial.node
32 import mercurial.node
33 import simplejson as json
33 import simplejson as json
34
34
35 from vcsserver import exceptions, subprocessio, settings
35 from vcsserver import exceptions, subprocessio, settings
36
36
37 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
38
38
39
39
40 class HooksHttpClient(object):
40 class HooksHttpClient(object):
41 connection = None
41 connection = None
42
42
43 def __init__(self, hooks_uri):
43 def __init__(self, hooks_uri):
44 self.hooks_uri = hooks_uri
44 self.hooks_uri = hooks_uri
45
45
46 def __call__(self, method, extras):
46 def __call__(self, method, extras):
47 connection = HTTPConnection(self.hooks_uri)
47 connection = HTTPConnection(self.hooks_uri)
48 body = self._serialize(method, extras)
48 body = self._serialize(method, extras)
49 connection.request('POST', '/', body)
49 connection.request('POST', '/', body)
50 response = connection.getresponse()
50 response = connection.getresponse()
51 return json.loads(response.read())
51 return json.loads(response.read())
52
52
53 def _serialize(self, hook_name, extras):
53 def _serialize(self, hook_name, extras):
54 data = {
54 data = {
55 'method': hook_name,
55 'method': hook_name,
56 'extras': extras
56 'extras': extras
57 }
57 }
58 return json.dumps(data)
58 return json.dumps(data)
59
59
60
60
61 class HooksDummyClient(object):
61 class HooksDummyClient(object):
62 def __init__(self, hooks_module):
62 def __init__(self, hooks_module):
63 self._hooks_module = importlib.import_module(hooks_module)
63 self._hooks_module = importlib.import_module(hooks_module)
64
64
65 def __call__(self, hook_name, extras):
65 def __call__(self, hook_name, extras):
66 with self._hooks_module.Hooks() as hooks:
66 with self._hooks_module.Hooks() as hooks:
67 return getattr(hooks, hook_name)(extras)
67 return getattr(hooks, hook_name)(extras)
68
68
69
69
70 class RemoteMessageWriter(object):
70 class RemoteMessageWriter(object):
71 """Writer base class."""
71 """Writer base class."""
72 def write(self, message):
72 def write(self, message):
73 raise NotImplementedError()
73 raise NotImplementedError()
74
74
75
75
76 class HgMessageWriter(RemoteMessageWriter):
76 class HgMessageWriter(RemoteMessageWriter):
77 """Writer that knows how to send messages to mercurial clients."""
77 """Writer that knows how to send messages to mercurial clients."""
78
78
79 def __init__(self, ui):
79 def __init__(self, ui):
80 self.ui = ui
80 self.ui = ui
81
81
82 def write(self, message):
82 def write(self, message):
83 # TODO: Check why the quiet flag is set by default.
83 # TODO: Check why the quiet flag is set by default.
84 old = self.ui.quiet
84 old = self.ui.quiet
85 self.ui.quiet = False
85 self.ui.quiet = False
86 self.ui.status(message.encode('utf-8'))
86 self.ui.status(message.encode('utf-8'))
87 self.ui.quiet = old
87 self.ui.quiet = old
88
88
89
89
90 class GitMessageWriter(RemoteMessageWriter):
90 class GitMessageWriter(RemoteMessageWriter):
91 """Writer that knows how to send messages to git clients."""
91 """Writer that knows how to send messages to git clients."""
92
92
93 def __init__(self, stdout=None):
93 def __init__(self, stdout=None):
94 self.stdout = stdout or sys.stdout
94 self.stdout = stdout or sys.stdout
95
95
96 def write(self, message):
96 def write(self, message):
97 self.stdout.write(message.encode('utf-8'))
97 self.stdout.write(message.encode('utf-8'))
98
98
99
99
100 def _handle_exception(result):
100 def _handle_exception(result):
101 exception_class = result.get('exception')
101 exception_class = result.get('exception')
102 exception_traceback = result.get('exception_traceback')
102 exception_traceback = result.get('exception_traceback')
103
103
104 if exception_traceback:
104 if exception_traceback:
105 log.error('Got traceback from remote call:%s', exception_traceback)
105 log.error('Got traceback from remote call:%s', exception_traceback)
106
106
107 if exception_class == 'HTTPLockedRC':
107 if exception_class == 'HTTPLockedRC':
108 raise exceptions.RepositoryLockedException(*result['exception_args'])
108 raise exceptions.RepositoryLockedException(*result['exception_args'])
109 elif exception_class == 'RepositoryError':
109 elif exception_class == 'RepositoryError':
110 raise exceptions.VcsException(*result['exception_args'])
110 raise exceptions.VcsException(*result['exception_args'])
111 elif exception_class:
111 elif exception_class:
112 raise Exception('Got remote exception "%s" with args "%s"' %
112 raise Exception('Got remote exception "%s" with args "%s"' %
113 (exception_class, result['exception_args']))
113 (exception_class, result['exception_args']))
114
114
115
115
116 def _get_hooks_client(extras):
116 def _get_hooks_client(extras):
117 if 'hooks_uri' in extras:
117 if 'hooks_uri' in extras:
118 protocol = extras.get('hooks_protocol')
118 protocol = extras.get('hooks_protocol')
119 return HooksHttpClient(extras['hooks_uri'])
119 return HooksHttpClient(extras['hooks_uri'])
120 else:
120 else:
121 return HooksDummyClient(extras['hooks_module'])
121 return HooksDummyClient(extras['hooks_module'])
122
122
123
123
124 def _call_hook(hook_name, extras, writer):
124 def _call_hook(hook_name, extras, writer):
125 hooks = _get_hooks_client(extras)
125 hooks = _get_hooks_client(extras)
126 result = hooks(hook_name, extras)
126 result = hooks(hook_name, extras)
127 log.debug('Hooks got result: %s', result)
127 log.debug('Hooks got result: %s', result)
128 writer.write(result['output'])
128 writer.write(result['output'])
129 _handle_exception(result)
129 _handle_exception(result)
130
130
131 return result['status']
131 return result['status']
132
132
133
133
134 def _extras_from_ui(ui):
134 def _extras_from_ui(ui):
135 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
135 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
136 if not hook_data:
136 if not hook_data:
137 # maybe it's inside environ ?
137 # maybe it's inside environ ?
138 env_hook_data = os.environ.get('RC_SCM_DATA')
138 env_hook_data = os.environ.get('RC_SCM_DATA')
139 if env_hook_data:
139 if env_hook_data:
140 hook_data = env_hook_data
140 hook_data = env_hook_data
141
141
142 extras = {}
142 extras = {}
143 if hook_data:
143 if hook_data:
144 extras = json.loads(hook_data)
144 extras = json.loads(hook_data)
145 return extras
145 return extras
146
146
147
147
148 def _rev_range_hash(repo, node):
148 def _rev_range_hash(repo, node):
149
149
150 commits = []
150 commits = []
151 for rev in xrange(repo[node], len(repo)):
151 for rev in xrange(repo[node], len(repo)):
152 ctx = repo[rev]
152 ctx = repo[rev]
153 commit_id = mercurial.node.hex(ctx.node())
153 commit_id = mercurial.node.hex(ctx.node())
154 branch = ctx.branch()
154 branch = ctx.branch()
155 commits.append((commit_id, branch))
155 commits.append((commit_id, branch))
156
156
157 return commits
157 return commits
158
158
159
159
160 def repo_size(ui, repo, **kwargs):
160 def repo_size(ui, repo, **kwargs):
161 extras = _extras_from_ui(ui)
161 extras = _extras_from_ui(ui)
162 return _call_hook('repo_size', extras, HgMessageWriter(ui))
162 return _call_hook('repo_size', extras, HgMessageWriter(ui))
163
163
164
164
165 def pre_pull(ui, repo, **kwargs):
165 def pre_pull(ui, repo, **kwargs):
166 extras = _extras_from_ui(ui)
166 extras = _extras_from_ui(ui)
167 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
167 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
168
168
169
169
170 def pre_pull_ssh(ui, repo, **kwargs):
170 def pre_pull_ssh(ui, repo, **kwargs):
171 extras = _extras_from_ui(ui)
171 extras = _extras_from_ui(ui)
172 if extras and extras.get('SSH'):
172 if extras and extras.get('SSH'):
173 return pre_pull(ui, repo, **kwargs)
173 return pre_pull(ui, repo, **kwargs)
174 return 0
174 return 0
175
175
176
176
177 def post_pull(ui, repo, **kwargs):
177 def post_pull(ui, repo, **kwargs):
178 extras = _extras_from_ui(ui)
178 extras = _extras_from_ui(ui)
179 return _call_hook('post_pull', extras, HgMessageWriter(ui))
179 return _call_hook('post_pull', extras, HgMessageWriter(ui))
180
180
181
181
182 def post_pull_ssh(ui, repo, **kwargs):
182 def post_pull_ssh(ui, repo, **kwargs):
183 extras = _extras_from_ui(ui)
183 extras = _extras_from_ui(ui)
184 if extras and extras.get('SSH'):
184 if extras and extras.get('SSH'):
185 return post_pull(ui, repo, **kwargs)
185 return post_pull(ui, repo, **kwargs)
186 return 0
186 return 0
187
187
188
188
189 def pre_push(ui, repo, node=None, **kwargs):
189 def pre_push(ui, repo, node=None, **kwargs):
190 extras = _extras_from_ui(ui)
190 extras = _extras_from_ui(ui)
191
191
192 rev_data = []
192 rev_data = []
193 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
193 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
194 branches = collections.defaultdict(list)
194 branches = collections.defaultdict(list)
195 for commit_id, branch in _rev_range_hash(repo, node):
195 for commit_id, branch in _rev_range_hash(repo, node):
196 branches[branch].append(commit_id)
196 branches[branch].append(commit_id)
197
197
198 for branch, commits in branches.iteritems():
198 for branch, commits in branches.iteritems():
199 old_rev = kwargs.get('node_last') or commits[0]
199 old_rev = kwargs.get('node_last') or commits[0]
200 rev_data.append({
200 rev_data.append({
201 'old_rev': old_rev,
201 'old_rev': old_rev,
202 'new_rev': commits[-1],
202 'new_rev': commits[-1],
203 'ref': '',
203 'ref': '',
204 'type': 'branch',
204 'type': 'branch',
205 'name': branch,
205 'name': branch,
206 })
206 })
207
207
208 extras['commit_ids'] = rev_data
208 extras['commit_ids'] = rev_data
209 return _call_hook('pre_push', extras, HgMessageWriter(ui))
209 return _call_hook('pre_push', extras, HgMessageWriter(ui))
210
210
211
211
212 def pre_push_ssh(ui, repo, node=None, **kwargs):
212 def pre_push_ssh(ui, repo, node=None, **kwargs):
213 if _extras_from_ui(ui).get('SSH'):
213 if _extras_from_ui(ui).get('SSH'):
214 return pre_push(ui, repo, node, **kwargs)
214 return pre_push(ui, repo, node, **kwargs)
215
215
216 return 0
216 return 0
217
217
218
218
219 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
219 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
220 extras = _extras_from_ui(ui)
220 extras = _extras_from_ui(ui)
221 if extras.get('SSH'):
221 if extras.get('SSH'):
222 permission = extras['SSH_PERMISSIONS']
222 permission = extras['SSH_PERMISSIONS']
223
223
224 if 'repository.write' == permission or 'repository.admin' == permission:
224 if 'repository.write' == permission or 'repository.admin' == permission:
225 return 0
225 return 0
226
226
227 # non-zero ret code
227 # non-zero ret code
228 return 1
228 return 1
229
229
230 return 0
230 return 0
231
231
232
232
233 def post_push(ui, repo, node, **kwargs):
233 def post_push(ui, repo, node, **kwargs):
234 extras = _extras_from_ui(ui)
234 extras = _extras_from_ui(ui)
235
235
236 commit_ids = []
236 commit_ids = []
237 branches = []
237 branches = []
238 bookmarks = []
238 bookmarks = []
239 tags = []
239 tags = []
240
240
241 for commit_id, branch in _rev_range_hash(repo, node):
241 for commit_id, branch in _rev_range_hash(repo, node):
242 commit_ids.append(commit_id)
242 commit_ids.append(commit_id)
243 if branch not in branches:
243 if branch not in branches:
244 branches.append(branch)
244 branches.append(branch)
245
245
246 if hasattr(ui, '_rc_pushkey_branches'):
246 if hasattr(ui, '_rc_pushkey_branches'):
247 bookmarks = ui._rc_pushkey_branches
247 bookmarks = ui._rc_pushkey_branches
248
248
249 extras['commit_ids'] = commit_ids
249 extras['commit_ids'] = commit_ids
250 extras['new_refs'] = {
250 extras['new_refs'] = {
251 'branches': branches,
251 'branches': branches,
252 'bookmarks': bookmarks,
252 'bookmarks': bookmarks,
253 'tags': tags
253 'tags': tags
254 }
254 }
255
255
256 return _call_hook('post_push', extras, HgMessageWriter(ui))
256 return _call_hook('post_push', extras, HgMessageWriter(ui))
257
257
258
258
259 def post_push_ssh(ui, repo, node, **kwargs):
259 def post_push_ssh(ui, repo, node, **kwargs):
260 if _extras_from_ui(ui).get('SSH'):
260 if _extras_from_ui(ui).get('SSH'):
261 return post_push(ui, repo, node, **kwargs)
261 return post_push(ui, repo, node, **kwargs)
262 return 0
262 return 0
263
263
264
264
265 def key_push(ui, repo, **kwargs):
265 def key_push(ui, repo, **kwargs):
266 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
266 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
267 # store new bookmarks in our UI object propagated later to post_push
267 # store new bookmarks in our UI object propagated later to post_push
268 ui._rc_pushkey_branches = repo[kwargs['key']].bookmarks()
268 ui._rc_pushkey_branches = repo[kwargs['key']].bookmarks()
269 return
269 return
270
270
271
271
272 # backward compat
272 # backward compat
273 log_pull_action = post_pull
273 log_pull_action = post_pull
274
274
275 # backward compat
275 # backward compat
276 log_push_action = post_push
276 log_push_action = post_push
277
277
278
278
279 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
279 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
280 """
280 """
281 Old hook name: keep here for backward compatibility.
281 Old hook name: keep here for backward compatibility.
282
282
283 This is only required when the installed git hooks are not upgraded.
283 This is only required when the installed git hooks are not upgraded.
284 """
284 """
285 pass
285 pass
286
286
287
287
288 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
288 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
289 """
289 """
290 Old hook name: keep here for backward compatibility.
290 Old hook name: keep here for backward compatibility.
291
291
292 This is only required when the installed git hooks are not upgraded.
292 This is only required when the installed git hooks are not upgraded.
293 """
293 """
294 pass
294 pass
295
295
296
296
297 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
297 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
298
298
299
299
300 def git_pre_pull(extras):
300 def git_pre_pull(extras):
301 """
301 """
302 Pre pull hook.
302 Pre pull hook.
303
303
304 :param extras: dictionary containing the keys defined in simplevcs
304 :param extras: dictionary containing the keys defined in simplevcs
305 :type extras: dict
305 :type extras: dict
306
306
307 :return: status code of the hook. 0 for success.
307 :return: status code of the hook. 0 for success.
308 :rtype: int
308 :rtype: int
309 """
309 """
310 if 'pull' not in extras['hooks']:
310 if 'pull' not in extras['hooks']:
311 return HookResponse(0, '')
311 return HookResponse(0, '')
312
312
313 stdout = io.BytesIO()
313 stdout = io.BytesIO()
314 try:
314 try:
315 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
315 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
316 except Exception as error:
316 except Exception as error:
317 status = 128
317 status = 128
318 stdout.write('ERROR: %s\n' % str(error))
318 stdout.write('ERROR: %s\n' % str(error))
319
319
320 return HookResponse(status, stdout.getvalue())
320 return HookResponse(status, stdout.getvalue())
321
321
322
322
323 def git_post_pull(extras):
323 def git_post_pull(extras):
324 """
324 """
325 Post pull hook.
325 Post pull hook.
326
326
327 :param extras: dictionary containing the keys defined in simplevcs
327 :param extras: dictionary containing the keys defined in simplevcs
328 :type extras: dict
328 :type extras: dict
329
329
330 :return: status code of the hook. 0 for success.
330 :return: status code of the hook. 0 for success.
331 :rtype: int
331 :rtype: int
332 """
332 """
333 if 'pull' not in extras['hooks']:
333 if 'pull' not in extras['hooks']:
334 return HookResponse(0, '')
334 return HookResponse(0, '')
335
335
336 stdout = io.BytesIO()
336 stdout = io.BytesIO()
337 try:
337 try:
338 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
338 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
339 except Exception as error:
339 except Exception as error:
340 status = 128
340 status = 128
341 stdout.write('ERROR: %s\n' % error)
341 stdout.write('ERROR: %s\n' % error)
342
342
343 return HookResponse(status, stdout.getvalue())
343 return HookResponse(status, stdout.getvalue())
344
344
345
345
346 def _parse_git_ref_lines(revision_lines):
346 def _parse_git_ref_lines(revision_lines):
347 rev_data = []
347 rev_data = []
348 for revision_line in revision_lines or []:
348 for revision_line in revision_lines or []:
349 old_rev, new_rev, ref = revision_line.strip().split(' ')
349 old_rev, new_rev, ref = revision_line.strip().split(' ')
350 ref_data = ref.split('/', 2)
350 ref_data = ref.split('/', 2)
351 if ref_data[1] in ('tags', 'heads'):
351 if ref_data[1] in ('tags', 'heads'):
352 rev_data.append({
352 rev_data.append({
353 'old_rev': old_rev,
353 'old_rev': old_rev,
354 'new_rev': new_rev,
354 'new_rev': new_rev,
355 'ref': ref,
355 'ref': ref,
356 'type': ref_data[1],
356 'type': ref_data[1],
357 'name': ref_data[2],
357 'name': ref_data[2],
358 })
358 })
359 return rev_data
359 return rev_data
360
360
361
361
362 def git_pre_receive(unused_repo_path, revision_lines, env):
362 def git_pre_receive(unused_repo_path, revision_lines, env):
363 """
363 """
364 Pre push hook.
364 Pre push hook.
365
365
366 :param extras: dictionary containing the keys defined in simplevcs
366 :param extras: dictionary containing the keys defined in simplevcs
367 :type extras: dict
367 :type extras: dict
368
368
369 :return: status code of the hook. 0 for success.
369 :return: status code of the hook. 0 for success.
370 :rtype: int
370 :rtype: int
371 """
371 """
372 extras = json.loads(env['RC_SCM_DATA'])
372 extras = json.loads(env['RC_SCM_DATA'])
373 rev_data = _parse_git_ref_lines(revision_lines)
373 rev_data = _parse_git_ref_lines(revision_lines)
374 if 'push' not in extras['hooks']:
374 if 'push' not in extras['hooks']:
375 return 0
375 return 0
376 extras['commit_ids'] = rev_data
376 extras['commit_ids'] = rev_data
377 return _call_hook('pre_push', extras, GitMessageWriter())
377 return _call_hook('pre_push', extras, GitMessageWriter())
378
378
379
379
380 def _run_command(arguments):
381 """
382 Run the specified command and return the stdout.
383
384 :param arguments: sequence of program arguments (including the program name)
385 :type arguments: list[str]
386 """
387
388 cmd = arguments
389 try:
390 gitenv = os.environ.copy()
391 _opts = {'env': gitenv, 'shell': False, 'fail_on_stderr': False}
392 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
393 stdout = ''.join(p)
394 except (EnvironmentError, OSError) as err:
395 cmd = ' '.join(cmd) # human friendly CMD
396 tb_err = ("Couldn't run git command (%s).\n"
397 "Original error was:%s\n" % (cmd, err))
398 log.exception(tb_err)
399 raise Exception(tb_err)
400
401 return stdout
402
403
404 def git_post_receive(unused_repo_path, revision_lines, env):
380 def git_post_receive(unused_repo_path, revision_lines, env):
405 """
381 """
406 Post push hook.
382 Post push hook.
407
383
408 :param extras: dictionary containing the keys defined in simplevcs
384 :param extras: dictionary containing the keys defined in simplevcs
409 :type extras: dict
385 :type extras: dict
410
386
411 :return: status code of the hook. 0 for success.
387 :return: status code of the hook. 0 for success.
412 :rtype: int
388 :rtype: int
413 """
389 """
414 extras = json.loads(env['RC_SCM_DATA'])
390 extras = json.loads(env['RC_SCM_DATA'])
415 if 'push' not in extras['hooks']:
391 if 'push' not in extras['hooks']:
416 return 0
392 return 0
417
393
418 rev_data = _parse_git_ref_lines(revision_lines)
394 rev_data = _parse_git_ref_lines(revision_lines)
419
395
420 git_revs = []
396 git_revs = []
421
397
422 # N.B.(skreft): it is ok to just call git, as git before calling a
398 # N.B.(skreft): it is ok to just call git, as git before calling a
423 # subcommand sets the PATH environment variable so that it point to the
399 # subcommand sets the PATH environment variable so that it point to the
424 # correct version of the git executable.
400 # correct version of the git executable.
425 empty_commit_id = '0' * 40
401 empty_commit_id = '0' * 40
426 branches = []
402 branches = []
427 tags = []
403 tags = []
428 for push_ref in rev_data:
404 for push_ref in rev_data:
429 type_ = push_ref['type']
405 type_ = push_ref['type']
430
406
431 if type_ == 'heads':
407 if type_ == 'heads':
432 if push_ref['old_rev'] == empty_commit_id:
408 if push_ref['old_rev'] == empty_commit_id:
433 # starting new branch case
409 # starting new branch case
434 if push_ref['name'] not in branches:
410 if push_ref['name'] not in branches:
435 branches.append(push_ref['name'])
411 branches.append(push_ref['name'])
436
412
437 # Fix up head revision if needed
413 # Fix up head revision if needed
438 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
414 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
439 try:
415 try:
440 _run_command(cmd)
416 subprocessio.run_command(cmd, env=os.environ.copy())
441 except Exception:
417 except Exception:
442 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
418 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
443 'refs/heads/%s' % push_ref['name']]
419 'refs/heads/%s' % push_ref['name']]
444 print("Setting default branch to %s" % push_ref['name'])
420 print("Setting default branch to %s" % push_ref['name'])
445 _run_command(cmd)
421 subprocessio.run_command(cmd, env=os.environ.copy())
446
422
447 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref', '--format=%(refname)',
423 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
448 'refs/heads/*']
424 '--format=%(refname)', 'refs/heads/*']
449 heads = _run_command(cmd)
425 stdout, stderr = subprocessio.run_command(
426 cmd, env=os.environ.copy())
427 heads = stdout
450 heads = heads.replace(push_ref['ref'], '')
428 heads = heads.replace(push_ref['ref'], '')
451 heads = ' '.join(head for head in heads.splitlines() if head)
429 heads = ' '.join(head for head in heads.splitlines() if head)
452 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse', '--pretty=format:%H',
430 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
453 '--', push_ref['new_rev'], '--not', heads]
431 '--pretty=format:%H', '--', push_ref['new_rev'],
454 git_revs.extend(_run_command(cmd).splitlines())
432 '--not', heads]
433 stdout, stderr = subprocessio.run_command(
434 cmd, env=os.environ.copy())
435 git_revs.extend(stdout.splitlines())
455 elif push_ref['new_rev'] == empty_commit_id:
436 elif push_ref['new_rev'] == empty_commit_id:
456 # delete branch case
437 # delete branch case
457 git_revs.append('delete_branch=>%s' % push_ref['name'])
438 git_revs.append('delete_branch=>%s' % push_ref['name'])
458 else:
439 else:
459 if push_ref['name'] not in branches:
440 if push_ref['name'] not in branches:
460 branches.append(push_ref['name'])
441 branches.append(push_ref['name'])
461
442
462 cmd = [settings.GIT_EXECUTABLE, 'log',
443 cmd = [settings.GIT_EXECUTABLE, 'log',
463 '{old_rev}..{new_rev}'.format(**push_ref),
444 '{old_rev}..{new_rev}'.format(**push_ref),
464 '--reverse', '--pretty=format:%H']
445 '--reverse', '--pretty=format:%H']
465 git_revs.extend(_run_command(cmd).splitlines())
446 stdout, stderr = subprocessio.run_command(
447 cmd, env=os.environ.copy())
448 git_revs.extend(stdout.splitlines())
466 elif type_ == 'tags':
449 elif type_ == 'tags':
467 if push_ref['name'] not in tags:
450 if push_ref['name'] not in tags:
468 tags.append(push_ref['name'])
451 tags.append(push_ref['name'])
469 git_revs.append('tag=>%s' % push_ref['name'])
452 git_revs.append('tag=>%s' % push_ref['name'])
470
453
471 extras['commit_ids'] = git_revs
454 extras['commit_ids'] = git_revs
472 extras['new_refs'] = {
455 extras['new_refs'] = {
473 'branches': branches,
456 'branches': branches,
474 'bookmarks': [],
457 'bookmarks': [],
475 'tags': tags,
458 'tags': tags,
476 }
459 }
477
460
478 if 'repo_size' in extras['hooks']:
461 if 'repo_size' in extras['hooks']:
479 try:
462 try:
480 _call_hook('repo_size', extras, GitMessageWriter())
463 _call_hook('repo_size', extras, GitMessageWriter())
481 except:
464 except:
482 pass
465 pass
483
466
484 return _call_hook('post_push', extras, GitMessageWriter())
467 return _call_hook('post_push', extras, GitMessageWriter())
@@ -1,484 +1,511 b''
1 """
1 """
2 Module provides a class allowing to wrap communication over subprocess.Popen
2 Module provides a class allowing to wrap communication over subprocess.Popen
3 input, output, error streams into a meaningfull, non-blocking, concurrent
3 input, output, error streams into a meaningfull, non-blocking, concurrent
4 stream processor exposing the output data as an iterator fitting to be a
4 stream processor exposing the output data as an iterator fitting to be a
5 return value passed by a WSGI applicaiton to a WSGI server per PEP 3333.
5 return value passed by a WSGI applicaiton to a WSGI server per PEP 3333.
6
6
7 Copyright (c) 2011 Daniel Dotsenko <dotsa[at]hotmail.com>
7 Copyright (c) 2011 Daniel Dotsenko <dotsa[at]hotmail.com>
8
8
9 This file is part of git_http_backend.py Project.
9 This file is part of git_http_backend.py Project.
10
10
11 git_http_backend.py Project is free software: you can redistribute it and/or
11 git_http_backend.py Project is free software: you can redistribute it and/or
12 modify it under the terms of the GNU Lesser General Public License as
12 modify it under the terms of the GNU Lesser General Public License as
13 published by the Free Software Foundation, either version 2.1 of the License,
13 published by the Free Software Foundation, either version 2.1 of the License,
14 or (at your option) any later version.
14 or (at your option) any later version.
15
15
16 git_http_backend.py Project is distributed in the hope that it will be useful,
16 git_http_backend.py Project is distributed in the hope that it will be useful,
17 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 but WITHOUT ANY WARRANTY; without even the implied warranty of
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 GNU Lesser General Public License for more details.
19 GNU Lesser General Public License for more details.
20
20
21 You should have received a copy of the GNU Lesser General Public License
21 You should have received a copy of the GNU Lesser General Public License
22 along with git_http_backend.py Project.
22 along with git_http_backend.py Project.
23 If not, see <http://www.gnu.org/licenses/>.
23 If not, see <http://www.gnu.org/licenses/>.
24 """
24 """
25 import os
25 import os
26 import logging
26 import logging
27 import subprocess32 as subprocess
27 import subprocess32 as subprocess
28 from collections import deque
28 from collections import deque
29 from threading import Event, Thread
29 from threading import Event, Thread
30
30
31 log = logging.getLogger(__name__)
31 log = logging.getLogger(__name__)
32
32
33
33
34 class StreamFeeder(Thread):
34 class StreamFeeder(Thread):
35 """
35 """
36 Normal writing into pipe-like is blocking once the buffer is filled.
36 Normal writing into pipe-like is blocking once the buffer is filled.
37 This thread allows a thread to seep data from a file-like into a pipe
37 This thread allows a thread to seep data from a file-like into a pipe
38 without blocking the main thread.
38 without blocking the main thread.
39 We close inpipe once the end of the source stream is reached.
39 We close inpipe once the end of the source stream is reached.
40 """
40 """
41
41
42 def __init__(self, source):
42 def __init__(self, source):
43 super(StreamFeeder, self).__init__()
43 super(StreamFeeder, self).__init__()
44 self.daemon = True
44 self.daemon = True
45 filelike = False
45 filelike = False
46 self.bytes = bytes()
46 self.bytes = bytes()
47 if type(source) in (type(''), bytes, bytearray): # string-like
47 if type(source) in (type(''), bytes, bytearray): # string-like
48 self.bytes = bytes(source)
48 self.bytes = bytes(source)
49 else: # can be either file pointer or file-like
49 else: # can be either file pointer or file-like
50 if type(source) in (int, long): # file pointer it is
50 if type(source) in (int, long): # file pointer it is
51 # converting file descriptor (int) stdin into file-like
51 # converting file descriptor (int) stdin into file-like
52 try:
52 try:
53 source = os.fdopen(source, 'rb', 16384)
53 source = os.fdopen(source, 'rb', 16384)
54 except Exception:
54 except Exception:
55 pass
55 pass
56 # let's see if source is file-like by now
56 # let's see if source is file-like by now
57 try:
57 try:
58 filelike = source.read
58 filelike = source.read
59 except Exception:
59 except Exception:
60 pass
60 pass
61 if not filelike and not self.bytes:
61 if not filelike and not self.bytes:
62 raise TypeError("StreamFeeder's source object must be a readable "
62 raise TypeError("StreamFeeder's source object must be a readable "
63 "file-like, a file descriptor, or a string-like.")
63 "file-like, a file descriptor, or a string-like.")
64 self.source = source
64 self.source = source
65 self.readiface, self.writeiface = os.pipe()
65 self.readiface, self.writeiface = os.pipe()
66
66
67 def run(self):
67 def run(self):
68 t = self.writeiface
68 t = self.writeiface
69 if self.bytes:
69 if self.bytes:
70 os.write(t, self.bytes)
70 os.write(t, self.bytes)
71 else:
71 else:
72 s = self.source
72 s = self.source
73 b = s.read(4096)
73 b = s.read(4096)
74 while b:
74 while b:
75 os.write(t, b)
75 os.write(t, b)
76 b = s.read(4096)
76 b = s.read(4096)
77 os.close(t)
77 os.close(t)
78
78
79 @property
79 @property
80 def output(self):
80 def output(self):
81 return self.readiface
81 return self.readiface
82
82
83
83
84 class InputStreamChunker(Thread):
84 class InputStreamChunker(Thread):
85 def __init__(self, source, target, buffer_size, chunk_size):
85 def __init__(self, source, target, buffer_size, chunk_size):
86
86
87 super(InputStreamChunker, self).__init__()
87 super(InputStreamChunker, self).__init__()
88
88
89 self.daemon = True # die die die.
89 self.daemon = True # die die die.
90
90
91 self.source = source
91 self.source = source
92 self.target = target
92 self.target = target
93 self.chunk_count_max = int(buffer_size / chunk_size) + 1
93 self.chunk_count_max = int(buffer_size / chunk_size) + 1
94 self.chunk_size = chunk_size
94 self.chunk_size = chunk_size
95
95
96 self.data_added = Event()
96 self.data_added = Event()
97 self.data_added.clear()
97 self.data_added.clear()
98
98
99 self.keep_reading = Event()
99 self.keep_reading = Event()
100 self.keep_reading.set()
100 self.keep_reading.set()
101
101
102 self.EOF = Event()
102 self.EOF = Event()
103 self.EOF.clear()
103 self.EOF.clear()
104
104
105 self.go = Event()
105 self.go = Event()
106 self.go.set()
106 self.go.set()
107
107
108 def stop(self):
108 def stop(self):
109 self.go.clear()
109 self.go.clear()
110 self.EOF.set()
110 self.EOF.set()
111 try:
111 try:
112 # this is not proper, but is done to force the reader thread let
112 # this is not proper, but is done to force the reader thread let
113 # go of the input because, if successful, .close() will send EOF
113 # go of the input because, if successful, .close() will send EOF
114 # down the pipe.
114 # down the pipe.
115 self.source.close()
115 self.source.close()
116 except:
116 except:
117 pass
117 pass
118
118
119 def run(self):
119 def run(self):
120 s = self.source
120 s = self.source
121 t = self.target
121 t = self.target
122 cs = self.chunk_size
122 cs = self.chunk_size
123 chunk_count_max = self.chunk_count_max
123 chunk_count_max = self.chunk_count_max
124 keep_reading = self.keep_reading
124 keep_reading = self.keep_reading
125 da = self.data_added
125 da = self.data_added
126 go = self.go
126 go = self.go
127
127
128 try:
128 try:
129 b = s.read(cs)
129 b = s.read(cs)
130 except ValueError:
130 except ValueError:
131 b = ''
131 b = ''
132
132
133 timeout_input = 20
133 timeout_input = 20
134 while b and go.is_set():
134 while b and go.is_set():
135 if len(t) > chunk_count_max:
135 if len(t) > chunk_count_max:
136 keep_reading.clear()
136 keep_reading.clear()
137 keep_reading.wait(timeout_input)
137 keep_reading.wait(timeout_input)
138 if len(t) > chunk_count_max + timeout_input:
138 if len(t) > chunk_count_max + timeout_input:
139 log.error("Timed out while waiting for input from subprocess.")
139 log.error("Timed out while waiting for input from subprocess.")
140 os._exit(-1) # this will cause the worker to recycle itself
140 os._exit(-1) # this will cause the worker to recycle itself
141
141
142 t.append(b)
142 t.append(b)
143 da.set()
143 da.set()
144 b = s.read(cs)
144 b = s.read(cs)
145 self.EOF.set()
145 self.EOF.set()
146 da.set() # for cases when done but there was no input.
146 da.set() # for cases when done but there was no input.
147
147
148
148
149 class BufferedGenerator(object):
149 class BufferedGenerator(object):
150 """
150 """
151 Class behaves as a non-blocking, buffered pipe reader.
151 Class behaves as a non-blocking, buffered pipe reader.
152 Reads chunks of data (through a thread)
152 Reads chunks of data (through a thread)
153 from a blocking pipe, and attaches these to an array (Deque) of chunks.
153 from a blocking pipe, and attaches these to an array (Deque) of chunks.
154 Reading is halted in the thread when max chunks is internally buffered.
154 Reading is halted in the thread when max chunks is internally buffered.
155 The .next() may operate in blocking or non-blocking fashion by yielding
155 The .next() may operate in blocking or non-blocking fashion by yielding
156 '' if no data is ready
156 '' if no data is ready
157 to be sent or by not returning until there is some data to send
157 to be sent or by not returning until there is some data to send
158 When we get EOF from underlying source pipe we raise the marker to raise
158 When we get EOF from underlying source pipe we raise the marker to raise
159 StopIteration after the last chunk of data is yielded.
159 StopIteration after the last chunk of data is yielded.
160 """
160 """
161
161
162 def __init__(self, source, buffer_size=65536, chunk_size=4096,
162 def __init__(self, source, buffer_size=65536, chunk_size=4096,
163 starting_values=None, bottomless=False):
163 starting_values=None, bottomless=False):
164 starting_values = starting_values or []
164 starting_values = starting_values or []
165
165
166 if bottomless:
166 if bottomless:
167 maxlen = int(buffer_size / chunk_size)
167 maxlen = int(buffer_size / chunk_size)
168 else:
168 else:
169 maxlen = None
169 maxlen = None
170
170
171 self.data = deque(starting_values, maxlen)
171 self.data = deque(starting_values, maxlen)
172 self.worker = InputStreamChunker(source, self.data, buffer_size,
172 self.worker = InputStreamChunker(source, self.data, buffer_size,
173 chunk_size)
173 chunk_size)
174 if starting_values:
174 if starting_values:
175 self.worker.data_added.set()
175 self.worker.data_added.set()
176 self.worker.start()
176 self.worker.start()
177
177
178 ####################
178 ####################
179 # Generator's methods
179 # Generator's methods
180 ####################
180 ####################
181
181
182 def __iter__(self):
182 def __iter__(self):
183 return self
183 return self
184
184
185 def next(self):
185 def next(self):
186 while not len(self.data) and not self.worker.EOF.is_set():
186 while not len(self.data) and not self.worker.EOF.is_set():
187 self.worker.data_added.clear()
187 self.worker.data_added.clear()
188 self.worker.data_added.wait(0.2)
188 self.worker.data_added.wait(0.2)
189 if len(self.data):
189 if len(self.data):
190 self.worker.keep_reading.set()
190 self.worker.keep_reading.set()
191 return bytes(self.data.popleft())
191 return bytes(self.data.popleft())
192 elif self.worker.EOF.is_set():
192 elif self.worker.EOF.is_set():
193 raise StopIteration
193 raise StopIteration
194
194
195 def throw(self, exc_type, value=None, traceback=None):
195 def throw(self, exc_type, value=None, traceback=None):
196 if not self.worker.EOF.is_set():
196 if not self.worker.EOF.is_set():
197 raise exc_type(value)
197 raise exc_type(value)
198
198
199 def start(self):
199 def start(self):
200 self.worker.start()
200 self.worker.start()
201
201
202 def stop(self):
202 def stop(self):
203 self.worker.stop()
203 self.worker.stop()
204
204
205 def close(self):
205 def close(self):
206 try:
206 try:
207 self.worker.stop()
207 self.worker.stop()
208 self.throw(GeneratorExit)
208 self.throw(GeneratorExit)
209 except (GeneratorExit, StopIteration):
209 except (GeneratorExit, StopIteration):
210 pass
210 pass
211
211
212 def __del__(self):
212 def __del__(self):
213 self.close()
213 self.close()
214
214
215 ####################
215 ####################
216 # Threaded reader's infrastructure.
216 # Threaded reader's infrastructure.
217 ####################
217 ####################
218 @property
218 @property
219 def input(self):
219 def input(self):
220 return self.worker.w
220 return self.worker.w
221
221
222 @property
222 @property
223 def data_added_event(self):
223 def data_added_event(self):
224 return self.worker.data_added
224 return self.worker.data_added
225
225
226 @property
226 @property
227 def data_added(self):
227 def data_added(self):
228 return self.worker.data_added.is_set()
228 return self.worker.data_added.is_set()
229
229
230 @property
230 @property
231 def reading_paused(self):
231 def reading_paused(self):
232 return not self.worker.keep_reading.is_set()
232 return not self.worker.keep_reading.is_set()
233
233
234 @property
234 @property
235 def done_reading_event(self):
235 def done_reading_event(self):
236 """
236 """
237 Done_reding does not mean that the iterator's buffer is empty.
237 Done_reding does not mean that the iterator's buffer is empty.
238 Iterator might have done reading from underlying source, but the read
238 Iterator might have done reading from underlying source, but the read
239 chunks might still be available for serving through .next() method.
239 chunks might still be available for serving through .next() method.
240
240
241 :returns: An Event class instance.
241 :returns: An Event class instance.
242 """
242 """
243 return self.worker.EOF
243 return self.worker.EOF
244
244
245 @property
245 @property
246 def done_reading(self):
246 def done_reading(self):
247 """
247 """
248 Done_reding does not mean that the iterator's buffer is empty.
248 Done_reding does not mean that the iterator's buffer is empty.
249 Iterator might have done reading from underlying source, but the read
249 Iterator might have done reading from underlying source, but the read
250 chunks might still be available for serving through .next() method.
250 chunks might still be available for serving through .next() method.
251
251
252 :returns: An Bool value.
252 :returns: An Bool value.
253 """
253 """
254 return self.worker.EOF.is_set()
254 return self.worker.EOF.is_set()
255
255
256 @property
256 @property
257 def length(self):
257 def length(self):
258 """
258 """
259 returns int.
259 returns int.
260
260
261 This is the lenght of the que of chunks, not the length of
261 This is the lenght of the que of chunks, not the length of
262 the combined contents in those chunks.
262 the combined contents in those chunks.
263
263
264 __len__() cannot be meaningfully implemented because this
264 __len__() cannot be meaningfully implemented because this
265 reader is just flying throuh a bottomless pit content and
265 reader is just flying throuh a bottomless pit content and
266 can only know the lenght of what it already saw.
266 can only know the lenght of what it already saw.
267
267
268 If __len__() on WSGI server per PEP 3333 returns a value,
268 If __len__() on WSGI server per PEP 3333 returns a value,
269 the responce's length will be set to that. In order not to
269 the responce's length will be set to that. In order not to
270 confuse WSGI PEP3333 servers, we will not implement __len__
270 confuse WSGI PEP3333 servers, we will not implement __len__
271 at all.
271 at all.
272 """
272 """
273 return len(self.data)
273 return len(self.data)
274
274
275 def prepend(self, x):
275 def prepend(self, x):
276 self.data.appendleft(x)
276 self.data.appendleft(x)
277
277
278 def append(self, x):
278 def append(self, x):
279 self.data.append(x)
279 self.data.append(x)
280
280
281 def extend(self, o):
281 def extend(self, o):
282 self.data.extend(o)
282 self.data.extend(o)
283
283
284 def __getitem__(self, i):
284 def __getitem__(self, i):
285 return self.data[i]
285 return self.data[i]
286
286
287
287
288 class SubprocessIOChunker(object):
288 class SubprocessIOChunker(object):
289 """
289 """
290 Processor class wrapping handling of subprocess IO.
290 Processor class wrapping handling of subprocess IO.
291
291
292 .. important::
292 .. important::
293
293
294 Watch out for the method `__del__` on this class. If this object
294 Watch out for the method `__del__` on this class. If this object
295 is deleted, it will kill the subprocess, so avoid to
295 is deleted, it will kill the subprocess, so avoid to
296 return the `output` attribute or usage of it like in the following
296 return the `output` attribute or usage of it like in the following
297 example::
297 example::
298
298
299 # `args` expected to run a program that produces a lot of output
299 # `args` expected to run a program that produces a lot of output
300 output = ''.join(SubprocessIOChunker(
300 output = ''.join(SubprocessIOChunker(
301 args, shell=False, inputstream=inputstream, env=environ).output)
301 args, shell=False, inputstream=inputstream, env=environ).output)
302
302
303 # `output` will not contain all the data, because the __del__ method
303 # `output` will not contain all the data, because the __del__ method
304 # has already killed the subprocess in this case before all output
304 # has already killed the subprocess in this case before all output
305 # has been consumed.
305 # has been consumed.
306
306
307
307
308
308
309 In a way, this is a "communicate()" replacement with a twist.
309 In a way, this is a "communicate()" replacement with a twist.
310
310
311 - We are multithreaded. Writing in and reading out, err are all sep threads.
311 - We are multithreaded. Writing in and reading out, err are all sep threads.
312 - We support concurrent (in and out) stream processing.
312 - We support concurrent (in and out) stream processing.
313 - The output is not a stream. It's a queue of read string (bytes, not unicode)
313 - The output is not a stream. It's a queue of read string (bytes, not unicode)
314 chunks. The object behaves as an iterable. You can "for chunk in obj:" us.
314 chunks. The object behaves as an iterable. You can "for chunk in obj:" us.
315 - We are non-blocking in more respects than communicate()
315 - We are non-blocking in more respects than communicate()
316 (reading from subprocess out pauses when internal buffer is full, but
316 (reading from subprocess out pauses when internal buffer is full, but
317 does not block the parent calling code. On the flip side, reading from
317 does not block the parent calling code. On the flip side, reading from
318 slow-yielding subprocess may block the iteration until data shows up. This
318 slow-yielding subprocess may block the iteration until data shows up. This
319 does not block the parallel inpipe reading occurring parallel thread.)
319 does not block the parallel inpipe reading occurring parallel thread.)
320
320
321 The purpose of the object is to allow us to wrap subprocess interactions into
321 The purpose of the object is to allow us to wrap subprocess interactions into
322 and interable that can be passed to a WSGI server as the application's return
322 and interable that can be passed to a WSGI server as the application's return
323 value. Because of stream-processing-ability, WSGI does not have to read ALL
323 value. Because of stream-processing-ability, WSGI does not have to read ALL
324 of the subprocess's output and buffer it, before handing it to WSGI server for
324 of the subprocess's output and buffer it, before handing it to WSGI server for
325 HTTP response. Instead, the class initializer reads just a bit of the stream
325 HTTP response. Instead, the class initializer reads just a bit of the stream
326 to figure out if error ocurred or likely to occur and if not, just hands the
326 to figure out if error ocurred or likely to occur and if not, just hands the
327 further iteration over subprocess output to the server for completion of HTTP
327 further iteration over subprocess output to the server for completion of HTTP
328 response.
328 response.
329
329
330 The real or perceived subprocess error is trapped and raised as one of
330 The real or perceived subprocess error is trapped and raised as one of
331 EnvironmentError family of exceptions
331 EnvironmentError family of exceptions
332
332
333 Example usage:
333 Example usage:
334 # try:
334 # try:
335 # answer = SubprocessIOChunker(
335 # answer = SubprocessIOChunker(
336 # cmd,
336 # cmd,
337 # input,
337 # input,
338 # buffer_size = 65536,
338 # buffer_size = 65536,
339 # chunk_size = 4096
339 # chunk_size = 4096
340 # )
340 # )
341 # except (EnvironmentError) as e:
341 # except (EnvironmentError) as e:
342 # print str(e)
342 # print str(e)
343 # raise e
343 # raise e
344 #
344 #
345 # return answer
345 # return answer
346
346
347
347
348 """
348 """
349
349
350 # TODO: johbo: This is used to make sure that the open end of the PIPE
350 # TODO: johbo: This is used to make sure that the open end of the PIPE
351 # is closed in the end. It would be way better to wrap this into an
351 # is closed in the end. It would be way better to wrap this into an
352 # object, so that it is closed automatically once it is consumed or
352 # object, so that it is closed automatically once it is consumed or
353 # something similar.
353 # something similar.
354 _close_input_fd = None
354 _close_input_fd = None
355
355
356 _closed = False
356 _closed = False
357
357
358 def __init__(self, cmd, inputstream=None, buffer_size=65536,
358 def __init__(self, cmd, inputstream=None, buffer_size=65536,
359 chunk_size=4096, starting_values=None, fail_on_stderr=True,
359 chunk_size=4096, starting_values=None, fail_on_stderr=True,
360 fail_on_return_code=True, **kwargs):
360 fail_on_return_code=True, **kwargs):
361 """
361 """
362 Initializes SubprocessIOChunker
362 Initializes SubprocessIOChunker
363
363
364 :param cmd: A Subprocess.Popen style "cmd". Can be string or array of strings
364 :param cmd: A Subprocess.Popen style "cmd". Can be string or array of strings
365 :param inputstream: (Default: None) A file-like, string, or file pointer.
365 :param inputstream: (Default: None) A file-like, string, or file pointer.
366 :param buffer_size: (Default: 65536) A size of total buffer per stream in bytes.
366 :param buffer_size: (Default: 65536) A size of total buffer per stream in bytes.
367 :param chunk_size: (Default: 4096) A max size of a chunk. Actual chunk may be smaller.
367 :param chunk_size: (Default: 4096) A max size of a chunk. Actual chunk may be smaller.
368 :param starting_values: (Default: []) An array of strings to put in front of output que.
368 :param starting_values: (Default: []) An array of strings to put in front of output que.
369 :param fail_on_stderr: (Default: True) Whether to raise an exception in
369 :param fail_on_stderr: (Default: True) Whether to raise an exception in
370 case something is written to stderr.
370 case something is written to stderr.
371 :param fail_on_return_code: (Default: True) Whether to raise an
371 :param fail_on_return_code: (Default: True) Whether to raise an
372 exception if the return code is not 0.
372 exception if the return code is not 0.
373 """
373 """
374
374
375 starting_values = starting_values or []
375 starting_values = starting_values or []
376 if inputstream:
376 if inputstream:
377 input_streamer = StreamFeeder(inputstream)
377 input_streamer = StreamFeeder(inputstream)
378 input_streamer.start()
378 input_streamer.start()
379 inputstream = input_streamer.output
379 inputstream = input_streamer.output
380 self._close_input_fd = inputstream
380 self._close_input_fd = inputstream
381
381
382 self._fail_on_stderr = fail_on_stderr
382 self._fail_on_stderr = fail_on_stderr
383 self._fail_on_return_code = fail_on_return_code
383 self._fail_on_return_code = fail_on_return_code
384
384
385 _shell = kwargs.get('shell', True)
385 _shell = kwargs.get('shell', True)
386 kwargs['shell'] = _shell
386 kwargs['shell'] = _shell
387
387
388 _p = subprocess.Popen(cmd, bufsize=-1,
388 _p = subprocess.Popen(cmd, bufsize=-1,
389 stdin=inputstream,
389 stdin=inputstream,
390 stdout=subprocess.PIPE,
390 stdout=subprocess.PIPE,
391 stderr=subprocess.PIPE,
391 stderr=subprocess.PIPE,
392 **kwargs)
392 **kwargs)
393
393
394 bg_out = BufferedGenerator(_p.stdout, buffer_size, chunk_size,
394 bg_out = BufferedGenerator(_p.stdout, buffer_size, chunk_size,
395 starting_values)
395 starting_values)
396 bg_err = BufferedGenerator(_p.stderr, 16000, 1, bottomless=True)
396 bg_err = BufferedGenerator(_p.stderr, 16000, 1, bottomless=True)
397
397
398 while not bg_out.done_reading and not bg_out.reading_paused and not bg_err.length:
398 while not bg_out.done_reading and not bg_out.reading_paused and not bg_err.length:
399 # doing this until we reach either end of file, or end of buffer.
399 # doing this until we reach either end of file, or end of buffer.
400 bg_out.data_added_event.wait(1)
400 bg_out.data_added_event.wait(1)
401 bg_out.data_added_event.clear()
401 bg_out.data_added_event.clear()
402
402
403 # at this point it's still ambiguous if we are done reading or just full buffer.
403 # at this point it's still ambiguous if we are done reading or just full buffer.
404 # Either way, if error (returned by ended process, or implied based on
404 # Either way, if error (returned by ended process, or implied based on
405 # presence of stuff in stderr output) we error out.
405 # presence of stuff in stderr output) we error out.
406 # Else, we are happy.
406 # Else, we are happy.
407 _returncode = _p.poll()
407 _returncode = _p.poll()
408
408
409 if ((_returncode and fail_on_return_code) or
409 if ((_returncode and fail_on_return_code) or
410 (fail_on_stderr and _returncode is None and bg_err.length)):
410 (fail_on_stderr and _returncode is None and bg_err.length)):
411 try:
411 try:
412 _p.terminate()
412 _p.terminate()
413 except Exception:
413 except Exception:
414 pass
414 pass
415 bg_out.stop()
415 bg_out.stop()
416 bg_err.stop()
416 bg_err.stop()
417 if fail_on_stderr:
417 if fail_on_stderr:
418 err = ''.join(bg_err)
418 err = ''.join(bg_err)
419 raise EnvironmentError(
419 raise EnvironmentError(
420 "Subprocess exited due to an error:\n" + err)
420 "Subprocess exited due to an error:\n" + err)
421 if _returncode and fail_on_return_code:
421 if _returncode and fail_on_return_code:
422 err = ''.join(bg_err)
422 err = ''.join(bg_err)
423 if not err:
423 if not err:
424 # maybe get empty stderr, try stdout instead
424 # maybe get empty stderr, try stdout instead
425 # in many cases git reports the errors on stdout too
425 # in many cases git reports the errors on stdout too
426 err = ''.join(bg_out)
426 err = ''.join(bg_out)
427 raise EnvironmentError(
427 raise EnvironmentError(
428 "Subprocess exited with non 0 ret code:%s: stderr:%s" % (
428 "Subprocess exited with non 0 ret code:%s: stderr:%s" % (
429 _returncode, err))
429 _returncode, err))
430
430
431 self.process = _p
431 self.process = _p
432 self.output = bg_out
432 self.output = bg_out
433 self.error = bg_err
433 self.error = bg_err
434
434
435 def __iter__(self):
435 def __iter__(self):
436 return self
436 return self
437
437
438 def next(self):
438 def next(self):
439 # Note: mikhail: We need to be sure that we are checking the return
439 # Note: mikhail: We need to be sure that we are checking the return
440 # code after the stdout stream is closed. Some processes, e.g. git
440 # code after the stdout stream is closed. Some processes, e.g. git
441 # are doing some magic in between closing stdout and terminating the
441 # are doing some magic in between closing stdout and terminating the
442 # process and, as a result, we are not getting return code on "slow"
442 # process and, as a result, we are not getting return code on "slow"
443 # systems.
443 # systems.
444 result = None
444 result = None
445 stop_iteration = None
445 stop_iteration = None
446 try:
446 try:
447 result = self.output.next()
447 result = self.output.next()
448 except StopIteration as e:
448 except StopIteration as e:
449 stop_iteration = e
449 stop_iteration = e
450
450
451 if self.process.poll() and self._fail_on_return_code:
451 if self.process.poll() and self._fail_on_return_code:
452 err = '%s' % ''.join(self.error)
452 err = '%s' % ''.join(self.error)
453 raise EnvironmentError(
453 raise EnvironmentError(
454 "Subprocess exited due to an error:\n" + err)
454 "Subprocess exited due to an error:\n" + err)
455
455
456 if stop_iteration:
456 if stop_iteration:
457 raise stop_iteration
457 raise stop_iteration
458 return result
458 return result
459
459
460 def throw(self, type, value=None, traceback=None):
460 def throw(self, type, value=None, traceback=None):
461 if self.output.length or not self.output.done_reading:
461 if self.output.length or not self.output.done_reading:
462 raise type(value)
462 raise type(value)
463
463
464 def close(self):
464 def close(self):
465 if self._closed:
465 if self._closed:
466 return
466 return
467 self._closed = True
467 self._closed = True
468 try:
468 try:
469 self.process.terminate()
469 self.process.terminate()
470 except:
470 except:
471 pass
471 pass
472 if self._close_input_fd:
472 if self._close_input_fd:
473 os.close(self._close_input_fd)
473 os.close(self._close_input_fd)
474 try:
474 try:
475 self.output.close()
475 self.output.close()
476 except:
476 except:
477 pass
477 pass
478 try:
478 try:
479 self.error.close()
479 self.error.close()
480 except:
480 except:
481 pass
481 pass
482
482
483 def __del__(self):
483 def __del__(self):
484 self.close()
484 self.close()
485
486
487 def run_command(arguments, env=None):
488 """
489 Run the specified command and return the stdout.
490
491 :param arguments: sequence of program arguments (including the program name)
492 :type arguments: list[str]
493 """
494
495 cmd = arguments
496 log.debug('Running subprocessio command %s', cmd)
497 try:
498 _opts = {'shell': False, 'fail_on_stderr': False}
499 if env:
500 _opts.update({'env': env})
501 p = SubprocessIOChunker(cmd, **_opts)
502 stdout = ''.join(p)
503 stderr = ''.join(''.join(p.error))
504 except (EnvironmentError, OSError) as err:
505 cmd = ' '.join(cmd) # human friendly CMD
506 tb_err = ("Couldn't run subprocessio command (%s).\n"
507 "Original error was:%s\n" % (cmd, err))
508 log.exception(tb_err)
509 raise Exception(tb_err)
510
511 return stdout, stderr
@@ -1,679 +1,677 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from __future__ import absolute_import
18 from __future__ import absolute_import
19
19
20 import os
20 import os
21 from urllib2 import URLError
21 from urllib2 import URLError
22 import logging
22 import logging
23 import posixpath as vcspath
23 import posixpath as vcspath
24 import StringIO
24 import StringIO
25 import subprocess
26 import urllib
25 import urllib
27 import traceback
26 import traceback
28
27
29 import svn.client
28 import svn.client
30 import svn.core
29 import svn.core
31 import svn.delta
30 import svn.delta
32 import svn.diff
31 import svn.diff
33 import svn.fs
32 import svn.fs
34 import svn.repos
33 import svn.repos
35
34
36 from vcsserver import svn_diff
35 from vcsserver import svn_diff, exceptions, subprocessio
37 from vcsserver import exceptions
38 from vcsserver.base import RepoFactory, raise_from_original
36 from vcsserver.base import RepoFactory, raise_from_original
39
37
40
41 log = logging.getLogger(__name__)
38 log = logging.getLogger(__name__)
42
39
43
40
44 # Set of svn compatible version flags.
41 # Set of svn compatible version flags.
45 # Compare with subversion/svnadmin/svnadmin.c
42 # Compare with subversion/svnadmin/svnadmin.c
46 svn_compatible_versions = set([
43 svn_compatible_versions = set([
47 'pre-1.4-compatible',
44 'pre-1.4-compatible',
48 'pre-1.5-compatible',
45 'pre-1.5-compatible',
49 'pre-1.6-compatible',
46 'pre-1.6-compatible',
50 'pre-1.8-compatible',
47 'pre-1.8-compatible',
51 'pre-1.9-compatible',
48 'pre-1.9-compatible',
52 ])
49 ])
53
50
54 svn_compatible_versions_map = {
51 svn_compatible_versions_map = {
55 'pre-1.4-compatible': '1.3',
52 'pre-1.4-compatible': '1.3',
56 'pre-1.5-compatible': '1.4',
53 'pre-1.5-compatible': '1.4',
57 'pre-1.6-compatible': '1.5',
54 'pre-1.6-compatible': '1.5',
58 'pre-1.8-compatible': '1.7',
55 'pre-1.8-compatible': '1.7',
59 'pre-1.9-compatible': '1.8',
56 'pre-1.9-compatible': '1.8',
60 }
57 }
61
58
62
59
63 def reraise_safe_exceptions(func):
60 def reraise_safe_exceptions(func):
64 """Decorator for converting svn exceptions to something neutral."""
61 """Decorator for converting svn exceptions to something neutral."""
65 def wrapper(*args, **kwargs):
62 def wrapper(*args, **kwargs):
66 try:
63 try:
67 return func(*args, **kwargs)
64 return func(*args, **kwargs)
68 except Exception as e:
65 except Exception as e:
69 if not hasattr(e, '_vcs_kind'):
66 if not hasattr(e, '_vcs_kind'):
70 log.exception("Unhandled exception in hg remote call")
67 log.exception("Unhandled exception in hg remote call")
71 raise_from_original(exceptions.UnhandledException)
68 raise_from_original(exceptions.UnhandledException)
72 raise
69 raise
73 return wrapper
70 return wrapper
74
71
75
72
76 class SubversionFactory(RepoFactory):
73 class SubversionFactory(RepoFactory):
77
74
78 def _create_repo(self, wire, create, compatible_version):
75 def _create_repo(self, wire, create, compatible_version):
79 path = svn.core.svn_path_canonicalize(wire['path'])
76 path = svn.core.svn_path_canonicalize(wire['path'])
80 if create:
77 if create:
81 fs_config = {'compatible-version': '1.9'}
78 fs_config = {'compatible-version': '1.9'}
82 if compatible_version:
79 if compatible_version:
83 if compatible_version not in svn_compatible_versions:
80 if compatible_version not in svn_compatible_versions:
84 raise Exception('Unknown SVN compatible version "{}"'
81 raise Exception('Unknown SVN compatible version "{}"'
85 .format(compatible_version))
82 .format(compatible_version))
86 fs_config['compatible-version'] = \
83 fs_config['compatible-version'] = \
87 svn_compatible_versions_map[compatible_version]
84 svn_compatible_versions_map[compatible_version]
88
85
89 log.debug('Create SVN repo with config "%s"', fs_config)
86 log.debug('Create SVN repo with config "%s"', fs_config)
90 repo = svn.repos.create(path, "", "", None, fs_config)
87 repo = svn.repos.create(path, "", "", None, fs_config)
91 else:
88 else:
92 repo = svn.repos.open(path)
89 repo = svn.repos.open(path)
93
90
94 log.debug('Got SVN object: %s', repo)
91 log.debug('Got SVN object: %s', repo)
95 return repo
92 return repo
96
93
97 def repo(self, wire, create=False, compatible_version=None):
94 def repo(self, wire, create=False, compatible_version=None):
98 def create_new_repo():
95 def create_new_repo():
99 return self._create_repo(wire, create, compatible_version)
96 return self._create_repo(wire, create, compatible_version)
100
97
101 return self._repo(wire, create_new_repo)
98 return self._repo(wire, create_new_repo)
102
99
103
100
104 NODE_TYPE_MAPPING = {
101 NODE_TYPE_MAPPING = {
105 svn.core.svn_node_file: 'file',
102 svn.core.svn_node_file: 'file',
106 svn.core.svn_node_dir: 'dir',
103 svn.core.svn_node_dir: 'dir',
107 }
104 }
108
105
109
106
110 class SvnRemote(object):
107 class SvnRemote(object):
111
108
112 def __init__(self, factory, hg_factory=None):
109 def __init__(self, factory, hg_factory=None):
113 self._factory = factory
110 self._factory = factory
114 # TODO: Remove once we do not use internal Mercurial objects anymore
111 # TODO: Remove once we do not use internal Mercurial objects anymore
115 # for subversion
112 # for subversion
116 self._hg_factory = hg_factory
113 self._hg_factory = hg_factory
117
114
118 @reraise_safe_exceptions
115 @reraise_safe_exceptions
119 def discover_svn_version(self):
116 def discover_svn_version(self):
120 try:
117 try:
121 import svn.core
118 import svn.core
122 svn_ver = svn.core.SVN_VERSION
119 svn_ver = svn.core.SVN_VERSION
123 except ImportError:
120 except ImportError:
124 svn_ver = None
121 svn_ver = None
125 return svn_ver
122 return svn_ver
126
123
127 def check_url(self, url, config_items):
124 def check_url(self, url, config_items):
128 # this can throw exception if not installed, but we detect this
125 # this can throw exception if not installed, but we detect this
129 from hgsubversion import svnrepo
126 from hgsubversion import svnrepo
130
127
131 baseui = self._hg_factory._create_config(config_items)
128 baseui = self._hg_factory._create_config(config_items)
132 # uuid function get's only valid UUID from proper repo, else
129 # uuid function get's only valid UUID from proper repo, else
133 # throws exception
130 # throws exception
134 try:
131 try:
135 svnrepo.svnremoterepo(baseui, url).svn.uuid
132 svnrepo.svnremoterepo(baseui, url).svn.uuid
136 except Exception:
133 except Exception:
137 tb = traceback.format_exc()
134 tb = traceback.format_exc()
138 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
135 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
139 raise URLError(
136 raise URLError(
140 '"%s" is not a valid Subversion source url.' % (url, ))
137 '"%s" is not a valid Subversion source url.' % (url, ))
141 return True
138 return True
142
139
143 def is_path_valid_repository(self, wire, path):
140 def is_path_valid_repository(self, wire, path):
144
141
145 # NOTE(marcink): short circuit the check for SVN repo
142 # NOTE(marcink): short circuit the check for SVN repo
146 # the repos.open might be expensive to check, but we have one cheap
143 # the repos.open might be expensive to check, but we have one cheap
147 # pre condition that we can use, to check for 'format' file
144 # pre condition that we can use, to check for 'format' file
148
145
149 if not os.path.isfile(os.path.join(path, 'format')):
146 if not os.path.isfile(os.path.join(path, 'format')):
150 return False
147 return False
151
148
152 try:
149 try:
153 svn.repos.open(path)
150 svn.repos.open(path)
154 except svn.core.SubversionException:
151 except svn.core.SubversionException:
155 tb = traceback.format_exc()
152 tb = traceback.format_exc()
156 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
153 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
157 return False
154 return False
158 return True
155 return True
159
156
160 @reraise_safe_exceptions
157 @reraise_safe_exceptions
161 def verify(self, wire,):
158 def verify(self, wire,):
162 repo_path = wire['path']
159 repo_path = wire['path']
163 if not self.is_path_valid_repository(wire, repo_path):
160 if not self.is_path_valid_repository(wire, repo_path):
164 raise Exception(
161 raise Exception(
165 "Path %s is not a valid Subversion repository." % repo_path)
162 "Path %s is not a valid Subversion repository." % repo_path)
166
163
167 load = subprocess.Popen(
164 cmd = ['svnadmin', 'info', repo_path]
168 ['svnadmin', 'info', repo_path],
165 stdout, stderr = subprocessio.run_command(cmd)
169 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
166 return stdout
170 return ''.join(load.stdout)
171
167
172 def lookup(self, wire, revision):
168 def lookup(self, wire, revision):
173 if revision not in [-1, None, 'HEAD']:
169 if revision not in [-1, None, 'HEAD']:
174 raise NotImplementedError
170 raise NotImplementedError
175 repo = self._factory.repo(wire)
171 repo = self._factory.repo(wire)
176 fs_ptr = svn.repos.fs(repo)
172 fs_ptr = svn.repos.fs(repo)
177 head = svn.fs.youngest_rev(fs_ptr)
173 head = svn.fs.youngest_rev(fs_ptr)
178 return head
174 return head
179
175
180 def lookup_interval(self, wire, start_ts, end_ts):
176 def lookup_interval(self, wire, start_ts, end_ts):
181 repo = self._factory.repo(wire)
177 repo = self._factory.repo(wire)
182 fsobj = svn.repos.fs(repo)
178 fsobj = svn.repos.fs(repo)
183 start_rev = None
179 start_rev = None
184 end_rev = None
180 end_rev = None
185 if start_ts:
181 if start_ts:
186 start_ts_svn = apr_time_t(start_ts)
182 start_ts_svn = apr_time_t(start_ts)
187 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
183 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
188 else:
184 else:
189 start_rev = 1
185 start_rev = 1
190 if end_ts:
186 if end_ts:
191 end_ts_svn = apr_time_t(end_ts)
187 end_ts_svn = apr_time_t(end_ts)
192 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
188 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
193 else:
189 else:
194 end_rev = svn.fs.youngest_rev(fsobj)
190 end_rev = svn.fs.youngest_rev(fsobj)
195 return start_rev, end_rev
191 return start_rev, end_rev
196
192
197 def revision_properties(self, wire, revision):
193 def revision_properties(self, wire, revision):
198 repo = self._factory.repo(wire)
194 repo = self._factory.repo(wire)
199 fs_ptr = svn.repos.fs(repo)
195 fs_ptr = svn.repos.fs(repo)
200 return svn.fs.revision_proplist(fs_ptr, revision)
196 return svn.fs.revision_proplist(fs_ptr, revision)
201
197
202 def revision_changes(self, wire, revision):
198 def revision_changes(self, wire, revision):
203
199
204 repo = self._factory.repo(wire)
200 repo = self._factory.repo(wire)
205 fsobj = svn.repos.fs(repo)
201 fsobj = svn.repos.fs(repo)
206 rev_root = svn.fs.revision_root(fsobj, revision)
202 rev_root = svn.fs.revision_root(fsobj, revision)
207
203
208 editor = svn.repos.ChangeCollector(fsobj, rev_root)
204 editor = svn.repos.ChangeCollector(fsobj, rev_root)
209 editor_ptr, editor_baton = svn.delta.make_editor(editor)
205 editor_ptr, editor_baton = svn.delta.make_editor(editor)
210 base_dir = ""
206 base_dir = ""
211 send_deltas = False
207 send_deltas = False
212 svn.repos.replay2(
208 svn.repos.replay2(
213 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
209 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
214 editor_ptr, editor_baton, None)
210 editor_ptr, editor_baton, None)
215
211
216 added = []
212 added = []
217 changed = []
213 changed = []
218 removed = []
214 removed = []
219
215
220 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
216 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
221 for path, change in editor.changes.iteritems():
217 for path, change in editor.changes.iteritems():
222 # TODO: Decide what to do with directory nodes. Subversion can add
218 # TODO: Decide what to do with directory nodes. Subversion can add
223 # empty directories.
219 # empty directories.
224
220
225 if change.item_kind == svn.core.svn_node_dir:
221 if change.item_kind == svn.core.svn_node_dir:
226 continue
222 continue
227 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
223 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
228 added.append(path)
224 added.append(path)
229 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
225 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
230 svn.repos.CHANGE_ACTION_REPLACE]:
226 svn.repos.CHANGE_ACTION_REPLACE]:
231 changed.append(path)
227 changed.append(path)
232 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
228 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
233 removed.append(path)
229 removed.append(path)
234 else:
230 else:
235 raise NotImplementedError(
231 raise NotImplementedError(
236 "Action %s not supported on path %s" % (
232 "Action %s not supported on path %s" % (
237 change.action, path))
233 change.action, path))
238
234
239 changes = {
235 changes = {
240 'added': added,
236 'added': added,
241 'changed': changed,
237 'changed': changed,
242 'removed': removed,
238 'removed': removed,
243 }
239 }
244 return changes
240 return changes
245
241
246 def node_history(self, wire, path, revision, limit):
242 def node_history(self, wire, path, revision, limit):
247 cross_copies = False
243 cross_copies = False
248 repo = self._factory.repo(wire)
244 repo = self._factory.repo(wire)
249 fsobj = svn.repos.fs(repo)
245 fsobj = svn.repos.fs(repo)
250 rev_root = svn.fs.revision_root(fsobj, revision)
246 rev_root = svn.fs.revision_root(fsobj, revision)
251
247
252 history_revisions = []
248 history_revisions = []
253 history = svn.fs.node_history(rev_root, path)
249 history = svn.fs.node_history(rev_root, path)
254 history = svn.fs.history_prev(history, cross_copies)
250 history = svn.fs.history_prev(history, cross_copies)
255 while history:
251 while history:
256 __, node_revision = svn.fs.history_location(history)
252 __, node_revision = svn.fs.history_location(history)
257 history_revisions.append(node_revision)
253 history_revisions.append(node_revision)
258 if limit and len(history_revisions) >= limit:
254 if limit and len(history_revisions) >= limit:
259 break
255 break
260 history = svn.fs.history_prev(history, cross_copies)
256 history = svn.fs.history_prev(history, cross_copies)
261 return history_revisions
257 return history_revisions
262
258
263 def node_properties(self, wire, path, revision):
259 def node_properties(self, wire, path, revision):
264 repo = self._factory.repo(wire)
260 repo = self._factory.repo(wire)
265 fsobj = svn.repos.fs(repo)
261 fsobj = svn.repos.fs(repo)
266 rev_root = svn.fs.revision_root(fsobj, revision)
262 rev_root = svn.fs.revision_root(fsobj, revision)
267 return svn.fs.node_proplist(rev_root, path)
263 return svn.fs.node_proplist(rev_root, path)
268
264
269 def file_annotate(self, wire, path, revision):
265 def file_annotate(self, wire, path, revision):
270 abs_path = 'file://' + urllib.pathname2url(
266 abs_path = 'file://' + urllib.pathname2url(
271 vcspath.join(wire['path'], path))
267 vcspath.join(wire['path'], path))
272 file_uri = svn.core.svn_path_canonicalize(abs_path)
268 file_uri = svn.core.svn_path_canonicalize(abs_path)
273
269
274 start_rev = svn_opt_revision_value_t(0)
270 start_rev = svn_opt_revision_value_t(0)
275 peg_rev = svn_opt_revision_value_t(revision)
271 peg_rev = svn_opt_revision_value_t(revision)
276 end_rev = peg_rev
272 end_rev = peg_rev
277
273
278 annotations = []
274 annotations = []
279
275
280 def receiver(line_no, revision, author, date, line, pool):
276 def receiver(line_no, revision, author, date, line, pool):
281 annotations.append((line_no, revision, line))
277 annotations.append((line_no, revision, line))
282
278
283 # TODO: Cannot use blame5, missing typemap function in the swig code
279 # TODO: Cannot use blame5, missing typemap function in the swig code
284 try:
280 try:
285 svn.client.blame2(
281 svn.client.blame2(
286 file_uri, peg_rev, start_rev, end_rev,
282 file_uri, peg_rev, start_rev, end_rev,
287 receiver, svn.client.create_context())
283 receiver, svn.client.create_context())
288 except svn.core.SubversionException as exc:
284 except svn.core.SubversionException as exc:
289 log.exception("Error during blame operation.")
285 log.exception("Error during blame operation.")
290 raise Exception(
286 raise Exception(
291 "Blame not supported or file does not exist at path %s. "
287 "Blame not supported or file does not exist at path %s. "
292 "Error %s." % (path, exc))
288 "Error %s." % (path, exc))
293
289
294 return annotations
290 return annotations
295
291
296 def get_node_type(self, wire, path, rev=None):
292 def get_node_type(self, wire, path, rev=None):
297 repo = self._factory.repo(wire)
293 repo = self._factory.repo(wire)
298 fs_ptr = svn.repos.fs(repo)
294 fs_ptr = svn.repos.fs(repo)
299 if rev is None:
295 if rev is None:
300 rev = svn.fs.youngest_rev(fs_ptr)
296 rev = svn.fs.youngest_rev(fs_ptr)
301 root = svn.fs.revision_root(fs_ptr, rev)
297 root = svn.fs.revision_root(fs_ptr, rev)
302 node = svn.fs.check_path(root, path)
298 node = svn.fs.check_path(root, path)
303 return NODE_TYPE_MAPPING.get(node, None)
299 return NODE_TYPE_MAPPING.get(node, None)
304
300
305 def get_nodes(self, wire, path, revision=None):
301 def get_nodes(self, wire, path, revision=None):
306 repo = self._factory.repo(wire)
302 repo = self._factory.repo(wire)
307 fsobj = svn.repos.fs(repo)
303 fsobj = svn.repos.fs(repo)
308 if revision is None:
304 if revision is None:
309 revision = svn.fs.youngest_rev(fsobj)
305 revision = svn.fs.youngest_rev(fsobj)
310 root = svn.fs.revision_root(fsobj, revision)
306 root = svn.fs.revision_root(fsobj, revision)
311 entries = svn.fs.dir_entries(root, path)
307 entries = svn.fs.dir_entries(root, path)
312 result = []
308 result = []
313 for entry_path, entry_info in entries.iteritems():
309 for entry_path, entry_info in entries.iteritems():
314 result.append(
310 result.append(
315 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
311 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
316 return result
312 return result
317
313
318 def get_file_content(self, wire, path, rev=None):
314 def get_file_content(self, wire, path, rev=None):
319 repo = self._factory.repo(wire)
315 repo = self._factory.repo(wire)
320 fsobj = svn.repos.fs(repo)
316 fsobj = svn.repos.fs(repo)
321 if rev is None:
317 if rev is None:
322 rev = svn.fs.youngest_revision(fsobj)
318 rev = svn.fs.youngest_revision(fsobj)
323 root = svn.fs.revision_root(fsobj, rev)
319 root = svn.fs.revision_root(fsobj, rev)
324 content = svn.core.Stream(svn.fs.file_contents(root, path))
320 content = svn.core.Stream(svn.fs.file_contents(root, path))
325 return content.read()
321 return content.read()
326
322
327 def get_file_size(self, wire, path, revision=None):
323 def get_file_size(self, wire, path, revision=None):
328 repo = self._factory.repo(wire)
324 repo = self._factory.repo(wire)
329 fsobj = svn.repos.fs(repo)
325 fsobj = svn.repos.fs(repo)
330 if revision is None:
326 if revision is None:
331 revision = svn.fs.youngest_revision(fsobj)
327 revision = svn.fs.youngest_revision(fsobj)
332 root = svn.fs.revision_root(fsobj, revision)
328 root = svn.fs.revision_root(fsobj, revision)
333 size = svn.fs.file_length(root, path)
329 size = svn.fs.file_length(root, path)
334 return size
330 return size
335
331
336 def create_repository(self, wire, compatible_version=None):
332 def create_repository(self, wire, compatible_version=None):
337 log.info('Creating Subversion repository in path "%s"', wire['path'])
333 log.info('Creating Subversion repository in path "%s"', wire['path'])
338 self._factory.repo(wire, create=True,
334 self._factory.repo(wire, create=True,
339 compatible_version=compatible_version)
335 compatible_version=compatible_version)
340
336
341 def import_remote_repository(self, wire, src_url):
337 def import_remote_repository(self, wire, src_url):
342 repo_path = wire['path']
338 repo_path = wire['path']
343 if not self.is_path_valid_repository(wire, repo_path):
339 if not self.is_path_valid_repository(wire, repo_path):
344 raise Exception(
340 raise Exception(
345 "Path %s is not a valid Subversion repository." % repo_path)
341 "Path %s is not a valid Subversion repository." % repo_path)
342
346 # TODO: johbo: URL checks ?
343 # TODO: johbo: URL checks ?
344 import subprocess
347 rdump = subprocess.Popen(
345 rdump = subprocess.Popen(
348 ['svnrdump', 'dump', '--non-interactive', src_url],
346 ['svnrdump', 'dump', '--non-interactive', src_url],
349 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
347 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
350 load = subprocess.Popen(
348 load = subprocess.Popen(
351 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
349 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
352
350
353 # TODO: johbo: This can be a very long operation, might be better
351 # TODO: johbo: This can be a very long operation, might be better
354 # to track some kind of status and provide an api to check if the
352 # to track some kind of status and provide an api to check if the
355 # import is done.
353 # import is done.
356 rdump.wait()
354 rdump.wait()
357 load.wait()
355 load.wait()
358
356
359 if rdump.returncode != 0:
357 if rdump.returncode != 0:
360 errors = rdump.stderr.read()
358 errors = rdump.stderr.read()
361 log.error('svnrdump dump failed: statuscode %s: message: %s',
359 log.error('svnrdump dump failed: statuscode %s: message: %s',
362 rdump.returncode, errors)
360 rdump.returncode, errors)
363 reason = 'UNKNOWN'
361 reason = 'UNKNOWN'
364 if 'svnrdump: E230001:' in errors:
362 if 'svnrdump: E230001:' in errors:
365 reason = 'INVALID_CERTIFICATE'
363 reason = 'INVALID_CERTIFICATE'
366 raise Exception(
364 raise Exception(
367 'Failed to dump the remote repository from %s.' % src_url,
365 'Failed to dump the remote repository from %s.' % src_url,
368 reason)
366 reason)
369 if load.returncode != 0:
367 if load.returncode != 0:
370 raise Exception(
368 raise Exception(
371 'Failed to load the dump of remote repository from %s.' %
369 'Failed to load the dump of remote repository from %s.' %
372 (src_url, ))
370 (src_url, ))
373
371
374 def commit(self, wire, message, author, timestamp, updated, removed):
372 def commit(self, wire, message, author, timestamp, updated, removed):
375 assert isinstance(message, str)
373 assert isinstance(message, str)
376 assert isinstance(author, str)
374 assert isinstance(author, str)
377
375
378 repo = self._factory.repo(wire)
376 repo = self._factory.repo(wire)
379 fsobj = svn.repos.fs(repo)
377 fsobj = svn.repos.fs(repo)
380
378
381 rev = svn.fs.youngest_rev(fsobj)
379 rev = svn.fs.youngest_rev(fsobj)
382 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
380 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
383 txn_root = svn.fs.txn_root(txn)
381 txn_root = svn.fs.txn_root(txn)
384
382
385 for node in updated:
383 for node in updated:
386 TxnNodeProcessor(node, txn_root).update()
384 TxnNodeProcessor(node, txn_root).update()
387 for node in removed:
385 for node in removed:
388 TxnNodeProcessor(node, txn_root).remove()
386 TxnNodeProcessor(node, txn_root).remove()
389
387
390 commit_id = svn.repos.fs_commit_txn(repo, txn)
388 commit_id = svn.repos.fs_commit_txn(repo, txn)
391
389
392 if timestamp:
390 if timestamp:
393 apr_time = apr_time_t(timestamp)
391 apr_time = apr_time_t(timestamp)
394 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
392 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
395 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
393 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
396
394
397 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
395 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
398 return commit_id
396 return commit_id
399
397
400 def diff(self, wire, rev1, rev2, path1=None, path2=None,
398 def diff(self, wire, rev1, rev2, path1=None, path2=None,
401 ignore_whitespace=False, context=3):
399 ignore_whitespace=False, context=3):
402
400
403 wire.update(cache=False)
401 wire.update(cache=False)
404 repo = self._factory.repo(wire)
402 repo = self._factory.repo(wire)
405 diff_creator = SvnDiffer(
403 diff_creator = SvnDiffer(
406 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
404 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
407 try:
405 try:
408 return diff_creator.generate_diff()
406 return diff_creator.generate_diff()
409 except svn.core.SubversionException as e:
407 except svn.core.SubversionException as e:
410 log.exception(
408 log.exception(
411 "Error during diff operation operation. "
409 "Error during diff operation operation. "
412 "Path might not exist %s, %s" % (path1, path2))
410 "Path might not exist %s, %s" % (path1, path2))
413 return ""
411 return ""
414
412
415 @reraise_safe_exceptions
413 @reraise_safe_exceptions
416 def is_large_file(self, wire, path):
414 def is_large_file(self, wire, path):
417 return False
415 return False
418
416
419
417
420 class SvnDiffer(object):
418 class SvnDiffer(object):
421 """
419 """
422 Utility to create diffs based on difflib and the Subversion api
420 Utility to create diffs based on difflib and the Subversion api
423 """
421 """
424
422
425 binary_content = False
423 binary_content = False
426
424
427 def __init__(
425 def __init__(
428 self, repo, src_rev, src_path, tgt_rev, tgt_path,
426 self, repo, src_rev, src_path, tgt_rev, tgt_path,
429 ignore_whitespace, context):
427 ignore_whitespace, context):
430 self.repo = repo
428 self.repo = repo
431 self.ignore_whitespace = ignore_whitespace
429 self.ignore_whitespace = ignore_whitespace
432 self.context = context
430 self.context = context
433
431
434 fsobj = svn.repos.fs(repo)
432 fsobj = svn.repos.fs(repo)
435
433
436 self.tgt_rev = tgt_rev
434 self.tgt_rev = tgt_rev
437 self.tgt_path = tgt_path or ''
435 self.tgt_path = tgt_path or ''
438 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
436 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
439 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
437 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
440
438
441 self.src_rev = src_rev
439 self.src_rev = src_rev
442 self.src_path = src_path or self.tgt_path
440 self.src_path = src_path or self.tgt_path
443 self.src_root = svn.fs.revision_root(fsobj, src_rev)
441 self.src_root = svn.fs.revision_root(fsobj, src_rev)
444 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
442 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
445
443
446 self._validate()
444 self._validate()
447
445
448 def _validate(self):
446 def _validate(self):
449 if (self.tgt_kind != svn.core.svn_node_none and
447 if (self.tgt_kind != svn.core.svn_node_none and
450 self.src_kind != svn.core.svn_node_none and
448 self.src_kind != svn.core.svn_node_none and
451 self.src_kind != self.tgt_kind):
449 self.src_kind != self.tgt_kind):
452 # TODO: johbo: proper error handling
450 # TODO: johbo: proper error handling
453 raise Exception(
451 raise Exception(
454 "Source and target are not compatible for diff generation. "
452 "Source and target are not compatible for diff generation. "
455 "Source type: %s, target type: %s" %
453 "Source type: %s, target type: %s" %
456 (self.src_kind, self.tgt_kind))
454 (self.src_kind, self.tgt_kind))
457
455
458 def generate_diff(self):
456 def generate_diff(self):
459 buf = StringIO.StringIO()
457 buf = StringIO.StringIO()
460 if self.tgt_kind == svn.core.svn_node_dir:
458 if self.tgt_kind == svn.core.svn_node_dir:
461 self._generate_dir_diff(buf)
459 self._generate_dir_diff(buf)
462 else:
460 else:
463 self._generate_file_diff(buf)
461 self._generate_file_diff(buf)
464 return buf.getvalue()
462 return buf.getvalue()
465
463
466 def _generate_dir_diff(self, buf):
464 def _generate_dir_diff(self, buf):
467 editor = DiffChangeEditor()
465 editor = DiffChangeEditor()
468 editor_ptr, editor_baton = svn.delta.make_editor(editor)
466 editor_ptr, editor_baton = svn.delta.make_editor(editor)
469 svn.repos.dir_delta2(
467 svn.repos.dir_delta2(
470 self.src_root,
468 self.src_root,
471 self.src_path,
469 self.src_path,
472 '', # src_entry
470 '', # src_entry
473 self.tgt_root,
471 self.tgt_root,
474 self.tgt_path,
472 self.tgt_path,
475 editor_ptr, editor_baton,
473 editor_ptr, editor_baton,
476 authorization_callback_allow_all,
474 authorization_callback_allow_all,
477 False, # text_deltas
475 False, # text_deltas
478 svn.core.svn_depth_infinity, # depth
476 svn.core.svn_depth_infinity, # depth
479 False, # entry_props
477 False, # entry_props
480 False, # ignore_ancestry
478 False, # ignore_ancestry
481 )
479 )
482
480
483 for path, __, change in sorted(editor.changes):
481 for path, __, change in sorted(editor.changes):
484 self._generate_node_diff(
482 self._generate_node_diff(
485 buf, change, path, self.tgt_path, path, self.src_path)
483 buf, change, path, self.tgt_path, path, self.src_path)
486
484
487 def _generate_file_diff(self, buf):
485 def _generate_file_diff(self, buf):
488 change = None
486 change = None
489 if self.src_kind == svn.core.svn_node_none:
487 if self.src_kind == svn.core.svn_node_none:
490 change = "add"
488 change = "add"
491 elif self.tgt_kind == svn.core.svn_node_none:
489 elif self.tgt_kind == svn.core.svn_node_none:
492 change = "delete"
490 change = "delete"
493 tgt_base, tgt_path = vcspath.split(self.tgt_path)
491 tgt_base, tgt_path = vcspath.split(self.tgt_path)
494 src_base, src_path = vcspath.split(self.src_path)
492 src_base, src_path = vcspath.split(self.src_path)
495 self._generate_node_diff(
493 self._generate_node_diff(
496 buf, change, tgt_path, tgt_base, src_path, src_base)
494 buf, change, tgt_path, tgt_base, src_path, src_base)
497
495
498 def _generate_node_diff(
496 def _generate_node_diff(
499 self, buf, change, tgt_path, tgt_base, src_path, src_base):
497 self, buf, change, tgt_path, tgt_base, src_path, src_base):
500
498
501 if self.src_rev == self.tgt_rev and tgt_base == src_base:
499 if self.src_rev == self.tgt_rev and tgt_base == src_base:
502 # makes consistent behaviour with git/hg to return empty diff if
500 # makes consistent behaviour with git/hg to return empty diff if
503 # we compare same revisions
501 # we compare same revisions
504 return
502 return
505
503
506 tgt_full_path = vcspath.join(tgt_base, tgt_path)
504 tgt_full_path = vcspath.join(tgt_base, tgt_path)
507 src_full_path = vcspath.join(src_base, src_path)
505 src_full_path = vcspath.join(src_base, src_path)
508
506
509 self.binary_content = False
507 self.binary_content = False
510 mime_type = self._get_mime_type(tgt_full_path)
508 mime_type = self._get_mime_type(tgt_full_path)
511
509
512 if mime_type and not mime_type.startswith('text'):
510 if mime_type and not mime_type.startswith('text'):
513 self.binary_content = True
511 self.binary_content = True
514 buf.write("=" * 67 + '\n')
512 buf.write("=" * 67 + '\n')
515 buf.write("Cannot display: file marked as a binary type.\n")
513 buf.write("Cannot display: file marked as a binary type.\n")
516 buf.write("svn:mime-type = %s\n" % mime_type)
514 buf.write("svn:mime-type = %s\n" % mime_type)
517 buf.write("Index: %s\n" % (tgt_path, ))
515 buf.write("Index: %s\n" % (tgt_path, ))
518 buf.write("=" * 67 + '\n')
516 buf.write("=" * 67 + '\n')
519 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
517 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
520 'tgt_path': tgt_path})
518 'tgt_path': tgt_path})
521
519
522 if change == 'add':
520 if change == 'add':
523 # TODO: johbo: SVN is missing a zero here compared to git
521 # TODO: johbo: SVN is missing a zero here compared to git
524 buf.write("new file mode 10644\n")
522 buf.write("new file mode 10644\n")
525
523
526 #TODO(marcink): intro to binary detection of svn patches
524 #TODO(marcink): intro to binary detection of svn patches
527 # if self.binary_content:
525 # if self.binary_content:
528 # buf.write('GIT binary patch\n')
526 # buf.write('GIT binary patch\n')
529
527
530 buf.write("--- /dev/null\t(revision 0)\n")
528 buf.write("--- /dev/null\t(revision 0)\n")
531 src_lines = []
529 src_lines = []
532 else:
530 else:
533 if change == 'delete':
531 if change == 'delete':
534 buf.write("deleted file mode 10644\n")
532 buf.write("deleted file mode 10644\n")
535
533
536 #TODO(marcink): intro to binary detection of svn patches
534 #TODO(marcink): intro to binary detection of svn patches
537 # if self.binary_content:
535 # if self.binary_content:
538 # buf.write('GIT binary patch\n')
536 # buf.write('GIT binary patch\n')
539
537
540 buf.write("--- a/%s\t(revision %s)\n" % (
538 buf.write("--- a/%s\t(revision %s)\n" % (
541 src_path, self.src_rev))
539 src_path, self.src_rev))
542 src_lines = self._svn_readlines(self.src_root, src_full_path)
540 src_lines = self._svn_readlines(self.src_root, src_full_path)
543
541
544 if change == 'delete':
542 if change == 'delete':
545 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
543 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
546 tgt_lines = []
544 tgt_lines = []
547 else:
545 else:
548 buf.write("+++ b/%s\t(revision %s)\n" % (
546 buf.write("+++ b/%s\t(revision %s)\n" % (
549 tgt_path, self.tgt_rev))
547 tgt_path, self.tgt_rev))
550 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
548 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
551
549
552 if not self.binary_content:
550 if not self.binary_content:
553 udiff = svn_diff.unified_diff(
551 udiff = svn_diff.unified_diff(
554 src_lines, tgt_lines, context=self.context,
552 src_lines, tgt_lines, context=self.context,
555 ignore_blank_lines=self.ignore_whitespace,
553 ignore_blank_lines=self.ignore_whitespace,
556 ignore_case=False,
554 ignore_case=False,
557 ignore_space_changes=self.ignore_whitespace)
555 ignore_space_changes=self.ignore_whitespace)
558 buf.writelines(udiff)
556 buf.writelines(udiff)
559
557
560 def _get_mime_type(self, path):
558 def _get_mime_type(self, path):
561 try:
559 try:
562 mime_type = svn.fs.node_prop(
560 mime_type = svn.fs.node_prop(
563 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
561 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
564 except svn.core.SubversionException:
562 except svn.core.SubversionException:
565 mime_type = svn.fs.node_prop(
563 mime_type = svn.fs.node_prop(
566 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
564 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
567 return mime_type
565 return mime_type
568
566
569 def _svn_readlines(self, fs_root, node_path):
567 def _svn_readlines(self, fs_root, node_path):
570 if self.binary_content:
568 if self.binary_content:
571 return []
569 return []
572 node_kind = svn.fs.check_path(fs_root, node_path)
570 node_kind = svn.fs.check_path(fs_root, node_path)
573 if node_kind not in (
571 if node_kind not in (
574 svn.core.svn_node_file, svn.core.svn_node_symlink):
572 svn.core.svn_node_file, svn.core.svn_node_symlink):
575 return []
573 return []
576 content = svn.core.Stream(
574 content = svn.core.Stream(
577 svn.fs.file_contents(fs_root, node_path)).read()
575 svn.fs.file_contents(fs_root, node_path)).read()
578 return content.splitlines(True)
576 return content.splitlines(True)
579
577
580
578
581 class DiffChangeEditor(svn.delta.Editor):
579 class DiffChangeEditor(svn.delta.Editor):
582 """
580 """
583 Records changes between two given revisions
581 Records changes between two given revisions
584 """
582 """
585
583
586 def __init__(self):
584 def __init__(self):
587 self.changes = []
585 self.changes = []
588
586
589 def delete_entry(self, path, revision, parent_baton, pool=None):
587 def delete_entry(self, path, revision, parent_baton, pool=None):
590 self.changes.append((path, None, 'delete'))
588 self.changes.append((path, None, 'delete'))
591
589
592 def add_file(
590 def add_file(
593 self, path, parent_baton, copyfrom_path, copyfrom_revision,
591 self, path, parent_baton, copyfrom_path, copyfrom_revision,
594 file_pool=None):
592 file_pool=None):
595 self.changes.append((path, 'file', 'add'))
593 self.changes.append((path, 'file', 'add'))
596
594
597 def open_file(self, path, parent_baton, base_revision, file_pool=None):
595 def open_file(self, path, parent_baton, base_revision, file_pool=None):
598 self.changes.append((path, 'file', 'change'))
596 self.changes.append((path, 'file', 'change'))
599
597
600
598
601 def authorization_callback_allow_all(root, path, pool):
599 def authorization_callback_allow_all(root, path, pool):
602 return True
600 return True
603
601
604
602
605 class TxnNodeProcessor(object):
603 class TxnNodeProcessor(object):
606 """
604 """
607 Utility to process the change of one node within a transaction root.
605 Utility to process the change of one node within a transaction root.
608
606
609 It encapsulates the knowledge of how to add, update or remove
607 It encapsulates the knowledge of how to add, update or remove
610 a node for a given transaction root. The purpose is to support the method
608 a node for a given transaction root. The purpose is to support the method
611 `SvnRemote.commit`.
609 `SvnRemote.commit`.
612 """
610 """
613
611
614 def __init__(self, node, txn_root):
612 def __init__(self, node, txn_root):
615 assert isinstance(node['path'], str)
613 assert isinstance(node['path'], str)
616
614
617 self.node = node
615 self.node = node
618 self.txn_root = txn_root
616 self.txn_root = txn_root
619
617
620 def update(self):
618 def update(self):
621 self._ensure_parent_dirs()
619 self._ensure_parent_dirs()
622 self._add_file_if_node_does_not_exist()
620 self._add_file_if_node_does_not_exist()
623 self._update_file_content()
621 self._update_file_content()
624 self._update_file_properties()
622 self._update_file_properties()
625
623
626 def remove(self):
624 def remove(self):
627 svn.fs.delete(self.txn_root, self.node['path'])
625 svn.fs.delete(self.txn_root, self.node['path'])
628 # TODO: Clean up directory if empty
626 # TODO: Clean up directory if empty
629
627
630 def _ensure_parent_dirs(self):
628 def _ensure_parent_dirs(self):
631 curdir = vcspath.dirname(self.node['path'])
629 curdir = vcspath.dirname(self.node['path'])
632 dirs_to_create = []
630 dirs_to_create = []
633 while not self._svn_path_exists(curdir):
631 while not self._svn_path_exists(curdir):
634 dirs_to_create.append(curdir)
632 dirs_to_create.append(curdir)
635 curdir = vcspath.dirname(curdir)
633 curdir = vcspath.dirname(curdir)
636
634
637 for curdir in reversed(dirs_to_create):
635 for curdir in reversed(dirs_to_create):
638 log.debug('Creating missing directory "%s"', curdir)
636 log.debug('Creating missing directory "%s"', curdir)
639 svn.fs.make_dir(self.txn_root, curdir)
637 svn.fs.make_dir(self.txn_root, curdir)
640
638
641 def _svn_path_exists(self, path):
639 def _svn_path_exists(self, path):
642 path_status = svn.fs.check_path(self.txn_root, path)
640 path_status = svn.fs.check_path(self.txn_root, path)
643 return path_status != svn.core.svn_node_none
641 return path_status != svn.core.svn_node_none
644
642
645 def _add_file_if_node_does_not_exist(self):
643 def _add_file_if_node_does_not_exist(self):
646 kind = svn.fs.check_path(self.txn_root, self.node['path'])
644 kind = svn.fs.check_path(self.txn_root, self.node['path'])
647 if kind == svn.core.svn_node_none:
645 if kind == svn.core.svn_node_none:
648 svn.fs.make_file(self.txn_root, self.node['path'])
646 svn.fs.make_file(self.txn_root, self.node['path'])
649
647
650 def _update_file_content(self):
648 def _update_file_content(self):
651 assert isinstance(self.node['content'], str)
649 assert isinstance(self.node['content'], str)
652 handler, baton = svn.fs.apply_textdelta(
650 handler, baton = svn.fs.apply_textdelta(
653 self.txn_root, self.node['path'], None, None)
651 self.txn_root, self.node['path'], None, None)
654 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
652 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
655
653
656 def _update_file_properties(self):
654 def _update_file_properties(self):
657 properties = self.node.get('properties', {})
655 properties = self.node.get('properties', {})
658 for key, value in properties.iteritems():
656 for key, value in properties.iteritems():
659 svn.fs.change_node_prop(
657 svn.fs.change_node_prop(
660 self.txn_root, self.node['path'], key, value)
658 self.txn_root, self.node['path'], key, value)
661
659
662
660
663 def apr_time_t(timestamp):
661 def apr_time_t(timestamp):
664 """
662 """
665 Convert a Python timestamp into APR timestamp type apr_time_t
663 Convert a Python timestamp into APR timestamp type apr_time_t
666 """
664 """
667 return timestamp * 1E6
665 return timestamp * 1E6
668
666
669
667
670 def svn_opt_revision_value_t(num):
668 def svn_opt_revision_value_t(num):
671 """
669 """
672 Put `num` into a `svn_opt_revision_value_t` structure.
670 Put `num` into a `svn_opt_revision_value_t` structure.
673 """
671 """
674 value = svn.core.svn_opt_revision_value_t()
672 value = svn.core.svn_opt_revision_value_t()
675 value.number = num
673 value.number = num
676 revision = svn.core.svn_opt_revision_t()
674 revision = svn.core.svn_opt_revision_t()
677 revision.kind = svn.core.svn_opt_revision_number
675 revision.kind = svn.core.svn_opt_revision_number
678 revision.value = value
676 revision.value = value
679 return revision
677 return revision
General Comments 0
You need to be logged in to leave comments. Login now