##// END OF EJS Templates
exceptions: use new wrapper that store the org exception inside the newly generated exceptions....
marcink -
r490:2961b1db default
parent child Browse files
Show More
@@ -1,70 +1,106 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 """
19 19 Special exception handling over the wire.
20 20
21 21 Since we cannot assume that our client is able to import our exception classes,
22 22 this module provides a "wrapping" mechanism to raise plain exceptions
23 23 which contain an extra attribute `_vcs_kind` to allow a client to distinguish
24 24 different error conditions.
25 25 """
26 26
27 27 import functools
28 28 from pyramid.httpexceptions import HTTPLocked
29 29
30 30
31 def _make_exception(kind, *args):
31 def _make_exception(kind, org_exc, *args):
32 32 """
33 33 Prepares a base `Exception` instance to be sent over the wire.
34 34
35 35 To give our caller a hint what this is about, it will attach an attribute
36 36 `_vcs_kind` to the exception.
37 37 """
38 38 exc = Exception(*args)
39 39 exc._vcs_kind = kind
40 exc._org_exc = org_exc
40 41 return exc
41 42
42 43
43 AbortException = functools.partial(_make_exception, 'abort')
44 def AbortException(org_exc=None):
45 def _make_exception_wrapper(*args):
46 return _make_exception('abort', org_exc, *args)
47 return _make_exception_wrapper
48
44 49
45 ArchiveException = functools.partial(_make_exception, 'archive')
50 def ArchiveException(org_exc=None):
51 def _make_exception_wrapper(*args):
52 return _make_exception('archive', org_exc, *args)
53 return _make_exception_wrapper
54
46 55
47 LookupException = functools.partial(_make_exception, 'lookup')
56 def LookupException(org_exc=None):
57 def _make_exception_wrapper(*args):
58 return _make_exception('lookup', org_exc, *args)
59 return _make_exception_wrapper
60
48 61
49 VcsException = functools.partial(_make_exception, 'error')
62 def VcsException(org_exc=None):
63 def _make_exception_wrapper(*args):
64 return _make_exception('error', org_exc, *args)
65 return _make_exception_wrapper
66
50 67
51 RepositoryLockedException = functools.partial(_make_exception, 'repo_locked')
68 def RepositoryLockedException(org_exc=None):
69 def _make_exception_wrapper(*args):
70 return _make_exception('repo_locked', org_exc, *args)
71 return _make_exception_wrapper
72
52 73
53 RequirementException = functools.partial(_make_exception, 'requirement')
74 def RequirementException(org_exc=None):
75 def _make_exception_wrapper(*args):
76 return _make_exception('requirement', org_exc, *args)
77 return _make_exception_wrapper
78
54 79
55 UnhandledException = functools.partial(_make_exception, 'unhandled')
80 def UnhandledException(org_exc=None):
81 def _make_exception_wrapper(*args):
82 return _make_exception('unhandled', org_exc, *args)
83 return _make_exception_wrapper
84
56 85
57 URLError = functools.partial(_make_exception, 'url_error')
86 def URLError(org_exc=None):
87 def _make_exception_wrapper(*args):
88 return _make_exception('url_error', org_exc, *args)
89 return _make_exception_wrapper
58 90
59 SubrepoMergeException = functools.partial(_make_exception, 'subrepo_merge_error')
91
92 def SubrepoMergeException(org_exc=None):
93 def _make_exception_wrapper(*args):
94 return _make_exception('subrepo_merge_error', org_exc, *args)
95 return _make_exception_wrapper
60 96
61 97
62 98 class HTTPRepoLocked(HTTPLocked):
63 99 """
64 100 Subclass of HTTPLocked response that allows to set the title and status
65 101 code via constructor arguments.
66 102 """
67 103 def __init__(self, title, status_code=None, **kwargs):
68 104 self.code = status_code or HTTPLocked.code
69 105 self.title = title
70 106 super(HTTPRepoLocked, self).__init__(**kwargs)
@@ -1,671 +1,671 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import logging
19 19 import os
20 20 import posixpath as vcspath
21 21 import re
22 22 import stat
23 23 import traceback
24 24 import urllib
25 25 import urllib2
26 26 from functools import wraps
27 27
28 28 from dulwich import index, objects
29 29 from dulwich.client import HttpGitClient, LocalGitClient
30 30 from dulwich.errors import (
31 31 NotGitRepository, ChecksumMismatch, WrongObjectException,
32 32 MissingCommitError, ObjectMissing, HangupException,
33 33 UnexpectedCommandError)
34 34 from dulwich.repo import Repo as DulwichRepo, Tag
35 35 from dulwich.server import update_server_info
36 36
37 37 from vcsserver import exceptions, settings, subprocessio
38 38 from vcsserver.utils import safe_str
39 39 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
40 40 from vcsserver.hgcompat import (
41 41 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
42 42 from vcsserver.git_lfs.lib import LFSOidStore
43 43
44 44 DIR_STAT = stat.S_IFDIR
45 45 FILE_MODE = stat.S_IFMT
46 46 GIT_LINK = objects.S_IFGITLINK
47 47
48 48 log = logging.getLogger(__name__)
49 49
50 50
51 51 def reraise_safe_exceptions(func):
52 52 """Converts Dulwich exceptions to something neutral."""
53 53 @wraps(func)
54 54 def wrapper(*args, **kwargs):
55 55 try:
56 56 return func(*args, **kwargs)
57 57 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
58 58 ObjectMissing) as e:
59 raise exceptions.LookupException(e.message)
59 raise exceptions.LookupException(e)(e.message)
60 60 except (HangupException, UnexpectedCommandError) as e:
61 raise exceptions.VcsException(e.message)
61 raise exceptions.VcsException(e)(e.message)
62 62 except Exception as e:
63 63 # NOTE(marcink): becuase of how dulwich handles some exceptions
64 64 # (KeyError on empty repos), we cannot track this and catch all
65 65 # exceptions, it's an exceptions from other handlers
66 66 #if not hasattr(e, '_vcs_kind'):
67 67 #log.exception("Unhandled exception in git remote call")
68 68 #raise_from_original(exceptions.UnhandledException)
69 69 raise
70 70 return wrapper
71 71
72 72
73 73 class Repo(DulwichRepo):
74 74 """
75 75 A wrapper for dulwich Repo class.
76 76
77 77 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
78 78 "Too many open files" error. We need to close all opened file descriptors
79 79 once the repo object is destroyed.
80 80
81 81 TODO: mikhail: please check if we need this wrapper after updating dulwich
82 82 to 0.12.0 +
83 83 """
84 84 def __del__(self):
85 85 if hasattr(self, 'object_store'):
86 86 self.close()
87 87
88 88
89 89 class GitFactory(RepoFactory):
90 90 repo_type = 'git'
91 91
92 92 def _create_repo(self, wire, create):
93 93 repo_path = str_to_dulwich(wire['path'])
94 94 return Repo(repo_path)
95 95
96 96
97 97 class GitRemote(object):
98 98
99 99 def __init__(self, factory):
100 100 self._factory = factory
101 101
102 102 self._bulk_methods = {
103 103 "author": self.commit_attribute,
104 104 "date": self.get_object_attrs,
105 105 "message": self.commit_attribute,
106 106 "parents": self.commit_attribute,
107 107 "_commit": self.revision,
108 108 }
109 109
110 110 def _wire_to_config(self, wire):
111 111 if 'config' in wire:
112 112 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
113 113 return {}
114 114
115 115 def _assign_ref(self, wire, ref, commit_id):
116 116 repo = self._factory.repo(wire)
117 117 repo[ref] = commit_id
118 118
119 119 @reraise_safe_exceptions
120 120 def add_object(self, wire, content):
121 121 repo = self._factory.repo(wire)
122 122 blob = objects.Blob()
123 123 blob.set_raw_string(content)
124 124 repo.object_store.add_object(blob)
125 125 return blob.id
126 126
127 127 @reraise_safe_exceptions
128 128 def assert_correct_path(self, wire):
129 129 path = wire.get('path')
130 130 try:
131 131 self._factory.repo(wire)
132 132 except NotGitRepository as e:
133 133 tb = traceback.format_exc()
134 134 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
135 135 return False
136 136
137 137 return True
138 138
139 139 @reraise_safe_exceptions
140 140 def bare(self, wire):
141 141 repo = self._factory.repo(wire)
142 142 return repo.bare
143 143
144 144 @reraise_safe_exceptions
145 145 def blob_as_pretty_string(self, wire, sha):
146 146 repo = self._factory.repo(wire)
147 147 return repo[sha].as_pretty_string()
148 148
149 149 @reraise_safe_exceptions
150 150 def blob_raw_length(self, wire, sha):
151 151 repo = self._factory.repo(wire)
152 152 blob = repo[sha]
153 153 return blob.raw_length()
154 154
155 155 def _parse_lfs_pointer(self, raw_content):
156 156
157 157 spec_string = 'version https://git-lfs.github.com/spec'
158 158 if raw_content and raw_content.startswith(spec_string):
159 159 pattern = re.compile(r"""
160 160 (?:\n)?
161 161 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
162 162 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
163 163 ^size[ ](?P<oid_size>[0-9]+)\n
164 164 (?:\n)?
165 165 """, re.VERBOSE | re.MULTILINE)
166 166 match = pattern.match(raw_content)
167 167 if match:
168 168 return match.groupdict()
169 169
170 170 return {}
171 171
172 172 @reraise_safe_exceptions
173 173 def is_large_file(self, wire, sha):
174 174 repo = self._factory.repo(wire)
175 175 blob = repo[sha]
176 176 return self._parse_lfs_pointer(blob.as_raw_string())
177 177
178 178 @reraise_safe_exceptions
179 179 def in_largefiles_store(self, wire, oid):
180 180 repo = self._factory.repo(wire)
181 181 conf = self._wire_to_config(wire)
182 182
183 183 store_location = conf.get('vcs_git_lfs_store_location')
184 184 if store_location:
185 185 repo_name = repo.path
186 186 store = LFSOidStore(
187 187 oid=oid, repo=repo_name, store_location=store_location)
188 188 return store.has_oid()
189 189
190 190 return False
191 191
192 192 @reraise_safe_exceptions
193 193 def store_path(self, wire, oid):
194 194 repo = self._factory.repo(wire)
195 195 conf = self._wire_to_config(wire)
196 196
197 197 store_location = conf.get('vcs_git_lfs_store_location')
198 198 if store_location:
199 199 repo_name = repo.path
200 200 store = LFSOidStore(
201 201 oid=oid, repo=repo_name, store_location=store_location)
202 202 return store.oid_path
203 203 raise ValueError('Unable to fetch oid with path {}'.format(oid))
204 204
205 205 @reraise_safe_exceptions
206 206 def bulk_request(self, wire, rev, pre_load):
207 207 result = {}
208 208 for attr in pre_load:
209 209 try:
210 210 method = self._bulk_methods[attr]
211 211 args = [wire, rev]
212 212 if attr == "date":
213 213 args.extend(["commit_time", "commit_timezone"])
214 214 elif attr in ["author", "message", "parents"]:
215 215 args.append(attr)
216 216 result[attr] = method(*args)
217 except KeyError:
218 raise exceptions.VcsException(
217 except KeyError as e:
218 raise exceptions.VcsException(e)(
219 219 "Unknown bulk attribute: %s" % attr)
220 220 return result
221 221
222 222 def _build_opener(self, url):
223 223 handlers = []
224 224 url_obj = url_parser(url)
225 225 _, authinfo = url_obj.authinfo()
226 226
227 227 if authinfo:
228 228 # create a password manager
229 229 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
230 230 passmgr.add_password(*authinfo)
231 231
232 232 handlers.extend((httpbasicauthhandler(passmgr),
233 233 httpdigestauthhandler(passmgr)))
234 234
235 235 return urllib2.build_opener(*handlers)
236 236
237 237 @reraise_safe_exceptions
238 238 def check_url(self, url, config):
239 239 url_obj = url_parser(url)
240 240 test_uri, _ = url_obj.authinfo()
241 241 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
242 242 url_obj.query = obfuscate_qs(url_obj.query)
243 243 cleaned_uri = str(url_obj)
244 244 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
245 245
246 246 if not test_uri.endswith('info/refs'):
247 247 test_uri = test_uri.rstrip('/') + '/info/refs'
248 248
249 249 o = self._build_opener(url)
250 250 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
251 251
252 252 q = {"service": 'git-upload-pack'}
253 253 qs = '?%s' % urllib.urlencode(q)
254 254 cu = "%s%s" % (test_uri, qs)
255 255 req = urllib2.Request(cu, None, {})
256 256
257 257 try:
258 258 log.debug("Trying to open URL %s", cleaned_uri)
259 259 resp = o.open(req)
260 260 if resp.code != 200:
261 raise exceptions.URLError('Return Code is not 200')
261 raise exceptions.URLError()('Return Code is not 200')
262 262 except Exception as e:
263 263 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
264 264 # means it cannot be cloned
265 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
265 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
266 266
267 267 # now detect if it's proper git repo
268 268 gitdata = resp.read()
269 269 if 'service=git-upload-pack' in gitdata:
270 270 pass
271 271 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
272 272 # old style git can return some other format !
273 273 pass
274 274 else:
275 raise exceptions.URLError(
275 raise exceptions.URLError()(
276 276 "url [%s] does not look like an git" % (cleaned_uri,))
277 277
278 278 return True
279 279
280 280 @reraise_safe_exceptions
281 281 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
282 282 remote_refs = self.fetch(wire, url, apply_refs=False)
283 283 repo = self._factory.repo(wire)
284 284 if isinstance(valid_refs, list):
285 285 valid_refs = tuple(valid_refs)
286 286
287 287 for k in remote_refs:
288 288 # only parse heads/tags and skip so called deferred tags
289 289 if k.startswith(valid_refs) and not k.endswith(deferred):
290 290 repo[k] = remote_refs[k]
291 291
292 292 if update_after_clone:
293 293 # we want to checkout HEAD
294 294 repo["HEAD"] = remote_refs["HEAD"]
295 295 index.build_index_from_tree(repo.path, repo.index_path(),
296 296 repo.object_store, repo["HEAD"].tree)
297 297
298 298 # TODO: this is quite complex, check if that can be simplified
299 299 @reraise_safe_exceptions
300 300 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
301 301 repo = self._factory.repo(wire)
302 302 object_store = repo.object_store
303 303
304 304 # Create tree and populates it with blobs
305 305 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
306 306
307 307 for node in updated:
308 308 # Compute subdirs if needed
309 309 dirpath, nodename = vcspath.split(node['path'])
310 310 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
311 311 parent = commit_tree
312 312 ancestors = [('', parent)]
313 313
314 314 # Tries to dig for the deepest existing tree
315 315 while dirnames:
316 316 curdir = dirnames.pop(0)
317 317 try:
318 318 dir_id = parent[curdir][1]
319 319 except KeyError:
320 320 # put curdir back into dirnames and stops
321 321 dirnames.insert(0, curdir)
322 322 break
323 323 else:
324 324 # If found, updates parent
325 325 parent = repo[dir_id]
326 326 ancestors.append((curdir, parent))
327 327 # Now parent is deepest existing tree and we need to create
328 328 # subtrees for dirnames (in reverse order)
329 329 # [this only applies for nodes from added]
330 330 new_trees = []
331 331
332 332 blob = objects.Blob.from_string(node['content'])
333 333
334 334 if dirnames:
335 335 # If there are trees which should be created we need to build
336 336 # them now (in reverse order)
337 337 reversed_dirnames = list(reversed(dirnames))
338 338 curtree = objects.Tree()
339 339 curtree[node['node_path']] = node['mode'], blob.id
340 340 new_trees.append(curtree)
341 341 for dirname in reversed_dirnames[:-1]:
342 342 newtree = objects.Tree()
343 343 newtree[dirname] = (DIR_STAT, curtree.id)
344 344 new_trees.append(newtree)
345 345 curtree = newtree
346 346 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
347 347 else:
348 348 parent.add(
349 349 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
350 350
351 351 new_trees.append(parent)
352 352 # Update ancestors
353 353 reversed_ancestors = reversed(
354 354 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
355 355 for parent, tree, path in reversed_ancestors:
356 356 parent[path] = (DIR_STAT, tree.id)
357 357 object_store.add_object(tree)
358 358
359 359 object_store.add_object(blob)
360 360 for tree in new_trees:
361 361 object_store.add_object(tree)
362 362
363 363 for node_path in removed:
364 364 paths = node_path.split('/')
365 365 tree = commit_tree
366 366 trees = [tree]
367 367 # Traverse deep into the forest...
368 368 for path in paths:
369 369 try:
370 370 obj = repo[tree[path][1]]
371 371 if isinstance(obj, objects.Tree):
372 372 trees.append(obj)
373 373 tree = obj
374 374 except KeyError:
375 375 break
376 376 # Cut down the blob and all rotten trees on the way back...
377 377 for path, tree in reversed(zip(paths, trees)):
378 378 del tree[path]
379 379 if tree:
380 380 # This tree still has elements - don't remove it or any
381 381 # of it's parents
382 382 break
383 383
384 384 object_store.add_object(commit_tree)
385 385
386 386 # Create commit
387 387 commit = objects.Commit()
388 388 commit.tree = commit_tree.id
389 389 for k, v in commit_data.iteritems():
390 390 setattr(commit, k, v)
391 391 object_store.add_object(commit)
392 392
393 393 ref = 'refs/heads/%s' % branch
394 394 repo.refs[ref] = commit.id
395 395
396 396 return commit.id
397 397
398 398 @reraise_safe_exceptions
399 399 def fetch(self, wire, url, apply_refs=True, refs=None):
400 400 if url != 'default' and '://' not in url:
401 401 client = LocalGitClient(url)
402 402 else:
403 403 url_obj = url_parser(url)
404 404 o = self._build_opener(url)
405 405 url, _ = url_obj.authinfo()
406 406 client = HttpGitClient(base_url=url, opener=o)
407 407 repo = self._factory.repo(wire)
408 408
409 409 determine_wants = repo.object_store.determine_wants_all
410 410 if refs:
411 411 def determine_wants_requested(references):
412 412 return [references[r] for r in references if r in refs]
413 413 determine_wants = determine_wants_requested
414 414
415 415 try:
416 416 remote_refs = client.fetch(
417 417 path=url, target=repo, determine_wants=determine_wants)
418 418 except NotGitRepository as e:
419 419 log.warning(
420 420 'Trying to fetch from "%s" failed, not a Git repository.', url)
421 421 # Exception can contain unicode which we convert
422 raise exceptions.AbortException(repr(e))
422 raise exceptions.AbortException(e)(repr(e))
423 423
424 424 # mikhail: client.fetch() returns all the remote refs, but fetches only
425 425 # refs filtered by `determine_wants` function. We need to filter result
426 426 # as well
427 427 if refs:
428 428 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
429 429
430 430 if apply_refs:
431 431 # TODO: johbo: Needs proper test coverage with a git repository
432 432 # that contains a tag object, so that we would end up with
433 433 # a peeled ref at this point.
434 434 PEELED_REF_MARKER = '^{}'
435 435 for k in remote_refs:
436 436 if k.endswith(PEELED_REF_MARKER):
437 437 log.info("Skipping peeled reference %s", k)
438 438 continue
439 439 repo[k] = remote_refs[k]
440 440
441 441 if refs:
442 442 # mikhail: explicitly set the head to the last ref.
443 443 repo['HEAD'] = remote_refs[refs[-1]]
444 444
445 445 # TODO: mikhail: should we return remote_refs here to be
446 446 # consistent?
447 447 else:
448 448 return remote_refs
449 449
450 450 @reraise_safe_exceptions
451 451 def sync_push(self, wire, url, refs=None):
452 452 if self.check_url(url, wire):
453 453 repo = self._factory.repo(wire)
454 454 self.run_git_command(
455 455 wire, ['push', url, '--mirror'], fail_on_stderr=False,
456 456 _copts=['-c', 'core.askpass=""'],
457 457 extra_env={'GIT_TERMINAL_PROMPT': '0'})
458 458
459 459 @reraise_safe_exceptions
460 460 def get_remote_refs(self, wire, url):
461 461 repo = Repo(url)
462 462 return repo.get_refs()
463 463
464 464 @reraise_safe_exceptions
465 465 def get_description(self, wire):
466 466 repo = self._factory.repo(wire)
467 467 return repo.get_description()
468 468
469 469 @reraise_safe_exceptions
470 470 def get_file_history(self, wire, file_path, commit_id, limit):
471 471 repo = self._factory.repo(wire)
472 472 include = [commit_id]
473 473 paths = [file_path]
474 474
475 475 walker = repo.get_walker(include, paths=paths, max_entries=limit)
476 476 return [x.commit.id for x in walker]
477 477
478 478 @reraise_safe_exceptions
479 479 def get_missing_revs(self, wire, rev1, rev2, path2):
480 480 repo = self._factory.repo(wire)
481 481 LocalGitClient(thin_packs=False).fetch(path2, repo)
482 482
483 483 wire_remote = wire.copy()
484 484 wire_remote['path'] = path2
485 485 repo_remote = self._factory.repo(wire_remote)
486 486 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
487 487
488 488 revs = [
489 489 x.commit.id
490 490 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
491 491 return revs
492 492
493 493 @reraise_safe_exceptions
494 494 def get_object(self, wire, sha):
495 495 repo = self._factory.repo(wire)
496 496 obj = repo.get_object(sha)
497 497 commit_id = obj.id
498 498
499 499 if isinstance(obj, Tag):
500 500 commit_id = obj.object[1]
501 501
502 502 return {
503 503 'id': obj.id,
504 504 'type': obj.type_name,
505 505 'commit_id': commit_id
506 506 }
507 507
508 508 @reraise_safe_exceptions
509 509 def get_object_attrs(self, wire, sha, *attrs):
510 510 repo = self._factory.repo(wire)
511 511 obj = repo.get_object(sha)
512 512 return list(getattr(obj, a) for a in attrs)
513 513
514 514 @reraise_safe_exceptions
515 515 def get_refs(self, wire):
516 516 repo = self._factory.repo(wire)
517 517 result = {}
518 518 for ref, sha in repo.refs.as_dict().items():
519 519 peeled_sha = repo.get_peeled(ref)
520 520 result[ref] = peeled_sha
521 521 return result
522 522
523 523 @reraise_safe_exceptions
524 524 def get_refs_path(self, wire):
525 525 repo = self._factory.repo(wire)
526 526 return repo.refs.path
527 527
528 528 @reraise_safe_exceptions
529 529 def head(self, wire):
530 530 repo = self._factory.repo(wire)
531 531 return repo.head()
532 532
533 533 @reraise_safe_exceptions
534 534 def init(self, wire):
535 535 repo_path = str_to_dulwich(wire['path'])
536 536 self.repo = Repo.init(repo_path)
537 537
538 538 @reraise_safe_exceptions
539 539 def init_bare(self, wire):
540 540 repo_path = str_to_dulwich(wire['path'])
541 541 self.repo = Repo.init_bare(repo_path)
542 542
543 543 @reraise_safe_exceptions
544 544 def revision(self, wire, rev):
545 545 repo = self._factory.repo(wire)
546 546 obj = repo[rev]
547 547 obj_data = {
548 548 'id': obj.id,
549 549 }
550 550 try:
551 551 obj_data['tree'] = obj.tree
552 552 except AttributeError:
553 553 pass
554 554 return obj_data
555 555
556 556 @reraise_safe_exceptions
557 557 def commit_attribute(self, wire, rev, attr):
558 558 repo = self._factory.repo(wire)
559 559 obj = repo[rev]
560 560 return getattr(obj, attr)
561 561
562 562 @reraise_safe_exceptions
563 563 def set_refs(self, wire, key, value):
564 564 repo = self._factory.repo(wire)
565 565 repo.refs[key] = value
566 566
567 567 @reraise_safe_exceptions
568 568 def remove_ref(self, wire, key):
569 569 repo = self._factory.repo(wire)
570 570 del repo.refs[key]
571 571
572 572 @reraise_safe_exceptions
573 573 def tree_changes(self, wire, source_id, target_id):
574 574 repo = self._factory.repo(wire)
575 575 source = repo[source_id].tree if source_id else None
576 576 target = repo[target_id].tree
577 577 result = repo.object_store.tree_changes(source, target)
578 578 return list(result)
579 579
580 580 @reraise_safe_exceptions
581 581 def tree_items(self, wire, tree_id):
582 582 repo = self._factory.repo(wire)
583 583 tree = repo[tree_id]
584 584
585 585 result = []
586 586 for item in tree.iteritems():
587 587 item_sha = item.sha
588 588 item_mode = item.mode
589 589
590 590 if FILE_MODE(item_mode) == GIT_LINK:
591 591 item_type = "link"
592 592 else:
593 593 item_type = repo[item_sha].type_name
594 594
595 595 result.append((item.path, item_mode, item_sha, item_type))
596 596 return result
597 597
598 598 @reraise_safe_exceptions
599 599 def update_server_info(self, wire):
600 600 repo = self._factory.repo(wire)
601 601 update_server_info(repo)
602 602
603 603 @reraise_safe_exceptions
604 604 def discover_git_version(self):
605 605 stdout, _ = self.run_git_command(
606 606 {}, ['--version'], _bare=True, _safe=True)
607 607 prefix = 'git version'
608 608 if stdout.startswith(prefix):
609 609 stdout = stdout[len(prefix):]
610 610 return stdout.strip()
611 611
612 612 @reraise_safe_exceptions
613 613 def run_git_command(self, wire, cmd, **opts):
614 614 path = wire.get('path', None)
615 615
616 616 if path and os.path.isdir(path):
617 617 opts['cwd'] = path
618 618
619 619 if '_bare' in opts:
620 620 _copts = []
621 621 del opts['_bare']
622 622 else:
623 623 _copts = ['-c', 'core.quotepath=false', ]
624 624 safe_call = False
625 625 if '_safe' in opts:
626 626 # no exc on failure
627 627 del opts['_safe']
628 628 safe_call = True
629 629
630 630 if '_copts' in opts:
631 631 _copts.extend(opts['_copts'] or [])
632 632 del opts['_copts']
633 633
634 634 gitenv = os.environ.copy()
635 635 gitenv.update(opts.pop('extra_env', {}))
636 636 # need to clean fix GIT_DIR !
637 637 if 'GIT_DIR' in gitenv:
638 638 del gitenv['GIT_DIR']
639 639 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
640 640 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
641 641
642 642 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
643 643
644 644 try:
645 645 _opts = {'env': gitenv, 'shell': False}
646 646 _opts.update(opts)
647 647 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
648 648
649 649 return ''.join(p), ''.join(p.error)
650 650 except (EnvironmentError, OSError) as err:
651 651 cmd = ' '.join(cmd) # human friendly CMD
652 652 tb_err = ("Couldn't run git command (%s).\n"
653 653 "Original error was:%s\n" % (cmd, err))
654 654 log.exception(tb_err)
655 655 if safe_call:
656 656 return '', err
657 657 else:
658 raise exceptions.VcsException(tb_err)
658 raise exceptions.VcsException()(tb_err)
659 659
660 660 @reraise_safe_exceptions
661 661 def install_hooks(self, wire, force=False):
662 662 from vcsserver.hook_utils import install_git_hooks
663 663 repo = self._factory.repo(wire)
664 664 return install_git_hooks(repo.path, repo.bare, force_create=force)
665 665
666 666
667 667 def str_to_dulwich(value):
668 668 """
669 669 Dulwich 0.10.1a requires `unicode` objects to be passed in.
670 670 """
671 671 return value.decode(settings.WIRE_ENCODING)
@@ -1,792 +1,793 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import logging
20 20 import stat
21 21 import urllib
22 22 import urllib2
23 23
24 24 from hgext import largefiles, rebase
25 25 from hgext.strip import strip as hgext_strip
26 26 from mercurial import commands
27 27 from mercurial import unionrepo
28 28 from mercurial import verify
29 29
30 30 from vcsserver import exceptions
31 31 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
32 32 from vcsserver.hgcompat import (
33 33 archival, bin, clone, config as hgconfig, diffopts, hex,
34 34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
35 35 makepeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
36 36 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
37 37 RepoLookupError, InterventionRequired, RequirementError)
38 38
39 39 log = logging.getLogger(__name__)
40 40
41 41
42 42 def make_ui_from_config(repo_config):
43 43 baseui = ui.ui()
44 44
45 45 # clean the baseui object
46 46 baseui._ocfg = hgconfig.config()
47 47 baseui._ucfg = hgconfig.config()
48 48 baseui._tcfg = hgconfig.config()
49 49
50 50 for section, option, value in repo_config:
51 51 baseui.setconfig(section, option, value)
52 52
53 53 # make our hgweb quiet so it doesn't print output
54 54 baseui.setconfig('ui', 'quiet', 'true')
55 55
56 56 baseui.setconfig('ui', 'paginate', 'never')
57 57 # force mercurial to only use 1 thread, otherwise it may try to set a
58 58 # signal in a non-main thread, thus generating a ValueError.
59 59 baseui.setconfig('worker', 'numcpus', 1)
60 60
61 61 # If there is no config for the largefiles extension, we explicitly disable
62 62 # it here. This overrides settings from repositories hgrc file. Recent
63 63 # mercurial versions enable largefiles in hgrc on clone from largefile
64 64 # repo.
65 65 if not baseui.hasconfig('extensions', 'largefiles'):
66 66 log.debug('Explicitly disable largefiles extension for repo.')
67 67 baseui.setconfig('extensions', 'largefiles', '!')
68 68
69 69 return baseui
70 70
71 71
72 72 def reraise_safe_exceptions(func):
73 73 """Decorator for converting mercurial exceptions to something neutral."""
74 74 def wrapper(*args, **kwargs):
75 75 try:
76 76 return func(*args, **kwargs)
77 except (Abort, InterventionRequired):
78 raise_from_original(exceptions.AbortException)
79 except RepoLookupError:
80 raise_from_original(exceptions.LookupException)
81 except RequirementError:
82 raise_from_original(exceptions.RequirementException)
83 except RepoError:
84 raise_from_original(exceptions.VcsException)
85 except LookupError:
86 raise_from_original(exceptions.LookupException)
77 except (Abort, InterventionRequired) as e:
78 raise_from_original(exceptions.AbortException(e))
79 except RepoLookupError as e:
80 raise_from_original(exceptions.LookupException(e))
81 except RequirementError as e:
82 raise_from_original(exceptions.RequirementException(e))
83 except RepoError as e:
84 raise_from_original(exceptions.VcsException(e))
85 except LookupError as e:
86 raise_from_original(exceptions.LookupException(e))
87 87 except Exception as e:
88 88 if not hasattr(e, '_vcs_kind'):
89 89 log.exception("Unhandled exception in hg remote call")
90 raise_from_original(exceptions.UnhandledException)
90 raise_from_original(exceptions.UnhandledException(e))
91
91 92 raise
92 93 return wrapper
93 94
94 95
95 96 class MercurialFactory(RepoFactory):
96 97 repo_type = 'hg'
97 98
98 99 def _create_config(self, config, hooks=True):
99 100 if not hooks:
100 101 hooks_to_clean = frozenset((
101 102 'changegroup.repo_size', 'preoutgoing.pre_pull',
102 103 'outgoing.pull_logger', 'prechangegroup.pre_push'))
103 104 new_config = []
104 105 for section, option, value in config:
105 106 if section == 'hooks' and option in hooks_to_clean:
106 107 continue
107 108 new_config.append((section, option, value))
108 109 config = new_config
109 110
110 111 baseui = make_ui_from_config(config)
111 112 return baseui
112 113
113 114 def _create_repo(self, wire, create):
114 115 baseui = self._create_config(wire["config"])
115 116 return localrepository(baseui, wire["path"], create)
116 117
117 118
118 119 class HgRemote(object):
119 120
120 121 def __init__(self, factory):
121 122 self._factory = factory
122 123
123 124 self._bulk_methods = {
124 125 "affected_files": self.ctx_files,
125 126 "author": self.ctx_user,
126 127 "branch": self.ctx_branch,
127 128 "children": self.ctx_children,
128 129 "date": self.ctx_date,
129 130 "message": self.ctx_description,
130 131 "parents": self.ctx_parents,
131 132 "status": self.ctx_status,
132 133 "obsolete": self.ctx_obsolete,
133 134 "phase": self.ctx_phase,
134 135 "hidden": self.ctx_hidden,
135 136 "_file_paths": self.ctx_list,
136 137 }
137 138
138 139 @reraise_safe_exceptions
139 140 def discover_hg_version(self):
140 141 from mercurial import util
141 142 return util.version()
142 143
143 144 @reraise_safe_exceptions
144 145 def archive_repo(self, archive_path, mtime, file_info, kind):
145 146 if kind == "tgz":
146 147 archiver = archival.tarit(archive_path, mtime, "gz")
147 148 elif kind == "tbz2":
148 149 archiver = archival.tarit(archive_path, mtime, "bz2")
149 150 elif kind == 'zip':
150 151 archiver = archival.zipit(archive_path, mtime)
151 152 else:
152 raise exceptions.ArchiveException(
153 raise exceptions.ArchiveException()(
153 154 'Remote does not support: "%s".' % kind)
154 155
155 156 for f_path, f_mode, f_is_link, f_content in file_info:
156 157 archiver.addfile(f_path, f_mode, f_is_link, f_content)
157 158 archiver.done()
158 159
159 160 @reraise_safe_exceptions
160 161 def bookmarks(self, wire):
161 162 repo = self._factory.repo(wire)
162 163 return dict(repo._bookmarks)
163 164
164 165 @reraise_safe_exceptions
165 166 def branches(self, wire, normal, closed):
166 167 repo = self._factory.repo(wire)
167 168 iter_branches = repo.branchmap().iterbranches()
168 169 bt = {}
169 170 for branch_name, _heads, tip, is_closed in iter_branches:
170 171 if normal and not is_closed:
171 172 bt[branch_name] = tip
172 173 if closed and is_closed:
173 174 bt[branch_name] = tip
174 175
175 176 return bt
176 177
177 178 @reraise_safe_exceptions
178 179 def bulk_request(self, wire, rev, pre_load):
179 180 result = {}
180 181 for attr in pre_load:
181 182 try:
182 183 method = self._bulk_methods[attr]
183 184 result[attr] = method(wire, rev)
184 except KeyError:
185 raise exceptions.VcsException(
185 except KeyError as e:
186 raise exceptions.VcsException(e)(
186 187 'Unknown bulk attribute: "%s"' % attr)
187 188 return result
188 189
189 190 @reraise_safe_exceptions
190 191 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
191 192 baseui = self._factory._create_config(wire["config"], hooks=hooks)
192 193 clone(baseui, source, dest, noupdate=not update_after_clone)
193 194
194 195 @reraise_safe_exceptions
195 196 def commitctx(
196 197 self, wire, message, parents, commit_time, commit_timezone,
197 198 user, files, extra, removed, updated):
198 199
199 200 def _filectxfn(_repo, memctx, path):
200 201 """
201 202 Marks given path as added/changed/removed in a given _repo. This is
202 203 for internal mercurial commit function.
203 204 """
204 205
205 206 # check if this path is removed
206 207 if path in removed:
207 208 # returning None is a way to mark node for removal
208 209 return None
209 210
210 211 # check if this path is added
211 212 for node in updated:
212 213 if node['path'] == path:
213 214 return memfilectx(
214 215 _repo,
215 216 changectx=memctx,
216 217 path=node['path'],
217 218 data=node['content'],
218 219 islink=False,
219 220 isexec=bool(node['mode'] & stat.S_IXUSR),
220 221 copied=False)
221 222
222 raise exceptions.AbortException(
223 raise exceptions.AbortException()(
223 224 "Given path haven't been marked as added, "
224 225 "changed or removed (%s)" % path)
225 226
226 227 repo = self._factory.repo(wire)
227 228
228 229 commit_ctx = memctx(
229 230 repo=repo,
230 231 parents=parents,
231 232 text=message,
232 233 files=files,
233 234 filectxfn=_filectxfn,
234 235 user=user,
235 236 date=(commit_time, commit_timezone),
236 237 extra=extra)
237 238
238 239 n = repo.commitctx(commit_ctx)
239 240 new_id = hex(n)
240 241
241 242 return new_id
242 243
243 244 @reraise_safe_exceptions
244 245 def ctx_branch(self, wire, revision):
245 246 repo = self._factory.repo(wire)
246 247 ctx = repo[revision]
247 248 return ctx.branch()
248 249
249 250 @reraise_safe_exceptions
250 251 def ctx_children(self, wire, revision):
251 252 repo = self._factory.repo(wire)
252 253 ctx = repo[revision]
253 254 return [child.rev() for child in ctx.children()]
254 255
255 256 @reraise_safe_exceptions
256 257 def ctx_date(self, wire, revision):
257 258 repo = self._factory.repo(wire)
258 259 ctx = repo[revision]
259 260 return ctx.date()
260 261
261 262 @reraise_safe_exceptions
262 263 def ctx_description(self, wire, revision):
263 264 repo = self._factory.repo(wire)
264 265 ctx = repo[revision]
265 266 return ctx.description()
266 267
267 268 @reraise_safe_exceptions
268 269 def ctx_diff(
269 270 self, wire, revision, git=True, ignore_whitespace=True, context=3):
270 271 repo = self._factory.repo(wire)
271 272 ctx = repo[revision]
272 273 result = ctx.diff(
273 274 git=git, ignore_whitespace=ignore_whitespace, context=context)
274 275 return list(result)
275 276
276 277 @reraise_safe_exceptions
277 278 def ctx_files(self, wire, revision):
278 279 repo = self._factory.repo(wire)
279 280 ctx = repo[revision]
280 281 return ctx.files()
281 282
282 283 @reraise_safe_exceptions
283 284 def ctx_list(self, path, revision):
284 285 repo = self._factory.repo(path)
285 286 ctx = repo[revision]
286 287 return list(ctx)
287 288
288 289 @reraise_safe_exceptions
289 290 def ctx_parents(self, wire, revision):
290 291 repo = self._factory.repo(wire)
291 292 ctx = repo[revision]
292 293 return [parent.rev() for parent in ctx.parents()]
293 294
294 295 @reraise_safe_exceptions
295 296 def ctx_phase(self, wire, revision):
296 297 repo = self._factory.repo(wire)
297 298 ctx = repo[revision]
298 299 # public=0, draft=1, secret=3
299 300 return ctx.phase()
300 301
301 302 @reraise_safe_exceptions
302 303 def ctx_obsolete(self, wire, revision):
303 304 repo = self._factory.repo(wire)
304 305 ctx = repo[revision]
305 306 return ctx.obsolete()
306 307
307 308 @reraise_safe_exceptions
308 309 def ctx_hidden(self, wire, revision):
309 310 repo = self._factory.repo(wire)
310 311 ctx = repo[revision]
311 312 return ctx.hidden()
312 313
313 314 @reraise_safe_exceptions
314 315 def ctx_substate(self, wire, revision):
315 316 repo = self._factory.repo(wire)
316 317 ctx = repo[revision]
317 318 return ctx.substate
318 319
319 320 @reraise_safe_exceptions
320 321 def ctx_status(self, wire, revision):
321 322 repo = self._factory.repo(wire)
322 323 ctx = repo[revision]
323 324 status = repo[ctx.p1().node()].status(other=ctx.node())
324 325 # object of status (odd, custom named tuple in mercurial) is not
325 326 # correctly serializable, we make it a list, as the underling
326 327 # API expects this to be a list
327 328 return list(status)
328 329
329 330 @reraise_safe_exceptions
330 331 def ctx_user(self, wire, revision):
331 332 repo = self._factory.repo(wire)
332 333 ctx = repo[revision]
333 334 return ctx.user()
334 335
335 336 @reraise_safe_exceptions
336 337 def check_url(self, url, config):
337 338 _proto = None
338 339 if '+' in url[:url.find('://')]:
339 340 _proto = url[0:url.find('+')]
340 341 url = url[url.find('+') + 1:]
341 342 handlers = []
342 343 url_obj = url_parser(url)
343 344 test_uri, authinfo = url_obj.authinfo()
344 345 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
345 346 url_obj.query = obfuscate_qs(url_obj.query)
346 347
347 348 cleaned_uri = str(url_obj)
348 349 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
349 350
350 351 if authinfo:
351 352 # create a password manager
352 353 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
353 354 passmgr.add_password(*authinfo)
354 355
355 356 handlers.extend((httpbasicauthhandler(passmgr),
356 357 httpdigestauthhandler(passmgr)))
357 358
358 359 o = urllib2.build_opener(*handlers)
359 360 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
360 361 ('Accept', 'application/mercurial-0.1')]
361 362
362 363 q = {"cmd": 'between'}
363 364 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
364 365 qs = '?%s' % urllib.urlencode(q)
365 366 cu = "%s%s" % (test_uri, qs)
366 367 req = urllib2.Request(cu, None, {})
367 368
368 369 try:
369 370 log.debug("Trying to open URL %s", cleaned_uri)
370 371 resp = o.open(req)
371 372 if resp.code != 200:
372 raise exceptions.URLError('Return Code is not 200')
373 raise exceptions.URLError()('Return Code is not 200')
373 374 except Exception as e:
374 375 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
375 376 # means it cannot be cloned
376 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
377 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
377 378
378 379 # now check if it's a proper hg repo, but don't do it for svn
379 380 try:
380 381 if _proto == 'svn':
381 382 pass
382 383 else:
383 384 # check for pure hg repos
384 385 log.debug(
385 386 "Verifying if URL is a Mercurial repository: %s",
386 387 cleaned_uri)
387 388 ui = make_ui_from_config(config)
388 389 peer_checker = makepeer(ui, url)
389 390 peer_checker.lookup('tip')
390 391 except Exception as e:
391 392 log.warning("URL is not a valid Mercurial repository: %s",
392 393 cleaned_uri)
393 raise exceptions.URLError(
394 raise exceptions.URLError(e)(
394 395 "url [%s] does not look like an hg repo org_exc: %s"
395 396 % (cleaned_uri, e))
396 397
397 398 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
398 399 return True
399 400
400 401 @reraise_safe_exceptions
401 402 def diff(
402 403 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
403 404 context):
404 405 repo = self._factory.repo(wire)
405 406
406 407 if file_filter:
407 408 match_filter = match(file_filter[0], '', [file_filter[1]])
408 409 else:
409 410 match_filter = file_filter
410 411 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
411 412
412 413 try:
413 414 return "".join(patch.diff(
414 415 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
415 except RepoLookupError:
416 raise exceptions.LookupException()
416 except RepoLookupError as e:
417 raise exceptions.LookupException(e)()
417 418
418 419 @reraise_safe_exceptions
419 420 def file_history(self, wire, revision, path, limit):
420 421 repo = self._factory.repo(wire)
421 422
422 423 ctx = repo[revision]
423 424 fctx = ctx.filectx(path)
424 425
425 426 def history_iter():
426 427 limit_rev = fctx.rev()
427 428 for obj in reversed(list(fctx.filelog())):
428 429 obj = fctx.filectx(obj)
429 430 if limit_rev >= obj.rev():
430 431 yield obj
431 432
432 433 history = []
433 434 for cnt, obj in enumerate(history_iter()):
434 435 if limit and cnt >= limit:
435 436 break
436 437 history.append(hex(obj.node()))
437 438
438 439 return [x for x in history]
439 440
440 441 @reraise_safe_exceptions
441 442 def file_history_untill(self, wire, revision, path, limit):
442 443 repo = self._factory.repo(wire)
443 444 ctx = repo[revision]
444 445 fctx = ctx.filectx(path)
445 446
446 447 file_log = list(fctx.filelog())
447 448 if limit:
448 449 # Limit to the last n items
449 450 file_log = file_log[-limit:]
450 451
451 452 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
452 453
453 454 @reraise_safe_exceptions
454 455 def fctx_annotate(self, wire, revision, path):
455 456 repo = self._factory.repo(wire)
456 457 ctx = repo[revision]
457 458 fctx = ctx.filectx(path)
458 459
459 460 result = []
460 461 for i, annotate_obj in enumerate(fctx.annotate(), 1):
461 462 ln_no = i
462 463 sha = hex(annotate_obj.fctx.node())
463 464 content = annotate_obj.text
464 465 result.append((ln_no, sha, content))
465 466 return result
466 467
467 468 @reraise_safe_exceptions
468 469 def fctx_data(self, wire, revision, path):
469 470 repo = self._factory.repo(wire)
470 471 ctx = repo[revision]
471 472 fctx = ctx.filectx(path)
472 473 return fctx.data()
473 474
474 475 @reraise_safe_exceptions
475 476 def fctx_flags(self, wire, revision, path):
476 477 repo = self._factory.repo(wire)
477 478 ctx = repo[revision]
478 479 fctx = ctx.filectx(path)
479 480 return fctx.flags()
480 481
481 482 @reraise_safe_exceptions
482 483 def fctx_size(self, wire, revision, path):
483 484 repo = self._factory.repo(wire)
484 485 ctx = repo[revision]
485 486 fctx = ctx.filectx(path)
486 487 return fctx.size()
487 488
488 489 @reraise_safe_exceptions
489 490 def get_all_commit_ids(self, wire, name):
490 491 repo = self._factory.repo(wire)
491 492 revs = repo.filtered(name).changelog.index
492 493 return map(lambda x: hex(x[7]), revs)[:-1]
493 494
494 495 @reraise_safe_exceptions
495 496 def get_config_value(self, wire, section, name, untrusted=False):
496 497 repo = self._factory.repo(wire)
497 498 return repo.ui.config(section, name, untrusted=untrusted)
498 499
499 500 @reraise_safe_exceptions
500 501 def get_config_bool(self, wire, section, name, untrusted=False):
501 502 repo = self._factory.repo(wire)
502 503 return repo.ui.configbool(section, name, untrusted=untrusted)
503 504
504 505 @reraise_safe_exceptions
505 506 def get_config_list(self, wire, section, name, untrusted=False):
506 507 repo = self._factory.repo(wire)
507 508 return repo.ui.configlist(section, name, untrusted=untrusted)
508 509
509 510 @reraise_safe_exceptions
510 511 def is_large_file(self, wire, path):
511 512 return largefiles.lfutil.isstandin(path)
512 513
513 514 @reraise_safe_exceptions
514 515 def in_largefiles_store(self, wire, sha):
515 516 repo = self._factory.repo(wire)
516 517 return largefiles.lfutil.instore(repo, sha)
517 518
518 519 @reraise_safe_exceptions
519 520 def in_user_cache(self, wire, sha):
520 521 repo = self._factory.repo(wire)
521 522 return largefiles.lfutil.inusercache(repo.ui, sha)
522 523
523 524 @reraise_safe_exceptions
524 525 def store_path(self, wire, sha):
525 526 repo = self._factory.repo(wire)
526 527 return largefiles.lfutil.storepath(repo, sha)
527 528
528 529 @reraise_safe_exceptions
529 530 def link(self, wire, sha, path):
530 531 repo = self._factory.repo(wire)
531 532 largefiles.lfutil.link(
532 533 largefiles.lfutil.usercachepath(repo.ui, sha), path)
533 534
534 535 @reraise_safe_exceptions
535 536 def localrepository(self, wire, create=False):
536 537 self._factory.repo(wire, create=create)
537 538
538 539 @reraise_safe_exceptions
539 540 def lookup(self, wire, revision, both):
540 541
541 542 repo = self._factory.repo(wire)
542 543
543 544 if isinstance(revision, int):
544 545 # NOTE(marcink):
545 546 # since Mercurial doesn't support indexes properly
546 547 # we need to shift accordingly by one to get proper index, e.g
547 548 # repo[-1] => repo[-2]
548 549 # repo[0] => repo[-1]
549 550 # repo[1] => repo[2] we also never call repo[0] because
550 551 # it's actually second commit
551 552 if revision <= 0:
552 553 revision = revision + -1
553 554 else:
554 555 revision = revision + 1
555 556
556 557 try:
557 558 ctx = repo[revision]
558 except RepoLookupError:
559 raise exceptions.LookupException(revision)
559 except RepoLookupError as e:
560 raise exceptions.LookupException(e)(revision)
560 561 except LookupError as e:
561 raise exceptions.LookupException(e.name)
562 raise exceptions.LookupException(e)(e.name)
562 563
563 564 if not both:
564 565 return ctx.hex()
565 566
566 567 ctx = repo[ctx.hex()]
567 568 return ctx.hex(), ctx.rev()
568 569
569 570 @reraise_safe_exceptions
570 571 def pull(self, wire, url, commit_ids=None):
571 572 repo = self._factory.repo(wire)
572 573 # Disable any prompts for this repo
573 574 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
574 575
575 576 remote = peer(repo, {}, url)
576 577 # Disable any prompts for this remote
577 578 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
578 579
579 580 if commit_ids:
580 581 commit_ids = [bin(commit_id) for commit_id in commit_ids]
581 582
582 583 return exchange.pull(
583 584 repo, remote, heads=commit_ids, force=None).cgresult
584 585
585 586 @reraise_safe_exceptions
586 587 def sync_push(self, wire, url):
587 588 if self.check_url(url, wire['config']):
588 589 repo = self._factory.repo(wire)
589 590
590 591 # Disable any prompts for this repo
591 592 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
592 593
593 594 bookmarks = dict(repo._bookmarks).keys()
594 595 remote = peer(repo, {}, url)
595 596 # Disable any prompts for this remote
596 597 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
597 598
598 599 return exchange.push(
599 600 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
600 601
601 602 @reraise_safe_exceptions
602 603 def revision(self, wire, rev):
603 604 repo = self._factory.repo(wire)
604 605 ctx = repo[rev]
605 606 return ctx.rev()
606 607
607 608 @reraise_safe_exceptions
608 609 def rev_range(self, wire, filter):
609 610 repo = self._factory.repo(wire)
610 611 revisions = [rev for rev in revrange(repo, filter)]
611 612 return revisions
612 613
613 614 @reraise_safe_exceptions
614 615 def rev_range_hash(self, wire, node):
615 616 repo = self._factory.repo(wire)
616 617
617 618 def get_revs(repo, rev_opt):
618 619 if rev_opt:
619 620 revs = revrange(repo, rev_opt)
620 621 if len(revs) == 0:
621 622 return (nullrev, nullrev)
622 623 return max(revs), min(revs)
623 624 else:
624 625 return len(repo) - 1, 0
625 626
626 627 stop, start = get_revs(repo, [node + ':'])
627 628 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
628 629 return revs
629 630
630 631 @reraise_safe_exceptions
631 632 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
632 633 other_path = kwargs.pop('other_path', None)
633 634
634 635 # case when we want to compare two independent repositories
635 636 if other_path and other_path != wire["path"]:
636 637 baseui = self._factory._create_config(wire["config"])
637 638 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
638 639 else:
639 640 repo = self._factory.repo(wire)
640 641 return list(repo.revs(rev_spec, *args))
641 642
642 643 @reraise_safe_exceptions
643 644 def strip(self, wire, revision, update, backup):
644 645 repo = self._factory.repo(wire)
645 646 ctx = repo[revision]
646 647 hgext_strip(
647 648 repo.baseui, repo, ctx.node(), update=update, backup=backup)
648 649
649 650 @reraise_safe_exceptions
650 651 def verify(self, wire,):
651 652 repo = self._factory.repo(wire)
652 653 baseui = self._factory._create_config(wire['config'])
653 654 baseui.setconfig('ui', 'quiet', 'false')
654 655 output = io.BytesIO()
655 656
656 657 def write(data, **unused_kwargs):
657 658 output.write(data)
658 659 baseui.write = write
659 660
660 661 repo.ui = baseui
661 662 verify.verify(repo)
662 663 return output.getvalue()
663 664
664 665 @reraise_safe_exceptions
665 666 def tag(self, wire, name, revision, message, local, user,
666 667 tag_time, tag_timezone):
667 668 repo = self._factory.repo(wire)
668 669 ctx = repo[revision]
669 670 node = ctx.node()
670 671
671 672 date = (tag_time, tag_timezone)
672 673 try:
673 674 hg_tag.tag(repo, name, node, message, local, user, date)
674 675 except Abort as e:
675 676 log.exception("Tag operation aborted")
676 677 # Exception can contain unicode which we convert
677 raise exceptions.AbortException(repr(e))
678 raise exceptions.AbortException(e)(repr(e))
678 679
679 680 @reraise_safe_exceptions
680 681 def tags(self, wire):
681 682 repo = self._factory.repo(wire)
682 683 return repo.tags()
683 684
684 685 @reraise_safe_exceptions
685 686 def update(self, wire, node=None, clean=False):
686 687 repo = self._factory.repo(wire)
687 688 baseui = self._factory._create_config(wire['config'])
688 689 commands.update(baseui, repo, node=node, clean=clean)
689 690
690 691 @reraise_safe_exceptions
691 692 def identify(self, wire):
692 693 repo = self._factory.repo(wire)
693 694 baseui = self._factory._create_config(wire['config'])
694 695 output = io.BytesIO()
695 696 baseui.write = output.write
696 697 # This is required to get a full node id
697 698 baseui.debugflag = True
698 699 commands.identify(baseui, repo, id=True)
699 700
700 701 return output.getvalue()
701 702
702 703 @reraise_safe_exceptions
703 704 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
704 705 hooks=True):
705 706 repo = self._factory.repo(wire)
706 707 baseui = self._factory._create_config(wire['config'], hooks=hooks)
707 708
708 709 # Mercurial internally has a lot of logic that checks ONLY if
709 710 # option is defined, we just pass those if they are defined then
710 711 opts = {}
711 712 if bookmark:
712 713 opts['bookmark'] = bookmark
713 714 if branch:
714 715 opts['branch'] = branch
715 716 if revision:
716 717 opts['rev'] = revision
717 718
718 719 commands.pull(baseui, repo, source, **opts)
719 720
720 721 @reraise_safe_exceptions
721 722 def heads(self, wire, branch=None):
722 723 repo = self._factory.repo(wire)
723 724 baseui = self._factory._create_config(wire['config'])
724 725 output = io.BytesIO()
725 726
726 727 def write(data, **unused_kwargs):
727 728 output.write(data)
728 729
729 730 baseui.write = write
730 731 if branch:
731 732 args = [branch]
732 733 else:
733 734 args = []
734 735 commands.heads(baseui, repo, template='{node} ', *args)
735 736
736 737 return output.getvalue()
737 738
738 739 @reraise_safe_exceptions
739 740 def ancestor(self, wire, revision1, revision2):
740 741 repo = self._factory.repo(wire)
741 742 changelog = repo.changelog
742 743 lookup = repo.lookup
743 744 a = changelog.ancestor(lookup(revision1), lookup(revision2))
744 745 return hex(a)
745 746
746 747 @reraise_safe_exceptions
747 748 def push(self, wire, revisions, dest_path, hooks=True,
748 749 push_branches=False):
749 750 repo = self._factory.repo(wire)
750 751 baseui = self._factory._create_config(wire['config'], hooks=hooks)
751 752 commands.push(baseui, repo, dest=dest_path, rev=revisions,
752 753 new_branch=push_branches)
753 754
754 755 @reraise_safe_exceptions
755 756 def merge(self, wire, revision):
756 757 repo = self._factory.repo(wire)
757 758 baseui = self._factory._create_config(wire['config'])
758 759 repo.ui.setconfig('ui', 'merge', 'internal:dump')
759 760
760 761 # In case of sub repositories are used mercurial prompts the user in
761 762 # case of merge conflicts or different sub repository sources. By
762 763 # setting the interactive flag to `False` mercurial doesn't prompt the
763 764 # used but instead uses a default value.
764 765 repo.ui.setconfig('ui', 'interactive', False)
765 766
766 767 commands.merge(baseui, repo, rev=revision)
767 768
768 769 @reraise_safe_exceptions
769 770 def commit(self, wire, message, username, close_branch=False):
770 771 repo = self._factory.repo(wire)
771 772 baseui = self._factory._create_config(wire['config'])
772 773 repo.ui.setconfig('ui', 'username', username)
773 774 commands.commit(baseui, repo, message=message, close_branch=close_branch)
774 775
775 776 @reraise_safe_exceptions
776 777 def rebase(self, wire, source=None, dest=None, abort=False):
777 778 repo = self._factory.repo(wire)
778 779 baseui = self._factory._create_config(wire['config'])
779 780 repo.ui.setconfig('ui', 'merge', 'internal:dump')
780 781 rebase.rebase(
781 782 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
782 783
783 784 @reraise_safe_exceptions
784 785 def bookmark(self, wire, bookmark, revision=None):
785 786 repo = self._factory.repo(wire)
786 787 baseui = self._factory._create_config(wire['config'])
787 788 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
788 789
789 790 @reraise_safe_exceptions
790 791 def install_hooks(self, wire, force=False):
791 792 # we don't need any special hooks for Mercurial
792 793 pass
@@ -1,134 +1,134 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 """
19 19 Adjustments to Mercurial
20 20
21 21 Intentionally kept separate from `hgcompat` and `hg`, so that these patches can
22 22 be applied without having to import the whole Mercurial machinery.
23 23
24 24 Imports are function local, so that just importing this module does not cause
25 25 side-effects other than these functions being defined.
26 26 """
27 27
28 28 import logging
29 29
30 30
31 31 def patch_largefiles_capabilities():
32 32 """
33 33 Patches the capabilities function in the largefiles extension.
34 34 """
35 35 from vcsserver import hgcompat
36 36 lfproto = hgcompat.largefiles.proto
37 37 wrapper = _dynamic_capabilities_wrapper(
38 38 lfproto, hgcompat.extensions.extensions)
39 39 lfproto._capabilities = wrapper
40 40
41 41
42 42 def _dynamic_capabilities_wrapper(lfproto, extensions):
43 43
44 44 wrapped_capabilities = lfproto._capabilities
45 45 logger = logging.getLogger('vcsserver.hg')
46 46
47 47 def _dynamic_capabilities(orig, repo, proto):
48 48 """
49 49 Adds dynamic behavior, so that the capability is only added if the
50 50 extension is enabled in the current ui object.
51 51 """
52 52 if 'largefiles' in dict(extensions(repo.ui)):
53 53 logger.debug('Extension largefiles enabled')
54 54 calc_capabilities = wrapped_capabilities
55 55 return calc_capabilities(orig, repo, proto)
56 56 else:
57 57 logger.debug('Extension largefiles disabled')
58 58 return orig(repo, proto)
59 59
60 60 return _dynamic_capabilities
61 61
62 62
63 63 def patch_subrepo_type_mapping():
64 64 from collections import defaultdict
65 65 from hgcompat import subrepo
66 66 from exceptions import SubrepoMergeException
67 67
68 68 class NoOpSubrepo(subrepo.abstractsubrepo):
69 69
70 70 def __init__(self, ctx, path, *args, **kwargs):
71 71 """Initialize abstractsubrepo part
72 72
73 73 ``ctx`` is the context referring this subrepository in the
74 74 parent repository.
75 75
76 76 ``path`` is the path to this subrepository as seen from
77 77 innermost repository.
78 78 """
79 79 self.ui = ctx.repo().ui
80 80 self._ctx = ctx
81 81 self._path = path
82 82
83 83 def storeclean(self, path):
84 84 """
85 85 returns true if the repository has not changed since it was last
86 86 cloned from or pushed to a given repository.
87 87 """
88 88 return True
89 89
90 90 def dirty(self, ignoreupdate=False, missing=False):
91 91 """returns true if the dirstate of the subrepo is dirty or does not
92 92 match current stored state. If ignoreupdate is true, only check
93 93 whether the subrepo has uncommitted changes in its dirstate.
94 94 """
95 95 return False
96 96
97 97 def basestate(self):
98 98 """current working directory base state, disregarding .hgsubstate
99 99 state and working directory modifications"""
100 100 substate = subrepo.state(self._ctx, self.ui)
101 101 file_system_path, rev, repotype = substate.get(self._path)
102 102 return rev
103 103
104 104 def remove(self):
105 105 """remove the subrepo
106 106
107 107 (should verify the dirstate is not dirty first)
108 108 """
109 109 pass
110 110
111 111 def get(self, state, overwrite=False):
112 112 """run whatever commands are needed to put the subrepo into
113 113 this state
114 114 """
115 115 pass
116 116
117 117 def merge(self, state):
118 118 """merge currently-saved state with the new state."""
119 raise SubrepoMergeException()
119 raise SubrepoMergeException()()
120 120
121 121 def push(self, opts):
122 122 """perform whatever action is analogous to 'hg push'
123 123
124 124 This may be a no-op on some systems.
125 125 """
126 126 pass
127 127
128 128 # Patch subrepo type mapping to always return our NoOpSubrepo class
129 129 # whenever a subrepo class is looked up.
130 130 subrepo.types = {
131 131 'hg': NoOpSubrepo,
132 132 'git': NoOpSubrepo,
133 133 'svn': NoOpSubrepo
134 134 }
@@ -1,572 +1,572 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 4 # Copyright (C) 2014-2018 RhodeCode GmbH
5 5 #
6 6 # This program is free software; you can redistribute it and/or modify
7 7 # it under the terms of the GNU General Public License as published by
8 8 # the Free Software Foundation; either version 3 of the License, or
9 9 # (at your option) any later version.
10 10 #
11 11 # This program is distributed in the hope that it will be useful,
12 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 14 # GNU General Public License for more details.
15 15 #
16 16 # You should have received a copy of the GNU General Public License
17 17 # along with this program; if not, write to the Free Software Foundation,
18 18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 19
20 20 import io
21 21 import os
22 22 import sys
23 23 import logging
24 24 import collections
25 25 import importlib
26 26 import base64
27 27
28 28 from httplib import HTTPConnection
29 29
30 30
31 31 import mercurial.scmutil
32 32 import mercurial.node
33 33 import simplejson as json
34 34
35 35 from vcsserver import exceptions, subprocessio, settings
36 36
37 37 log = logging.getLogger(__name__)
38 38
39 39
40 40 class HooksHttpClient(object):
41 41 connection = None
42 42
43 43 def __init__(self, hooks_uri):
44 44 self.hooks_uri = hooks_uri
45 45
46 46 def __call__(self, method, extras):
47 47 connection = HTTPConnection(self.hooks_uri)
48 48 body = self._serialize(method, extras)
49 49 try:
50 50 connection.request('POST', '/', body)
51 51 except Exception:
52 52 log.error('Connection failed on %s', connection)
53 53 raise
54 54 response = connection.getresponse()
55 55 return json.loads(response.read())
56 56
57 57 def _serialize(self, hook_name, extras):
58 58 data = {
59 59 'method': hook_name,
60 60 'extras': extras
61 61 }
62 62 return json.dumps(data)
63 63
64 64
65 65 class HooksDummyClient(object):
66 66 def __init__(self, hooks_module):
67 67 self._hooks_module = importlib.import_module(hooks_module)
68 68
69 69 def __call__(self, hook_name, extras):
70 70 with self._hooks_module.Hooks() as hooks:
71 71 return getattr(hooks, hook_name)(extras)
72 72
73 73
74 74 class RemoteMessageWriter(object):
75 75 """Writer base class."""
76 76 def write(self, message):
77 77 raise NotImplementedError()
78 78
79 79
80 80 class HgMessageWriter(RemoteMessageWriter):
81 81 """Writer that knows how to send messages to mercurial clients."""
82 82
83 83 def __init__(self, ui):
84 84 self.ui = ui
85 85
86 86 def write(self, message):
87 87 # TODO: Check why the quiet flag is set by default.
88 88 old = self.ui.quiet
89 89 self.ui.quiet = False
90 90 self.ui.status(message.encode('utf-8'))
91 91 self.ui.quiet = old
92 92
93 93
94 94 class GitMessageWriter(RemoteMessageWriter):
95 95 """Writer that knows how to send messages to git clients."""
96 96
97 97 def __init__(self, stdout=None):
98 98 self.stdout = stdout or sys.stdout
99 99
100 100 def write(self, message):
101 101 self.stdout.write(message.encode('utf-8'))
102 102
103 103
104 104 class SvnMessageWriter(RemoteMessageWriter):
105 105 """Writer that knows how to send messages to svn clients."""
106 106
107 107 def __init__(self, stderr=None):
108 108 # SVN needs data sent to stderr for back-to-client messaging
109 109 self.stderr = stderr or sys.stderr
110 110
111 111 def write(self, message):
112 112 self.stderr.write(message.encode('utf-8'))
113 113
114 114
115 115 def _handle_exception(result):
116 116 exception_class = result.get('exception')
117 117 exception_traceback = result.get('exception_traceback')
118 118
119 119 if exception_traceback:
120 120 log.error('Got traceback from remote call:%s', exception_traceback)
121 121
122 122 if exception_class == 'HTTPLockedRC':
123 raise exceptions.RepositoryLockedException(*result['exception_args'])
123 raise exceptions.RepositoryLockedException()(*result['exception_args'])
124 124 elif exception_class == 'RepositoryError':
125 raise exceptions.VcsException(*result['exception_args'])
125 raise exceptions.VcsException()(*result['exception_args'])
126 126 elif exception_class:
127 127 raise Exception('Got remote exception "%s" with args "%s"' %
128 128 (exception_class, result['exception_args']))
129 129
130 130
131 131 def _get_hooks_client(extras):
132 132 if 'hooks_uri' in extras:
133 133 protocol = extras.get('hooks_protocol')
134 134 return HooksHttpClient(extras['hooks_uri'])
135 135 else:
136 136 return HooksDummyClient(extras['hooks_module'])
137 137
138 138
139 139 def _call_hook(hook_name, extras, writer):
140 140 hooks_client = _get_hooks_client(extras)
141 141 log.debug('Hooks, using client:%s', hooks_client)
142 142 result = hooks_client(hook_name, extras)
143 143 log.debug('Hooks got result: %s', result)
144 144 writer.write(result['output'])
145 145 _handle_exception(result)
146 146
147 147 return result['status']
148 148
149 149
150 150 def _extras_from_ui(ui):
151 151 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
152 152 if not hook_data:
153 153 # maybe it's inside environ ?
154 154 env_hook_data = os.environ.get('RC_SCM_DATA')
155 155 if env_hook_data:
156 156 hook_data = env_hook_data
157 157
158 158 extras = {}
159 159 if hook_data:
160 160 extras = json.loads(hook_data)
161 161 return extras
162 162
163 163
164 164 def _rev_range_hash(repo, node):
165 165
166 166 commits = []
167 167 start = repo[node].rev()
168 168 for rev in xrange(start, len(repo)):
169 169 ctx = repo[rev]
170 170 commit_id = mercurial.node.hex(ctx.node())
171 171 branch = ctx.branch()
172 172 commits.append((commit_id, branch))
173 173
174 174 return commits
175 175
176 176
177 177 def repo_size(ui, repo, **kwargs):
178 178 extras = _extras_from_ui(ui)
179 179 return _call_hook('repo_size', extras, HgMessageWriter(ui))
180 180
181 181
182 182 def pre_pull(ui, repo, **kwargs):
183 183 extras = _extras_from_ui(ui)
184 184 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
185 185
186 186
187 187 def pre_pull_ssh(ui, repo, **kwargs):
188 188 extras = _extras_from_ui(ui)
189 189 if extras and extras.get('SSH'):
190 190 return pre_pull(ui, repo, **kwargs)
191 191 return 0
192 192
193 193
194 194 def post_pull(ui, repo, **kwargs):
195 195 extras = _extras_from_ui(ui)
196 196 return _call_hook('post_pull', extras, HgMessageWriter(ui))
197 197
198 198
199 199 def post_pull_ssh(ui, repo, **kwargs):
200 200 extras = _extras_from_ui(ui)
201 201 if extras and extras.get('SSH'):
202 202 return post_pull(ui, repo, **kwargs)
203 203 return 0
204 204
205 205
206 206 def pre_push(ui, repo, node=None, **kwargs):
207 207 extras = _extras_from_ui(ui)
208 208
209 209 rev_data = []
210 210 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
211 211 branches = collections.defaultdict(list)
212 212 for commit_id, branch in _rev_range_hash(repo, node):
213 213 branches[branch].append(commit_id)
214 214
215 215 for branch, commits in branches.iteritems():
216 216 old_rev = kwargs.get('node_last') or commits[0]
217 217 rev_data.append({
218 218 'old_rev': old_rev,
219 219 'new_rev': commits[-1],
220 220 'ref': '',
221 221 'type': 'branch',
222 222 'name': branch,
223 223 })
224 224
225 225 extras['commit_ids'] = rev_data
226 226 return _call_hook('pre_push', extras, HgMessageWriter(ui))
227 227
228 228
229 229 def pre_push_ssh(ui, repo, node=None, **kwargs):
230 230 if _extras_from_ui(ui).get('SSH'):
231 231 return pre_push(ui, repo, node, **kwargs)
232 232
233 233 return 0
234 234
235 235
236 236 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
237 237 extras = _extras_from_ui(ui)
238 238 if extras.get('SSH'):
239 239 permission = extras['SSH_PERMISSIONS']
240 240
241 241 if 'repository.write' == permission or 'repository.admin' == permission:
242 242 return 0
243 243
244 244 # non-zero ret code
245 245 return 1
246 246
247 247 return 0
248 248
249 249
250 250 def post_push(ui, repo, node, **kwargs):
251 251 extras = _extras_from_ui(ui)
252 252
253 253 commit_ids = []
254 254 branches = []
255 255 bookmarks = []
256 256 tags = []
257 257
258 258 for commit_id, branch in _rev_range_hash(repo, node):
259 259 commit_ids.append(commit_id)
260 260 if branch not in branches:
261 261 branches.append(branch)
262 262
263 263 if hasattr(ui, '_rc_pushkey_branches'):
264 264 bookmarks = ui._rc_pushkey_branches
265 265
266 266 extras['commit_ids'] = commit_ids
267 267 extras['new_refs'] = {
268 268 'branches': branches,
269 269 'bookmarks': bookmarks,
270 270 'tags': tags
271 271 }
272 272
273 273 return _call_hook('post_push', extras, HgMessageWriter(ui))
274 274
275 275
276 276 def post_push_ssh(ui, repo, node, **kwargs):
277 277 if _extras_from_ui(ui).get('SSH'):
278 278 return post_push(ui, repo, node, **kwargs)
279 279 return 0
280 280
281 281
282 282 def key_push(ui, repo, **kwargs):
283 283 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
284 284 # store new bookmarks in our UI object propagated later to post_push
285 285 ui._rc_pushkey_branches = repo[kwargs['key']].bookmarks()
286 286 return
287 287
288 288
289 289 # backward compat
290 290 log_pull_action = post_pull
291 291
292 292 # backward compat
293 293 log_push_action = post_push
294 294
295 295
296 296 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
297 297 """
298 298 Old hook name: keep here for backward compatibility.
299 299
300 300 This is only required when the installed git hooks are not upgraded.
301 301 """
302 302 pass
303 303
304 304
305 305 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
306 306 """
307 307 Old hook name: keep here for backward compatibility.
308 308
309 309 This is only required when the installed git hooks are not upgraded.
310 310 """
311 311 pass
312 312
313 313
314 314 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
315 315
316 316
317 317 def git_pre_pull(extras):
318 318 """
319 319 Pre pull hook.
320 320
321 321 :param extras: dictionary containing the keys defined in simplevcs
322 322 :type extras: dict
323 323
324 324 :return: status code of the hook. 0 for success.
325 325 :rtype: int
326 326 """
327 327 if 'pull' not in extras['hooks']:
328 328 return HookResponse(0, '')
329 329
330 330 stdout = io.BytesIO()
331 331 try:
332 332 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
333 333 except Exception as error:
334 334 status = 128
335 335 stdout.write('ERROR: %s\n' % str(error))
336 336
337 337 return HookResponse(status, stdout.getvalue())
338 338
339 339
340 340 def git_post_pull(extras):
341 341 """
342 342 Post pull hook.
343 343
344 344 :param extras: dictionary containing the keys defined in simplevcs
345 345 :type extras: dict
346 346
347 347 :return: status code of the hook. 0 for success.
348 348 :rtype: int
349 349 """
350 350 if 'pull' not in extras['hooks']:
351 351 return HookResponse(0, '')
352 352
353 353 stdout = io.BytesIO()
354 354 try:
355 355 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
356 356 except Exception as error:
357 357 status = 128
358 358 stdout.write('ERROR: %s\n' % error)
359 359
360 360 return HookResponse(status, stdout.getvalue())
361 361
362 362
363 363 def _parse_git_ref_lines(revision_lines):
364 364 rev_data = []
365 365 for revision_line in revision_lines or []:
366 366 old_rev, new_rev, ref = revision_line.strip().split(' ')
367 367 ref_data = ref.split('/', 2)
368 368 if ref_data[1] in ('tags', 'heads'):
369 369 rev_data.append({
370 370 'old_rev': old_rev,
371 371 'new_rev': new_rev,
372 372 'ref': ref,
373 373 'type': ref_data[1],
374 374 'name': ref_data[2],
375 375 })
376 376 return rev_data
377 377
378 378
379 379 def git_pre_receive(unused_repo_path, revision_lines, env):
380 380 """
381 381 Pre push hook.
382 382
383 383 :param extras: dictionary containing the keys defined in simplevcs
384 384 :type extras: dict
385 385
386 386 :return: status code of the hook. 0 for success.
387 387 :rtype: int
388 388 """
389 389 extras = json.loads(env['RC_SCM_DATA'])
390 390 rev_data = _parse_git_ref_lines(revision_lines)
391 391 if 'push' not in extras['hooks']:
392 392 return 0
393 393 extras['commit_ids'] = rev_data
394 394 return _call_hook('pre_push', extras, GitMessageWriter())
395 395
396 396
397 397 def git_post_receive(unused_repo_path, revision_lines, env):
398 398 """
399 399 Post push hook.
400 400
401 401 :param extras: dictionary containing the keys defined in simplevcs
402 402 :type extras: dict
403 403
404 404 :return: status code of the hook. 0 for success.
405 405 :rtype: int
406 406 """
407 407 extras = json.loads(env['RC_SCM_DATA'])
408 408 if 'push' not in extras['hooks']:
409 409 return 0
410 410
411 411 rev_data = _parse_git_ref_lines(revision_lines)
412 412
413 413 git_revs = []
414 414
415 415 # N.B.(skreft): it is ok to just call git, as git before calling a
416 416 # subcommand sets the PATH environment variable so that it point to the
417 417 # correct version of the git executable.
418 418 empty_commit_id = '0' * 40
419 419 branches = []
420 420 tags = []
421 421 for push_ref in rev_data:
422 422 type_ = push_ref['type']
423 423
424 424 if type_ == 'heads':
425 425 if push_ref['old_rev'] == empty_commit_id:
426 426 # starting new branch case
427 427 if push_ref['name'] not in branches:
428 428 branches.append(push_ref['name'])
429 429
430 430 # Fix up head revision if needed
431 431 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
432 432 try:
433 433 subprocessio.run_command(cmd, env=os.environ.copy())
434 434 except Exception:
435 435 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
436 436 'refs/heads/%s' % push_ref['name']]
437 437 print("Setting default branch to %s" % push_ref['name'])
438 438 subprocessio.run_command(cmd, env=os.environ.copy())
439 439
440 440 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
441 441 '--format=%(refname)', 'refs/heads/*']
442 442 stdout, stderr = subprocessio.run_command(
443 443 cmd, env=os.environ.copy())
444 444 heads = stdout
445 445 heads = heads.replace(push_ref['ref'], '')
446 446 heads = ' '.join(head for head
447 447 in heads.splitlines() if head) or '.'
448 448 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
449 449 '--pretty=format:%H', '--', push_ref['new_rev'],
450 450 '--not', heads]
451 451 stdout, stderr = subprocessio.run_command(
452 452 cmd, env=os.environ.copy())
453 453 git_revs.extend(stdout.splitlines())
454 454 elif push_ref['new_rev'] == empty_commit_id:
455 455 # delete branch case
456 456 git_revs.append('delete_branch=>%s' % push_ref['name'])
457 457 else:
458 458 if push_ref['name'] not in branches:
459 459 branches.append(push_ref['name'])
460 460
461 461 cmd = [settings.GIT_EXECUTABLE, 'log',
462 462 '{old_rev}..{new_rev}'.format(**push_ref),
463 463 '--reverse', '--pretty=format:%H']
464 464 stdout, stderr = subprocessio.run_command(
465 465 cmd, env=os.environ.copy())
466 466 git_revs.extend(stdout.splitlines())
467 467 elif type_ == 'tags':
468 468 if push_ref['name'] not in tags:
469 469 tags.append(push_ref['name'])
470 470 git_revs.append('tag=>%s' % push_ref['name'])
471 471
472 472 extras['commit_ids'] = git_revs
473 473 extras['new_refs'] = {
474 474 'branches': branches,
475 475 'bookmarks': [],
476 476 'tags': tags,
477 477 }
478 478
479 479 if 'repo_size' in extras['hooks']:
480 480 try:
481 481 _call_hook('repo_size', extras, GitMessageWriter())
482 482 except:
483 483 pass
484 484
485 485 return _call_hook('post_push', extras, GitMessageWriter())
486 486
487 487
488 488 def _get_extras_from_txn_id(path, txn_id):
489 489 extras = {}
490 490 try:
491 491 cmd = ['svnlook', 'pget',
492 492 '-t', txn_id,
493 493 '--revprop', path, 'rc-scm-extras']
494 494 stdout, stderr = subprocessio.run_command(
495 495 cmd, env=os.environ.copy())
496 496 extras = json.loads(base64.urlsafe_b64decode(stdout))
497 497 except Exception:
498 498 log.exception('Failed to extract extras info from txn_id')
499 499
500 500 return extras
501 501
502 502
503 503 def svn_pre_commit(repo_path, commit_data, env):
504 504 path, txn_id = commit_data
505 505 branches = []
506 506 tags = []
507 507
508 508 if env.get('RC_SCM_DATA'):
509 509 extras = json.loads(env['RC_SCM_DATA'])
510 510 else:
511 511 # fallback method to read from TXN-ID stored data
512 512 extras = _get_extras_from_txn_id(path, txn_id)
513 513 if not extras:
514 514 return 0
515 515
516 516 extras['commit_ids'] = []
517 517 extras['txn_id'] = txn_id
518 518 extras['new_refs'] = {
519 519 'branches': branches,
520 520 'bookmarks': [],
521 521 'tags': tags,
522 522 }
523 523
524 524 return _call_hook('pre_push', extras, SvnMessageWriter())
525 525
526 526
527 527 def _get_extras_from_commit_id(commit_id, path):
528 528 extras = {}
529 529 try:
530 530 cmd = ['svnlook', 'pget',
531 531 '-r', commit_id,
532 532 '--revprop', path, 'rc-scm-extras']
533 533 stdout, stderr = subprocessio.run_command(
534 534 cmd, env=os.environ.copy())
535 535 extras = json.loads(base64.urlsafe_b64decode(stdout))
536 536 except Exception:
537 537 log.exception('Failed to extract extras info from commit_id')
538 538
539 539 return extras
540 540
541 541
542 542 def svn_post_commit(repo_path, commit_data, env):
543 543 """
544 544 commit_data is path, rev, txn_id
545 545 """
546 546 path, commit_id, txn_id = commit_data
547 547 branches = []
548 548 tags = []
549 549
550 550 if env.get('RC_SCM_DATA'):
551 551 extras = json.loads(env['RC_SCM_DATA'])
552 552 else:
553 553 # fallback method to read from TXN-ID stored data
554 554 extras = _get_extras_from_commit_id(commit_id, path)
555 555 if not extras:
556 556 return 0
557 557
558 558 extras['commit_ids'] = [commit_id]
559 559 extras['txn_id'] = txn_id
560 560 extras['new_refs'] = {
561 561 'branches': branches,
562 562 'bookmarks': [],
563 563 'tags': tags,
564 564 }
565 565
566 566 if 'repo_size' in extras['hooks']:
567 567 try:
568 568 _call_hook('repo_size', extras, SvnMessageWriter())
569 569 except Exception:
570 570 pass
571 571
572 572 return _call_hook('post_push', extras, SvnMessageWriter())
@@ -1,234 +1,234 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import logging
20 20 import itertools
21 21
22 22 import mercurial
23 23 import mercurial.error
24 24 import mercurial.wireprotoserver
25 25 import mercurial.hgweb.common
26 26 import mercurial.hgweb.hgweb_mod
27 27 import webob.exc
28 28
29 29 from vcsserver import pygrack, exceptions, settings, git_lfs
30 30
31 31
32 32 log = logging.getLogger(__name__)
33 33
34 34
35 35 # propagated from mercurial documentation
36 36 HG_UI_SECTIONS = [
37 37 'alias', 'auth', 'decode/encode', 'defaults', 'diff', 'email', 'extensions',
38 38 'format', 'merge-patterns', 'merge-tools', 'hooks', 'http_proxy', 'smtp',
39 39 'patch', 'paths', 'profiling', 'server', 'trusted', 'ui', 'web',
40 40 ]
41 41
42 42
43 43 class HgWeb(mercurial.hgweb.hgweb_mod.hgweb):
44 44 """Extension of hgweb that simplifies some functions."""
45 45
46 46 def _get_view(self, repo):
47 47 """Views are not supported."""
48 48 return repo
49 49
50 50 def loadsubweb(self):
51 51 """The result is only used in the templater method which is not used."""
52 52 return None
53 53
54 54 def run(self):
55 55 """Unused function so raise an exception if accidentally called."""
56 56 raise NotImplementedError
57 57
58 58 def templater(self, req):
59 59 """Function used in an unreachable code path.
60 60
61 61 This code is unreachable because we guarantee that the HTTP request,
62 62 corresponds to a Mercurial command. See the is_hg method. So, we are
63 63 never going to get a user-visible url.
64 64 """
65 65 raise NotImplementedError
66 66
67 67 def archivelist(self, nodeid):
68 68 """Unused function so raise an exception if accidentally called."""
69 69 raise NotImplementedError
70 70
71 71 def __call__(self, environ, start_response):
72 72 """Run the WSGI application.
73 73
74 74 This may be called by multiple threads.
75 75 """
76 76 from mercurial.hgweb import request as requestmod
77 77 req = requestmod.parserequestfromenv(environ)
78 78 res = requestmod.wsgiresponse(req, start_response)
79 79 gen = self.run_wsgi(req, res)
80 80
81 81 first_chunk = None
82 82
83 83 try:
84 84 data = gen.next()
85 85
86 86 def first_chunk():
87 87 yield data
88 88 except StopIteration:
89 89 pass
90 90
91 91 if first_chunk:
92 92 return itertools.chain(first_chunk(), gen)
93 93 return gen
94 94
95 95 def _runwsgi(self, req, res, repo):
96 96
97 97 cmd = req.qsparams.get('cmd', '')
98 98 if not mercurial.wireprotoserver.iscmd(cmd):
99 99 # NOTE(marcink): for unsupported commands, we return bad request
100 100 # internally from HG
101 101 from mercurial.hgweb.common import statusmessage
102 102 res.status = statusmessage(mercurial.hgweb.common.HTTP_BAD_REQUEST)
103 103 res.setbodybytes('')
104 104 return res.sendresponse()
105 105
106 106 return super(HgWeb, self)._runwsgi(req, res, repo)
107 107
108 108
109 109 def make_hg_ui_from_config(repo_config):
110 110 baseui = mercurial.ui.ui()
111 111
112 112 # clean the baseui object
113 113 baseui._ocfg = mercurial.config.config()
114 114 baseui._ucfg = mercurial.config.config()
115 115 baseui._tcfg = mercurial.config.config()
116 116
117 117 for section, option, value in repo_config:
118 118 baseui.setconfig(section, option, value)
119 119
120 120 # make our hgweb quiet so it doesn't print output
121 121 baseui.setconfig('ui', 'quiet', 'true')
122 122
123 123 return baseui
124 124
125 125
126 126 def update_hg_ui_from_hgrc(baseui, repo_path):
127 127 path = os.path.join(repo_path, '.hg', 'hgrc')
128 128
129 129 if not os.path.isfile(path):
130 130 log.debug('hgrc file is not present at %s, skipping...', path)
131 131 return
132 132 log.debug('reading hgrc from %s', path)
133 133 cfg = mercurial.config.config()
134 134 cfg.read(path)
135 135 for section in HG_UI_SECTIONS:
136 136 for k, v in cfg.items(section):
137 137 log.debug('settings ui from file: [%s] %s=%s', section, k, v)
138 138 baseui.setconfig(section, k, v)
139 139
140 140
141 141 def create_hg_wsgi_app(repo_path, repo_name, config):
142 142 """
143 143 Prepares a WSGI application to handle Mercurial requests.
144 144
145 145 :param config: is a list of 3-item tuples representing a ConfigObject
146 146 (it is the serialized version of the config object).
147 147 """
148 148 log.debug("Creating Mercurial WSGI application")
149 149
150 150 baseui = make_hg_ui_from_config(config)
151 151 update_hg_ui_from_hgrc(baseui, repo_path)
152 152
153 153 try:
154 154 return HgWeb(repo_path, name=repo_name, baseui=baseui)
155 except mercurial.error.RequirementError as exc:
156 raise exceptions.RequirementException(exc)
155 except mercurial.error.RequirementError as e:
156 raise exceptions.RequirementException(e)(e)
157 157
158 158
159 159 class GitHandler(object):
160 160 """
161 161 Handler for Git operations like push/pull etc
162 162 """
163 163 def __init__(self, repo_location, repo_name, git_path, update_server_info,
164 164 extras):
165 165 if not os.path.isdir(repo_location):
166 166 raise OSError(repo_location)
167 167 self.content_path = repo_location
168 168 self.repo_name = repo_name
169 169 self.repo_location = repo_location
170 170 self.extras = extras
171 171 self.git_path = git_path
172 172 self.update_server_info = update_server_info
173 173
174 174 def __call__(self, environ, start_response):
175 175 app = webob.exc.HTTPNotFound()
176 176 candidate_paths = (
177 177 self.content_path, os.path.join(self.content_path, '.git'))
178 178
179 179 for content_path in candidate_paths:
180 180 try:
181 181 app = pygrack.GitRepository(
182 182 self.repo_name, content_path, self.git_path,
183 183 self.update_server_info, self.extras)
184 184 break
185 185 except OSError:
186 186 continue
187 187
188 188 return app(environ, start_response)
189 189
190 190
191 191 def create_git_wsgi_app(repo_path, repo_name, config):
192 192 """
193 193 Creates a WSGI application to handle Git requests.
194 194
195 195 :param config: is a dictionary holding the extras.
196 196 """
197 197 git_path = settings.GIT_EXECUTABLE
198 198 update_server_info = config.pop('git_update_server_info')
199 199 app = GitHandler(
200 200 repo_path, repo_name, git_path, update_server_info, config)
201 201
202 202 return app
203 203
204 204
205 205 class GitLFSHandler(object):
206 206 """
207 207 Handler for Git LFS operations
208 208 """
209 209
210 210 def __init__(self, repo_location, repo_name, git_path, update_server_info,
211 211 extras):
212 212 if not os.path.isdir(repo_location):
213 213 raise OSError(repo_location)
214 214 self.content_path = repo_location
215 215 self.repo_name = repo_name
216 216 self.repo_location = repo_location
217 217 self.extras = extras
218 218 self.git_path = git_path
219 219 self.update_server_info = update_server_info
220 220
221 221 def get_app(self, git_lfs_enabled, git_lfs_store_path):
222 222 app = git_lfs.create_app(git_lfs_enabled, git_lfs_store_path)
223 223 return app
224 224
225 225
226 226 def create_git_lfs_wsgi_app(repo_path, repo_name, config):
227 227 git_path = settings.GIT_EXECUTABLE
228 228 update_server_info = config.pop('git_update_server_info')
229 229 git_lfs_enabled = config.pop('git_lfs_enabled')
230 230 git_lfs_store_path = config.pop('git_lfs_store_path')
231 231 app = GitLFSHandler(
232 232 repo_path, repo_name, git_path, update_server_info, config)
233 233
234 234 return app.get_app(git_lfs_enabled, git_lfs_store_path)
@@ -1,705 +1,705 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 from __future__ import absolute_import
19 19
20 20 import os
21 21 from urllib2 import URLError
22 22 import logging
23 23 import posixpath as vcspath
24 24 import StringIO
25 25 import urllib
26 26 import traceback
27 27
28 28 import svn.client
29 29 import svn.core
30 30 import svn.delta
31 31 import svn.diff
32 32 import svn.fs
33 33 import svn.repos
34 34
35 35 from vcsserver import svn_diff, exceptions, subprocessio, settings
36 36 from vcsserver.base import RepoFactory, raise_from_original
37 37
38 38 log = logging.getLogger(__name__)
39 39
40 40
41 41 # Set of svn compatible version flags.
42 42 # Compare with subversion/svnadmin/svnadmin.c
43 43 svn_compatible_versions = {
44 44 'pre-1.4-compatible',
45 45 'pre-1.5-compatible',
46 46 'pre-1.6-compatible',
47 47 'pre-1.8-compatible',
48 48 'pre-1.9-compatible'
49 49 }
50 50
51 51 svn_compatible_versions_map = {
52 52 'pre-1.4-compatible': '1.3',
53 53 'pre-1.5-compatible': '1.4',
54 54 'pre-1.6-compatible': '1.5',
55 55 'pre-1.8-compatible': '1.7',
56 56 'pre-1.9-compatible': '1.8',
57 57 }
58 58
59 59
60 60 def reraise_safe_exceptions(func):
61 61 """Decorator for converting svn exceptions to something neutral."""
62 62 def wrapper(*args, **kwargs):
63 63 try:
64 64 return func(*args, **kwargs)
65 65 except Exception as e:
66 66 if not hasattr(e, '_vcs_kind'):
67 log.exception("Unhandled exception in hg remote call")
68 raise_from_original(exceptions.UnhandledException)
67 log.exception("Unhandled exception in svn remote call")
68 raise_from_original(exceptions.UnhandledException(e))
69 69 raise
70 70 return wrapper
71 71
72 72
73 73 class SubversionFactory(RepoFactory):
74 74 repo_type = 'svn'
75 75
76 76 def _create_repo(self, wire, create, compatible_version):
77 77 path = svn.core.svn_path_canonicalize(wire['path'])
78 78 if create:
79 79 fs_config = {'compatible-version': '1.9'}
80 80 if compatible_version:
81 81 if compatible_version not in svn_compatible_versions:
82 82 raise Exception('Unknown SVN compatible version "{}"'
83 83 .format(compatible_version))
84 84 fs_config['compatible-version'] = \
85 85 svn_compatible_versions_map[compatible_version]
86 86
87 87 log.debug('Create SVN repo with config "%s"', fs_config)
88 88 repo = svn.repos.create(path, "", "", None, fs_config)
89 89 else:
90 90 repo = svn.repos.open(path)
91 91
92 92 log.debug('Got SVN object: %s', repo)
93 93 return repo
94 94
95 95 def repo(self, wire, create=False, compatible_version=None):
96 96 """
97 97 Get a repository instance for the given path.
98 98
99 99 Uses internally the low level beaker API since the decorators introduce
100 100 significant overhead.
101 101 """
102 102 region = self._cache_region
103 103 context = wire.get('context', None)
104 104 repo_path = wire.get('path', '')
105 105 context_uid = '{}'.format(context)
106 106 cache = wire.get('cache', True)
107 107 cache_on = context and cache
108 108
109 109 @region.conditional_cache_on_arguments(condition=cache_on)
110 110 def create_new_repo(_repo_type, _repo_path, _context_uid, compatible_version_id):
111 111 return self._create_repo(wire, create, compatible_version)
112 112
113 113 return create_new_repo(self.repo_type, repo_path, context_uid,
114 114 compatible_version)
115 115
116 116
117 117 NODE_TYPE_MAPPING = {
118 118 svn.core.svn_node_file: 'file',
119 119 svn.core.svn_node_dir: 'dir',
120 120 }
121 121
122 122
123 123 class SvnRemote(object):
124 124
125 125 def __init__(self, factory, hg_factory=None):
126 126 self._factory = factory
127 127 # TODO: Remove once we do not use internal Mercurial objects anymore
128 128 # for subversion
129 129 self._hg_factory = hg_factory
130 130
131 131 @reraise_safe_exceptions
132 132 def discover_svn_version(self):
133 133 try:
134 134 import svn.core
135 135 svn_ver = svn.core.SVN_VERSION
136 136 except ImportError:
137 137 svn_ver = None
138 138 return svn_ver
139 139
140 140 def check_url(self, url, config_items):
141 141 # this can throw exception if not installed, but we detect this
142 142 from hgsubversion import svnrepo
143 143
144 144 baseui = self._hg_factory._create_config(config_items)
145 145 # uuid function get's only valid UUID from proper repo, else
146 146 # throws exception
147 147 try:
148 148 svnrepo.svnremoterepo(baseui, url).svn.uuid
149 149 except Exception:
150 150 tb = traceback.format_exc()
151 151 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
152 152 raise URLError(
153 153 '"%s" is not a valid Subversion source url.' % (url, ))
154 154 return True
155 155
156 156 def is_path_valid_repository(self, wire, path):
157 157
158 158 # NOTE(marcink): short circuit the check for SVN repo
159 159 # the repos.open might be expensive to check, but we have one cheap
160 160 # pre condition that we can use, to check for 'format' file
161 161
162 162 if not os.path.isfile(os.path.join(path, 'format')):
163 163 return False
164 164
165 165 try:
166 166 svn.repos.open(path)
167 167 except svn.core.SubversionException:
168 168 tb = traceback.format_exc()
169 169 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
170 170 return False
171 171 return True
172 172
173 173 @reraise_safe_exceptions
174 174 def verify(self, wire,):
175 175 repo_path = wire['path']
176 176 if not self.is_path_valid_repository(wire, repo_path):
177 177 raise Exception(
178 178 "Path %s is not a valid Subversion repository." % repo_path)
179 179
180 180 cmd = ['svnadmin', 'info', repo_path]
181 181 stdout, stderr = subprocessio.run_command(cmd)
182 182 return stdout
183 183
184 184 def lookup(self, wire, revision):
185 185 if revision not in [-1, None, 'HEAD']:
186 186 raise NotImplementedError
187 187 repo = self._factory.repo(wire)
188 188 fs_ptr = svn.repos.fs(repo)
189 189 head = svn.fs.youngest_rev(fs_ptr)
190 190 return head
191 191
192 192 def lookup_interval(self, wire, start_ts, end_ts):
193 193 repo = self._factory.repo(wire)
194 194 fsobj = svn.repos.fs(repo)
195 195 start_rev = None
196 196 end_rev = None
197 197 if start_ts:
198 198 start_ts_svn = apr_time_t(start_ts)
199 199 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
200 200 else:
201 201 start_rev = 1
202 202 if end_ts:
203 203 end_ts_svn = apr_time_t(end_ts)
204 204 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
205 205 else:
206 206 end_rev = svn.fs.youngest_rev(fsobj)
207 207 return start_rev, end_rev
208 208
209 209 def revision_properties(self, wire, revision):
210 210 repo = self._factory.repo(wire)
211 211 fs_ptr = svn.repos.fs(repo)
212 212 return svn.fs.revision_proplist(fs_ptr, revision)
213 213
214 214 def revision_changes(self, wire, revision):
215 215
216 216 repo = self._factory.repo(wire)
217 217 fsobj = svn.repos.fs(repo)
218 218 rev_root = svn.fs.revision_root(fsobj, revision)
219 219
220 220 editor = svn.repos.ChangeCollector(fsobj, rev_root)
221 221 editor_ptr, editor_baton = svn.delta.make_editor(editor)
222 222 base_dir = ""
223 223 send_deltas = False
224 224 svn.repos.replay2(
225 225 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
226 226 editor_ptr, editor_baton, None)
227 227
228 228 added = []
229 229 changed = []
230 230 removed = []
231 231
232 232 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
233 233 for path, change in editor.changes.iteritems():
234 234 # TODO: Decide what to do with directory nodes. Subversion can add
235 235 # empty directories.
236 236
237 237 if change.item_kind == svn.core.svn_node_dir:
238 238 continue
239 239 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
240 240 added.append(path)
241 241 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
242 242 svn.repos.CHANGE_ACTION_REPLACE]:
243 243 changed.append(path)
244 244 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
245 245 removed.append(path)
246 246 else:
247 247 raise NotImplementedError(
248 248 "Action %s not supported on path %s" % (
249 249 change.action, path))
250 250
251 251 changes = {
252 252 'added': added,
253 253 'changed': changed,
254 254 'removed': removed,
255 255 }
256 256 return changes
257 257
258 258 def node_history(self, wire, path, revision, limit):
259 259 cross_copies = False
260 260 repo = self._factory.repo(wire)
261 261 fsobj = svn.repos.fs(repo)
262 262 rev_root = svn.fs.revision_root(fsobj, revision)
263 263
264 264 history_revisions = []
265 265 history = svn.fs.node_history(rev_root, path)
266 266 history = svn.fs.history_prev(history, cross_copies)
267 267 while history:
268 268 __, node_revision = svn.fs.history_location(history)
269 269 history_revisions.append(node_revision)
270 270 if limit and len(history_revisions) >= limit:
271 271 break
272 272 history = svn.fs.history_prev(history, cross_copies)
273 273 return history_revisions
274 274
275 275 def node_properties(self, wire, path, revision):
276 276 repo = self._factory.repo(wire)
277 277 fsobj = svn.repos.fs(repo)
278 278 rev_root = svn.fs.revision_root(fsobj, revision)
279 279 return svn.fs.node_proplist(rev_root, path)
280 280
281 281 def file_annotate(self, wire, path, revision):
282 282 abs_path = 'file://' + urllib.pathname2url(
283 283 vcspath.join(wire['path'], path))
284 284 file_uri = svn.core.svn_path_canonicalize(abs_path)
285 285
286 286 start_rev = svn_opt_revision_value_t(0)
287 287 peg_rev = svn_opt_revision_value_t(revision)
288 288 end_rev = peg_rev
289 289
290 290 annotations = []
291 291
292 292 def receiver(line_no, revision, author, date, line, pool):
293 293 annotations.append((line_no, revision, line))
294 294
295 295 # TODO: Cannot use blame5, missing typemap function in the swig code
296 296 try:
297 297 svn.client.blame2(
298 298 file_uri, peg_rev, start_rev, end_rev,
299 299 receiver, svn.client.create_context())
300 300 except svn.core.SubversionException as exc:
301 301 log.exception("Error during blame operation.")
302 302 raise Exception(
303 303 "Blame not supported or file does not exist at path %s. "
304 304 "Error %s." % (path, exc))
305 305
306 306 return annotations
307 307
308 308 def get_node_type(self, wire, path, rev=None):
309 309 repo = self._factory.repo(wire)
310 310 fs_ptr = svn.repos.fs(repo)
311 311 if rev is None:
312 312 rev = svn.fs.youngest_rev(fs_ptr)
313 313 root = svn.fs.revision_root(fs_ptr, rev)
314 314 node = svn.fs.check_path(root, path)
315 315 return NODE_TYPE_MAPPING.get(node, None)
316 316
317 317 def get_nodes(self, wire, path, revision=None):
318 318 repo = self._factory.repo(wire)
319 319 fsobj = svn.repos.fs(repo)
320 320 if revision is None:
321 321 revision = svn.fs.youngest_rev(fsobj)
322 322 root = svn.fs.revision_root(fsobj, revision)
323 323 entries = svn.fs.dir_entries(root, path)
324 324 result = []
325 325 for entry_path, entry_info in entries.iteritems():
326 326 result.append(
327 327 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
328 328 return result
329 329
330 330 def get_file_content(self, wire, path, rev=None):
331 331 repo = self._factory.repo(wire)
332 332 fsobj = svn.repos.fs(repo)
333 333 if rev is None:
334 334 rev = svn.fs.youngest_revision(fsobj)
335 335 root = svn.fs.revision_root(fsobj, rev)
336 336 content = svn.core.Stream(svn.fs.file_contents(root, path))
337 337 return content.read()
338 338
339 339 def get_file_size(self, wire, path, revision=None):
340 340 repo = self._factory.repo(wire)
341 341 fsobj = svn.repos.fs(repo)
342 342 if revision is None:
343 343 revision = svn.fs.youngest_revision(fsobj)
344 344 root = svn.fs.revision_root(fsobj, revision)
345 345 size = svn.fs.file_length(root, path)
346 346 return size
347 347
348 348 def create_repository(self, wire, compatible_version=None):
349 349 log.info('Creating Subversion repository in path "%s"', wire['path'])
350 350 self._factory.repo(wire, create=True,
351 351 compatible_version=compatible_version)
352 352
353 353 def import_remote_repository(self, wire, src_url):
354 354 repo_path = wire['path']
355 355 if not self.is_path_valid_repository(wire, repo_path):
356 356 raise Exception(
357 357 "Path %s is not a valid Subversion repository." % repo_path)
358 358
359 359 # TODO: johbo: URL checks ?
360 360 import subprocess
361 361 rdump = subprocess.Popen(
362 362 ['svnrdump', 'dump', '--non-interactive', src_url],
363 363 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
364 364 load = subprocess.Popen(
365 365 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
366 366
367 367 # TODO: johbo: This can be a very long operation, might be better
368 368 # to track some kind of status and provide an api to check if the
369 369 # import is done.
370 370 rdump.wait()
371 371 load.wait()
372 372
373 373 if rdump.returncode != 0:
374 374 errors = rdump.stderr.read()
375 375 log.error('svnrdump dump failed: statuscode %s: message: %s',
376 376 rdump.returncode, errors)
377 377 reason = 'UNKNOWN'
378 378 if 'svnrdump: E230001:' in errors:
379 379 reason = 'INVALID_CERTIFICATE'
380 380 raise Exception(
381 381 'Failed to dump the remote repository from %s.' % src_url,
382 382 reason)
383 383 if load.returncode != 0:
384 384 raise Exception(
385 385 'Failed to load the dump of remote repository from %s.' %
386 386 (src_url, ))
387 387
388 388 def commit(self, wire, message, author, timestamp, updated, removed):
389 389 assert isinstance(message, str)
390 390 assert isinstance(author, str)
391 391
392 392 repo = self._factory.repo(wire)
393 393 fsobj = svn.repos.fs(repo)
394 394
395 395 rev = svn.fs.youngest_rev(fsobj)
396 396 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
397 397 txn_root = svn.fs.txn_root(txn)
398 398
399 399 for node in updated:
400 400 TxnNodeProcessor(node, txn_root).update()
401 401 for node in removed:
402 402 TxnNodeProcessor(node, txn_root).remove()
403 403
404 404 commit_id = svn.repos.fs_commit_txn(repo, txn)
405 405
406 406 if timestamp:
407 407 apr_time = apr_time_t(timestamp)
408 408 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
409 409 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
410 410
411 411 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
412 412 return commit_id
413 413
414 414 def diff(self, wire, rev1, rev2, path1=None, path2=None,
415 415 ignore_whitespace=False, context=3):
416 416
417 417 wire.update(cache=False)
418 418 repo = self._factory.repo(wire)
419 419 diff_creator = SvnDiffer(
420 420 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
421 421 try:
422 422 return diff_creator.generate_diff()
423 423 except svn.core.SubversionException as e:
424 424 log.exception(
425 425 "Error during diff operation operation. "
426 426 "Path might not exist %s, %s" % (path1, path2))
427 427 return ""
428 428
429 429 @reraise_safe_exceptions
430 430 def is_large_file(self, wire, path):
431 431 return False
432 432
433 433 @reraise_safe_exceptions
434 434 def install_hooks(self, wire, force=False):
435 435 from vcsserver.hook_utils import install_svn_hooks
436 436 repo_path = wire['path']
437 437 binary_dir = settings.BINARY_DIR
438 438 executable = None
439 439 if binary_dir:
440 440 executable = os.path.join(binary_dir, 'python')
441 441 return install_svn_hooks(
442 442 repo_path, executable=executable, force_create=force)
443 443
444 444
445 445 class SvnDiffer(object):
446 446 """
447 447 Utility to create diffs based on difflib and the Subversion api
448 448 """
449 449
450 450 binary_content = False
451 451
452 452 def __init__(
453 453 self, repo, src_rev, src_path, tgt_rev, tgt_path,
454 454 ignore_whitespace, context):
455 455 self.repo = repo
456 456 self.ignore_whitespace = ignore_whitespace
457 457 self.context = context
458 458
459 459 fsobj = svn.repos.fs(repo)
460 460
461 461 self.tgt_rev = tgt_rev
462 462 self.tgt_path = tgt_path or ''
463 463 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
464 464 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
465 465
466 466 self.src_rev = src_rev
467 467 self.src_path = src_path or self.tgt_path
468 468 self.src_root = svn.fs.revision_root(fsobj, src_rev)
469 469 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
470 470
471 471 self._validate()
472 472
473 473 def _validate(self):
474 474 if (self.tgt_kind != svn.core.svn_node_none and
475 475 self.src_kind != svn.core.svn_node_none and
476 476 self.src_kind != self.tgt_kind):
477 477 # TODO: johbo: proper error handling
478 478 raise Exception(
479 479 "Source and target are not compatible for diff generation. "
480 480 "Source type: %s, target type: %s" %
481 481 (self.src_kind, self.tgt_kind))
482 482
483 483 def generate_diff(self):
484 484 buf = StringIO.StringIO()
485 485 if self.tgt_kind == svn.core.svn_node_dir:
486 486 self._generate_dir_diff(buf)
487 487 else:
488 488 self._generate_file_diff(buf)
489 489 return buf.getvalue()
490 490
491 491 def _generate_dir_diff(self, buf):
492 492 editor = DiffChangeEditor()
493 493 editor_ptr, editor_baton = svn.delta.make_editor(editor)
494 494 svn.repos.dir_delta2(
495 495 self.src_root,
496 496 self.src_path,
497 497 '', # src_entry
498 498 self.tgt_root,
499 499 self.tgt_path,
500 500 editor_ptr, editor_baton,
501 501 authorization_callback_allow_all,
502 502 False, # text_deltas
503 503 svn.core.svn_depth_infinity, # depth
504 504 False, # entry_props
505 505 False, # ignore_ancestry
506 506 )
507 507
508 508 for path, __, change in sorted(editor.changes):
509 509 self._generate_node_diff(
510 510 buf, change, path, self.tgt_path, path, self.src_path)
511 511
512 512 def _generate_file_diff(self, buf):
513 513 change = None
514 514 if self.src_kind == svn.core.svn_node_none:
515 515 change = "add"
516 516 elif self.tgt_kind == svn.core.svn_node_none:
517 517 change = "delete"
518 518 tgt_base, tgt_path = vcspath.split(self.tgt_path)
519 519 src_base, src_path = vcspath.split(self.src_path)
520 520 self._generate_node_diff(
521 521 buf, change, tgt_path, tgt_base, src_path, src_base)
522 522
523 523 def _generate_node_diff(
524 524 self, buf, change, tgt_path, tgt_base, src_path, src_base):
525 525
526 526 if self.src_rev == self.tgt_rev and tgt_base == src_base:
527 527 # makes consistent behaviour with git/hg to return empty diff if
528 528 # we compare same revisions
529 529 return
530 530
531 531 tgt_full_path = vcspath.join(tgt_base, tgt_path)
532 532 src_full_path = vcspath.join(src_base, src_path)
533 533
534 534 self.binary_content = False
535 535 mime_type = self._get_mime_type(tgt_full_path)
536 536
537 537 if mime_type and not mime_type.startswith('text'):
538 538 self.binary_content = True
539 539 buf.write("=" * 67 + '\n')
540 540 buf.write("Cannot display: file marked as a binary type.\n")
541 541 buf.write("svn:mime-type = %s\n" % mime_type)
542 542 buf.write("Index: %s\n" % (tgt_path, ))
543 543 buf.write("=" * 67 + '\n')
544 544 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
545 545 'tgt_path': tgt_path})
546 546
547 547 if change == 'add':
548 548 # TODO: johbo: SVN is missing a zero here compared to git
549 549 buf.write("new file mode 10644\n")
550 550
551 551 #TODO(marcink): intro to binary detection of svn patches
552 552 # if self.binary_content:
553 553 # buf.write('GIT binary patch\n')
554 554
555 555 buf.write("--- /dev/null\t(revision 0)\n")
556 556 src_lines = []
557 557 else:
558 558 if change == 'delete':
559 559 buf.write("deleted file mode 10644\n")
560 560
561 561 #TODO(marcink): intro to binary detection of svn patches
562 562 # if self.binary_content:
563 563 # buf.write('GIT binary patch\n')
564 564
565 565 buf.write("--- a/%s\t(revision %s)\n" % (
566 566 src_path, self.src_rev))
567 567 src_lines = self._svn_readlines(self.src_root, src_full_path)
568 568
569 569 if change == 'delete':
570 570 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
571 571 tgt_lines = []
572 572 else:
573 573 buf.write("+++ b/%s\t(revision %s)\n" % (
574 574 tgt_path, self.tgt_rev))
575 575 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
576 576
577 577 if not self.binary_content:
578 578 udiff = svn_diff.unified_diff(
579 579 src_lines, tgt_lines, context=self.context,
580 580 ignore_blank_lines=self.ignore_whitespace,
581 581 ignore_case=False,
582 582 ignore_space_changes=self.ignore_whitespace)
583 583 buf.writelines(udiff)
584 584
585 585 def _get_mime_type(self, path):
586 586 try:
587 587 mime_type = svn.fs.node_prop(
588 588 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
589 589 except svn.core.SubversionException:
590 590 mime_type = svn.fs.node_prop(
591 591 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
592 592 return mime_type
593 593
594 594 def _svn_readlines(self, fs_root, node_path):
595 595 if self.binary_content:
596 596 return []
597 597 node_kind = svn.fs.check_path(fs_root, node_path)
598 598 if node_kind not in (
599 599 svn.core.svn_node_file, svn.core.svn_node_symlink):
600 600 return []
601 601 content = svn.core.Stream(
602 602 svn.fs.file_contents(fs_root, node_path)).read()
603 603 return content.splitlines(True)
604 604
605 605
606 606
607 607 class DiffChangeEditor(svn.delta.Editor):
608 608 """
609 609 Records changes between two given revisions
610 610 """
611 611
612 612 def __init__(self):
613 613 self.changes = []
614 614
615 615 def delete_entry(self, path, revision, parent_baton, pool=None):
616 616 self.changes.append((path, None, 'delete'))
617 617
618 618 def add_file(
619 619 self, path, parent_baton, copyfrom_path, copyfrom_revision,
620 620 file_pool=None):
621 621 self.changes.append((path, 'file', 'add'))
622 622
623 623 def open_file(self, path, parent_baton, base_revision, file_pool=None):
624 624 self.changes.append((path, 'file', 'change'))
625 625
626 626
627 627 def authorization_callback_allow_all(root, path, pool):
628 628 return True
629 629
630 630
631 631 class TxnNodeProcessor(object):
632 632 """
633 633 Utility to process the change of one node within a transaction root.
634 634
635 635 It encapsulates the knowledge of how to add, update or remove
636 636 a node for a given transaction root. The purpose is to support the method
637 637 `SvnRemote.commit`.
638 638 """
639 639
640 640 def __init__(self, node, txn_root):
641 641 assert isinstance(node['path'], str)
642 642
643 643 self.node = node
644 644 self.txn_root = txn_root
645 645
646 646 def update(self):
647 647 self._ensure_parent_dirs()
648 648 self._add_file_if_node_does_not_exist()
649 649 self._update_file_content()
650 650 self._update_file_properties()
651 651
652 652 def remove(self):
653 653 svn.fs.delete(self.txn_root, self.node['path'])
654 654 # TODO: Clean up directory if empty
655 655
656 656 def _ensure_parent_dirs(self):
657 657 curdir = vcspath.dirname(self.node['path'])
658 658 dirs_to_create = []
659 659 while not self._svn_path_exists(curdir):
660 660 dirs_to_create.append(curdir)
661 661 curdir = vcspath.dirname(curdir)
662 662
663 663 for curdir in reversed(dirs_to_create):
664 664 log.debug('Creating missing directory "%s"', curdir)
665 665 svn.fs.make_dir(self.txn_root, curdir)
666 666
667 667 def _svn_path_exists(self, path):
668 668 path_status = svn.fs.check_path(self.txn_root, path)
669 669 return path_status != svn.core.svn_node_none
670 670
671 671 def _add_file_if_node_does_not_exist(self):
672 672 kind = svn.fs.check_path(self.txn_root, self.node['path'])
673 673 if kind == svn.core.svn_node_none:
674 674 svn.fs.make_file(self.txn_root, self.node['path'])
675 675
676 676 def _update_file_content(self):
677 677 assert isinstance(self.node['content'], str)
678 678 handler, baton = svn.fs.apply_textdelta(
679 679 self.txn_root, self.node['path'], None, None)
680 680 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
681 681
682 682 def _update_file_properties(self):
683 683 properties = self.node.get('properties', {})
684 684 for key, value in properties.iteritems():
685 685 svn.fs.change_node_prop(
686 686 self.txn_root, self.node['path'], key, value)
687 687
688 688
689 689 def apr_time_t(timestamp):
690 690 """
691 691 Convert a Python timestamp into APR timestamp type apr_time_t
692 692 """
693 693 return timestamp * 1E6
694 694
695 695
696 696 def svn_opt_revision_value_t(num):
697 697 """
698 698 Put `num` into a `svn_opt_revision_value_t` structure.
699 699 """
700 700 value = svn.core.svn_opt_revision_value_t()
701 701 value.number = num
702 702 revision = svn.core.svn_opt_revision_t()
703 703 revision.kind = svn.core.svn_opt_revision_number
704 704 revision.value = value
705 705 return revision
@@ -1,127 +1,127 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import inspect
19 19 import sys
20 20 import traceback
21 21
22 22 import pytest
23 23 from mercurial.error import LookupError
24 24 from mock import Mock, MagicMock, patch
25 25
26 26 from vcsserver import exceptions, hg, hgcompat
27 27
28 28
29 29 class TestHGLookup(object):
30 30 def setup(self):
31 31 self.mock_repo = MagicMock()
32 32 self.mock_repo.__getitem__.side_effect = LookupError(
33 33 'revision_or_commit_id', 'index', 'message')
34 34 factory = Mock()
35 35 factory.repo = Mock(return_value=self.mock_repo)
36 36 self.remote_hg = hg.HgRemote(factory)
37 37
38 38 def test_fail_lookup_hg(self):
39 39 with pytest.raises(Exception) as exc_info:
40 40 self.remote_hg.lookup(
41 41 wire=None, revision='revision_or_commit_id', both=True)
42 42
43 43 assert exc_info.value._vcs_kind == 'lookup'
44 44 assert 'revision_or_commit_id' in exc_info.value.args
45 45
46 46
47 47 class TestDiff(object):
48 48 def test_raising_safe_exception_when_lookup_failed(self):
49 49 repo = Mock()
50 50 factory = Mock()
51 51 factory.repo = Mock(return_value=repo)
52 52 hg_remote = hg.HgRemote(factory)
53 53 with patch('mercurial.patch.diff') as diff_mock:
54 54 diff_mock.side_effect = LookupError(
55 55 'deadbeef', 'index', 'message')
56 56 with pytest.raises(Exception) as exc_info:
57 57 hg_remote.diff(
58 58 wire=None, rev1='deadbeef', rev2='deadbee1',
59 59 file_filter=None, opt_git=True, opt_ignorews=True,
60 60 context=3)
61 61 assert type(exc_info.value) == Exception
62 62 assert exc_info.value._vcs_kind == 'lookup'
63 63
64 64
65 65 class TestReraiseSafeExceptions(object):
66 66 def test_method_decorated_with_reraise_safe_exceptions(self):
67 67 factory = Mock()
68 68 hg_remote = hg.HgRemote(factory)
69 69 methods = inspect.getmembers(hg_remote, predicate=inspect.ismethod)
70 70 decorator = hg.reraise_safe_exceptions(None)
71 71 for method_name, method in methods:
72 72 if not method_name.startswith('_'):
73 73 assert method.im_func.__code__ == decorator.__code__
74 74
75 75 @pytest.mark.parametrize('side_effect, expected_type', [
76 76 (hgcompat.Abort(), 'abort'),
77 77 (hgcompat.InterventionRequired(), 'abort'),
78 78 (hgcompat.RepoLookupError(), 'lookup'),
79 79 (hgcompat.LookupError('deadbeef', 'index', 'message'), 'lookup'),
80 80 (hgcompat.RepoError(), 'error'),
81 81 (hgcompat.RequirementError(), 'requirement'),
82 82 ])
83 83 def test_safe_exceptions_reraised(self, side_effect, expected_type):
84 84 @hg.reraise_safe_exceptions
85 85 def fake_method():
86 86 raise side_effect
87 87
88 88 with pytest.raises(Exception) as exc_info:
89 89 fake_method()
90 90 assert type(exc_info.value) == Exception
91 91 assert exc_info.value._vcs_kind == expected_type
92 92
93 93 def test_keeps_original_traceback(self):
94 94 @hg.reraise_safe_exceptions
95 95 def fake_method():
96 96 try:
97 97 raise hgcompat.Abort()
98 98 except:
99 99 self.original_traceback = traceback.format_tb(
100 100 sys.exc_info()[2])
101 101 raise
102 102
103 103 try:
104 104 fake_method()
105 105 except Exception:
106 106 new_traceback = traceback.format_tb(sys.exc_info()[2])
107 107
108 108 new_traceback_tail = new_traceback[-len(self.original_traceback):]
109 109 assert new_traceback_tail == self.original_traceback
110 110
111 111 def test_maps_unknow_exceptions_to_unhandled(self):
112 112 @hg.reraise_safe_exceptions
113 113 def stub_method():
114 114 raise ValueError('stub')
115 115
116 116 with pytest.raises(Exception) as exc_info:
117 117 stub_method()
118 118 assert exc_info.value._vcs_kind == 'unhandled'
119 119
120 120 def test_does_not_map_known_exceptions(self):
121 121 @hg.reraise_safe_exceptions
122 122 def stub_method():
123 raise exceptions.LookupException('stub')
123 raise exceptions.LookupException()('stub')
124 124
125 125 with pytest.raises(Exception) as exc_info:
126 126 stub_method()
127 127 assert exc_info.value._vcs_kind == 'lookup'
General Comments 0
You need to be logged in to leave comments. Login now