##// END OF EJS Templates
git: switched most git operations to libgit2
marcink -
r725:d64a71b9 default
parent child Browse files
Show More
@@ -1,94 +1,76 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import sys
19 19 import traceback
20 20 import logging
21 21 import urlparse
22 22
23 23 from vcsserver.lib.rc_cache import region_meta
24 24 log = logging.getLogger(__name__)
25 25
26 26
27 27 class RepoFactory(object):
28 28 """
29 29 Utility to create instances of repository
30 30
31 31 It provides internal caching of the `repo` object based on
32 32 the :term:`call context`.
33 33 """
34 34 repo_type = None
35 35
36 36 def __init__(self):
37 37 self._cache_region = region_meta.dogpile_cache_regions['repo_object']
38 38
39 39 def _create_config(self, path, config):
40 40 config = {}
41 41 return config
42 42
43 43 def _create_repo(self, wire, create):
44 44 raise NotImplementedError()
45 45
46 46 def repo(self, wire, create=False):
47 """
48 Get a repository instance for the given path.
49
50 Uses internally the low level beaker API since the decorators introduce
51 significant overhead.
52 """
53 region = self._cache_region
54 context = wire.get('context', None)
55 repo_path = wire.get('path', '')
56 context_uid = '{}'.format(context)
57 cache = wire.get('cache', True)
58 cache_on = context and cache
59
60 @region.conditional_cache_on_arguments(condition=cache_on)
61 def create_new_repo(_repo_type, _repo_path, _context_uid):
62 return self._create_repo(wire, create)
63
64 repo = create_new_repo(self.repo_type, repo_path, context_uid)
65 return repo
47 raise NotImplementedError()
66 48
67 49
68 50 def obfuscate_qs(query_string):
69 51 if query_string is None:
70 52 return None
71 53
72 54 parsed = []
73 55 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
74 56 if k in ['auth_token', 'api_key']:
75 57 v = "*****"
76 58 parsed.append((k, v))
77 59
78 60 return '&'.join('{}{}'.format(
79 61 k, '={}'.format(v) if v else '') for k, v in parsed)
80 62
81 63
82 64 def raise_from_original(new_type):
83 65 """
84 66 Raise a new exception type with original args and traceback.
85 67 """
86 68 exc_type, exc_value, exc_traceback = sys.exc_info()
87 69 new_exc = new_type(*exc_value.args)
88 70 # store the original traceback into the new exc
89 71 new_exc._org_exc_tb = traceback.format_exc(exc_traceback)
90 72
91 73 try:
92 74 raise new_exc, None, exc_traceback
93 75 finally:
94 76 del exc_traceback
@@ -1,752 +1,845 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17 import collections
18 18 import logging
19 19 import os
20 20 import posixpath as vcspath
21 21 import re
22 22 import stat
23 23 import traceback
24 24 import urllib
25 25 import urllib2
26 26 from functools import wraps
27 27
28 28 import more_itertools
29 import pygit2
30 from pygit2 import Repository as LibGit2Repo
29 31 from dulwich import index, objects
30 32 from dulwich.client import HttpGitClient, LocalGitClient
31 33 from dulwich.errors import (
32 34 NotGitRepository, ChecksumMismatch, WrongObjectException,
33 35 MissingCommitError, ObjectMissing, HangupException,
34 36 UnexpectedCommandError)
35 from dulwich.repo import Repo as DulwichRepo, Tag
37 from dulwich.repo import Repo as DulwichRepo
36 38 from dulwich.server import update_server_info
37 39
38 40 from vcsserver import exceptions, settings, subprocessio
39 41 from vcsserver.utils import safe_str
40 42 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
41 43 from vcsserver.hgcompat import (
42 44 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
43 45 from vcsserver.git_lfs.lib import LFSOidStore
44 46
45 47 DIR_STAT = stat.S_IFDIR
46 48 FILE_MODE = stat.S_IFMT
47 49 GIT_LINK = objects.S_IFGITLINK
48 50
49 51 log = logging.getLogger(__name__)
50 52
51 53
52 54 def reraise_safe_exceptions(func):
53 55 """Converts Dulwich exceptions to something neutral."""
56
54 57 @wraps(func)
55 58 def wrapper(*args, **kwargs):
56 59 try:
57 60 return func(*args, **kwargs)
58 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
59 ObjectMissing) as e:
60 exc = exceptions.LookupException(e)
61 raise exc(e)
61 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
62 exc = exceptions.LookupException(org_exc=e)
63 raise exc(safe_str(e))
62 64 except (HangupException, UnexpectedCommandError) as e:
63 exc = exceptions.VcsException(e)
64 raise exc(e)
65 exc = exceptions.VcsException(org_exc=e)
66 raise exc(safe_str(e))
65 67 except Exception as e:
66 68 # NOTE(marcink): becuase of how dulwich handles some exceptions
67 69 # (KeyError on empty repos), we cannot track this and catch all
68 70 # exceptions, it's an exceptions from other handlers
69 71 #if not hasattr(e, '_vcs_kind'):
70 72 #log.exception("Unhandled exception in git remote call")
71 73 #raise_from_original(exceptions.UnhandledException)
72 74 raise
73 75 return wrapper
74 76
75 77
76 78 class Repo(DulwichRepo):
77 79 """
78 80 A wrapper for dulwich Repo class.
79 81
80 82 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
81 83 "Too many open files" error. We need to close all opened file descriptors
82 84 once the repo object is destroyed.
83
84 TODO: mikhail: please check if we need this wrapper after updating dulwich
85 to 0.12.0 +
86 85 """
87 86 def __del__(self):
88 87 if hasattr(self, 'object_store'):
89 88 self.close()
90 89
91 90
91 class Repository(LibGit2Repo):
92
93 def __enter__(self):
94 return self
95
96 def __exit__(self, exc_type, exc_val, exc_tb):
97 self.free()
98
99
92 100 class GitFactory(RepoFactory):
93 101 repo_type = 'git'
94 102
95 def _create_repo(self, wire, create):
96 repo_path = str_to_dulwich(wire['path'])
97 return Repo(repo_path)
103 def _create_repo(self, wire, create, use_libgit2=False):
104 if use_libgit2:
105 return Repository(wire['path'])
106 else:
107 repo_path = str_to_dulwich(wire['path'])
108 return Repo(repo_path)
109
110 def repo(self, wire, create=False, use_libgit2=False):
111 """
112 Get a repository instance for the given path.
113 """
114 region = self._cache_region
115 context = wire.get('context', None)
116 repo_path = wire.get('path', '')
117 context_uid = '{}'.format(context)
118 cache = wire.get('cache', True)
119 cache_on = context and cache
120
121 @region.conditional_cache_on_arguments(condition=cache_on)
122 def create_new_repo(_repo_type, _repo_path, _context_uid, _use_libgit2):
123 return self._create_repo(wire, create, use_libgit2)
124
125 repo = create_new_repo(self.repo_type, repo_path, context_uid, use_libgit2)
126 return repo
127
128 def repo_libgit2(self, wire):
129 return self.repo(wire, use_libgit2=True)
98 130
99 131
100 132 class GitRemote(object):
101 133
102 134 def __init__(self, factory):
103 135 self._factory = factory
104 136 self.peeled_ref_marker = '^{}'
105 137 self._bulk_methods = {
106 "author": self.commit_attribute,
107 "date": self.get_object_attrs,
108 "message": self.commit_attribute,
109 "parents": self.commit_attribute,
138 "date": self.date,
139 "author": self.author,
140 "message": self.message,
141 "parents": self.parents,
110 142 "_commit": self.revision,
111 143 }
112 144
113 145 def _wire_to_config(self, wire):
114 146 if 'config' in wire:
115 147 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
116 148 return {}
117 149
118 def _assign_ref(self, wire, ref, commit_id):
119 repo = self._factory.repo(wire)
120 repo[ref] = commit_id
121
122 150 def _remote_conf(self, config):
123 151 params = [
124 152 '-c', 'core.askpass=""',
125 153 ]
126 154 ssl_cert_dir = config.get('vcs_ssl_dir')
127 155 if ssl_cert_dir:
128 156 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
129 157 return params
130 158
131 159 @reraise_safe_exceptions
132 160 def is_empty(self, wire):
133 repo = self._factory.repo(wire)
134 try:
135 return not repo.head()
136 except Exception:
137 log.exception("failed to read object_store")
138 return True
161 repo = self._factory.repo_libgit2(wire)
162
163 # NOTE(marcink): old solution as an alternative
164 # try:
165 # return not repo.head.name
166 # except Exception:
167 # return True
168
169 return repo.is_empty
139 170
140 171 @reraise_safe_exceptions
141 172 def add_object(self, wire, content):
142 173 repo = self._factory.repo(wire)
143 174 blob = objects.Blob()
144 175 blob.set_raw_string(content)
145 176 repo.object_store.add_object(blob)
146 177 return blob.id
147 178
148 179 @reraise_safe_exceptions
149 180 def assert_correct_path(self, wire):
150 path = wire.get('path')
151 181 try:
152 self._factory.repo(wire)
153 except NotGitRepository as e:
182 self._factory.repo_libgit2(wire)
183 except pygit2.GitError:
184 path = wire.get('path')
154 185 tb = traceback.format_exc()
155 186 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
156 187 return False
157 188
158 189 return True
159 190
160 191 @reraise_safe_exceptions
161 192 def bare(self, wire):
162 repo = self._factory.repo(wire)
163 return repo.bare
193 repo = self._factory.repo_libgit2(wire)
194 return repo.is_bare
164 195
165 196 @reraise_safe_exceptions
166 197 def blob_as_pretty_string(self, wire, sha):
167 repo = self._factory.repo(wire)
168 return repo[sha].as_pretty_string()
198 repo_init = self._factory.repo_libgit2(wire)
199 with repo_init as repo:
200 blob_obj = repo[sha]
201 blob = blob_obj.data
202 return blob
169 203
170 204 @reraise_safe_exceptions
171 205 def blob_raw_length(self, wire, sha):
172 repo = self._factory.repo(wire)
173 blob = repo[sha]
174 return blob.raw_length()
206 repo_init = self._factory.repo_libgit2(wire)
207 with repo_init as repo:
208 blob = repo[sha]
209 return blob.size
175 210
176 211 def _parse_lfs_pointer(self, raw_content):
177 212
178 213 spec_string = 'version https://git-lfs.github.com/spec'
179 214 if raw_content and raw_content.startswith(spec_string):
180 215 pattern = re.compile(r"""
181 216 (?:\n)?
182 217 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
183 218 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
184 219 ^size[ ](?P<oid_size>[0-9]+)\n
185 220 (?:\n)?
186 221 """, re.VERBOSE | re.MULTILINE)
187 222 match = pattern.match(raw_content)
188 223 if match:
189 224 return match.groupdict()
190 225
191 226 return {}
192 227
193 228 @reraise_safe_exceptions
194 229 def is_large_file(self, wire, sha):
195 230 repo = self._factory.repo(wire)
196 231 blob = repo[sha]
197 232 return self._parse_lfs_pointer(blob.as_raw_string())
198 233
199 234 @reraise_safe_exceptions
200 235 def in_largefiles_store(self, wire, oid):
201 236 repo = self._factory.repo(wire)
202 237 conf = self._wire_to_config(wire)
203 238
204 239 store_location = conf.get('vcs_git_lfs_store_location')
205 240 if store_location:
206 241 repo_name = repo.path
207 242 store = LFSOidStore(
208 243 oid=oid, repo=repo_name, store_location=store_location)
209 244 return store.has_oid()
210 245
211 246 return False
212 247
213 248 @reraise_safe_exceptions
214 249 def store_path(self, wire, oid):
215 250 repo = self._factory.repo(wire)
216 251 conf = self._wire_to_config(wire)
217 252
218 253 store_location = conf.get('vcs_git_lfs_store_location')
219 254 if store_location:
220 255 repo_name = repo.path
221 256 store = LFSOidStore(
222 257 oid=oid, repo=repo_name, store_location=store_location)
223 258 return store.oid_path
224 259 raise ValueError('Unable to fetch oid with path {}'.format(oid))
225 260
226 261 @reraise_safe_exceptions
227 262 def bulk_request(self, wire, rev, pre_load):
228 263 result = {}
229 264 for attr in pre_load:
230 265 try:
231 266 method = self._bulk_methods[attr]
232 267 args = [wire, rev]
233 if attr == "date":
234 args.extend(["commit_time", "commit_timezone"])
235 elif attr in ["author", "message", "parents"]:
236 args.append(attr)
237 268 result[attr] = method(*args)
238 269 except KeyError as e:
239 raise exceptions.VcsException(e)(
240 "Unknown bulk attribute: %s" % attr)
270 raise exceptions.VcsException(e)("Unknown bulk attribute: %s" % attr)
241 271 return result
242 272
243 273 def _build_opener(self, url):
244 274 handlers = []
245 275 url_obj = url_parser(url)
246 276 _, authinfo = url_obj.authinfo()
247 277
248 278 if authinfo:
249 279 # create a password manager
250 280 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
251 281 passmgr.add_password(*authinfo)
252 282
253 283 handlers.extend((httpbasicauthhandler(passmgr),
254 284 httpdigestauthhandler(passmgr)))
255 285
256 286 return urllib2.build_opener(*handlers)
257 287
288 def _type_id_to_name(self, type_id):
289 return {
290 1: b'commit',
291 2: b'tree',
292 3: b'blob',
293 4: b'tag'
294 }[type_id]
295
258 296 @reraise_safe_exceptions
259 297 def check_url(self, url, config):
260 298 url_obj = url_parser(url)
261 299 test_uri, _ = url_obj.authinfo()
262 300 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
263 301 url_obj.query = obfuscate_qs(url_obj.query)
264 302 cleaned_uri = str(url_obj)
265 303 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
266 304
267 305 if not test_uri.endswith('info/refs'):
268 306 test_uri = test_uri.rstrip('/') + '/info/refs'
269 307
270 308 o = self._build_opener(url)
271 309 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
272 310
273 311 q = {"service": 'git-upload-pack'}
274 312 qs = '?%s' % urllib.urlencode(q)
275 313 cu = "%s%s" % (test_uri, qs)
276 314 req = urllib2.Request(cu, None, {})
277 315
278 316 try:
279 317 log.debug("Trying to open URL %s", cleaned_uri)
280 318 resp = o.open(req)
281 319 if resp.code != 200:
282 320 raise exceptions.URLError()('Return Code is not 200')
283 321 except Exception as e:
284 322 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
285 323 # means it cannot be cloned
286 324 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
287 325
288 326 # now detect if it's proper git repo
289 327 gitdata = resp.read()
290 328 if 'service=git-upload-pack' in gitdata:
291 329 pass
292 330 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
293 331 # old style git can return some other format !
294 332 pass
295 333 else:
296 334 raise exceptions.URLError()(
297 335 "url [%s] does not look like an git" % (cleaned_uri,))
298 336
299 337 return True
300 338
301 339 @reraise_safe_exceptions
302 340 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
303 341 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
304 342 remote_refs = self.pull(wire, url, apply_refs=False)
305 343 repo = self._factory.repo(wire)
306 344 if isinstance(valid_refs, list):
307 345 valid_refs = tuple(valid_refs)
308 346
309 347 for k in remote_refs:
310 348 # only parse heads/tags and skip so called deferred tags
311 349 if k.startswith(valid_refs) and not k.endswith(deferred):
312 350 repo[k] = remote_refs[k]
313 351
314 352 if update_after_clone:
315 353 # we want to checkout HEAD
316 354 repo["HEAD"] = remote_refs["HEAD"]
317 355 index.build_index_from_tree(repo.path, repo.index_path(),
318 356 repo.object_store, repo["HEAD"].tree)
319 357
320 358 # TODO: this is quite complex, check if that can be simplified
321 359 @reraise_safe_exceptions
322 360 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
323 361 repo = self._factory.repo(wire)
324 362 object_store = repo.object_store
325 363
326 364 # Create tree and populates it with blobs
327 365 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
328 366
329 367 for node in updated:
330 368 # Compute subdirs if needed
331 369 dirpath, nodename = vcspath.split(node['path'])
332 370 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
333 371 parent = commit_tree
334 372 ancestors = [('', parent)]
335 373
336 374 # Tries to dig for the deepest existing tree
337 375 while dirnames:
338 376 curdir = dirnames.pop(0)
339 377 try:
340 378 dir_id = parent[curdir][1]
341 379 except KeyError:
342 380 # put curdir back into dirnames and stops
343 381 dirnames.insert(0, curdir)
344 382 break
345 383 else:
346 384 # If found, updates parent
347 385 parent = repo[dir_id]
348 386 ancestors.append((curdir, parent))
349 387 # Now parent is deepest existing tree and we need to create
350 388 # subtrees for dirnames (in reverse order)
351 389 # [this only applies for nodes from added]
352 390 new_trees = []
353 391
354 392 blob = objects.Blob.from_string(node['content'])
355 393
356 394 if dirnames:
357 395 # If there are trees which should be created we need to build
358 396 # them now (in reverse order)
359 397 reversed_dirnames = list(reversed(dirnames))
360 398 curtree = objects.Tree()
361 399 curtree[node['node_path']] = node['mode'], blob.id
362 400 new_trees.append(curtree)
363 401 for dirname in reversed_dirnames[:-1]:
364 402 newtree = objects.Tree()
365 403 newtree[dirname] = (DIR_STAT, curtree.id)
366 404 new_trees.append(newtree)
367 405 curtree = newtree
368 406 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
369 407 else:
370 parent.add(
371 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
408 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
372 409
373 410 new_trees.append(parent)
374 411 # Update ancestors
375 412 reversed_ancestors = reversed(
376 413 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
377 414 for parent, tree, path in reversed_ancestors:
378 415 parent[path] = (DIR_STAT, tree.id)
379 416 object_store.add_object(tree)
380 417
381 418 object_store.add_object(blob)
382 419 for tree in new_trees:
383 420 object_store.add_object(tree)
384 421
385 422 for node_path in removed:
386 423 paths = node_path.split('/')
387 424 tree = commit_tree
388 425 trees = [tree]
389 426 # Traverse deep into the forest...
390 427 for path in paths:
391 428 try:
392 429 obj = repo[tree[path][1]]
393 430 if isinstance(obj, objects.Tree):
394 431 trees.append(obj)
395 432 tree = obj
396 433 except KeyError:
397 434 break
398 435 # Cut down the blob and all rotten trees on the way back...
399 436 for path, tree in reversed(zip(paths, trees)):
400 437 del tree[path]
401 438 if tree:
402 439 # This tree still has elements - don't remove it or any
403 440 # of it's parents
404 441 break
405 442
406 443 object_store.add_object(commit_tree)
407 444
408 445 # Create commit
409 446 commit = objects.Commit()
410 447 commit.tree = commit_tree.id
411 448 for k, v in commit_data.iteritems():
412 449 setattr(commit, k, v)
413 450 object_store.add_object(commit)
414 451
452 self.create_branch(wire, branch, commit.id)
453
454 # dulwich set-ref
415 455 ref = 'refs/heads/%s' % branch
416 456 repo.refs[ref] = commit.id
417 457
418 458 return commit.id
419 459
420 460 @reraise_safe_exceptions
421 461 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
422 462 if url != 'default' and '://' not in url:
423 463 client = LocalGitClient(url)
424 464 else:
425 465 url_obj = url_parser(url)
426 466 o = self._build_opener(url)
427 467 url, _ = url_obj.authinfo()
428 468 client = HttpGitClient(base_url=url, opener=o)
429 469 repo = self._factory.repo(wire)
430 470
431 471 determine_wants = repo.object_store.determine_wants_all
432 472 if refs:
433 473 def determine_wants_requested(references):
434 474 return [references[r] for r in references if r in refs]
435 475 determine_wants = determine_wants_requested
436 476
437 477 try:
438 478 remote_refs = client.fetch(
439 479 path=url, target=repo, determine_wants=determine_wants)
440 480 except NotGitRepository as e:
441 481 log.warning(
442 482 'Trying to fetch from "%s" failed, not a Git repository.', url)
443 483 # Exception can contain unicode which we convert
444 484 raise exceptions.AbortException(e)(repr(e))
445 485
446 486 # mikhail: client.fetch() returns all the remote refs, but fetches only
447 487 # refs filtered by `determine_wants` function. We need to filter result
448 488 # as well
449 489 if refs:
450 490 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
451 491
452 492 if apply_refs:
453 493 # TODO: johbo: Needs proper test coverage with a git repository
454 494 # that contains a tag object, so that we would end up with
455 495 # a peeled ref at this point.
456 496 for k in remote_refs:
457 497 if k.endswith(self.peeled_ref_marker):
458 498 log.debug("Skipping peeled reference %s", k)
459 499 continue
460 500 repo[k] = remote_refs[k]
461 501
462 502 if refs and not update_after:
463 503 # mikhail: explicitly set the head to the last ref.
464 504 repo['HEAD'] = remote_refs[refs[-1]]
465 505
466 506 if update_after:
467 507 # we want to checkout HEAD
468 508 repo["HEAD"] = remote_refs["HEAD"]
469 509 index.build_index_from_tree(repo.path, repo.index_path(),
470 510 repo.object_store, repo["HEAD"].tree)
471 511 return remote_refs
472 512
473 513 @reraise_safe_exceptions
474 514 def sync_fetch(self, wire, url, refs=None):
475 515 repo = self._factory.repo(wire)
476 516 if refs and not isinstance(refs, (list, tuple)):
477 517 refs = [refs]
478 518 config = self._wire_to_config(wire)
479 519 # get all remote refs we'll use to fetch later
480 520 output, __ = self.run_git_command(
481 521 wire, ['ls-remote', url], fail_on_stderr=False,
482 522 _copts=self._remote_conf(config),
483 523 extra_env={'GIT_TERMINAL_PROMPT': '0'})
484 524
485 525 remote_refs = collections.OrderedDict()
486 526 fetch_refs = []
487 527
488 528 for ref_line in output.splitlines():
489 529 sha, ref = ref_line.split('\t')
490 530 sha = sha.strip()
491 531 if ref in remote_refs:
492 532 # duplicate, skip
493 533 continue
494 534 if ref.endswith(self.peeled_ref_marker):
495 535 log.debug("Skipping peeled reference %s", ref)
496 536 continue
497 537 # don't sync HEAD
498 538 if ref in ['HEAD']:
499 539 continue
500 540
501 541 remote_refs[ref] = sha
502 542
503 543 if refs and sha in refs:
504 544 # we filter fetch using our specified refs
505 545 fetch_refs.append('{}:{}'.format(ref, ref))
506 546 elif not refs:
507 547 fetch_refs.append('{}:{}'.format(ref, ref))
508 548 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
509 549 if fetch_refs:
510 550 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
511 551 fetch_refs_chunks = list(chunk)
512 552 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
513 553 _out, _err = self.run_git_command(
514 554 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
515 555 fail_on_stderr=False,
516 556 _copts=self._remote_conf(config),
517 557 extra_env={'GIT_TERMINAL_PROMPT': '0'})
518 558
519 559 return remote_refs
520 560
521 561 @reraise_safe_exceptions
522 562 def sync_push(self, wire, url, refs=None):
523 563 if not self.check_url(url, wire):
524 564 return
525 565 config = self._wire_to_config(wire)
526 566 repo = self._factory.repo(wire)
527 567 self.run_git_command(
528 568 wire, ['push', url, '--mirror'], fail_on_stderr=False,
529 569 _copts=self._remote_conf(config),
530 570 extra_env={'GIT_TERMINAL_PROMPT': '0'})
531 571
532 572 @reraise_safe_exceptions
533 573 def get_remote_refs(self, wire, url):
534 574 repo = Repo(url)
535 575 return repo.get_refs()
536 576
537 577 @reraise_safe_exceptions
538 578 def get_description(self, wire):
539 579 repo = self._factory.repo(wire)
540 580 return repo.get_description()
541 581
542 582 @reraise_safe_exceptions
543 583 def get_missing_revs(self, wire, rev1, rev2, path2):
544 584 repo = self._factory.repo(wire)
545 585 LocalGitClient(thin_packs=False).fetch(path2, repo)
546 586
547 587 wire_remote = wire.copy()
548 588 wire_remote['path'] = path2
549 589 repo_remote = self._factory.repo(wire_remote)
550 590 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
551 591
552 592 revs = [
553 593 x.commit.id
554 594 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
555 595 return revs
556 596
557 597 @reraise_safe_exceptions
558 598 def get_object(self, wire, sha):
559 repo = self._factory.repo(wire)
560 obj = repo.get_object(sha)
561 commit_id = obj.id
599 repo = self._factory.repo_libgit2(wire)
562 600
563 if isinstance(obj, Tag):
564 commit_id = obj.object[1]
601 try:
602 commit = repo.revparse_single(sha)
603 except (KeyError, ValueError) as e:
604 msg = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
605 raise exceptions.LookupException(e)(msg)
606
607 if isinstance(commit, pygit2.Tag):
608 commit = repo.get(commit.target)
609
610 commit_id = commit.hex
611 type_id = commit.type
565 612
566 613 return {
567 'id': obj.id,
568 'type': obj.type_name,
614 'id': commit_id,
615 'type': self._type_id_to_name(type_id),
569 616 'commit_id': commit_id,
570 617 'idx': 0
571 618 }
572 619
573 620 @reraise_safe_exceptions
574 def get_object_attrs(self, wire, sha, *attrs):
575 repo = self._factory.repo(wire)
576 obj = repo.get_object(sha)
577 return list(getattr(obj, a) for a in attrs)
621 def get_refs(self, wire):
622 repo = self._factory.repo_libgit2(wire)
578 623
579 @reraise_safe_exceptions
580 def get_refs(self, wire):
581 repo = self._factory.repo(wire)
582 624 result = {}
583 for ref, sha in repo.refs.as_dict().items():
584 peeled_sha = repo.get_peeled(ref)
585 result[ref] = peeled_sha
625 for ref in repo.references:
626 peeled_sha = repo.lookup_reference(ref).peel()
627 result[ref] = peeled_sha.hex
628
586 629 return result
587 630
588 631 @reraise_safe_exceptions
589 def get_refs_path(self, wire):
590 repo = self._factory.repo(wire)
591 return repo.refs.path
592
593 @reraise_safe_exceptions
594 632 def head(self, wire, show_exc=True):
595 repo = self._factory.repo(wire)
633 repo = self._factory.repo_libgit2(wire)
596 634 try:
597 return repo.head()
635 return repo.head.peel().hex
598 636 except Exception:
599 637 if show_exc:
600 638 raise
601 639
602 640 @reraise_safe_exceptions
603 641 def init(self, wire):
604 642 repo_path = str_to_dulwich(wire['path'])
605 643 self.repo = Repo.init(repo_path)
606 644
607 645 @reraise_safe_exceptions
608 646 def init_bare(self, wire):
609 647 repo_path = str_to_dulwich(wire['path'])
610 648 self.repo = Repo.init_bare(repo_path)
611 649
612 650 @reraise_safe_exceptions
613 651 def revision(self, wire, rev):
614 repo = self._factory.repo(wire)
615 obj = repo[rev]
652 repo = self._factory.repo_libgit2(wire)
653 commit = repo[rev]
616 654 obj_data = {
617 'id': obj.id,
655 'id': commit.id.hex,
618 656 }
619 try:
620 obj_data['tree'] = obj.tree
621 except AttributeError:
622 pass
657 # tree objects itself don't have tree_id attribute
658 if hasattr(commit, 'tree_id'):
659 obj_data['tree'] = commit.tree_id.hex
660
623 661 return obj_data
624 662
625 663 @reraise_safe_exceptions
626 def commit_attribute(self, wire, rev, attr):
627 repo = self._factory.repo(wire)
628 obj = repo[rev]
629 return getattr(obj, attr)
664 def date(self, wire, rev):
665 repo = self._factory.repo_libgit2(wire)
666 commit = repo[rev]
667 # TODO(marcink): check dulwich difference of offset vs timezone
668 return [commit.commit_time, commit.commit_time_offset]
669
670 @reraise_safe_exceptions
671 def author(self, wire, rev):
672 repo = self._factory.repo_libgit2(wire)
673 commit = repo[rev]
674 if commit.author.email:
675 return u"{} <{}>".format(commit.author.name, commit.author.email)
676
677 return u"{}".format(commit.author.raw_name)
678
679 @reraise_safe_exceptions
680 def message(self, wire, rev):
681 repo = self._factory.repo_libgit2(wire)
682 commit = repo[rev]
683 return commit.message
684
685 @reraise_safe_exceptions
686 def parents(self, wire, rev):
687 repo = self._factory.repo_libgit2(wire)
688 commit = repo[rev]
689 return [x.hex for x in commit.parent_ids]
630 690
631 691 @reraise_safe_exceptions
632 692 def set_refs(self, wire, key, value):
633 repo = self._factory.repo(wire)
634 repo.refs[key] = value
693 repo = self._factory.repo_libgit2(wire)
694 repo.references.create(key, value, force=True)
695
696 @reraise_safe_exceptions
697 def create_branch(self, wire, branch_name, commit_id, force=False):
698 repo = self._factory.repo_libgit2(wire)
699 commit = repo[commit_id]
700
701 if force:
702 repo.branches.local.create(branch_name, commit, force=force)
703 elif not repo.branches.get(branch_name):
704 # create only if that branch isn't existing
705 repo.branches.local.create(branch_name, commit, force=force)
635 706
636 707 @reraise_safe_exceptions
637 708 def remove_ref(self, wire, key):
638 repo = self._factory.repo(wire)
639 del repo.refs[key]
709 repo = self._factory.repo_libgit2(wire)
710 repo.references.delete(key)
711
712 @reraise_safe_exceptions
713 def tag_remove(self, wire, tag_name):
714 repo = self._factory.repo_libgit2(wire)
715 key = 'refs/tags/{}'.format(tag_name)
716 repo.references.delete(key)
640 717
641 718 @reraise_safe_exceptions
642 719 def tree_changes(self, wire, source_id, target_id):
720 # TODO(marcink): remove this seems it's only used by tests
643 721 repo = self._factory.repo(wire)
644 722 source = repo[source_id].tree if source_id else None
645 723 target = repo[target_id].tree
646 724 result = repo.object_store.tree_changes(source, target)
647 725 return list(result)
648 726
649 727 @reraise_safe_exceptions
650 728 def tree_items(self, wire, tree_id):
651 repo = self._factory.repo(wire)
652 tree = repo[tree_id]
729 repo_init = self._factory.repo_libgit2(wire)
653 730
654 result = []
655 for item in tree.iteritems():
656 item_sha = item.sha
657 item_mode = item.mode
731 with repo_init as repo:
732 tree = repo[tree_id]
658 733
659 if FILE_MODE(item_mode) == GIT_LINK:
660 item_type = "link"
661 else:
662 item_type = repo[item_sha].type_name
734 result = []
735 for item in tree:
736 item_sha = item.hex
737 item_mode = item.filemode
738 item_type = item.type
663 739
664 result.append((item.path, item_mode, item_sha, item_type))
665 return result
740 if item_type == 'commit':
741 # NOTE(marcink): submodules we translate to 'link' for backward compat
742 item_type = 'link'
743
744 result.append((item.name, item_mode, item_sha, item_type))
745 return result
666 746
667 747 @reraise_safe_exceptions
668 748 def update_server_info(self, wire):
669 749 repo = self._factory.repo(wire)
670 750 update_server_info(repo)
671 751
672 752 @reraise_safe_exceptions
673 753 def discover_git_version(self):
674 754 stdout, _ = self.run_git_command(
675 755 {}, ['--version'], _bare=True, _safe=True)
676 756 prefix = 'git version'
677 757 if stdout.startswith(prefix):
678 758 stdout = stdout[len(prefix):]
679 759 return stdout.strip()
680 760
681 761 @reraise_safe_exceptions
762 def get_all_commit_ids(self, wire):
763 if self.is_empty(wire):
764 return []
765
766 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
767 try:
768 output, __ = self.run_git_command(wire, cmd)
769 return output.splitlines()
770 except Exception:
771 # Can be raised for empty repositories
772 return []
773
774 @reraise_safe_exceptions
682 775 def run_git_command(self, wire, cmd, **opts):
683 776 path = wire.get('path', None)
684 777
685 778 if path and os.path.isdir(path):
686 779 opts['cwd'] = path
687 780
688 781 if '_bare' in opts:
689 782 _copts = []
690 783 del opts['_bare']
691 784 else:
692 785 _copts = ['-c', 'core.quotepath=false', ]
693 786 safe_call = False
694 787 if '_safe' in opts:
695 788 # no exc on failure
696 789 del opts['_safe']
697 790 safe_call = True
698 791
699 792 if '_copts' in opts:
700 793 _copts.extend(opts['_copts'] or [])
701 794 del opts['_copts']
702 795
703 796 gitenv = os.environ.copy()
704 797 gitenv.update(opts.pop('extra_env', {}))
705 798 # need to clean fix GIT_DIR !
706 799 if 'GIT_DIR' in gitenv:
707 800 del gitenv['GIT_DIR']
708 801 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
709 802 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
710 803
711 804 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
712 805 _opts = {'env': gitenv, 'shell': False}
713 806
714 807 try:
715 808 _opts.update(opts)
716 809 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
717 810
718 811 return ''.join(p), ''.join(p.error)
719 812 except (EnvironmentError, OSError) as err:
720 813 cmd = ' '.join(cmd) # human friendly CMD
721 814 tb_err = ("Couldn't run git command (%s).\n"
722 815 "Original error was:%s\n"
723 816 "Call options:%s\n"
724 817 % (cmd, err, _opts))
725 818 log.exception(tb_err)
726 819 if safe_call:
727 820 return '', err
728 821 else:
729 822 raise exceptions.VcsException()(tb_err)
730 823
731 824 @reraise_safe_exceptions
732 825 def install_hooks(self, wire, force=False):
733 826 from vcsserver.hook_utils import install_git_hooks
734 827 repo = self._factory.repo(wire)
735 828 return install_git_hooks(repo.path, repo.bare, force_create=force)
736 829
737 830 @reraise_safe_exceptions
738 831 def get_hooks_info(self, wire):
739 832 from vcsserver.hook_utils import (
740 833 get_git_pre_hook_version, get_git_post_hook_version)
741 834 repo = self._factory.repo(wire)
742 835 return {
743 836 'pre_version': get_git_pre_hook_version(repo.path, repo.bare),
744 837 'post_version': get_git_post_hook_version(repo.path, repo.bare),
745 838 }
746 839
747 840
748 841 def str_to_dulwich(value):
749 842 """
750 843 Dulwich 0.10.1a requires `unicode` objects to be passed in.
751 844 """
752 845 return value.decode(settings.WIRE_ENCODING)
@@ -1,856 +1,874 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import logging
20 20 import stat
21 21 import urllib
22 22 import urllib2
23 23 import traceback
24 24
25 25 from hgext import largefiles, rebase
26 26 from hgext.strip import strip as hgext_strip
27 27 from mercurial import commands
28 28 from mercurial import unionrepo
29 29 from mercurial import verify
30 30
31 31 import vcsserver
32 32 from vcsserver import exceptions
33 33 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
34 34 from vcsserver.hgcompat import (
35 35 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
36 36 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
37 37 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
38 38 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
39 39 RepoLookupError, InterventionRequired, RequirementError)
40 40
41 41 log = logging.getLogger(__name__)
42 42
43 43
44 44 def make_ui_from_config(repo_config):
45 45
46 46 class LoggingUI(ui.ui):
47 47 def status(self, *msg, **opts):
48 48 log.info(' '.join(msg).rstrip('\n'))
49 49 super(LoggingUI, self).status(*msg, **opts)
50 50
51 51 def warn(self, *msg, **opts):
52 52 log.warn(' '.join(msg).rstrip('\n'))
53 53 super(LoggingUI, self).warn(*msg, **opts)
54 54
55 55 def error(self, *msg, **opts):
56 56 log.error(' '.join(msg).rstrip('\n'))
57 57 super(LoggingUI, self).error(*msg, **opts)
58 58
59 59 def note(self, *msg, **opts):
60 60 log.info(' '.join(msg).rstrip('\n'))
61 61 super(LoggingUI, self).note(*msg, **opts)
62 62
63 63 def debug(self, *msg, **opts):
64 64 log.debug(' '.join(msg).rstrip('\n'))
65 65 super(LoggingUI, self).debug(*msg, **opts)
66 66
67 67 baseui = LoggingUI()
68 68
69 69 # clean the baseui object
70 70 baseui._ocfg = hgconfig.config()
71 71 baseui._ucfg = hgconfig.config()
72 72 baseui._tcfg = hgconfig.config()
73 73
74 74 for section, option, value in repo_config:
75 75 baseui.setconfig(section, option, value)
76 76
77 77 # make our hgweb quiet so it doesn't print output
78 78 baseui.setconfig('ui', 'quiet', 'true')
79 79
80 80 baseui.setconfig('ui', 'paginate', 'never')
81 81 # for better Error reporting of Mercurial
82 82 baseui.setconfig('ui', 'message-output', 'stderr')
83 83
84 84 # force mercurial to only use 1 thread, otherwise it may try to set a
85 85 # signal in a non-main thread, thus generating a ValueError.
86 86 baseui.setconfig('worker', 'numcpus', 1)
87 87
88 88 # If there is no config for the largefiles extension, we explicitly disable
89 89 # it here. This overrides settings from repositories hgrc file. Recent
90 90 # mercurial versions enable largefiles in hgrc on clone from largefile
91 91 # repo.
92 92 if not baseui.hasconfig('extensions', 'largefiles'):
93 93 log.debug('Explicitly disable largefiles extension for repo.')
94 94 baseui.setconfig('extensions', 'largefiles', '!')
95 95
96 96 return baseui
97 97
98 98
99 99 def reraise_safe_exceptions(func):
100 100 """Decorator for converting mercurial exceptions to something neutral."""
101
101 102 def wrapper(*args, **kwargs):
102 103 try:
103 104 return func(*args, **kwargs)
104 105 except (Abort, InterventionRequired) as e:
105 106 raise_from_original(exceptions.AbortException(e))
106 107 except RepoLookupError as e:
107 108 raise_from_original(exceptions.LookupException(e))
108 109 except RequirementError as e:
109 110 raise_from_original(exceptions.RequirementException(e))
110 111 except RepoError as e:
111 112 raise_from_original(exceptions.VcsException(e))
112 113 except LookupError as e:
113 114 raise_from_original(exceptions.LookupException(e))
114 115 except Exception as e:
115 116 if not hasattr(e, '_vcs_kind'):
116 117 log.exception("Unhandled exception in hg remote call")
117 118 raise_from_original(exceptions.UnhandledException(e))
118 119
119 120 raise
120 121 return wrapper
121 122
122 123
123 124 class MercurialFactory(RepoFactory):
124 125 repo_type = 'hg'
125 126
126 127 def _create_config(self, config, hooks=True):
127 128 if not hooks:
128 129 hooks_to_clean = frozenset((
129 130 'changegroup.repo_size', 'preoutgoing.pre_pull',
130 131 'outgoing.pull_logger', 'prechangegroup.pre_push'))
131 132 new_config = []
132 133 for section, option, value in config:
133 134 if section == 'hooks' and option in hooks_to_clean:
134 135 continue
135 136 new_config.append((section, option, value))
136 137 config = new_config
137 138
138 139 baseui = make_ui_from_config(config)
139 140 return baseui
140 141
141 142 def _create_repo(self, wire, create):
142 143 baseui = self._create_config(wire["config"])
143 144 return instance(baseui, wire["path"], create)
144 145
146 def repo(self, wire, create=False):
147 """
148 Get a repository instance for the given path.
149 """
150 region = self._cache_region
151 context = wire.get('context', None)
152 repo_path = wire.get('path', '')
153 context_uid = '{}'.format(context)
154 cache = wire.get('cache', True)
155 cache_on = context and cache
156
157 @region.conditional_cache_on_arguments(condition=cache_on)
158 def create_new_repo(_repo_type, _repo_path, _context_uid):
159 return self._create_repo(wire, create)
160
161 return create_new_repo(self.repo_type, repo_path, context_uid)
162
145 163
146 164 class HgRemote(object):
147 165
148 166 def __init__(self, factory):
149 167 self._factory = factory
150 168
151 169 self._bulk_methods = {
152 170 "affected_files": self.ctx_files,
153 171 "author": self.ctx_user,
154 172 "branch": self.ctx_branch,
155 173 "children": self.ctx_children,
156 174 "date": self.ctx_date,
157 175 "message": self.ctx_description,
158 176 "parents": self.ctx_parents,
159 177 "status": self.ctx_status,
160 178 "obsolete": self.ctx_obsolete,
161 179 "phase": self.ctx_phase,
162 180 "hidden": self.ctx_hidden,
163 181 "_file_paths": self.ctx_list,
164 182 }
165 183
166 184 def _get_ctx(self, repo, ref):
167 185 return get_ctx(repo, ref)
168 186
169 187 @reraise_safe_exceptions
170 188 def discover_hg_version(self):
171 189 from mercurial import util
172 190 return util.version()
173 191
174 192 @reraise_safe_exceptions
175 193 def is_empty(self, wire):
176 194 repo = self._factory.repo(wire)
177 195
178 196 try:
179 197 return len(repo) == 0
180 198 except Exception:
181 199 log.exception("failed to read object_store")
182 200 return False
183 201
184 202 @reraise_safe_exceptions
185 203 def archive_repo(self, archive_path, mtime, file_info, kind):
186 204 if kind == "tgz":
187 205 archiver = archival.tarit(archive_path, mtime, "gz")
188 206 elif kind == "tbz2":
189 207 archiver = archival.tarit(archive_path, mtime, "bz2")
190 208 elif kind == 'zip':
191 209 archiver = archival.zipit(archive_path, mtime)
192 210 else:
193 211 raise exceptions.ArchiveException()(
194 212 'Remote does not support: "%s".' % kind)
195 213
196 214 for f_path, f_mode, f_is_link, f_content in file_info:
197 215 archiver.addfile(f_path, f_mode, f_is_link, f_content)
198 216 archiver.done()
199 217
200 218 @reraise_safe_exceptions
201 219 def bookmarks(self, wire):
202 220 repo = self._factory.repo(wire)
203 221 return dict(repo._bookmarks)
204 222
205 223 @reraise_safe_exceptions
206 224 def branches(self, wire, normal, closed):
207 225 repo = self._factory.repo(wire)
208 226 iter_branches = repo.branchmap().iterbranches()
209 227 bt = {}
210 228 for branch_name, _heads, tip, is_closed in iter_branches:
211 229 if normal and not is_closed:
212 230 bt[branch_name] = tip
213 231 if closed and is_closed:
214 232 bt[branch_name] = tip
215 233
216 234 return bt
217 235
218 236 @reraise_safe_exceptions
219 237 def bulk_request(self, wire, rev, pre_load):
220 238 result = {}
221 239 for attr in pre_load:
222 240 try:
223 241 method = self._bulk_methods[attr]
224 242 result[attr] = method(wire, rev)
225 243 except KeyError as e:
226 244 raise exceptions.VcsException(e)(
227 245 'Unknown bulk attribute: "%s"' % attr)
228 246 return result
229 247
230 248 @reraise_safe_exceptions
231 249 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
232 250 baseui = self._factory._create_config(wire["config"], hooks=hooks)
233 251 clone(baseui, source, dest, noupdate=not update_after_clone)
234 252
235 253 @reraise_safe_exceptions
236 254 def commitctx(
237 255 self, wire, message, parents, commit_time, commit_timezone,
238 256 user, files, extra, removed, updated):
239 257
240 258 repo = self._factory.repo(wire)
241 259 baseui = self._factory._create_config(wire['config'])
242 260 publishing = baseui.configbool('phases', 'publish')
243 261 if publishing:
244 262 new_commit = 'public'
245 263 else:
246 264 new_commit = 'draft'
247 265
248 266 def _filectxfn(_repo, ctx, path):
249 267 """
250 268 Marks given path as added/changed/removed in a given _repo. This is
251 269 for internal mercurial commit function.
252 270 """
253 271
254 272 # check if this path is removed
255 273 if path in removed:
256 274 # returning None is a way to mark node for removal
257 275 return None
258 276
259 277 # check if this path is added
260 278 for node in updated:
261 279 if node['path'] == path:
262 280 return memfilectx(
263 281 _repo,
264 282 changectx=ctx,
265 283 path=node['path'],
266 284 data=node['content'],
267 285 islink=False,
268 286 isexec=bool(node['mode'] & stat.S_IXUSR),
269 287 copied=False)
270 288
271 289 raise exceptions.AbortException()(
272 290 "Given path haven't been marked as added, "
273 291 "changed or removed (%s)" % path)
274 292
275 293 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
276 294
277 295 commit_ctx = memctx(
278 296 repo=repo,
279 297 parents=parents,
280 298 text=message,
281 299 files=files,
282 300 filectxfn=_filectxfn,
283 301 user=user,
284 302 date=(commit_time, commit_timezone),
285 303 extra=extra)
286 304
287 305 n = repo.commitctx(commit_ctx)
288 306 new_id = hex(n)
289 307
290 308 return new_id
291 309
292 310 @reraise_safe_exceptions
293 311 def ctx_branch(self, wire, revision):
294 312 repo = self._factory.repo(wire)
295 313 ctx = self._get_ctx(repo, revision)
296 314 return ctx.branch()
297 315
298 316 @reraise_safe_exceptions
299 317 def ctx_children(self, wire, revision):
300 318 repo = self._factory.repo(wire)
301 319 ctx = self._get_ctx(repo, revision)
302 320 return [child.rev() for child in ctx.children()]
303 321
304 322 @reraise_safe_exceptions
305 323 def ctx_date(self, wire, revision):
306 324 repo = self._factory.repo(wire)
307 325 ctx = self._get_ctx(repo, revision)
308 326 return ctx.date()
309 327
310 328 @reraise_safe_exceptions
311 329 def ctx_description(self, wire, revision):
312 330 repo = self._factory.repo(wire)
313 331 ctx = self._get_ctx(repo, revision)
314 332 return ctx.description()
315 333
316 334 @reraise_safe_exceptions
317 335 def ctx_files(self, wire, revision):
318 336 repo = self._factory.repo(wire)
319 337 ctx = self._get_ctx(repo, revision)
320 338 return ctx.files()
321 339
322 340 @reraise_safe_exceptions
323 341 def ctx_list(self, path, revision):
324 342 repo = self._factory.repo(path)
325 343 ctx = self._get_ctx(repo, revision)
326 344 return list(ctx)
327 345
328 346 @reraise_safe_exceptions
329 347 def ctx_parents(self, wire, revision):
330 348 repo = self._factory.repo(wire)
331 349 ctx = self._get_ctx(repo, revision)
332 350 return [parent.rev() for parent in ctx.parents()]
333 351
334 352 @reraise_safe_exceptions
335 353 def ctx_phase(self, wire, revision):
336 354 repo = self._factory.repo(wire)
337 355 ctx = self._get_ctx(repo, revision)
338 356 # public=0, draft=1, secret=3
339 357 return ctx.phase()
340 358
341 359 @reraise_safe_exceptions
342 360 def ctx_obsolete(self, wire, revision):
343 361 repo = self._factory.repo(wire)
344 362 ctx = self._get_ctx(repo, revision)
345 363 return ctx.obsolete()
346 364
347 365 @reraise_safe_exceptions
348 366 def ctx_hidden(self, wire, revision):
349 367 repo = self._factory.repo(wire)
350 368 ctx = self._get_ctx(repo, revision)
351 369 return ctx.hidden()
352 370
353 371 @reraise_safe_exceptions
354 372 def ctx_substate(self, wire, revision):
355 373 repo = self._factory.repo(wire)
356 374 ctx = self._get_ctx(repo, revision)
357 375 return ctx.substate
358 376
359 377 @reraise_safe_exceptions
360 378 def ctx_status(self, wire, revision):
361 379 repo = self._factory.repo(wire)
362 380 ctx = self._get_ctx(repo, revision)
363 381 status = repo[ctx.p1().node()].status(other=ctx.node())
364 382 # object of status (odd, custom named tuple in mercurial) is not
365 383 # correctly serializable, we make it a list, as the underling
366 384 # API expects this to be a list
367 385 return list(status)
368 386
369 387 @reraise_safe_exceptions
370 388 def ctx_user(self, wire, revision):
371 389 repo = self._factory.repo(wire)
372 390 ctx = self._get_ctx(repo, revision)
373 391 return ctx.user()
374 392
375 393 @reraise_safe_exceptions
376 394 def check_url(self, url, config):
377 395 _proto = None
378 396 if '+' in url[:url.find('://')]:
379 397 _proto = url[0:url.find('+')]
380 398 url = url[url.find('+') + 1:]
381 399 handlers = []
382 400 url_obj = url_parser(url)
383 401 test_uri, authinfo = url_obj.authinfo()
384 402 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
385 403 url_obj.query = obfuscate_qs(url_obj.query)
386 404
387 405 cleaned_uri = str(url_obj)
388 406 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
389 407
390 408 if authinfo:
391 409 # create a password manager
392 410 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
393 411 passmgr.add_password(*authinfo)
394 412
395 413 handlers.extend((httpbasicauthhandler(passmgr),
396 414 httpdigestauthhandler(passmgr)))
397 415
398 416 o = urllib2.build_opener(*handlers)
399 417 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
400 418 ('Accept', 'application/mercurial-0.1')]
401 419
402 420 q = {"cmd": 'between'}
403 421 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
404 422 qs = '?%s' % urllib.urlencode(q)
405 423 cu = "%s%s" % (test_uri, qs)
406 424 req = urllib2.Request(cu, None, {})
407 425
408 426 try:
409 427 log.debug("Trying to open URL %s", cleaned_uri)
410 428 resp = o.open(req)
411 429 if resp.code != 200:
412 430 raise exceptions.URLError()('Return Code is not 200')
413 431 except Exception as e:
414 432 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
415 433 # means it cannot be cloned
416 434 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
417 435
418 436 # now check if it's a proper hg repo, but don't do it for svn
419 437 try:
420 438 if _proto == 'svn':
421 439 pass
422 440 else:
423 441 # check for pure hg repos
424 442 log.debug(
425 443 "Verifying if URL is a Mercurial repository: %s",
426 444 cleaned_uri)
427 445 ui = make_ui_from_config(config)
428 446 peer_checker = makepeer(ui, url)
429 447 peer_checker.lookup('tip')
430 448 except Exception as e:
431 449 log.warning("URL is not a valid Mercurial repository: %s",
432 450 cleaned_uri)
433 451 raise exceptions.URLError(e)(
434 452 "url [%s] does not look like an hg repo org_exc: %s"
435 453 % (cleaned_uri, e))
436 454
437 455 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
438 456 return True
439 457
440 458 @reraise_safe_exceptions
441 459 def diff(
442 460 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
443 461 context):
444 462 repo = self._factory.repo(wire)
445 463
446 464 if file_filter:
447 465 match_filter = match(file_filter[0], '', [file_filter[1]])
448 466 else:
449 467 match_filter = file_filter
450 468 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
451 469
452 470 try:
453 471 return "".join(patch.diff(
454 472 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
455 473 except RepoLookupError as e:
456 474 raise exceptions.LookupException(e)()
457 475
458 476 @reraise_safe_exceptions
459 477 def node_history(self, wire, revision, path, limit):
460 478 repo = self._factory.repo(wire)
461 479
462 480 ctx = self._get_ctx(repo, revision)
463 481 fctx = ctx.filectx(path)
464 482
465 483 def history_iter():
466 484 limit_rev = fctx.rev()
467 485 for obj in reversed(list(fctx.filelog())):
468 486 obj = fctx.filectx(obj)
469 487 ctx = obj.changectx()
470 488 if ctx.hidden() or ctx.obsolete():
471 489 continue
472 490
473 491 if limit_rev >= obj.rev():
474 492 yield obj
475 493
476 494 history = []
477 495 for cnt, obj in enumerate(history_iter()):
478 496 if limit and cnt >= limit:
479 497 break
480 498 history.append(hex(obj.node()))
481 499
482 500 return [x for x in history]
483 501
484 502 @reraise_safe_exceptions
485 503 def node_history_untill(self, wire, revision, path, limit):
486 504 repo = self._factory.repo(wire)
487 505 ctx = self._get_ctx(repo, revision)
488 506 fctx = ctx.filectx(path)
489 507
490 508 file_log = list(fctx.filelog())
491 509 if limit:
492 510 # Limit to the last n items
493 511 file_log = file_log[-limit:]
494 512
495 513 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
496 514
497 515 @reraise_safe_exceptions
498 516 def fctx_annotate(self, wire, revision, path):
499 517 repo = self._factory.repo(wire)
500 518 ctx = self._get_ctx(repo, revision)
501 519 fctx = ctx.filectx(path)
502 520
503 521 result = []
504 522 for i, annotate_obj in enumerate(fctx.annotate(), 1):
505 523 ln_no = i
506 524 sha = hex(annotate_obj.fctx.node())
507 525 content = annotate_obj.text
508 526 result.append((ln_no, sha, content))
509 527 return result
510 528
511 529 @reraise_safe_exceptions
512 530 def fctx_data(self, wire, revision, path):
513 531 repo = self._factory.repo(wire)
514 532 ctx = self._get_ctx(repo, revision)
515 533 fctx = ctx.filectx(path)
516 534 return fctx.data()
517 535
518 536 @reraise_safe_exceptions
519 537 def fctx_flags(self, wire, revision, path):
520 538 repo = self._factory.repo(wire)
521 539 ctx = self._get_ctx(repo, revision)
522 540 fctx = ctx.filectx(path)
523 541 return fctx.flags()
524 542
525 543 @reraise_safe_exceptions
526 544 def fctx_size(self, wire, revision, path):
527 545 repo = self._factory.repo(wire)
528 546 ctx = self._get_ctx(repo, revision)
529 547 fctx = ctx.filectx(path)
530 548 return fctx.size()
531 549
532 550 @reraise_safe_exceptions
533 551 def get_all_commit_ids(self, wire, name):
534 552 repo = self._factory.repo(wire)
535 553 repo = repo.filtered(name)
536 554 revs = map(lambda x: hex(x[7]), repo.changelog.index)
537 555 return revs
538 556
539 557 @reraise_safe_exceptions
540 558 def get_config_value(self, wire, section, name, untrusted=False):
541 559 repo = self._factory.repo(wire)
542 560 return repo.ui.config(section, name, untrusted=untrusted)
543 561
544 562 @reraise_safe_exceptions
545 563 def get_config_bool(self, wire, section, name, untrusted=False):
546 564 repo = self._factory.repo(wire)
547 565 return repo.ui.configbool(section, name, untrusted=untrusted)
548 566
549 567 @reraise_safe_exceptions
550 568 def get_config_list(self, wire, section, name, untrusted=False):
551 569 repo = self._factory.repo(wire)
552 570 return repo.ui.configlist(section, name, untrusted=untrusted)
553 571
554 572 @reraise_safe_exceptions
555 573 def is_large_file(self, wire, path):
556 574 return largefiles.lfutil.isstandin(path)
557 575
558 576 @reraise_safe_exceptions
559 577 def in_largefiles_store(self, wire, sha):
560 578 repo = self._factory.repo(wire)
561 579 return largefiles.lfutil.instore(repo, sha)
562 580
563 581 @reraise_safe_exceptions
564 582 def in_user_cache(self, wire, sha):
565 583 repo = self._factory.repo(wire)
566 584 return largefiles.lfutil.inusercache(repo.ui, sha)
567 585
568 586 @reraise_safe_exceptions
569 587 def store_path(self, wire, sha):
570 588 repo = self._factory.repo(wire)
571 589 return largefiles.lfutil.storepath(repo, sha)
572 590
573 591 @reraise_safe_exceptions
574 592 def link(self, wire, sha, path):
575 593 repo = self._factory.repo(wire)
576 594 largefiles.lfutil.link(
577 595 largefiles.lfutil.usercachepath(repo.ui, sha), path)
578 596
579 597 @reraise_safe_exceptions
580 598 def localrepository(self, wire, create=False):
581 599 self._factory.repo(wire, create=create)
582 600
583 601 @reraise_safe_exceptions
584 602 def lookup(self, wire, revision, both):
585 603
586 604 repo = self._factory.repo(wire)
587 605
588 606 if isinstance(revision, int):
589 607 # NOTE(marcink):
590 608 # since Mercurial doesn't support negative indexes properly
591 609 # we need to shift accordingly by one to get proper index, e.g
592 610 # repo[-1] => repo[-2]
593 611 # repo[0] => repo[-1]
594 612 if revision <= 0:
595 613 revision = revision + -1
596 614 try:
597 615 ctx = self._get_ctx(repo, revision)
598 616 except (TypeError, RepoLookupError) as e:
599 617 e._org_exc_tb = traceback.format_exc()
600 618 raise exceptions.LookupException(e)(revision)
601 619 except LookupError as e:
602 620 e._org_exc_tb = traceback.format_exc()
603 621 raise exceptions.LookupException(e)(e.name)
604 622
605 623 if not both:
606 624 return ctx.hex()
607 625
608 626 ctx = repo[ctx.hex()]
609 627 return ctx.hex(), ctx.rev()
610 628
611 629 @reraise_safe_exceptions
612 630 def pull(self, wire, url, commit_ids=None):
613 631 repo = self._factory.repo(wire)
614 632 # Disable any prompts for this repo
615 633 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
616 634
617 635 remote = peer(repo, {}, url)
618 636 # Disable any prompts for this remote
619 637 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
620 638
621 639 if commit_ids:
622 640 commit_ids = [bin(commit_id) for commit_id in commit_ids]
623 641
624 642 return exchange.pull(
625 643 repo, remote, heads=commit_ids, force=None).cgresult
626 644
627 645 @reraise_safe_exceptions
628 646 def sync_push(self, wire, url):
629 647 if not self.check_url(url, wire['config']):
630 648 return
631 649
632 650 repo = self._factory.repo(wire)
633 651
634 652 # Disable any prompts for this repo
635 653 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
636 654
637 655 bookmarks = dict(repo._bookmarks).keys()
638 656 remote = peer(repo, {}, url)
639 657 # Disable any prompts for this remote
640 658 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
641 659
642 660 return exchange.push(
643 661 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
644 662
645 663 @reraise_safe_exceptions
646 664 def revision(self, wire, rev):
647 665 repo = self._factory.repo(wire)
648 666 ctx = self._get_ctx(repo, rev)
649 667 return ctx.rev()
650 668
651 669 @reraise_safe_exceptions
652 670 def rev_range(self, wire, filter):
653 671 repo = self._factory.repo(wire)
654 672 revisions = [rev for rev in revrange(repo, filter)]
655 673 return revisions
656 674
657 675 @reraise_safe_exceptions
658 676 def rev_range_hash(self, wire, node):
659 677 repo = self._factory.repo(wire)
660 678
661 679 def get_revs(repo, rev_opt):
662 680 if rev_opt:
663 681 revs = revrange(repo, rev_opt)
664 682 if len(revs) == 0:
665 683 return (nullrev, nullrev)
666 684 return max(revs), min(revs)
667 685 else:
668 686 return len(repo) - 1, 0
669 687
670 688 stop, start = get_revs(repo, [node + ':'])
671 689 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
672 690 return revs
673 691
674 692 @reraise_safe_exceptions
675 693 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
676 694 other_path = kwargs.pop('other_path', None)
677 695
678 696 # case when we want to compare two independent repositories
679 697 if other_path and other_path != wire["path"]:
680 698 baseui = self._factory._create_config(wire["config"])
681 699 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
682 700 else:
683 701 repo = self._factory.repo(wire)
684 702 return list(repo.revs(rev_spec, *args))
685 703
686 704 @reraise_safe_exceptions
687 705 def strip(self, wire, revision, update, backup):
688 706 repo = self._factory.repo(wire)
689 707 ctx = self._get_ctx(repo, revision)
690 708 hgext_strip(
691 709 repo.baseui, repo, ctx.node(), update=update, backup=backup)
692 710
693 711 @reraise_safe_exceptions
694 712 def verify(self, wire,):
695 713 repo = self._factory.repo(wire)
696 714 baseui = self._factory._create_config(wire['config'])
697 715 baseui.setconfig('ui', 'quiet', 'false')
698 716 output = io.BytesIO()
699 717
700 718 def write(data, **unused_kwargs):
701 719 output.write(data)
702 720 baseui.write = write
703 721
704 722 repo.ui = baseui
705 723 verify.verify(repo)
706 724 return output.getvalue()
707 725
708 726 @reraise_safe_exceptions
709 727 def tag(self, wire, name, revision, message, local, user,
710 728 tag_time, tag_timezone):
711 729 repo = self._factory.repo(wire)
712 730 ctx = self._get_ctx(repo, revision)
713 731 node = ctx.node()
714 732
715 733 date = (tag_time, tag_timezone)
716 734 try:
717 735 hg_tag.tag(repo, name, node, message, local, user, date)
718 736 except Abort as e:
719 737 log.exception("Tag operation aborted")
720 738 # Exception can contain unicode which we convert
721 739 raise exceptions.AbortException(e)(repr(e))
722 740
723 741 @reraise_safe_exceptions
724 742 def tags(self, wire):
725 743 repo = self._factory.repo(wire)
726 744 return repo.tags()
727 745
728 746 @reraise_safe_exceptions
729 747 def update(self, wire, node=None, clean=False):
730 748 repo = self._factory.repo(wire)
731 749 baseui = self._factory._create_config(wire['config'])
732 750 commands.update(baseui, repo, node=node, clean=clean)
733 751
734 752 @reraise_safe_exceptions
735 753 def identify(self, wire):
736 754 repo = self._factory.repo(wire)
737 755 baseui = self._factory._create_config(wire['config'])
738 756 output = io.BytesIO()
739 757 baseui.write = output.write
740 758 # This is required to get a full node id
741 759 baseui.debugflag = True
742 760 commands.identify(baseui, repo, id=True)
743 761
744 762 return output.getvalue()
745 763
746 764 @reraise_safe_exceptions
747 765 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
748 766 hooks=True):
749 767 repo = self._factory.repo(wire)
750 768 baseui = self._factory._create_config(wire['config'], hooks=hooks)
751 769
752 770 # Mercurial internally has a lot of logic that checks ONLY if
753 771 # option is defined, we just pass those if they are defined then
754 772 opts = {}
755 773 if bookmark:
756 774 opts['bookmark'] = bookmark
757 775 if branch:
758 776 opts['branch'] = branch
759 777 if revision:
760 778 opts['rev'] = revision
761 779
762 780 commands.pull(baseui, repo, source, **opts)
763 781
764 782 @reraise_safe_exceptions
765 783 def heads(self, wire, branch=None):
766 784 repo = self._factory.repo(wire)
767 785 baseui = self._factory._create_config(wire['config'])
768 786 output = io.BytesIO()
769 787
770 788 def write(data, **unused_kwargs):
771 789 output.write(data)
772 790
773 791 baseui.write = write
774 792 if branch:
775 793 args = [branch]
776 794 else:
777 795 args = []
778 796 commands.heads(baseui, repo, template='{node} ', *args)
779 797
780 798 return output.getvalue()
781 799
782 800 @reraise_safe_exceptions
783 801 def ancestor(self, wire, revision1, revision2):
784 802 repo = self._factory.repo(wire)
785 803 changelog = repo.changelog
786 804 lookup = repo.lookup
787 805 a = changelog.ancestor(lookup(revision1), lookup(revision2))
788 806 return hex(a)
789 807
790 808 @reraise_safe_exceptions
791 809 def push(self, wire, revisions, dest_path, hooks=True,
792 810 push_branches=False):
793 811 repo = self._factory.repo(wire)
794 812 baseui = self._factory._create_config(wire['config'], hooks=hooks)
795 813 commands.push(baseui, repo, dest=dest_path, rev=revisions,
796 814 new_branch=push_branches)
797 815
798 816 @reraise_safe_exceptions
799 817 def merge(self, wire, revision):
800 818 repo = self._factory.repo(wire)
801 819 baseui = self._factory._create_config(wire['config'])
802 820 repo.ui.setconfig('ui', 'merge', 'internal:dump')
803 821
804 822 # In case of sub repositories are used mercurial prompts the user in
805 823 # case of merge conflicts or different sub repository sources. By
806 824 # setting the interactive flag to `False` mercurial doesn't prompt the
807 825 # used but instead uses a default value.
808 826 repo.ui.setconfig('ui', 'interactive', False)
809 827 commands.merge(baseui, repo, rev=revision)
810 828
811 829 @reraise_safe_exceptions
812 830 def merge_state(self, wire):
813 831 repo = self._factory.repo(wire)
814 832 repo.ui.setconfig('ui', 'merge', 'internal:dump')
815 833
816 834 # In case of sub repositories are used mercurial prompts the user in
817 835 # case of merge conflicts or different sub repository sources. By
818 836 # setting the interactive flag to `False` mercurial doesn't prompt the
819 837 # used but instead uses a default value.
820 838 repo.ui.setconfig('ui', 'interactive', False)
821 839 ms = hg_merge.mergestate(repo)
822 840 return [x for x in ms.unresolved()]
823 841
824 842 @reraise_safe_exceptions
825 843 def commit(self, wire, message, username, close_branch=False):
826 844 repo = self._factory.repo(wire)
827 845 baseui = self._factory._create_config(wire['config'])
828 846 repo.ui.setconfig('ui', 'username', username)
829 847 commands.commit(baseui, repo, message=message, close_branch=close_branch)
830 848
831 849
832 850 @reraise_safe_exceptions
833 851 def rebase(self, wire, source=None, dest=None, abort=False):
834 852 repo = self._factory.repo(wire)
835 853 baseui = self._factory._create_config(wire['config'])
836 854 repo.ui.setconfig('ui', 'merge', 'internal:dump')
837 855 rebase.rebase(
838 856 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
839 857
840 858 @reraise_safe_exceptions
841 859 def bookmark(self, wire, bookmark, revision=None):
842 860 repo = self._factory.repo(wire)
843 861 baseui = self._factory._create_config(wire['config'])
844 862 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
845 863
846 864 @reraise_safe_exceptions
847 865 def install_hooks(self, wire, force=False):
848 866 # we don't need any special hooks for Mercurial
849 867 pass
850 868
851 869 @reraise_safe_exceptions
852 870 def get_hooks_info(self, wire):
853 871 return {
854 872 'pre_version': vcsserver.__version__,
855 873 'post_version': vcsserver.__version__,
856 874 }
@@ -1,610 +1,611 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import sys
20 20 import base64
21 21 import locale
22 22 import logging
23 23 import uuid
24 24 import wsgiref.util
25 25 import traceback
26 26 import tempfile
27 27 from itertools import chain
28 28
29 29 import simplejson as json
30 30 import msgpack
31 31 from pyramid.config import Configurator
32 32 from pyramid.settings import asbool, aslist
33 33 from pyramid.wsgi import wsgiapp
34 34 from pyramid.compat import configparser
35 35
36 36
37 37 log = logging.getLogger(__name__)
38 38
39 39 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
40 40 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
41 41
42 42 try:
43 43 locale.setlocale(locale.LC_ALL, '')
44 44 except locale.Error as e:
45 45 log.error(
46 46 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
47 47 os.environ['LC_ALL'] = 'C'
48 48
49 49 import vcsserver
50 50 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
51 51 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
52 52 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
53 53 from vcsserver.echo_stub.echo_app import EchoApp
54 54 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
55 55 from vcsserver.lib.exc_tracking import store_exception
56 56 from vcsserver.server import VcsServer
57 57
58 58 try:
59 59 from vcsserver.git import GitFactory, GitRemote
60 60 except ImportError:
61 61 GitFactory = None
62 62 GitRemote = None
63 63
64 64 try:
65 65 from vcsserver.hg import MercurialFactory, HgRemote
66 66 except ImportError:
67 67 MercurialFactory = None
68 68 HgRemote = None
69 69
70 70 try:
71 71 from vcsserver.svn import SubversionFactory, SvnRemote
72 72 except ImportError:
73 73 SubversionFactory = None
74 74 SvnRemote = None
75 75
76 76
77 77 def _is_request_chunked(environ):
78 78 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
79 79 return stream
80 80
81 81
82 82 def _int_setting(settings, name, default):
83 83 settings[name] = int(settings.get(name, default))
84 84 return settings[name]
85 85
86 86
87 87 def _bool_setting(settings, name, default):
88 88 input_val = settings.get(name, default)
89 89 if isinstance(input_val, unicode):
90 90 input_val = input_val.encode('utf8')
91 91 settings[name] = asbool(input_val)
92 92 return settings[name]
93 93
94 94
95 95 def _list_setting(settings, name, default):
96 96 raw_value = settings.get(name, default)
97 97
98 98 # Otherwise we assume it uses pyramids space/newline separation.
99 99 settings[name] = aslist(raw_value)
100 100 return settings[name]
101 101
102 102
103 103 def _string_setting(settings, name, default, lower=True, default_when_empty=False):
104 104 value = settings.get(name, default)
105 105
106 106 if default_when_empty and not value:
107 107 # use default value when value is empty
108 108 value = default
109 109
110 110 if lower:
111 111 value = value.lower()
112 112 settings[name] = value
113 113 return settings[name]
114 114
115 115
116 116 class VCS(object):
117 117 def __init__(self, locale=None, cache_config=None):
118 118 self.locale = locale
119 119 self.cache_config = cache_config
120 120 self._configure_locale()
121 121
122 122 if GitFactory and GitRemote:
123 123 git_factory = GitFactory()
124 124 self._git_remote = GitRemote(git_factory)
125 125 else:
126 126 log.info("Git client import failed")
127 127
128 128 if MercurialFactory and HgRemote:
129 129 hg_factory = MercurialFactory()
130 130 self._hg_remote = HgRemote(hg_factory)
131 131 else:
132 132 log.info("Mercurial client import failed")
133 133
134 134 if SubversionFactory and SvnRemote:
135 135 svn_factory = SubversionFactory()
136 136
137 137 # hg factory is used for svn url validation
138 138 hg_factory = MercurialFactory()
139 139 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
140 140 else:
141 141 log.info("Subversion client import failed")
142 142
143 143 self._vcsserver = VcsServer()
144 144
145 145 def _configure_locale(self):
146 146 if self.locale:
147 147 log.info('Settings locale: `LC_ALL` to %s', self.locale)
148 148 else:
149 149 log.info(
150 150 'Configuring locale subsystem based on environment variables')
151 151 try:
152 152 # If self.locale is the empty string, then the locale
153 153 # module will use the environment variables. See the
154 154 # documentation of the package `locale`.
155 155 locale.setlocale(locale.LC_ALL, self.locale)
156 156
157 157 language_code, encoding = locale.getlocale()
158 158 log.info(
159 159 'Locale set to language code "%s" with encoding "%s".',
160 160 language_code, encoding)
161 161 except locale.Error:
162 162 log.exception(
163 163 'Cannot set locale, not configuring the locale system')
164 164
165 165
166 166 class WsgiProxy(object):
167 167 def __init__(self, wsgi):
168 168 self.wsgi = wsgi
169 169
170 170 def __call__(self, environ, start_response):
171 171 input_data = environ['wsgi.input'].read()
172 172 input_data = msgpack.unpackb(input_data)
173 173
174 174 error = None
175 175 try:
176 176 data, status, headers = self.wsgi.handle(
177 177 input_data['environment'], input_data['input_data'],
178 178 *input_data['args'], **input_data['kwargs'])
179 179 except Exception as e:
180 180 data, status, headers = [], None, None
181 181 error = {
182 182 'message': str(e),
183 183 '_vcs_kind': getattr(e, '_vcs_kind', None)
184 184 }
185 185
186 186 start_response(200, {})
187 187 return self._iterator(error, status, headers, data)
188 188
189 189 def _iterator(self, error, status, headers, data):
190 190 initial_data = [
191 191 error,
192 192 status,
193 193 headers,
194 194 ]
195 195
196 196 for d in chain(initial_data, data):
197 197 yield msgpack.packb(d)
198 198
199 199
200 200 def not_found(request):
201 201 return {'status': '404 NOT FOUND'}
202 202
203 203
204 204 class VCSViewPredicate(object):
205 205 def __init__(self, val, config):
206 206 self.remotes = val
207 207
208 208 def text(self):
209 209 return 'vcs view method = %s' % (self.remotes.keys(),)
210 210
211 211 phash = text
212 212
213 213 def __call__(self, context, request):
214 214 """
215 215 View predicate that returns true if given backend is supported by
216 216 defined remotes.
217 217 """
218 218 backend = request.matchdict.get('backend')
219 219 return backend in self.remotes
220 220
221 221
222 222 class HTTPApplication(object):
223 223 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
224 224
225 225 remote_wsgi = remote_wsgi
226 226 _use_echo_app = False
227 227
228 228 def __init__(self, settings=None, global_config=None):
229 229 self._sanitize_settings_and_apply_defaults(settings)
230 230
231 231 self.config = Configurator(settings=settings)
232 232 self.global_config = global_config
233 233 self.config.include('vcsserver.lib.rc_cache')
234 234
235 235 locale = settings.get('locale', '') or 'en_US.UTF-8'
236 236 vcs = VCS(locale=locale, cache_config=settings)
237 237 self._remotes = {
238 238 'hg': vcs._hg_remote,
239 239 'git': vcs._git_remote,
240 240 'svn': vcs._svn_remote,
241 241 'server': vcs._vcsserver,
242 242 }
243 243 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
244 244 self._use_echo_app = True
245 245 log.warning("Using EchoApp for VCS operations.")
246 246 self.remote_wsgi = remote_wsgi_stub
247 247
248 248 self._configure_settings(global_config, settings)
249 249 self._configure()
250 250
251 251 def _configure_settings(self, global_config, app_settings):
252 252 """
253 253 Configure the settings module.
254 254 """
255 255 settings_merged = global_config.copy()
256 256 settings_merged.update(app_settings)
257 257
258 258 git_path = app_settings.get('git_path', None)
259 259 if git_path:
260 260 settings.GIT_EXECUTABLE = git_path
261 261 binary_dir = app_settings.get('core.binary_dir', None)
262 262 if binary_dir:
263 263 settings.BINARY_DIR = binary_dir
264 264
265 265 # Store the settings to make them available to other modules.
266 266 vcsserver.PYRAMID_SETTINGS = settings_merged
267 267 vcsserver.CONFIG = settings_merged
268 268
269 269 def _sanitize_settings_and_apply_defaults(self, settings):
270 270 temp_store = tempfile.gettempdir()
271 271 default_cache_dir = os.path.join(temp_store, 'rc_cache')
272 272
273 273 # save default, cache dir, and use it for all backends later.
274 274 default_cache_dir = _string_setting(
275 275 settings,
276 276 'cache_dir',
277 277 default_cache_dir, lower=False, default_when_empty=True)
278 278
279 279 # ensure we have our dir created
280 280 if not os.path.isdir(default_cache_dir):
281 281 os.makedirs(default_cache_dir, mode=0o755)
282 282
283 283 # exception store cache
284 284 _string_setting(
285 285 settings,
286 286 'exception_tracker.store_path',
287 287 temp_store, lower=False, default_when_empty=True)
288 288
289 289 # repo_object cache
290 290 _string_setting(
291 291 settings,
292 292 'rc_cache.repo_object.backend',
293 293 'dogpile.cache.rc.memory_lru')
294 294 _int_setting(
295 295 settings,
296 296 'rc_cache.repo_object.expiration_time',
297 297 300)
298 298 _int_setting(
299 299 settings,
300 300 'rc_cache.repo_object.max_size',
301 301 1024)
302 302
303 303 def _configure(self):
304 304 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
305 305
306 306 self.config.add_route('service', '/_service')
307 307 self.config.add_route('status', '/status')
308 308 self.config.add_route('hg_proxy', '/proxy/hg')
309 309 self.config.add_route('git_proxy', '/proxy/git')
310 310 self.config.add_route('vcs', '/{backend}')
311 311 self.config.add_route('stream_git', '/stream/git/*repo_name')
312 312 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
313 313
314 314 self.config.add_view(self.status_view, route_name='status', renderer='json')
315 315 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
316 316
317 317 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
318 318 self.config.add_view(self.git_proxy(), route_name='git_proxy')
319 319 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
320 320 vcs_view=self._remotes)
321 321
322 322 self.config.add_view(self.hg_stream(), route_name='stream_hg')
323 323 self.config.add_view(self.git_stream(), route_name='stream_git')
324 324
325 325 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
326 326
327 327 self.config.add_notfound_view(not_found, renderer='json')
328 328
329 329 self.config.add_view(self.handle_vcs_exception, context=Exception)
330 330
331 331 self.config.add_tween(
332 332 'vcsserver.tweens.RequestWrapperTween',
333 333 )
334 334
335 335 def wsgi_app(self):
336 336 return self.config.make_wsgi_app()
337 337
338 338 def vcs_view(self, request):
339 339 remote = self._remotes[request.matchdict['backend']]
340 340 payload = msgpack.unpackb(request.body, use_list=True)
341 341 method = payload.get('method')
342 342 params = payload.get('params')
343 343 wire = params.get('wire')
344 344 args = params.get('args')
345 345 kwargs = params.get('kwargs')
346 346 context_uid = None
347 347
348 348 if wire:
349 349 try:
350 350 wire['context'] = context_uid = uuid.UUID(wire['context'])
351 351 except KeyError:
352 352 pass
353 353 args.insert(0, wire)
354 354
355 355 log.debug('method called:%s with kwargs:%s context_uid: %s',
356 356 method, kwargs, context_uid)
357
357 358 try:
358 359 resp = getattr(remote, method)(*args, **kwargs)
359 360 except Exception as e:
360 361 exc_info = list(sys.exc_info())
361 362 exc_type, exc_value, exc_traceback = exc_info
362 363
363 364 org_exc = getattr(e, '_org_exc', None)
364 365 org_exc_name = None
365 366 org_exc_tb = ''
366 367 if org_exc:
367 368 org_exc_name = org_exc.__class__.__name__
368 369 org_exc_tb = getattr(e, '_org_exc_tb', '')
369 370 # replace our "faked" exception with our org
370 371 exc_info[0] = org_exc.__class__
371 372 exc_info[1] = org_exc
372 373
373 374 store_exception(id(exc_info), exc_info)
374 375
375 376 tb_info = ''.join(
376 377 traceback.format_exception(exc_type, exc_value, exc_traceback))
377 378
378 379 type_ = e.__class__.__name__
379 380 if type_ not in self.ALLOWED_EXCEPTIONS:
380 381 type_ = None
381 382
382 383 resp = {
383 384 'id': payload.get('id'),
384 385 'error': {
385 386 'message': e.message,
386 387 'traceback': tb_info,
387 388 'org_exc': org_exc_name,
388 389 'org_exc_tb': org_exc_tb,
389 390 'type': type_
390 391 }
391 392 }
392 393 try:
393 394 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
394 395 except AttributeError:
395 396 pass
396 397 else:
397 398 resp = {
398 399 'id': payload.get('id'),
399 400 'result': resp
400 401 }
401 402
402 403 return resp
403 404
404 405 def status_view(self, request):
405 406 import vcsserver
406 407 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
407 408 'pid': os.getpid()}
408 409
409 410 def service_view(self, request):
410 411 import vcsserver
411 412
412 413 payload = msgpack.unpackb(request.body, use_list=True)
413 414
414 415 try:
415 416 path = self.global_config['__file__']
416 417 config = configparser.ConfigParser()
417 418 config.read(path)
418 419 parsed_ini = config
419 420 if parsed_ini.has_section('server:main'):
420 421 parsed_ini = dict(parsed_ini.items('server:main'))
421 422 except Exception:
422 423 log.exception('Failed to read .ini file for display')
423 424 parsed_ini = {}
424 425
425 426 resp = {
426 427 'id': payload.get('id'),
427 428 'result': dict(
428 429 version=vcsserver.__version__,
429 430 config=parsed_ini,
430 431 payload=payload,
431 432 )
432 433 }
433 434 return resp
434 435
435 436 def _msgpack_renderer_factory(self, info):
436 437 def _render(value, system):
437 438 value = msgpack.packb(value)
438 439 request = system.get('request')
439 440 if request is not None:
440 441 response = request.response
441 442 ct = response.content_type
442 443 if ct == response.default_content_type:
443 444 response.content_type = 'application/x-msgpack'
444 445 return value
445 446 return _render
446 447
447 448 def set_env_from_config(self, environ, config):
448 449 dict_conf = {}
449 450 try:
450 451 for elem in config:
451 452 if elem[0] == 'rhodecode':
452 453 dict_conf = json.loads(elem[2])
453 454 break
454 455 except Exception:
455 456 log.exception('Failed to fetch SCM CONFIG')
456 457 return
457 458
458 459 username = dict_conf.get('username')
459 460 if username:
460 461 environ['REMOTE_USER'] = username
461 462 # mercurial specific, some extension api rely on this
462 463 environ['HGUSER'] = username
463 464
464 465 ip = dict_conf.get('ip')
465 466 if ip:
466 467 environ['REMOTE_HOST'] = ip
467 468
468 469 if _is_request_chunked(environ):
469 470 # set the compatibility flag for webob
470 471 environ['wsgi.input_terminated'] = True
471 472
472 473 def hg_proxy(self):
473 474 @wsgiapp
474 475 def _hg_proxy(environ, start_response):
475 476 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
476 477 return app(environ, start_response)
477 478 return _hg_proxy
478 479
479 480 def git_proxy(self):
480 481 @wsgiapp
481 482 def _git_proxy(environ, start_response):
482 483 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
483 484 return app(environ, start_response)
484 485 return _git_proxy
485 486
486 487 def hg_stream(self):
487 488 if self._use_echo_app:
488 489 @wsgiapp
489 490 def _hg_stream(environ, start_response):
490 491 app = EchoApp('fake_path', 'fake_name', None)
491 492 return app(environ, start_response)
492 493 return _hg_stream
493 494 else:
494 495 @wsgiapp
495 496 def _hg_stream(environ, start_response):
496 497 log.debug('http-app: handling hg stream')
497 498 repo_path = environ['HTTP_X_RC_REPO_PATH']
498 499 repo_name = environ['HTTP_X_RC_REPO_NAME']
499 500 packed_config = base64.b64decode(
500 501 environ['HTTP_X_RC_REPO_CONFIG'])
501 502 config = msgpack.unpackb(packed_config)
502 503 app = scm_app.create_hg_wsgi_app(
503 504 repo_path, repo_name, config)
504 505
505 506 # Consistent path information for hgweb
506 507 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
507 508 environ['REPO_NAME'] = repo_name
508 509 self.set_env_from_config(environ, config)
509 510
510 511 log.debug('http-app: starting app handler '
511 512 'with %s and process request', app)
512 513 return app(environ, ResponseFilter(start_response))
513 514 return _hg_stream
514 515
515 516 def git_stream(self):
516 517 if self._use_echo_app:
517 518 @wsgiapp
518 519 def _git_stream(environ, start_response):
519 520 app = EchoApp('fake_path', 'fake_name', None)
520 521 return app(environ, start_response)
521 522 return _git_stream
522 523 else:
523 524 @wsgiapp
524 525 def _git_stream(environ, start_response):
525 526 log.debug('http-app: handling git stream')
526 527 repo_path = environ['HTTP_X_RC_REPO_PATH']
527 528 repo_name = environ['HTTP_X_RC_REPO_NAME']
528 529 packed_config = base64.b64decode(
529 530 environ['HTTP_X_RC_REPO_CONFIG'])
530 531 config = msgpack.unpackb(packed_config)
531 532
532 533 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
533 534 self.set_env_from_config(environ, config)
534 535
535 536 content_type = environ.get('CONTENT_TYPE', '')
536 537
537 538 path = environ['PATH_INFO']
538 539 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
539 540 log.debug(
540 541 'LFS: Detecting if request `%s` is LFS server path based '
541 542 'on content type:`%s`, is_lfs:%s',
542 543 path, content_type, is_lfs_request)
543 544
544 545 if not is_lfs_request:
545 546 # fallback detection by path
546 547 if GIT_LFS_PROTO_PAT.match(path):
547 548 is_lfs_request = True
548 549 log.debug(
549 550 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
550 551 path, is_lfs_request)
551 552
552 553 if is_lfs_request:
553 554 app = scm_app.create_git_lfs_wsgi_app(
554 555 repo_path, repo_name, config)
555 556 else:
556 557 app = scm_app.create_git_wsgi_app(
557 558 repo_path, repo_name, config)
558 559
559 560 log.debug('http-app: starting app handler '
560 561 'with %s and process request', app)
561 562
562 563 return app(environ, start_response)
563 564
564 565 return _git_stream
565 566
566 567 def handle_vcs_exception(self, exception, request):
567 568 _vcs_kind = getattr(exception, '_vcs_kind', '')
568 569 if _vcs_kind == 'repo_locked':
569 570 # Get custom repo-locked status code if present.
570 571 status_code = request.headers.get('X-RC-Locked-Status-Code')
571 572 return HTTPRepoLocked(
572 573 title=exception.message, status_code=status_code)
573 574
574 575 elif _vcs_kind == 'repo_branch_protected':
575 576 # Get custom repo-branch-protected status code if present.
576 577 return HTTPRepoBranchProtected(title=exception.message)
577 578
578 579 exc_info = request.exc_info
579 580 store_exception(id(exc_info), exc_info)
580 581
581 582 traceback_info = 'unavailable'
582 583 if request.exc_info:
583 584 exc_type, exc_value, exc_tb = request.exc_info
584 585 traceback_info = ''.join(traceback.format_exception(exc_type, exc_value, exc_tb))
585 586
586 587 log.error(
587 588 'error occurred handling this request for path: %s, \n tb: %s',
588 589 request.path, traceback_info)
589 590 raise exception
590 591
591 592
592 593 class ResponseFilter(object):
593 594
594 595 def __init__(self, start_response):
595 596 self._start_response = start_response
596 597
597 598 def __call__(self, status, response_headers, exc_info=None):
598 599 headers = tuple(
599 600 (h, v) for h, v in response_headers
600 601 if not wsgiref.util.is_hop_by_hop(h))
601 602 return self._start_response(status, headers, exc_info)
602 603
603 604
604 605 def main(global_config, **settings):
605 606 if MercurialFactory:
606 607 hgpatches.patch_largefiles_capabilities()
607 608 hgpatches.patch_subrepo_type_mapping()
608 609
609 610 app = HTTPApplication(settings=settings, global_config=global_config)
610 611 return app.wsgi_app()
@@ -1,775 +1,772 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 from __future__ import absolute_import
19 19
20 20 import os
21 21 import subprocess
22 22 from urllib2 import URLError
23 23 import urlparse
24 24 import logging
25 25 import posixpath as vcspath
26 26 import StringIO
27 27 import urllib
28 28 import traceback
29 29
30 30 import svn.client
31 31 import svn.core
32 32 import svn.delta
33 33 import svn.diff
34 34 import svn.fs
35 35 import svn.repos
36 36
37 37 from vcsserver import svn_diff, exceptions, subprocessio, settings
38 38 from vcsserver.base import RepoFactory, raise_from_original
39 39
40 40 log = logging.getLogger(__name__)
41 41
42 42
43 43 # Set of svn compatible version flags.
44 44 # Compare with subversion/svnadmin/svnadmin.c
45 45 svn_compatible_versions = {
46 46 'pre-1.4-compatible',
47 47 'pre-1.5-compatible',
48 48 'pre-1.6-compatible',
49 49 'pre-1.8-compatible',
50 50 'pre-1.9-compatible'
51 51 }
52 52
53 53 svn_compatible_versions_map = {
54 54 'pre-1.4-compatible': '1.3',
55 55 'pre-1.5-compatible': '1.4',
56 56 'pre-1.6-compatible': '1.5',
57 57 'pre-1.8-compatible': '1.7',
58 58 'pre-1.9-compatible': '1.8',
59 59 }
60 60
61 61
62 62 def reraise_safe_exceptions(func):
63 63 """Decorator for converting svn exceptions to something neutral."""
64 64 def wrapper(*args, **kwargs):
65 65 try:
66 66 return func(*args, **kwargs)
67 67 except Exception as e:
68 68 if not hasattr(e, '_vcs_kind'):
69 69 log.exception("Unhandled exception in svn remote call")
70 70 raise_from_original(exceptions.UnhandledException(e))
71 71 raise
72 72 return wrapper
73 73
74 74
75 75 class SubversionFactory(RepoFactory):
76 76 repo_type = 'svn'
77 77
78 78 def _create_repo(self, wire, create, compatible_version):
79 79 path = svn.core.svn_path_canonicalize(wire['path'])
80 80 if create:
81 81 fs_config = {'compatible-version': '1.9'}
82 82 if compatible_version:
83 83 if compatible_version not in svn_compatible_versions:
84 84 raise Exception('Unknown SVN compatible version "{}"'
85 85 .format(compatible_version))
86 86 fs_config['compatible-version'] = \
87 87 svn_compatible_versions_map[compatible_version]
88 88
89 89 log.debug('Create SVN repo with config "%s"', fs_config)
90 90 repo = svn.repos.create(path, "", "", None, fs_config)
91 91 else:
92 92 repo = svn.repos.open(path)
93 93
94 94 log.debug('Got SVN object: %s', repo)
95 95 return repo
96 96
97 97 def repo(self, wire, create=False, compatible_version=None):
98 98 """
99 99 Get a repository instance for the given path.
100
101 Uses internally the low level beaker API since the decorators introduce
102 significant overhead.
103 100 """
104 101 region = self._cache_region
105 102 context = wire.get('context', None)
106 103 repo_path = wire.get('path', '')
107 104 context_uid = '{}'.format(context)
108 105 cache = wire.get('cache', True)
109 106 cache_on = context and cache
110 107
111 108 @region.conditional_cache_on_arguments(condition=cache_on)
112 109 def create_new_repo(_repo_type, _repo_path, _context_uid, compatible_version_id):
113 110 return self._create_repo(wire, create, compatible_version)
114 111
115 112 return create_new_repo(self.repo_type, repo_path, context_uid,
116 113 compatible_version)
117 114
118 115
119 116 NODE_TYPE_MAPPING = {
120 117 svn.core.svn_node_file: 'file',
121 118 svn.core.svn_node_dir: 'dir',
122 119 }
123 120
124 121
125 122 class SvnRemote(object):
126 123
127 124 def __init__(self, factory, hg_factory=None):
128 125 self._factory = factory
129 126 # TODO: Remove once we do not use internal Mercurial objects anymore
130 127 # for subversion
131 128 self._hg_factory = hg_factory
132 129
133 130 @reraise_safe_exceptions
134 131 def discover_svn_version(self):
135 132 try:
136 133 import svn.core
137 134 svn_ver = svn.core.SVN_VERSION
138 135 except ImportError:
139 136 svn_ver = None
140 137 return svn_ver
141 138
142 139 @reraise_safe_exceptions
143 140 def is_empty(self, wire):
144 141 repo = self._factory.repo(wire)
145 142
146 143 try:
147 144 return self.lookup(wire, -1) == 0
148 145 except Exception:
149 146 log.exception("failed to read object_store")
150 147 return False
151 148
152 149 def check_url(self, url, config_items):
153 150 # this can throw exception if not installed, but we detect this
154 151 from hgsubversion import svnrepo
155 152
156 153 baseui = self._hg_factory._create_config(config_items)
157 154 # uuid function get's only valid UUID from proper repo, else
158 155 # throws exception
159 156 try:
160 157 svnrepo.svnremoterepo(baseui, url).svn.uuid
161 158 except Exception:
162 159 tb = traceback.format_exc()
163 160 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
164 161 raise URLError(
165 162 '"%s" is not a valid Subversion source url.' % (url, ))
166 163 return True
167 164
168 165 def is_path_valid_repository(self, wire, path):
169 166
170 167 # NOTE(marcink): short circuit the check for SVN repo
171 168 # the repos.open might be expensive to check, but we have one cheap
172 169 # pre condition that we can use, to check for 'format' file
173 170
174 171 if not os.path.isfile(os.path.join(path, 'format')):
175 172 return False
176 173
177 174 try:
178 175 svn.repos.open(path)
179 176 except svn.core.SubversionException:
180 177 tb = traceback.format_exc()
181 178 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
182 179 return False
183 180 return True
184 181
185 182 @reraise_safe_exceptions
186 183 def verify(self, wire,):
187 184 repo_path = wire['path']
188 185 if not self.is_path_valid_repository(wire, repo_path):
189 186 raise Exception(
190 187 "Path %s is not a valid Subversion repository." % repo_path)
191 188
192 189 cmd = ['svnadmin', 'info', repo_path]
193 190 stdout, stderr = subprocessio.run_command(cmd)
194 191 return stdout
195 192
196 193 def lookup(self, wire, revision):
197 194 if revision not in [-1, None, 'HEAD']:
198 195 raise NotImplementedError
199 196 repo = self._factory.repo(wire)
200 197 fs_ptr = svn.repos.fs(repo)
201 198 head = svn.fs.youngest_rev(fs_ptr)
202 199 return head
203 200
204 201 def lookup_interval(self, wire, start_ts, end_ts):
205 202 repo = self._factory.repo(wire)
206 203 fsobj = svn.repos.fs(repo)
207 204 start_rev = None
208 205 end_rev = None
209 206 if start_ts:
210 207 start_ts_svn = apr_time_t(start_ts)
211 208 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
212 209 else:
213 210 start_rev = 1
214 211 if end_ts:
215 212 end_ts_svn = apr_time_t(end_ts)
216 213 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
217 214 else:
218 215 end_rev = svn.fs.youngest_rev(fsobj)
219 216 return start_rev, end_rev
220 217
221 218 def revision_properties(self, wire, revision):
222 219 repo = self._factory.repo(wire)
223 220 fs_ptr = svn.repos.fs(repo)
224 221 return svn.fs.revision_proplist(fs_ptr, revision)
225 222
226 223 def revision_changes(self, wire, revision):
227 224
228 225 repo = self._factory.repo(wire)
229 226 fsobj = svn.repos.fs(repo)
230 227 rev_root = svn.fs.revision_root(fsobj, revision)
231 228
232 229 editor = svn.repos.ChangeCollector(fsobj, rev_root)
233 230 editor_ptr, editor_baton = svn.delta.make_editor(editor)
234 231 base_dir = ""
235 232 send_deltas = False
236 233 svn.repos.replay2(
237 234 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
238 235 editor_ptr, editor_baton, None)
239 236
240 237 added = []
241 238 changed = []
242 239 removed = []
243 240
244 241 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
245 242 for path, change in editor.changes.iteritems():
246 243 # TODO: Decide what to do with directory nodes. Subversion can add
247 244 # empty directories.
248 245
249 246 if change.item_kind == svn.core.svn_node_dir:
250 247 continue
251 248 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
252 249 added.append(path)
253 250 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
254 251 svn.repos.CHANGE_ACTION_REPLACE]:
255 252 changed.append(path)
256 253 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
257 254 removed.append(path)
258 255 else:
259 256 raise NotImplementedError(
260 257 "Action %s not supported on path %s" % (
261 258 change.action, path))
262 259
263 260 changes = {
264 261 'added': added,
265 262 'changed': changed,
266 263 'removed': removed,
267 264 }
268 265 return changes
269 266
270 267 def node_history(self, wire, path, revision, limit):
271 268 cross_copies = False
272 269 repo = self._factory.repo(wire)
273 270 fsobj = svn.repos.fs(repo)
274 271 rev_root = svn.fs.revision_root(fsobj, revision)
275 272
276 273 history_revisions = []
277 274 history = svn.fs.node_history(rev_root, path)
278 275 history = svn.fs.history_prev(history, cross_copies)
279 276 while history:
280 277 __, node_revision = svn.fs.history_location(history)
281 278 history_revisions.append(node_revision)
282 279 if limit and len(history_revisions) >= limit:
283 280 break
284 281 history = svn.fs.history_prev(history, cross_copies)
285 282 return history_revisions
286 283
287 284 def node_properties(self, wire, path, revision):
288 285 repo = self._factory.repo(wire)
289 286 fsobj = svn.repos.fs(repo)
290 287 rev_root = svn.fs.revision_root(fsobj, revision)
291 288 return svn.fs.node_proplist(rev_root, path)
292 289
293 290 def file_annotate(self, wire, path, revision):
294 291 abs_path = 'file://' + urllib.pathname2url(
295 292 vcspath.join(wire['path'], path))
296 293 file_uri = svn.core.svn_path_canonicalize(abs_path)
297 294
298 295 start_rev = svn_opt_revision_value_t(0)
299 296 peg_rev = svn_opt_revision_value_t(revision)
300 297 end_rev = peg_rev
301 298
302 299 annotations = []
303 300
304 301 def receiver(line_no, revision, author, date, line, pool):
305 302 annotations.append((line_no, revision, line))
306 303
307 304 # TODO: Cannot use blame5, missing typemap function in the swig code
308 305 try:
309 306 svn.client.blame2(
310 307 file_uri, peg_rev, start_rev, end_rev,
311 308 receiver, svn.client.create_context())
312 309 except svn.core.SubversionException as exc:
313 310 log.exception("Error during blame operation.")
314 311 raise Exception(
315 312 "Blame not supported or file does not exist at path %s. "
316 313 "Error %s." % (path, exc))
317 314
318 315 return annotations
319 316
320 317 def get_node_type(self, wire, path, rev=None):
321 318 repo = self._factory.repo(wire)
322 319 fs_ptr = svn.repos.fs(repo)
323 320 if rev is None:
324 321 rev = svn.fs.youngest_rev(fs_ptr)
325 322 root = svn.fs.revision_root(fs_ptr, rev)
326 323 node = svn.fs.check_path(root, path)
327 324 return NODE_TYPE_MAPPING.get(node, None)
328 325
329 326 def get_nodes(self, wire, path, revision=None):
330 327 repo = self._factory.repo(wire)
331 328 fsobj = svn.repos.fs(repo)
332 329 if revision is None:
333 330 revision = svn.fs.youngest_rev(fsobj)
334 331 root = svn.fs.revision_root(fsobj, revision)
335 332 entries = svn.fs.dir_entries(root, path)
336 333 result = []
337 334 for entry_path, entry_info in entries.iteritems():
338 335 result.append(
339 336 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
340 337 return result
341 338
342 339 def get_file_content(self, wire, path, rev=None):
343 340 repo = self._factory.repo(wire)
344 341 fsobj = svn.repos.fs(repo)
345 342 if rev is None:
346 343 rev = svn.fs.youngest_revision(fsobj)
347 344 root = svn.fs.revision_root(fsobj, rev)
348 345 content = svn.core.Stream(svn.fs.file_contents(root, path))
349 346 return content.read()
350 347
351 348 def get_file_size(self, wire, path, revision=None):
352 349 repo = self._factory.repo(wire)
353 350 fsobj = svn.repos.fs(repo)
354 351 if revision is None:
355 352 revision = svn.fs.youngest_revision(fsobj)
356 353 root = svn.fs.revision_root(fsobj, revision)
357 354 size = svn.fs.file_length(root, path)
358 355 return size
359 356
360 357 def create_repository(self, wire, compatible_version=None):
361 358 log.info('Creating Subversion repository in path "%s"', wire['path'])
362 359 self._factory.repo(wire, create=True,
363 360 compatible_version=compatible_version)
364 361
365 362 def get_url_and_credentials(self, src_url):
366 363 obj = urlparse.urlparse(src_url)
367 364 username = obj.username or None
368 365 password = obj.password or None
369 366 return username, password, src_url
370 367
371 368 def import_remote_repository(self, wire, src_url):
372 369 repo_path = wire['path']
373 370 if not self.is_path_valid_repository(wire, repo_path):
374 371 raise Exception(
375 372 "Path %s is not a valid Subversion repository." % repo_path)
376 373
377 374 username, password, src_url = self.get_url_and_credentials(src_url)
378 375 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
379 376 '--trust-server-cert-failures=unknown-ca']
380 377 if username and password:
381 378 rdump_cmd += ['--username', username, '--password', password]
382 379 rdump_cmd += [src_url]
383 380
384 381 rdump = subprocess.Popen(
385 382 rdump_cmd,
386 383 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
387 384 load = subprocess.Popen(
388 385 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
389 386
390 387 # TODO: johbo: This can be a very long operation, might be better
391 388 # to track some kind of status and provide an api to check if the
392 389 # import is done.
393 390 rdump.wait()
394 391 load.wait()
395 392
396 393 log.debug('Return process ended with code: %s', rdump.returncode)
397 394 if rdump.returncode != 0:
398 395 errors = rdump.stderr.read()
399 396 log.error('svnrdump dump failed: statuscode %s: message: %s',
400 397 rdump.returncode, errors)
401 398 reason = 'UNKNOWN'
402 399 if 'svnrdump: E230001:' in errors:
403 400 reason = 'INVALID_CERTIFICATE'
404 401
405 402 if reason == 'UNKNOWN':
406 403 reason = 'UNKNOWN:{}'.format(errors)
407 404 raise Exception(
408 405 'Failed to dump the remote repository from %s. Reason:%s' % (
409 406 src_url, reason))
410 407 if load.returncode != 0:
411 408 raise Exception(
412 409 'Failed to load the dump of remote repository from %s.' %
413 410 (src_url, ))
414 411
415 412 def commit(self, wire, message, author, timestamp, updated, removed):
416 413 assert isinstance(message, str)
417 414 assert isinstance(author, str)
418 415
419 416 repo = self._factory.repo(wire)
420 417 fsobj = svn.repos.fs(repo)
421 418
422 419 rev = svn.fs.youngest_rev(fsobj)
423 420 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
424 421 txn_root = svn.fs.txn_root(txn)
425 422
426 423 for node in updated:
427 424 TxnNodeProcessor(node, txn_root).update()
428 425 for node in removed:
429 426 TxnNodeProcessor(node, txn_root).remove()
430 427
431 428 commit_id = svn.repos.fs_commit_txn(repo, txn)
432 429
433 430 if timestamp:
434 431 apr_time = apr_time_t(timestamp)
435 432 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
436 433 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
437 434
438 435 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
439 436 return commit_id
440 437
441 438 def diff(self, wire, rev1, rev2, path1=None, path2=None,
442 439 ignore_whitespace=False, context=3):
443 440
444 441 wire.update(cache=False)
445 442 repo = self._factory.repo(wire)
446 443 diff_creator = SvnDiffer(
447 444 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
448 445 try:
449 446 return diff_creator.generate_diff()
450 447 except svn.core.SubversionException as e:
451 448 log.exception(
452 449 "Error during diff operation operation. "
453 450 "Path might not exist %s, %s" % (path1, path2))
454 451 return ""
455 452
456 453 @reraise_safe_exceptions
457 454 def is_large_file(self, wire, path):
458 455 return False
459 456
460 457 @reraise_safe_exceptions
461 458 def run_svn_command(self, wire, cmd, **opts):
462 459 path = wire.get('path', None)
463 460
464 461 if path and os.path.isdir(path):
465 462 opts['cwd'] = path
466 463
467 464 safe_call = False
468 465 if '_safe' in opts:
469 466 safe_call = True
470 467
471 468 svnenv = os.environ.copy()
472 469 svnenv.update(opts.pop('extra_env', {}))
473 470
474 471 _opts = {'env': svnenv, 'shell': False}
475 472
476 473 try:
477 474 _opts.update(opts)
478 475 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
479 476
480 477 return ''.join(p), ''.join(p.error)
481 478 except (EnvironmentError, OSError) as err:
482 479 cmd = ' '.join(cmd) # human friendly CMD
483 480 tb_err = ("Couldn't run svn command (%s).\n"
484 481 "Original error was:%s\n"
485 482 "Call options:%s\n"
486 483 % (cmd, err, _opts))
487 484 log.exception(tb_err)
488 485 if safe_call:
489 486 return '', err
490 487 else:
491 488 raise exceptions.VcsException()(tb_err)
492 489
493 490 @reraise_safe_exceptions
494 491 def install_hooks(self, wire, force=False):
495 492 from vcsserver.hook_utils import install_svn_hooks
496 493 repo_path = wire['path']
497 494 binary_dir = settings.BINARY_DIR
498 495 executable = None
499 496 if binary_dir:
500 497 executable = os.path.join(binary_dir, 'python')
501 498 return install_svn_hooks(
502 499 repo_path, executable=executable, force_create=force)
503 500
504 501 @reraise_safe_exceptions
505 502 def get_hooks_info(self, wire):
506 503 from vcsserver.hook_utils import (
507 504 get_svn_pre_hook_version, get_svn_post_hook_version)
508 505 repo_path = wire['path']
509 506 return {
510 507 'pre_version': get_svn_pre_hook_version(repo_path),
511 508 'post_version': get_svn_post_hook_version(repo_path),
512 509 }
513 510
514 511
515 512 class SvnDiffer(object):
516 513 """
517 514 Utility to create diffs based on difflib and the Subversion api
518 515 """
519 516
520 517 binary_content = False
521 518
522 519 def __init__(
523 520 self, repo, src_rev, src_path, tgt_rev, tgt_path,
524 521 ignore_whitespace, context):
525 522 self.repo = repo
526 523 self.ignore_whitespace = ignore_whitespace
527 524 self.context = context
528 525
529 526 fsobj = svn.repos.fs(repo)
530 527
531 528 self.tgt_rev = tgt_rev
532 529 self.tgt_path = tgt_path or ''
533 530 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
534 531 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
535 532
536 533 self.src_rev = src_rev
537 534 self.src_path = src_path or self.tgt_path
538 535 self.src_root = svn.fs.revision_root(fsobj, src_rev)
539 536 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
540 537
541 538 self._validate()
542 539
543 540 def _validate(self):
544 541 if (self.tgt_kind != svn.core.svn_node_none and
545 542 self.src_kind != svn.core.svn_node_none and
546 543 self.src_kind != self.tgt_kind):
547 544 # TODO: johbo: proper error handling
548 545 raise Exception(
549 546 "Source and target are not compatible for diff generation. "
550 547 "Source type: %s, target type: %s" %
551 548 (self.src_kind, self.tgt_kind))
552 549
553 550 def generate_diff(self):
554 551 buf = StringIO.StringIO()
555 552 if self.tgt_kind == svn.core.svn_node_dir:
556 553 self._generate_dir_diff(buf)
557 554 else:
558 555 self._generate_file_diff(buf)
559 556 return buf.getvalue()
560 557
561 558 def _generate_dir_diff(self, buf):
562 559 editor = DiffChangeEditor()
563 560 editor_ptr, editor_baton = svn.delta.make_editor(editor)
564 561 svn.repos.dir_delta2(
565 562 self.src_root,
566 563 self.src_path,
567 564 '', # src_entry
568 565 self.tgt_root,
569 566 self.tgt_path,
570 567 editor_ptr, editor_baton,
571 568 authorization_callback_allow_all,
572 569 False, # text_deltas
573 570 svn.core.svn_depth_infinity, # depth
574 571 False, # entry_props
575 572 False, # ignore_ancestry
576 573 )
577 574
578 575 for path, __, change in sorted(editor.changes):
579 576 self._generate_node_diff(
580 577 buf, change, path, self.tgt_path, path, self.src_path)
581 578
582 579 def _generate_file_diff(self, buf):
583 580 change = None
584 581 if self.src_kind == svn.core.svn_node_none:
585 582 change = "add"
586 583 elif self.tgt_kind == svn.core.svn_node_none:
587 584 change = "delete"
588 585 tgt_base, tgt_path = vcspath.split(self.tgt_path)
589 586 src_base, src_path = vcspath.split(self.src_path)
590 587 self._generate_node_diff(
591 588 buf, change, tgt_path, tgt_base, src_path, src_base)
592 589
593 590 def _generate_node_diff(
594 591 self, buf, change, tgt_path, tgt_base, src_path, src_base):
595 592
596 593 if self.src_rev == self.tgt_rev and tgt_base == src_base:
597 594 # makes consistent behaviour with git/hg to return empty diff if
598 595 # we compare same revisions
599 596 return
600 597
601 598 tgt_full_path = vcspath.join(tgt_base, tgt_path)
602 599 src_full_path = vcspath.join(src_base, src_path)
603 600
604 601 self.binary_content = False
605 602 mime_type = self._get_mime_type(tgt_full_path)
606 603
607 604 if mime_type and not mime_type.startswith('text'):
608 605 self.binary_content = True
609 606 buf.write("=" * 67 + '\n')
610 607 buf.write("Cannot display: file marked as a binary type.\n")
611 608 buf.write("svn:mime-type = %s\n" % mime_type)
612 609 buf.write("Index: %s\n" % (tgt_path, ))
613 610 buf.write("=" * 67 + '\n')
614 611 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
615 612 'tgt_path': tgt_path})
616 613
617 614 if change == 'add':
618 615 # TODO: johbo: SVN is missing a zero here compared to git
619 616 buf.write("new file mode 10644\n")
620 617
621 618 #TODO(marcink): intro to binary detection of svn patches
622 619 # if self.binary_content:
623 620 # buf.write('GIT binary patch\n')
624 621
625 622 buf.write("--- /dev/null\t(revision 0)\n")
626 623 src_lines = []
627 624 else:
628 625 if change == 'delete':
629 626 buf.write("deleted file mode 10644\n")
630 627
631 628 #TODO(marcink): intro to binary detection of svn patches
632 629 # if self.binary_content:
633 630 # buf.write('GIT binary patch\n')
634 631
635 632 buf.write("--- a/%s\t(revision %s)\n" % (
636 633 src_path, self.src_rev))
637 634 src_lines = self._svn_readlines(self.src_root, src_full_path)
638 635
639 636 if change == 'delete':
640 637 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
641 638 tgt_lines = []
642 639 else:
643 640 buf.write("+++ b/%s\t(revision %s)\n" % (
644 641 tgt_path, self.tgt_rev))
645 642 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
646 643
647 644 if not self.binary_content:
648 645 udiff = svn_diff.unified_diff(
649 646 src_lines, tgt_lines, context=self.context,
650 647 ignore_blank_lines=self.ignore_whitespace,
651 648 ignore_case=False,
652 649 ignore_space_changes=self.ignore_whitespace)
653 650 buf.writelines(udiff)
654 651
655 652 def _get_mime_type(self, path):
656 653 try:
657 654 mime_type = svn.fs.node_prop(
658 655 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
659 656 except svn.core.SubversionException:
660 657 mime_type = svn.fs.node_prop(
661 658 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
662 659 return mime_type
663 660
664 661 def _svn_readlines(self, fs_root, node_path):
665 662 if self.binary_content:
666 663 return []
667 664 node_kind = svn.fs.check_path(fs_root, node_path)
668 665 if node_kind not in (
669 666 svn.core.svn_node_file, svn.core.svn_node_symlink):
670 667 return []
671 668 content = svn.core.Stream(
672 669 svn.fs.file_contents(fs_root, node_path)).read()
673 670 return content.splitlines(True)
674 671
675 672
676 673
677 674 class DiffChangeEditor(svn.delta.Editor):
678 675 """
679 676 Records changes between two given revisions
680 677 """
681 678
682 679 def __init__(self):
683 680 self.changes = []
684 681
685 682 def delete_entry(self, path, revision, parent_baton, pool=None):
686 683 self.changes.append((path, None, 'delete'))
687 684
688 685 def add_file(
689 686 self, path, parent_baton, copyfrom_path, copyfrom_revision,
690 687 file_pool=None):
691 688 self.changes.append((path, 'file', 'add'))
692 689
693 690 def open_file(self, path, parent_baton, base_revision, file_pool=None):
694 691 self.changes.append((path, 'file', 'change'))
695 692
696 693
697 694 def authorization_callback_allow_all(root, path, pool):
698 695 return True
699 696
700 697
701 698 class TxnNodeProcessor(object):
702 699 """
703 700 Utility to process the change of one node within a transaction root.
704 701
705 702 It encapsulates the knowledge of how to add, update or remove
706 703 a node for a given transaction root. The purpose is to support the method
707 704 `SvnRemote.commit`.
708 705 """
709 706
710 707 def __init__(self, node, txn_root):
711 708 assert isinstance(node['path'], str)
712 709
713 710 self.node = node
714 711 self.txn_root = txn_root
715 712
716 713 def update(self):
717 714 self._ensure_parent_dirs()
718 715 self._add_file_if_node_does_not_exist()
719 716 self._update_file_content()
720 717 self._update_file_properties()
721 718
722 719 def remove(self):
723 720 svn.fs.delete(self.txn_root, self.node['path'])
724 721 # TODO: Clean up directory if empty
725 722
726 723 def _ensure_parent_dirs(self):
727 724 curdir = vcspath.dirname(self.node['path'])
728 725 dirs_to_create = []
729 726 while not self._svn_path_exists(curdir):
730 727 dirs_to_create.append(curdir)
731 728 curdir = vcspath.dirname(curdir)
732 729
733 730 for curdir in reversed(dirs_to_create):
734 731 log.debug('Creating missing directory "%s"', curdir)
735 732 svn.fs.make_dir(self.txn_root, curdir)
736 733
737 734 def _svn_path_exists(self, path):
738 735 path_status = svn.fs.check_path(self.txn_root, path)
739 736 return path_status != svn.core.svn_node_none
740 737
741 738 def _add_file_if_node_does_not_exist(self):
742 739 kind = svn.fs.check_path(self.txn_root, self.node['path'])
743 740 if kind == svn.core.svn_node_none:
744 741 svn.fs.make_file(self.txn_root, self.node['path'])
745 742
746 743 def _update_file_content(self):
747 744 assert isinstance(self.node['content'], str)
748 745 handler, baton = svn.fs.apply_textdelta(
749 746 self.txn_root, self.node['path'], None, None)
750 747 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
751 748
752 749 def _update_file_properties(self):
753 750 properties = self.node.get('properties', {})
754 751 for key, value in properties.iteritems():
755 752 svn.fs.change_node_prop(
756 753 self.txn_root, self.node['path'], key, value)
757 754
758 755
759 756 def apr_time_t(timestamp):
760 757 """
761 758 Convert a Python timestamp into APR timestamp type apr_time_t
762 759 """
763 760 return timestamp * 1E6
764 761
765 762
766 763 def svn_opt_revision_value_t(num):
767 764 """
768 765 Put `num` into a `svn_opt_revision_value_t` structure.
769 766 """
770 767 value = svn.core.svn_opt_revision_value_t()
771 768 value.number = num
772 769 revision = svn.core.svn_opt_revision_t()
773 770 revision.kind = svn.core.svn_opt_revision_number
774 771 revision.value = value
775 772 return revision
@@ -1,165 +1,160 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import inspect
19 19
20 20 import pytest
21 21 import dulwich.errors
22 22 from mock import Mock, patch
23 23
24 24 from vcsserver import git
25 25
26 26
27 27 SAMPLE_REFS = {
28 28 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
29 29 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
30 30 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
31 31 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
32 32 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
33 33 }
34 34
35 35
36 36 @pytest.fixture
37 37 def git_remote():
38 38 """
39 39 A GitRemote instance with a mock factory.
40 40 """
41 41 factory = Mock()
42 42 remote = git.GitRemote(factory)
43 43 return remote
44 44
45 45
46 46 def test_discover_git_version(git_remote):
47 47 version = git_remote.discover_git_version()
48 48 assert version
49 49
50 50
51 51 class TestGitFetch(object):
52 52 def setup(self):
53 53 self.mock_repo = Mock()
54 54 factory = Mock()
55 55 factory.repo = Mock(return_value=self.mock_repo)
56 56 self.remote_git = git.GitRemote(factory)
57 57
58 58 def test_fetches_all_when_no_commit_ids_specified(self):
59 59 def side_effect(determine_wants, *args, **kwargs):
60 60 determine_wants(SAMPLE_REFS)
61 61
62 62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
63 63 mock_fetch.side_effect = side_effect
64 64 self.remote_git.pull(wire=None, url='/tmp/', apply_refs=False)
65 65 determine_wants = self.mock_repo.object_store.determine_wants_all
66 66 determine_wants.assert_called_once_with(SAMPLE_REFS)
67 67
68 68 def test_fetches_specified_commits(self):
69 69 selected_refs = {
70 70 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
71 71 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
72 72 }
73 73
74 74 def side_effect(determine_wants, *args, **kwargs):
75 75 result = determine_wants(SAMPLE_REFS)
76 76 assert sorted(result) == sorted(selected_refs.values())
77 77 return result
78 78
79 79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
80 80 mock_fetch.side_effect = side_effect
81 81 self.remote_git.pull(
82 82 wire=None, url='/tmp/', apply_refs=False,
83 83 refs=selected_refs.keys())
84 84 determine_wants = self.mock_repo.object_store.determine_wants_all
85 85 assert determine_wants.call_count == 0
86 86
87 87 def test_get_remote_refs(self):
88 88 factory = Mock()
89 89 remote_git = git.GitRemote(factory)
90 90 url = 'http://example.com/test/test.git'
91 91 sample_refs = {
92 92 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
93 93 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
94 94 }
95 95
96 96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
97 97 mock_repo().get_refs.return_value = sample_refs
98 98 remote_refs = remote_git.get_remote_refs(wire=None, url=url)
99 99 mock_repo().get_refs.assert_called_once_with()
100 100 assert remote_refs == sample_refs
101 101
102 def test_remove_ref(self):
103 ref_to_remove = 'refs/tags/v0.1.9'
104 self.mock_repo.refs = SAMPLE_REFS.copy()
105 self.remote_git.remove_ref(None, ref_to_remove)
106 assert ref_to_remove not in self.mock_repo.refs
107
108 102
109 103 class TestReraiseSafeExceptions(object):
104
110 105 def test_method_decorated_with_reraise_safe_exceptions(self):
111 106 factory = Mock()
112 107 git_remote = git.GitRemote(factory)
113 108
114 109 def fake_function():
115 110 return None
116 111
117 112 decorator = git.reraise_safe_exceptions(fake_function)
118 113
119 114 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
120 115 for method_name, method in methods:
121 116 if not method_name.startswith('_'):
122 117 assert method.im_func.__code__ == decorator.__code__
123 118
124 119 @pytest.mark.parametrize('side_effect, expected_type', [
125 120 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
126 121 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
127 122 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
128 123 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
129 124 (dulwich.errors.HangupException(), 'error'),
130 125 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
131 126 ])
132 127 def test_safe_exceptions_reraised(self, side_effect, expected_type):
133 128 @git.reraise_safe_exceptions
134 129 def fake_method():
135 130 raise side_effect
136 131
137 132 with pytest.raises(Exception) as exc_info:
138 133 fake_method()
139 134 assert type(exc_info.value) == Exception
140 135 assert exc_info.value._vcs_kind == expected_type
141 136
142 137
143 138 class TestDulwichRepoWrapper(object):
144 139 def test_calls_close_on_delete(self):
145 140 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
146 141 with isdir_patcher:
147 142 repo = git.Repo('/tmp/abcde')
148 143 with patch.object(git.DulwichRepo, 'close') as close_mock:
149 144 del repo
150 145 close_mock.assert_called_once_with()
151 146
152 147
153 148 class TestGitFactory(object):
154 149 def test_create_repo_returns_dulwich_wrapper(self):
155 150
156 151 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
157 152 mock.side_effect = {'repo_objects': ''}
158 153 factory = git.GitFactory()
159 154 wire = {
160 155 'path': '/tmp/abcde'
161 156 }
162 157 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
163 158 with isdir_patcher:
164 159 result = factory._create_repo(wire, True)
165 160 assert isinstance(result, git.Repo)
General Comments 0
You need to be logged in to leave comments. Login now