##// END OF EJS Templates
remote-clone: obfuscate also given query string paramas that RhodeCode uses. Fixes #4668
marcink -
r106:d14c31eb default
parent child Browse files
Show More
@@ -1,36 +1,48 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2016 RodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import mock
19 import pytest
19 20
20 21 from vcsserver import main
22 from vcsserver.base import obfuscate_qs
21 23
22 24
23 25 @mock.patch('vcsserver.main.VcsServerCommand', mock.Mock())
24 26 @mock.patch('vcsserver.hgpatches.patch_largefiles_capabilities')
25 27 def test_applies_largefiles_patch(patch_largefiles_capabilities):
26 28 main.main([])
27 29 patch_largefiles_capabilities.assert_called_once_with()
28 30
29 31
30 32 @mock.patch('vcsserver.main.VcsServerCommand', mock.Mock())
31 33 @mock.patch('vcsserver.main.MercurialFactory', None)
32 34 @mock.patch(
33 35 'vcsserver.hgpatches.patch_largefiles_capabilities',
34 36 mock.Mock(side_effect=Exception("Must not be called")))
35 37 def test_applies_largefiles_patch_only_if_mercurial_is_available():
36 38 main.main([])
39
40
41 @pytest.mark.parametrize('given, expected', [
42 ('foo=bar', 'foo=bar'),
43 ('auth_token=secret', 'auth_token=*****'),
44 ('auth_token=secret&api_key=secret2', 'auth_token=*****&api_key=*****'),
45 ('auth_token=secret&api_key=secret2&param=value', 'auth_token=*****&api_key=*****&param=value'),
46 ])
47 def test_obfuscate_qs(given, expected):
48 assert expected == obfuscate_qs(given)
@@ -1,71 +1,81 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2016 RodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import logging
19
19 import urlparse
20 20
21 21 log = logging.getLogger(__name__)
22 22
23 23
24 24 class RepoFactory(object):
25 25 """
26 26 Utility to create instances of repository
27 27
28 28 It provides internal caching of the `repo` object based on
29 29 the :term:`call context`.
30 30 """
31 31
32 32 def __init__(self, repo_cache):
33 33 self._cache = repo_cache
34 34
35 35 def _create_config(self, path, config):
36 36 config = {}
37 37 return config
38 38
39 39 def _create_repo(self, wire, create):
40 40 raise NotImplementedError()
41 41
42 42 def repo(self, wire, create=False):
43 43 """
44 44 Get a repository instance for the given path.
45 45
46 46 Uses internally the low level beaker API since the decorators introduce
47 47 significant overhead.
48 48 """
49 49 def create_new_repo():
50 50 return self._create_repo(wire, create)
51 51
52 52 return self._repo(wire, create_new_repo)
53 53
54 54 def _repo(self, wire, createfunc):
55 55 context = wire.get('context', None)
56 56 cache = wire.get('cache', True)
57 57 log.debug(
58 58 'GET %s@%s with cache:%s. Context: %s',
59 59 self.__class__.__name__, wire['path'], cache, context)
60 60
61 61 if context and cache:
62 62 cache_key = (context, wire['path'])
63 63 log.debug(
64 64 'FETCH %s@%s repo object from cache. Context: %s',
65 65 self.__class__.__name__, wire['path'], context)
66 66 return self._cache.get(key=cache_key, createfunc=createfunc)
67 67 else:
68 68 log.debug(
69 69 'INIT %s@%s repo object based on wire %s. Context: %s',
70 70 self.__class__.__name__, wire['path'], wire, context)
71 71 return createfunc()
72
73
74 def obfuscate_qs(query_string):
75 parsed = []
76 for k, v in urlparse.parse_qsl(query_string):
77 if k in ['auth_token', 'api_key']:
78 v = "*****"
79 parsed.append((k, v))
80
81 return '&'.join('{}={}'.format(k,v) for k,v in parsed)
@@ -1,579 +1,580 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2016 RodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import logging
19 19 import os
20 20 import posixpath as vcspath
21 21 import re
22 22 import stat
23 23 import urllib
24 24 import urllib2
25 25 from functools import wraps
26 26
27 27 from dulwich import index, objects
28 28 from dulwich.client import HttpGitClient, LocalGitClient
29 29 from dulwich.errors import (
30 30 NotGitRepository, ChecksumMismatch, WrongObjectException,
31 31 MissingCommitError, ObjectMissing, HangupException,
32 32 UnexpectedCommandError)
33 33 from dulwich.repo import Repo as DulwichRepo, Tag
34 34 from dulwich.server import update_server_info
35 35
36 36 from vcsserver import exceptions, settings, subprocessio
37 37 from vcsserver.utils import safe_str
38 from vcsserver.base import RepoFactory
38 from vcsserver.base import RepoFactory, obfuscate_qs
39 39 from vcsserver.hgcompat import (
40 40 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
41 41
42 42
43 43 DIR_STAT = stat.S_IFDIR
44 44 FILE_MODE = stat.S_IFMT
45 45 GIT_LINK = objects.S_IFGITLINK
46 46
47 47 log = logging.getLogger(__name__)
48 48
49 49
50 50 def reraise_safe_exceptions(func):
51 51 """Converts Dulwich exceptions to something neutral."""
52 52 @wraps(func)
53 53 def wrapper(*args, **kwargs):
54 54 try:
55 55 return func(*args, **kwargs)
56 56 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
57 57 ObjectMissing) as e:
58 58 raise exceptions.LookupException(e.message)
59 59 except (HangupException, UnexpectedCommandError) as e:
60 60 raise exceptions.VcsException(e.message)
61 61 return wrapper
62 62
63 63
64 64 class Repo(DulwichRepo):
65 65 """
66 66 A wrapper for dulwich Repo class.
67 67
68 68 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
69 69 "Too many open files" error. We need to close all opened file descriptors
70 70 once the repo object is destroyed.
71 71
72 72 TODO: mikhail: please check if we need this wrapper after updating dulwich
73 73 to 0.12.0 +
74 74 """
75 75 def __del__(self):
76 76 if hasattr(self, 'object_store'):
77 77 self.close()
78 78
79 79
80 80 class GitFactory(RepoFactory):
81 81
82 82 def _create_repo(self, wire, create):
83 83 repo_path = str_to_dulwich(wire['path'])
84 84 return Repo(repo_path)
85 85
86 86
87 87 class GitRemote(object):
88 88
89 89 def __init__(self, factory):
90 90 self._factory = factory
91 91
92 92 self._bulk_methods = {
93 93 "author": self.commit_attribute,
94 94 "date": self.get_object_attrs,
95 95 "message": self.commit_attribute,
96 96 "parents": self.commit_attribute,
97 97 "_commit": self.revision,
98 98 }
99 99
100 100 def _assign_ref(self, wire, ref, commit_id):
101 101 repo = self._factory.repo(wire)
102 102 repo[ref] = commit_id
103 103
104 104 @reraise_safe_exceptions
105 105 def add_object(self, wire, content):
106 106 repo = self._factory.repo(wire)
107 107 blob = objects.Blob()
108 108 blob.set_raw_string(content)
109 109 repo.object_store.add_object(blob)
110 110 return blob.id
111 111
112 112 @reraise_safe_exceptions
113 113 def assert_correct_path(self, wire):
114 114 try:
115 115 self._factory.repo(wire)
116 116 except NotGitRepository as e:
117 117 # Exception can contain unicode which we convert
118 118 raise exceptions.AbortException(repr(e))
119 119
120 120 @reraise_safe_exceptions
121 121 def bare(self, wire):
122 122 repo = self._factory.repo(wire)
123 123 return repo.bare
124 124
125 125 @reraise_safe_exceptions
126 126 def blob_as_pretty_string(self, wire, sha):
127 127 repo = self._factory.repo(wire)
128 128 return repo[sha].as_pretty_string()
129 129
130 130 @reraise_safe_exceptions
131 131 def blob_raw_length(self, wire, sha):
132 132 repo = self._factory.repo(wire)
133 133 blob = repo[sha]
134 134 return blob.raw_length()
135 135
136 136 @reraise_safe_exceptions
137 137 def bulk_request(self, wire, rev, pre_load):
138 138 result = {}
139 139 for attr in pre_load:
140 140 try:
141 141 method = self._bulk_methods[attr]
142 142 args = [wire, rev]
143 143 if attr == "date":
144 144 args.extend(["commit_time", "commit_timezone"])
145 145 elif attr in ["author", "message", "parents"]:
146 146 args.append(attr)
147 147 result[attr] = method(*args)
148 148 except KeyError:
149 149 raise exceptions.VcsException(
150 150 "Unknown bulk attribute: %s" % attr)
151 151 return result
152 152
153 153 def _build_opener(self, url):
154 154 handlers = []
155 155 url_obj = url_parser(url)
156 156 _, authinfo = url_obj.authinfo()
157 157
158 158 if authinfo:
159 159 # create a password manager
160 160 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
161 161 passmgr.add_password(*authinfo)
162 162
163 163 handlers.extend((httpbasicauthhandler(passmgr),
164 164 httpdigestauthhandler(passmgr)))
165 165
166 166 return urllib2.build_opener(*handlers)
167 167
168 168 @reraise_safe_exceptions
169 169 def check_url(self, url, config):
170 170 url_obj = url_parser(url)
171 171 test_uri, _ = url_obj.authinfo()
172 172 url_obj.passwd = '*****'
173 url_obj.query = obfuscate_qs(url_obj.query)
173 174 cleaned_uri = str(url_obj)
174 175 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
175 176
176 177 if not test_uri.endswith('info/refs'):
177 178 test_uri = test_uri.rstrip('/') + '/info/refs'
178 179
179 180 o = self._build_opener(url)
180 181 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
181 182
182 183 q = {"service": 'git-upload-pack'}
183 184 qs = '?%s' % urllib.urlencode(q)
184 185 cu = "%s%s" % (test_uri, qs)
185 186 req = urllib2.Request(cu, None, {})
186 187
187 188 try:
188 189 log.debug("Trying to open URL %s", cleaned_uri)
189 190 resp = o.open(req)
190 191 if resp.code != 200:
191 192 raise exceptions.URLError('Return Code is not 200')
192 193 except Exception as e:
193 194 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
194 195 # means it cannot be cloned
195 196 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
196 197
197 198 # now detect if it's proper git repo
198 199 gitdata = resp.read()
199 200 if 'service=git-upload-pack' in gitdata:
200 201 pass
201 202 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
202 203 # old style git can return some other format !
203 204 pass
204 205 else:
205 206 raise exceptions.URLError(
206 207 "url [%s] does not look like an git" % (cleaned_uri,))
207 208
208 209 return True
209 210
210 211 @reraise_safe_exceptions
211 212 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
212 213 remote_refs = self.fetch(wire, url, apply_refs=False)
213 214 repo = self._factory.repo(wire)
214 215 if isinstance(valid_refs, list):
215 216 valid_refs = tuple(valid_refs)
216 217
217 218 for k in remote_refs:
218 219 # only parse heads/tags and skip so called deferred tags
219 220 if k.startswith(valid_refs) and not k.endswith(deferred):
220 221 repo[k] = remote_refs[k]
221 222
222 223 if update_after_clone:
223 224 # we want to checkout HEAD
224 225 repo["HEAD"] = remote_refs["HEAD"]
225 226 index.build_index_from_tree(repo.path, repo.index_path(),
226 227 repo.object_store, repo["HEAD"].tree)
227 228
228 229 # TODO: this is quite complex, check if that can be simplified
229 230 @reraise_safe_exceptions
230 231 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
231 232 repo = self._factory.repo(wire)
232 233 object_store = repo.object_store
233 234
234 235 # Create tree and populates it with blobs
235 236 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
236 237
237 238 for node in updated:
238 239 # Compute subdirs if needed
239 240 dirpath, nodename = vcspath.split(node['path'])
240 241 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
241 242 parent = commit_tree
242 243 ancestors = [('', parent)]
243 244
244 245 # Tries to dig for the deepest existing tree
245 246 while dirnames:
246 247 curdir = dirnames.pop(0)
247 248 try:
248 249 dir_id = parent[curdir][1]
249 250 except KeyError:
250 251 # put curdir back into dirnames and stops
251 252 dirnames.insert(0, curdir)
252 253 break
253 254 else:
254 255 # If found, updates parent
255 256 parent = repo[dir_id]
256 257 ancestors.append((curdir, parent))
257 258 # Now parent is deepest existing tree and we need to create
258 259 # subtrees for dirnames (in reverse order)
259 260 # [this only applies for nodes from added]
260 261 new_trees = []
261 262
262 263 blob = objects.Blob.from_string(node['content'])
263 264
264 265 if dirnames:
265 266 # If there are trees which should be created we need to build
266 267 # them now (in reverse order)
267 268 reversed_dirnames = list(reversed(dirnames))
268 269 curtree = objects.Tree()
269 270 curtree[node['node_path']] = node['mode'], blob.id
270 271 new_trees.append(curtree)
271 272 for dirname in reversed_dirnames[:-1]:
272 273 newtree = objects.Tree()
273 274 newtree[dirname] = (DIR_STAT, curtree.id)
274 275 new_trees.append(newtree)
275 276 curtree = newtree
276 277 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
277 278 else:
278 279 parent.add(
279 280 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
280 281
281 282 new_trees.append(parent)
282 283 # Update ancestors
283 284 reversed_ancestors = reversed(
284 285 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
285 286 for parent, tree, path in reversed_ancestors:
286 287 parent[path] = (DIR_STAT, tree.id)
287 288 object_store.add_object(tree)
288 289
289 290 object_store.add_object(blob)
290 291 for tree in new_trees:
291 292 object_store.add_object(tree)
292 293
293 294 for node_path in removed:
294 295 paths = node_path.split('/')
295 296 tree = commit_tree
296 297 trees = [tree]
297 298 # Traverse deep into the forest...
298 299 for path in paths:
299 300 try:
300 301 obj = repo[tree[path][1]]
301 302 if isinstance(obj, objects.Tree):
302 303 trees.append(obj)
303 304 tree = obj
304 305 except KeyError:
305 306 break
306 307 # Cut down the blob and all rotten trees on the way back...
307 308 for path, tree in reversed(zip(paths, trees)):
308 309 del tree[path]
309 310 if tree:
310 311 # This tree still has elements - don't remove it or any
311 312 # of it's parents
312 313 break
313 314
314 315 object_store.add_object(commit_tree)
315 316
316 317 # Create commit
317 318 commit = objects.Commit()
318 319 commit.tree = commit_tree.id
319 320 for k, v in commit_data.iteritems():
320 321 setattr(commit, k, v)
321 322 object_store.add_object(commit)
322 323
323 324 ref = 'refs/heads/%s' % branch
324 325 repo.refs[ref] = commit.id
325 326
326 327 return commit.id
327 328
328 329 @reraise_safe_exceptions
329 330 def fetch(self, wire, url, apply_refs=True, refs=None):
330 331 if url != 'default' and '://' not in url:
331 332 client = LocalGitClient(url)
332 333 else:
333 334 url_obj = url_parser(url)
334 335 o = self._build_opener(url)
335 336 url, _ = url_obj.authinfo()
336 337 client = HttpGitClient(base_url=url, opener=o)
337 338 repo = self._factory.repo(wire)
338 339
339 340 determine_wants = repo.object_store.determine_wants_all
340 341 if refs:
341 342 def determine_wants_requested(references):
342 343 return [references[r] for r in references if r in refs]
343 344 determine_wants = determine_wants_requested
344 345
345 346 try:
346 347 remote_refs = client.fetch(
347 348 path=url, target=repo, determine_wants=determine_wants)
348 349 except NotGitRepository:
349 350 log.warning(
350 351 'Trying to fetch from "%s" failed, not a Git repository.', url)
351 352 raise exceptions.AbortException()
352 353
353 354 # mikhail: client.fetch() returns all the remote refs, but fetches only
354 355 # refs filtered by `determine_wants` function. We need to filter result
355 356 # as well
356 357 if refs:
357 358 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
358 359
359 360 if apply_refs:
360 361 # TODO: johbo: Needs proper test coverage with a git repository
361 362 # that contains a tag object, so that we would end up with
362 363 # a peeled ref at this point.
363 364 PEELED_REF_MARKER = '^{}'
364 365 for k in remote_refs:
365 366 if k.endswith(PEELED_REF_MARKER):
366 367 log.info("Skipping peeled reference %s", k)
367 368 continue
368 369 repo[k] = remote_refs[k]
369 370
370 371 if refs:
371 372 # mikhail: explicitly set the head to the last ref.
372 373 repo['HEAD'] = remote_refs[refs[-1]]
373 374
374 375 # TODO: mikhail: should we return remote_refs here to be
375 376 # consistent?
376 377 else:
377 378 return remote_refs
378 379
379 380 @reraise_safe_exceptions
380 381 def get_remote_refs(self, wire, url):
381 382 repo = Repo(url)
382 383 return repo.get_refs()
383 384
384 385 @reraise_safe_exceptions
385 386 def get_description(self, wire):
386 387 repo = self._factory.repo(wire)
387 388 return repo.get_description()
388 389
389 390 @reraise_safe_exceptions
390 391 def get_file_history(self, wire, file_path, commit_id, limit):
391 392 repo = self._factory.repo(wire)
392 393 include = [commit_id]
393 394 paths = [file_path]
394 395
395 396 walker = repo.get_walker(include, paths=paths, max_entries=limit)
396 397 return [x.commit.id for x in walker]
397 398
398 399 @reraise_safe_exceptions
399 400 def get_missing_revs(self, wire, rev1, rev2, path2):
400 401 repo = self._factory.repo(wire)
401 402 LocalGitClient(thin_packs=False).fetch(path2, repo)
402 403
403 404 wire_remote = wire.copy()
404 405 wire_remote['path'] = path2
405 406 repo_remote = self._factory.repo(wire_remote)
406 407 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
407 408
408 409 revs = [
409 410 x.commit.id
410 411 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
411 412 return revs
412 413
413 414 @reraise_safe_exceptions
414 415 def get_object(self, wire, sha):
415 416 repo = self._factory.repo(wire)
416 417 obj = repo.get_object(sha)
417 418 commit_id = obj.id
418 419
419 420 if isinstance(obj, Tag):
420 421 commit_id = obj.object[1]
421 422
422 423 return {
423 424 'id': obj.id,
424 425 'type': obj.type_name,
425 426 'commit_id': commit_id
426 427 }
427 428
428 429 @reraise_safe_exceptions
429 430 def get_object_attrs(self, wire, sha, *attrs):
430 431 repo = self._factory.repo(wire)
431 432 obj = repo.get_object(sha)
432 433 return list(getattr(obj, a) for a in attrs)
433 434
434 435 @reraise_safe_exceptions
435 436 def get_refs(self, wire):
436 437 repo = self._factory.repo(wire)
437 438 result = {}
438 439 for ref, sha in repo.refs.as_dict().items():
439 440 peeled_sha = repo.get_peeled(ref)
440 441 result[ref] = peeled_sha
441 442 return result
442 443
443 444 @reraise_safe_exceptions
444 445 def get_refs_path(self, wire):
445 446 repo = self._factory.repo(wire)
446 447 return repo.refs.path
447 448
448 449 @reraise_safe_exceptions
449 450 def head(self, wire):
450 451 repo = self._factory.repo(wire)
451 452 return repo.head()
452 453
453 454 @reraise_safe_exceptions
454 455 def init(self, wire):
455 456 repo_path = str_to_dulwich(wire['path'])
456 457 self.repo = Repo.init(repo_path)
457 458
458 459 @reraise_safe_exceptions
459 460 def init_bare(self, wire):
460 461 repo_path = str_to_dulwich(wire['path'])
461 462 self.repo = Repo.init_bare(repo_path)
462 463
463 464 @reraise_safe_exceptions
464 465 def revision(self, wire, rev):
465 466 repo = self._factory.repo(wire)
466 467 obj = repo[rev]
467 468 obj_data = {
468 469 'id': obj.id,
469 470 }
470 471 try:
471 472 obj_data['tree'] = obj.tree
472 473 except AttributeError:
473 474 pass
474 475 return obj_data
475 476
476 477 @reraise_safe_exceptions
477 478 def commit_attribute(self, wire, rev, attr):
478 479 repo = self._factory.repo(wire)
479 480 obj = repo[rev]
480 481 return getattr(obj, attr)
481 482
482 483 @reraise_safe_exceptions
483 484 def set_refs(self, wire, key, value):
484 485 repo = self._factory.repo(wire)
485 486 repo.refs[key] = value
486 487
487 488 @reraise_safe_exceptions
488 489 def remove_ref(self, wire, key):
489 490 repo = self._factory.repo(wire)
490 491 del repo.refs[key]
491 492
492 493 @reraise_safe_exceptions
493 494 def tree_changes(self, wire, source_id, target_id):
494 495 repo = self._factory.repo(wire)
495 496 source = repo[source_id].tree if source_id else None
496 497 target = repo[target_id].tree
497 498 result = repo.object_store.tree_changes(source, target)
498 499 return list(result)
499 500
500 501 @reraise_safe_exceptions
501 502 def tree_items(self, wire, tree_id):
502 503 repo = self._factory.repo(wire)
503 504 tree = repo[tree_id]
504 505
505 506 result = []
506 507 for item in tree.iteritems():
507 508 item_sha = item.sha
508 509 item_mode = item.mode
509 510
510 511 if FILE_MODE(item_mode) == GIT_LINK:
511 512 item_type = "link"
512 513 else:
513 514 item_type = repo[item_sha].type_name
514 515
515 516 result.append((item.path, item_mode, item_sha, item_type))
516 517 return result
517 518
518 519 @reraise_safe_exceptions
519 520 def update_server_info(self, wire):
520 521 repo = self._factory.repo(wire)
521 522 update_server_info(repo)
522 523
523 524 @reraise_safe_exceptions
524 525 def discover_git_version(self):
525 526 stdout, _ = self.run_git_command(
526 527 {}, ['--version'], _bare=True, _safe=True)
527 528 prefix = 'git version'
528 529 if stdout.startswith(prefix):
529 530 stdout = stdout[len(prefix):]
530 531 return stdout.strip()
531 532
532 533 @reraise_safe_exceptions
533 534 def run_git_command(self, wire, cmd, **opts):
534 535 path = wire.get('path', None)
535 536
536 537 if path and os.path.isdir(path):
537 538 opts['cwd'] = path
538 539
539 540 if '_bare' in opts:
540 541 _copts = []
541 542 del opts['_bare']
542 543 else:
543 544 _copts = ['-c', 'core.quotepath=false', ]
544 545 safe_call = False
545 546 if '_safe' in opts:
546 547 # no exc on failure
547 548 del opts['_safe']
548 549 safe_call = True
549 550
550 551 gitenv = os.environ.copy()
551 552 gitenv.update(opts.pop('extra_env', {}))
552 553 # need to clean fix GIT_DIR !
553 554 if 'GIT_DIR' in gitenv:
554 555 del gitenv['GIT_DIR']
555 556 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
556 557
557 558 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
558 559
559 560 try:
560 561 _opts = {'env': gitenv, 'shell': False}
561 562 _opts.update(opts)
562 563 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
563 564
564 565 return ''.join(p), ''.join(p.error)
565 566 except (EnvironmentError, OSError) as err:
566 567 tb_err = ("Couldn't run git command (%s).\n"
567 568 "Original error was:%s\n" % (cmd, err))
568 569 log.exception(tb_err)
569 570 if safe_call:
570 571 return '', err
571 572 else:
572 573 raise exceptions.VcsException(tb_err)
573 574
574 575
575 576 def str_to_dulwich(value):
576 577 """
577 578 Dulwich 0.10.1a requires `unicode` objects to be passed in.
578 579 """
579 580 return value.decode(settings.WIRE_ENCODING)
@@ -1,721 +1,723 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2016 RodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import logging
20 20 import stat
21 21 import sys
22 22 import urllib
23 23 import urllib2
24 24
25 25 from hgext import largefiles, rebase
26 26 from hgext.strip import strip as hgext_strip
27 27 from mercurial import commands
28 28 from mercurial import unionrepo
29 29
30 30 from vcsserver import exceptions
31 from vcsserver.base import RepoFactory
31 from vcsserver.base import RepoFactory, obfuscate_qs
32 32 from vcsserver.hgcompat import (
33 33 archival, bin, clone, config as hgconfig, diffopts, hex,
34 34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
35 35 httppeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
36 36 patch, peer, revrange, ui, Abort, LookupError, RepoError, RepoLookupError,
37 37 InterventionRequired, RequirementError)
38 38
39 39 log = logging.getLogger(__name__)
40 40
41 41
42 42 def make_ui_from_config(repo_config):
43 43 baseui = ui.ui()
44 44
45 45 # clean the baseui object
46 46 baseui._ocfg = hgconfig.config()
47 47 baseui._ucfg = hgconfig.config()
48 48 baseui._tcfg = hgconfig.config()
49 49
50 50 for section, option, value in repo_config:
51 51 baseui.setconfig(section, option, value)
52 52
53 53 # make our hgweb quiet so it doesn't print output
54 54 baseui.setconfig('ui', 'quiet', 'true')
55 55
56 56 # force mercurial to only use 1 thread, otherwise it may try to set a
57 57 # signal in a non-main thread, thus generating a ValueError.
58 58 baseui.setconfig('worker', 'numcpus', 1)
59 59
60 60 # If there is no config for the largefiles extension, we explicitly disable
61 61 # it here. This overrides settings from repositories hgrc file. Recent
62 62 # mercurial versions enable largefiles in hgrc on clone from largefile
63 63 # repo.
64 64 if not baseui.hasconfig('extensions', 'largefiles'):
65 65 log.debug('Explicitly disable largefiles extension for repo.')
66 66 baseui.setconfig('extensions', 'largefiles', '!')
67 67
68 68 return baseui
69 69
70 70
71 71 def reraise_safe_exceptions(func):
72 72 """Decorator for converting mercurial exceptions to something neutral."""
73 73 def wrapper(*args, **kwargs):
74 74 try:
75 75 return func(*args, **kwargs)
76 76 except (Abort, InterventionRequired):
77 77 raise_from_original(exceptions.AbortException)
78 78 except RepoLookupError:
79 79 raise_from_original(exceptions.LookupException)
80 80 except RequirementError:
81 81 raise_from_original(exceptions.RequirementException)
82 82 except RepoError:
83 83 raise_from_original(exceptions.VcsException)
84 84 except LookupError:
85 85 raise_from_original(exceptions.LookupException)
86 86 except Exception as e:
87 87 if not hasattr(e, '_vcs_kind'):
88 88 log.exception("Unhandled exception in hg remote call")
89 89 raise_from_original(exceptions.UnhandledException)
90 90 raise
91 91 return wrapper
92 92
93 93
94 94 def raise_from_original(new_type):
95 95 """
96 96 Raise a new exception type with original args and traceback.
97 97 """
98 98 _, original, traceback = sys.exc_info()
99 99 try:
100 100 raise new_type(*original.args), None, traceback
101 101 finally:
102 102 del traceback
103 103
104 104
105 105 class MercurialFactory(RepoFactory):
106 106
107 107 def _create_config(self, config, hooks=True):
108 108 if not hooks:
109 109 hooks_to_clean = frozenset((
110 110 'changegroup.repo_size', 'preoutgoing.pre_pull',
111 111 'outgoing.pull_logger', 'prechangegroup.pre_push'))
112 112 new_config = []
113 113 for section, option, value in config:
114 114 if section == 'hooks' and option in hooks_to_clean:
115 115 continue
116 116 new_config.append((section, option, value))
117 117 config = new_config
118 118
119 119 baseui = make_ui_from_config(config)
120 120 return baseui
121 121
122 122 def _create_repo(self, wire, create):
123 123 baseui = self._create_config(wire["config"])
124 124 return localrepository(baseui, wire["path"], create)
125 125
126 126
127 127 class HgRemote(object):
128 128
129 129 def __init__(self, factory):
130 130 self._factory = factory
131 131
132 132 self._bulk_methods = {
133 133 "affected_files": self.ctx_files,
134 134 "author": self.ctx_user,
135 135 "branch": self.ctx_branch,
136 136 "children": self.ctx_children,
137 137 "date": self.ctx_date,
138 138 "message": self.ctx_description,
139 139 "parents": self.ctx_parents,
140 140 "status": self.ctx_status,
141 141 "_file_paths": self.ctx_list,
142 142 }
143 143
144 144 @reraise_safe_exceptions
145 145 def discover_hg_version(self):
146 146 from mercurial import util
147 147 return util.version()
148 148
149 149 @reraise_safe_exceptions
150 150 def archive_repo(self, archive_path, mtime, file_info, kind):
151 151 if kind == "tgz":
152 152 archiver = archival.tarit(archive_path, mtime, "gz")
153 153 elif kind == "tbz2":
154 154 archiver = archival.tarit(archive_path, mtime, "bz2")
155 155 elif kind == 'zip':
156 156 archiver = archival.zipit(archive_path, mtime)
157 157 else:
158 158 raise exceptions.ArchiveException(
159 159 'Remote does not support: "%s".' % kind)
160 160
161 161 for f_path, f_mode, f_is_link, f_content in file_info:
162 162 archiver.addfile(f_path, f_mode, f_is_link, f_content)
163 163 archiver.done()
164 164
165 165 @reraise_safe_exceptions
166 166 def bookmarks(self, wire):
167 167 repo = self._factory.repo(wire)
168 168 return dict(repo._bookmarks)
169 169
170 170 @reraise_safe_exceptions
171 171 def branches(self, wire, normal, closed):
172 172 repo = self._factory.repo(wire)
173 173 iter_branches = repo.branchmap().iterbranches()
174 174 bt = {}
175 175 for branch_name, _heads, tip, is_closed in iter_branches:
176 176 if normal and not is_closed:
177 177 bt[branch_name] = tip
178 178 if closed and is_closed:
179 179 bt[branch_name] = tip
180 180
181 181 return bt
182 182
183 183 @reraise_safe_exceptions
184 184 def bulk_request(self, wire, rev, pre_load):
185 185 result = {}
186 186 for attr in pre_load:
187 187 try:
188 188 method = self._bulk_methods[attr]
189 189 result[attr] = method(wire, rev)
190 190 except KeyError:
191 191 raise exceptions.VcsException(
192 192 'Unknown bulk attribute: "%s"' % attr)
193 193 return result
194 194
195 195 @reraise_safe_exceptions
196 196 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
197 197 baseui = self._factory._create_config(wire["config"], hooks=hooks)
198 198 clone(baseui, source, dest, noupdate=not update_after_clone)
199 199
200 200 @reraise_safe_exceptions
201 201 def commitctx(
202 202 self, wire, message, parents, commit_time, commit_timezone,
203 203 user, files, extra, removed, updated):
204 204
205 205 def _filectxfn(_repo, memctx, path):
206 206 """
207 207 Marks given path as added/changed/removed in a given _repo. This is
208 208 for internal mercurial commit function.
209 209 """
210 210
211 211 # check if this path is removed
212 212 if path in removed:
213 213 # returning None is a way to mark node for removal
214 214 return None
215 215
216 216 # check if this path is added
217 217 for node in updated:
218 218 if node['path'] == path:
219 219 return memfilectx(
220 220 _repo,
221 221 path=node['path'],
222 222 data=node['content'],
223 223 islink=False,
224 224 isexec=bool(node['mode'] & stat.S_IXUSR),
225 225 copied=False,
226 226 memctx=memctx)
227 227
228 228 raise exceptions.AbortException(
229 229 "Given path haven't been marked as added, "
230 230 "changed or removed (%s)" % path)
231 231
232 232 repo = self._factory.repo(wire)
233 233
234 234 commit_ctx = memctx(
235 235 repo=repo,
236 236 parents=parents,
237 237 text=message,
238 238 files=files,
239 239 filectxfn=_filectxfn,
240 240 user=user,
241 241 date=(commit_time, commit_timezone),
242 242 extra=extra)
243 243
244 244 n = repo.commitctx(commit_ctx)
245 245 new_id = hex(n)
246 246
247 247 return new_id
248 248
249 249 @reraise_safe_exceptions
250 250 def ctx_branch(self, wire, revision):
251 251 repo = self._factory.repo(wire)
252 252 ctx = repo[revision]
253 253 return ctx.branch()
254 254
255 255 @reraise_safe_exceptions
256 256 def ctx_children(self, wire, revision):
257 257 repo = self._factory.repo(wire)
258 258 ctx = repo[revision]
259 259 return [child.rev() for child in ctx.children()]
260 260
261 261 @reraise_safe_exceptions
262 262 def ctx_date(self, wire, revision):
263 263 repo = self._factory.repo(wire)
264 264 ctx = repo[revision]
265 265 return ctx.date()
266 266
267 267 @reraise_safe_exceptions
268 268 def ctx_description(self, wire, revision):
269 269 repo = self._factory.repo(wire)
270 270 ctx = repo[revision]
271 271 return ctx.description()
272 272
273 273 @reraise_safe_exceptions
274 274 def ctx_diff(
275 275 self, wire, revision, git=True, ignore_whitespace=True, context=3):
276 276 repo = self._factory.repo(wire)
277 277 ctx = repo[revision]
278 278 result = ctx.diff(
279 279 git=git, ignore_whitespace=ignore_whitespace, context=context)
280 280 return list(result)
281 281
282 282 @reraise_safe_exceptions
283 283 def ctx_files(self, wire, revision):
284 284 repo = self._factory.repo(wire)
285 285 ctx = repo[revision]
286 286 return ctx.files()
287 287
288 288 @reraise_safe_exceptions
289 289 def ctx_list(self, path, revision):
290 290 repo = self._factory.repo(path)
291 291 ctx = repo[revision]
292 292 return list(ctx)
293 293
294 294 @reraise_safe_exceptions
295 295 def ctx_parents(self, wire, revision):
296 296 repo = self._factory.repo(wire)
297 297 ctx = repo[revision]
298 298 return [parent.rev() for parent in ctx.parents()]
299 299
300 300 @reraise_safe_exceptions
301 301 def ctx_substate(self, wire, revision):
302 302 repo = self._factory.repo(wire)
303 303 ctx = repo[revision]
304 304 return ctx.substate
305 305
306 306 @reraise_safe_exceptions
307 307 def ctx_status(self, wire, revision):
308 308 repo = self._factory.repo(wire)
309 309 ctx = repo[revision]
310 310 status = repo[ctx.p1().node()].status(other=ctx.node())
311 311 # object of status (odd, custom named tuple in mercurial) is not
312 312 # correctly serializable via Pyro, we make it a list, as the underling
313 313 # API expects this to be a list
314 314 return list(status)
315 315
316 316 @reraise_safe_exceptions
317 317 def ctx_user(self, wire, revision):
318 318 repo = self._factory.repo(wire)
319 319 ctx = repo[revision]
320 320 return ctx.user()
321 321
322 322 @reraise_safe_exceptions
323 323 def check_url(self, url, config):
324 324 _proto = None
325 325 if '+' in url[:url.find('://')]:
326 326 _proto = url[0:url.find('+')]
327 327 url = url[url.find('+') + 1:]
328 328 handlers = []
329 329 url_obj = url_parser(url)
330 330 test_uri, authinfo = url_obj.authinfo()
331 331 url_obj.passwd = '*****'
332 url_obj.query = obfuscate_qs(url_obj.query)
333
332 334 cleaned_uri = str(url_obj)
333 335 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
334 336
335 337 if authinfo:
336 338 # create a password manager
337 339 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
338 340 passmgr.add_password(*authinfo)
339 341
340 342 handlers.extend((httpbasicauthhandler(passmgr),
341 343 httpdigestauthhandler(passmgr)))
342 344
343 345 o = urllib2.build_opener(*handlers)
344 346 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
345 347 ('Accept', 'application/mercurial-0.1')]
346 348
347 349 q = {"cmd": 'between'}
348 350 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
349 351 qs = '?%s' % urllib.urlencode(q)
350 352 cu = "%s%s" % (test_uri, qs)
351 353 req = urllib2.Request(cu, None, {})
352 354
353 355 try:
354 356 log.debug("Trying to open URL %s", cleaned_uri)
355 357 resp = o.open(req)
356 358 if resp.code != 200:
357 359 raise exceptions.URLError('Return Code is not 200')
358 360 except Exception as e:
359 361 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
360 362 # means it cannot be cloned
361 363 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
362 364
363 365 # now check if it's a proper hg repo, but don't do it for svn
364 366 try:
365 367 if _proto == 'svn':
366 368 pass
367 369 else:
368 370 # check for pure hg repos
369 371 log.debug(
370 372 "Verifying if URL is a Mercurial repository: %s",
371 373 cleaned_uri)
372 374 httppeer(make_ui_from_config(config), url).lookup('tip')
373 375 except Exception as e:
374 376 log.warning("URL is not a valid Mercurial repository: %s",
375 377 cleaned_uri)
376 378 raise exceptions.URLError(
377 379 "url [%s] does not look like an hg repo org_exc: %s"
378 380 % (cleaned_uri, e))
379 381
380 382 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
381 383 return True
382 384
383 385 @reraise_safe_exceptions
384 386 def diff(
385 387 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
386 388 context):
387 389 repo = self._factory.repo(wire)
388 390
389 391 if file_filter:
390 392 filter = match(file_filter[0], '', [file_filter[1]])
391 393 else:
392 394 filter = file_filter
393 395 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
394 396
395 397 try:
396 398 return "".join(patch.diff(
397 399 repo, node1=rev1, node2=rev2, match=filter, opts=opts))
398 400 except RepoLookupError:
399 401 raise exceptions.LookupException()
400 402
401 403 @reraise_safe_exceptions
402 404 def file_history(self, wire, revision, path, limit):
403 405 repo = self._factory.repo(wire)
404 406
405 407 ctx = repo[revision]
406 408 fctx = ctx.filectx(path)
407 409
408 410 def history_iter():
409 411 limit_rev = fctx.rev()
410 412 for obj in reversed(list(fctx.filelog())):
411 413 obj = fctx.filectx(obj)
412 414 if limit_rev >= obj.rev():
413 415 yield obj
414 416
415 417 history = []
416 418 for cnt, obj in enumerate(history_iter()):
417 419 if limit and cnt >= limit:
418 420 break
419 421 history.append(hex(obj.node()))
420 422
421 423 return [x for x in history]
422 424
423 425 @reraise_safe_exceptions
424 426 def file_history_untill(self, wire, revision, path, limit):
425 427 repo = self._factory.repo(wire)
426 428 ctx = repo[revision]
427 429 fctx = ctx.filectx(path)
428 430
429 431 file_log = list(fctx.filelog())
430 432 if limit:
431 433 # Limit to the last n items
432 434 file_log = file_log[-limit:]
433 435
434 436 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
435 437
436 438 @reraise_safe_exceptions
437 439 def fctx_annotate(self, wire, revision, path):
438 440 repo = self._factory.repo(wire)
439 441 ctx = repo[revision]
440 442 fctx = ctx.filectx(path)
441 443
442 444 result = []
443 445 for i, annotate_data in enumerate(fctx.annotate()):
444 446 ln_no = i + 1
445 447 sha = hex(annotate_data[0].node())
446 448 result.append((ln_no, sha, annotate_data[1]))
447 449 return result
448 450
449 451 @reraise_safe_exceptions
450 452 def fctx_data(self, wire, revision, path):
451 453 repo = self._factory.repo(wire)
452 454 ctx = repo[revision]
453 455 fctx = ctx.filectx(path)
454 456 return fctx.data()
455 457
456 458 @reraise_safe_exceptions
457 459 def fctx_flags(self, wire, revision, path):
458 460 repo = self._factory.repo(wire)
459 461 ctx = repo[revision]
460 462 fctx = ctx.filectx(path)
461 463 return fctx.flags()
462 464
463 465 @reraise_safe_exceptions
464 466 def fctx_size(self, wire, revision, path):
465 467 repo = self._factory.repo(wire)
466 468 ctx = repo[revision]
467 469 fctx = ctx.filectx(path)
468 470 return fctx.size()
469 471
470 472 @reraise_safe_exceptions
471 473 def get_all_commit_ids(self, wire, name):
472 474 repo = self._factory.repo(wire)
473 475 revs = repo.filtered(name).changelog.index
474 476 return map(lambda x: hex(x[7]), revs)[:-1]
475 477
476 478 @reraise_safe_exceptions
477 479 def get_config_value(self, wire, section, name, untrusted=False):
478 480 repo = self._factory.repo(wire)
479 481 return repo.ui.config(section, name, untrusted=untrusted)
480 482
481 483 @reraise_safe_exceptions
482 484 def get_config_bool(self, wire, section, name, untrusted=False):
483 485 repo = self._factory.repo(wire)
484 486 return repo.ui.configbool(section, name, untrusted=untrusted)
485 487
486 488 @reraise_safe_exceptions
487 489 def get_config_list(self, wire, section, name, untrusted=False):
488 490 repo = self._factory.repo(wire)
489 491 return repo.ui.configlist(section, name, untrusted=untrusted)
490 492
491 493 @reraise_safe_exceptions
492 494 def is_large_file(self, wire, path):
493 495 return largefiles.lfutil.isstandin(path)
494 496
495 497 @reraise_safe_exceptions
496 498 def in_store(self, wire, sha):
497 499 repo = self._factory.repo(wire)
498 500 return largefiles.lfutil.instore(repo, sha)
499 501
500 502 @reraise_safe_exceptions
501 503 def in_user_cache(self, wire, sha):
502 504 repo = self._factory.repo(wire)
503 505 return largefiles.lfutil.inusercache(repo.ui, sha)
504 506
505 507 @reraise_safe_exceptions
506 508 def store_path(self, wire, sha):
507 509 repo = self._factory.repo(wire)
508 510 return largefiles.lfutil.storepath(repo, sha)
509 511
510 512 @reraise_safe_exceptions
511 513 def link(self, wire, sha, path):
512 514 repo = self._factory.repo(wire)
513 515 largefiles.lfutil.link(
514 516 largefiles.lfutil.usercachepath(repo.ui, sha), path)
515 517
516 518 @reraise_safe_exceptions
517 519 def localrepository(self, wire, create=False):
518 520 self._factory.repo(wire, create=create)
519 521
520 522 @reraise_safe_exceptions
521 523 def lookup(self, wire, revision, both):
522 524 # TODO Paris: Ugly hack to "deserialize" long for msgpack
523 525 if isinstance(revision, float):
524 526 revision = long(revision)
525 527 repo = self._factory.repo(wire)
526 528 try:
527 529 ctx = repo[revision]
528 530 except RepoLookupError:
529 531 raise exceptions.LookupException(revision)
530 532 except LookupError as e:
531 533 raise exceptions.LookupException(e.name)
532 534
533 535 if not both:
534 536 return ctx.hex()
535 537
536 538 ctx = repo[ctx.hex()]
537 539 return ctx.hex(), ctx.rev()
538 540
539 541 @reraise_safe_exceptions
540 542 def pull(self, wire, url, commit_ids=None):
541 543 repo = self._factory.repo(wire)
542 544 remote = peer(repo, {}, url)
543 545 if commit_ids:
544 546 commit_ids = [bin(commit_id) for commit_id in commit_ids]
545 547
546 548 return exchange.pull(
547 549 repo, remote, heads=commit_ids, force=None).cgresult
548 550
549 551 @reraise_safe_exceptions
550 552 def revision(self, wire, rev):
551 553 repo = self._factory.repo(wire)
552 554 ctx = repo[rev]
553 555 return ctx.rev()
554 556
555 557 @reraise_safe_exceptions
556 558 def rev_range(self, wire, filter):
557 559 repo = self._factory.repo(wire)
558 560 revisions = [rev for rev in revrange(repo, filter)]
559 561 return revisions
560 562
561 563 @reraise_safe_exceptions
562 564 def rev_range_hash(self, wire, node):
563 565 repo = self._factory.repo(wire)
564 566
565 567 def get_revs(repo, rev_opt):
566 568 if rev_opt:
567 569 revs = revrange(repo, rev_opt)
568 570 if len(revs) == 0:
569 571 return (nullrev, nullrev)
570 572 return max(revs), min(revs)
571 573 else:
572 574 return len(repo) - 1, 0
573 575
574 576 stop, start = get_revs(repo, [node + ':'])
575 577 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
576 578 return revs
577 579
578 580 @reraise_safe_exceptions
579 581 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
580 582 other_path = kwargs.pop('other_path', None)
581 583
582 584 # case when we want to compare two independent repositories
583 585 if other_path and other_path != wire["path"]:
584 586 baseui = self._factory._create_config(wire["config"])
585 587 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
586 588 else:
587 589 repo = self._factory.repo(wire)
588 590 return list(repo.revs(rev_spec, *args))
589 591
590 592 @reraise_safe_exceptions
591 593 def strip(self, wire, revision, update, backup):
592 594 repo = self._factory.repo(wire)
593 595 ctx = repo[revision]
594 596 hgext_strip(
595 597 repo.baseui, repo, ctx.node(), update=update, backup=backup)
596 598
597 599 @reraise_safe_exceptions
598 600 def tag(self, wire, name, revision, message, local, user,
599 601 tag_time, tag_timezone):
600 602 repo = self._factory.repo(wire)
601 603 ctx = repo[revision]
602 604 node = ctx.node()
603 605
604 606 date = (tag_time, tag_timezone)
605 607 try:
606 608 repo.tag(name, node, message, local, user, date)
607 609 except Abort:
608 610 log.exception("Tag operation aborted")
609 611 raise exceptions.AbortException()
610 612
611 613 @reraise_safe_exceptions
612 614 def tags(self, wire):
613 615 repo = self._factory.repo(wire)
614 616 return repo.tags()
615 617
616 618 @reraise_safe_exceptions
617 619 def update(self, wire, node=None, clean=False):
618 620 repo = self._factory.repo(wire)
619 621 baseui = self._factory._create_config(wire['config'])
620 622 commands.update(baseui, repo, node=node, clean=clean)
621 623
622 624 @reraise_safe_exceptions
623 625 def identify(self, wire):
624 626 repo = self._factory.repo(wire)
625 627 baseui = self._factory._create_config(wire['config'])
626 628 output = io.BytesIO()
627 629 baseui.write = output.write
628 630 # This is required to get a full node id
629 631 baseui.debugflag = True
630 632 commands.identify(baseui, repo, id=True)
631 633
632 634 return output.getvalue()
633 635
634 636 @reraise_safe_exceptions
635 637 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
636 638 hooks=True):
637 639 repo = self._factory.repo(wire)
638 640 baseui = self._factory._create_config(wire['config'], hooks=hooks)
639 641
640 642 # Mercurial internally has a lot of logic that checks ONLY if
641 643 # option is defined, we just pass those if they are defined then
642 644 opts = {}
643 645 if bookmark:
644 646 opts['bookmark'] = bookmark
645 647 if branch:
646 648 opts['branch'] = branch
647 649 if revision:
648 650 opts['rev'] = revision
649 651
650 652 commands.pull(baseui, repo, source, **opts)
651 653
652 654 @reraise_safe_exceptions
653 655 def heads(self, wire, branch=None):
654 656 repo = self._factory.repo(wire)
655 657 baseui = self._factory._create_config(wire['config'])
656 658 output = io.BytesIO()
657 659
658 660 def write(data, **unused_kwargs):
659 661 output.write(data)
660 662
661 663 baseui.write = write
662 664 if branch:
663 665 args = [branch]
664 666 else:
665 667 args = []
666 668 commands.heads(baseui, repo, template='{node} ', *args)
667 669
668 670 return output.getvalue()
669 671
670 672 @reraise_safe_exceptions
671 673 def ancestor(self, wire, revision1, revision2):
672 674 repo = self._factory.repo(wire)
673 675 baseui = self._factory._create_config(wire['config'])
674 676 output = io.BytesIO()
675 677 baseui.write = output.write
676 678 commands.debugancestor(baseui, repo, revision1, revision2)
677 679
678 680 return output.getvalue()
679 681
680 682 @reraise_safe_exceptions
681 683 def push(self, wire, revisions, dest_path, hooks=True,
682 684 push_branches=False):
683 685 repo = self._factory.repo(wire)
684 686 baseui = self._factory._create_config(wire['config'], hooks=hooks)
685 687 commands.push(baseui, repo, dest=dest_path, rev=revisions,
686 688 new_branch=push_branches)
687 689
688 690 @reraise_safe_exceptions
689 691 def merge(self, wire, revision):
690 692 repo = self._factory.repo(wire)
691 693 baseui = self._factory._create_config(wire['config'])
692 694 repo.ui.setconfig('ui', 'merge', 'internal:dump')
693 695
694 696 # In case of sub repositories are used mercurial prompts the user in
695 697 # case of merge conflicts or different sub repository sources. By
696 698 # setting the interactive flag to `False` mercurial doesn't prompt the
697 699 # used but instead uses a default value.
698 700 repo.ui.setconfig('ui', 'interactive', False)
699 701
700 702 commands.merge(baseui, repo, rev=revision)
701 703
702 704 @reraise_safe_exceptions
703 705 def commit(self, wire, message, username):
704 706 repo = self._factory.repo(wire)
705 707 baseui = self._factory._create_config(wire['config'])
706 708 repo.ui.setconfig('ui', 'username', username)
707 709 commands.commit(baseui, repo, message=message)
708 710
709 711 @reraise_safe_exceptions
710 712 def rebase(self, wire, source=None, dest=None, abort=False):
711 713 repo = self._factory.repo(wire)
712 714 baseui = self._factory._create_config(wire['config'])
713 715 repo.ui.setconfig('ui', 'merge', 'internal:dump')
714 716 rebase.rebase(
715 717 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
716 718
717 719 @reraise_safe_exceptions
718 720 def bookmark(self, wire, bookmark, revision=None):
719 721 repo = self._factory.repo(wire)
720 722 baseui = self._factory._create_config(wire['config'])
721 723 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
General Comments 0
You need to be logged in to leave comments. Login now