##// END OF EJS Templates
backends: implemented functions for fetching backend versions via remote calls....
marcink -
r101:62999e0d default
parent child Browse files
Show More
@@ -1,573 +1,576 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2016 RodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import logging
19 19 import os
20 20 import posixpath as vcspath
21 21 import re
22 22 import stat
23 23 import urllib
24 24 import urllib2
25 25 from functools import wraps
26 26
27 27 from dulwich import index, objects
28 28 from dulwich.client import HttpGitClient, LocalGitClient
29 29 from dulwich.errors import (
30 30 NotGitRepository, ChecksumMismatch, WrongObjectException,
31 31 MissingCommitError, ObjectMissing, HangupException,
32 32 UnexpectedCommandError)
33 33 from dulwich.repo import Repo as DulwichRepo, Tag
34 34 from dulwich.server import update_server_info
35 35
36 36 from vcsserver import exceptions, settings, subprocessio
37 37 from vcsserver.utils import safe_str
38 38 from vcsserver.base import RepoFactory
39 39 from vcsserver.hgcompat import (
40 40 hg_url, httpbasicauthhandler, httpdigestauthhandler)
41 41
42 42
43 43 DIR_STAT = stat.S_IFDIR
44 44 FILE_MODE = stat.S_IFMT
45 45 GIT_LINK = objects.S_IFGITLINK
46 46
47 47 log = logging.getLogger(__name__)
48 48
49 49
50 50 def reraise_safe_exceptions(func):
51 51 """Converts Dulwich exceptions to something neutral."""
52 52 @wraps(func)
53 53 def wrapper(*args, **kwargs):
54 54 try:
55 55 return func(*args, **kwargs)
56 56 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
57 57 ObjectMissing) as e:
58 58 raise exceptions.LookupException(e.message)
59 59 except (HangupException, UnexpectedCommandError) as e:
60 60 raise exceptions.VcsException(e.message)
61 61 return wrapper
62 62
63 63
64 64 class Repo(DulwichRepo):
65 65 """
66 66 A wrapper for dulwich Repo class.
67 67
68 68 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
69 69 "Too many open files" error. We need to close all opened file descriptors
70 70 once the repo object is destroyed.
71 71
72 72 TODO: mikhail: please check if we need this wrapper after updating dulwich
73 73 to 0.12.0 +
74 74 """
75 75 def __del__(self):
76 76 if hasattr(self, 'object_store'):
77 77 self.close()
78 78
79 79
80 80 class GitFactory(RepoFactory):
81 81
82 82 def _create_repo(self, wire, create):
83 83 repo_path = str_to_dulwich(wire['path'])
84 84 return Repo(repo_path)
85 85
86 86
87 87 class GitRemote(object):
88 88
89 89 def __init__(self, factory):
90 90 self._factory = factory
91 91
92 92 self._bulk_methods = {
93 93 "author": self.commit_attribute,
94 94 "date": self.get_object_attrs,
95 95 "message": self.commit_attribute,
96 96 "parents": self.commit_attribute,
97 97 "_commit": self.revision,
98 98 }
99 99
100 100 def _assign_ref(self, wire, ref, commit_id):
101 101 repo = self._factory.repo(wire)
102 102 repo[ref] = commit_id
103 103
104 104 @reraise_safe_exceptions
105 105 def add_object(self, wire, content):
106 106 repo = self._factory.repo(wire)
107 107 blob = objects.Blob()
108 108 blob.set_raw_string(content)
109 109 repo.object_store.add_object(blob)
110 110 return blob.id
111 111
112 112 @reraise_safe_exceptions
113 113 def assert_correct_path(self, wire):
114 114 try:
115 115 self._factory.repo(wire)
116 116 except NotGitRepository as e:
117 117 # Exception can contain unicode which we convert
118 118 raise exceptions.AbortException(repr(e))
119 119
120 120 @reraise_safe_exceptions
121 121 def bare(self, wire):
122 122 repo = self._factory.repo(wire)
123 123 return repo.bare
124 124
125 125 @reraise_safe_exceptions
126 126 def blob_as_pretty_string(self, wire, sha):
127 127 repo = self._factory.repo(wire)
128 128 return repo[sha].as_pretty_string()
129 129
130 130 @reraise_safe_exceptions
131 131 def blob_raw_length(self, wire, sha):
132 132 repo = self._factory.repo(wire)
133 133 blob = repo[sha]
134 134 return blob.raw_length()
135 135
136 136 @reraise_safe_exceptions
137 137 def bulk_request(self, wire, rev, pre_load):
138 138 result = {}
139 139 for attr in pre_load:
140 140 try:
141 141 method = self._bulk_methods[attr]
142 142 args = [wire, rev]
143 143 if attr == "date":
144 144 args.extend(["commit_time", "commit_timezone"])
145 145 elif attr in ["author", "message", "parents"]:
146 146 args.append(attr)
147 147 result[attr] = method(*args)
148 148 except KeyError:
149 149 raise exceptions.VcsException(
150 150 "Unknown bulk attribute: %s" % attr)
151 151 return result
152 152
153 153 def _build_opener(self, url):
154 154 handlers = []
155 155 url_obj = hg_url(url)
156 156 _, authinfo = url_obj.authinfo()
157 157
158 158 if authinfo:
159 159 # create a password manager
160 160 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
161 161 passmgr.add_password(*authinfo)
162 162
163 163 handlers.extend((httpbasicauthhandler(passmgr),
164 164 httpdigestauthhandler(passmgr)))
165 165
166 166 return urllib2.build_opener(*handlers)
167 167
168 168 @reraise_safe_exceptions
169 169 def check_url(self, url, config):
170 170 url_obj = hg_url(url)
171 171 test_uri, _ = url_obj.authinfo()
172 172 url_obj.passwd = '*****'
173 173 cleaned_uri = str(url_obj)
174 174
175 175 if not test_uri.endswith('info/refs'):
176 176 test_uri = test_uri.rstrip('/') + '/info/refs'
177 177
178 178 o = self._build_opener(url)
179 179 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
180 180
181 181 q = {"service": 'git-upload-pack'}
182 182 qs = '?%s' % urllib.urlencode(q)
183 183 cu = "%s%s" % (test_uri, qs)
184 184 req = urllib2.Request(cu, None, {})
185 185
186 186 try:
187 187 resp = o.open(req)
188 188 if resp.code != 200:
189 189 raise Exception('Return Code is not 200')
190 190 except Exception as e:
191 191 # means it cannot be cloned
192 192 raise urllib2.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
193 193
194 194 # now detect if it's proper git repo
195 195 gitdata = resp.read()
196 196 if 'service=git-upload-pack' in gitdata:
197 197 pass
198 198 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
199 199 # old style git can return some other format !
200 200 pass
201 201 else:
202 202 raise urllib2.URLError(
203 203 "url [%s] does not look like an git" % (cleaned_uri,))
204 204
205 205 return True
206 206
207 207 @reraise_safe_exceptions
208 208 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
209 209 remote_refs = self.fetch(wire, url, apply_refs=False)
210 210 repo = self._factory.repo(wire)
211 211 if isinstance(valid_refs, list):
212 212 valid_refs = tuple(valid_refs)
213 213
214 214 for k in remote_refs:
215 215 # only parse heads/tags and skip so called deferred tags
216 216 if k.startswith(valid_refs) and not k.endswith(deferred):
217 217 repo[k] = remote_refs[k]
218 218
219 219 if update_after_clone:
220 220 # we want to checkout HEAD
221 221 repo["HEAD"] = remote_refs["HEAD"]
222 222 index.build_index_from_tree(repo.path, repo.index_path(),
223 223 repo.object_store, repo["HEAD"].tree)
224 224
225 225 # TODO: this is quite complex, check if that can be simplified
226 226 @reraise_safe_exceptions
227 227 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
228 228 repo = self._factory.repo(wire)
229 229 object_store = repo.object_store
230 230
231 231 # Create tree and populates it with blobs
232 232 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
233 233
234 234 for node in updated:
235 235 # Compute subdirs if needed
236 236 dirpath, nodename = vcspath.split(node['path'])
237 237 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
238 238 parent = commit_tree
239 239 ancestors = [('', parent)]
240 240
241 241 # Tries to dig for the deepest existing tree
242 242 while dirnames:
243 243 curdir = dirnames.pop(0)
244 244 try:
245 245 dir_id = parent[curdir][1]
246 246 except KeyError:
247 247 # put curdir back into dirnames and stops
248 248 dirnames.insert(0, curdir)
249 249 break
250 250 else:
251 251 # If found, updates parent
252 252 parent = repo[dir_id]
253 253 ancestors.append((curdir, parent))
254 254 # Now parent is deepest existing tree and we need to create
255 255 # subtrees for dirnames (in reverse order)
256 256 # [this only applies for nodes from added]
257 257 new_trees = []
258 258
259 259 blob = objects.Blob.from_string(node['content'])
260 260
261 261 if dirnames:
262 262 # If there are trees which should be created we need to build
263 263 # them now (in reverse order)
264 264 reversed_dirnames = list(reversed(dirnames))
265 265 curtree = objects.Tree()
266 266 curtree[node['node_path']] = node['mode'], blob.id
267 267 new_trees.append(curtree)
268 268 for dirname in reversed_dirnames[:-1]:
269 269 newtree = objects.Tree()
270 270 newtree[dirname] = (DIR_STAT, curtree.id)
271 271 new_trees.append(newtree)
272 272 curtree = newtree
273 273 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
274 274 else:
275 275 parent.add(
276 276 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
277 277
278 278 new_trees.append(parent)
279 279 # Update ancestors
280 280 reversed_ancestors = reversed(
281 281 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
282 282 for parent, tree, path in reversed_ancestors:
283 283 parent[path] = (DIR_STAT, tree.id)
284 284 object_store.add_object(tree)
285 285
286 286 object_store.add_object(blob)
287 287 for tree in new_trees:
288 288 object_store.add_object(tree)
289 289
290 290 for node_path in removed:
291 291 paths = node_path.split('/')
292 292 tree = commit_tree
293 293 trees = [tree]
294 294 # Traverse deep into the forest...
295 295 for path in paths:
296 296 try:
297 297 obj = repo[tree[path][1]]
298 298 if isinstance(obj, objects.Tree):
299 299 trees.append(obj)
300 300 tree = obj
301 301 except KeyError:
302 302 break
303 303 # Cut down the blob and all rotten trees on the way back...
304 304 for path, tree in reversed(zip(paths, trees)):
305 305 del tree[path]
306 306 if tree:
307 307 # This tree still has elements - don't remove it or any
308 308 # of it's parents
309 309 break
310 310
311 311 object_store.add_object(commit_tree)
312 312
313 313 # Create commit
314 314 commit = objects.Commit()
315 315 commit.tree = commit_tree.id
316 316 for k, v in commit_data.iteritems():
317 317 setattr(commit, k, v)
318 318 object_store.add_object(commit)
319 319
320 320 ref = 'refs/heads/%s' % branch
321 321 repo.refs[ref] = commit.id
322 322
323 323 return commit.id
324 324
325 325 @reraise_safe_exceptions
326 326 def fetch(self, wire, url, apply_refs=True, refs=None):
327 327 if url != 'default' and '://' not in url:
328 328 client = LocalGitClient(url)
329 329 else:
330 330 url_obj = hg_url(url)
331 331 o = self._build_opener(url)
332 332 url, _ = url_obj.authinfo()
333 333 client = HttpGitClient(base_url=url, opener=o)
334 334 repo = self._factory.repo(wire)
335 335
336 336 determine_wants = repo.object_store.determine_wants_all
337 337 if refs:
338 338 def determine_wants_requested(references):
339 339 return [references[r] for r in references if r in refs]
340 340 determine_wants = determine_wants_requested
341 341
342 342 try:
343 343 remote_refs = client.fetch(
344 344 path=url, target=repo, determine_wants=determine_wants)
345 345 except NotGitRepository:
346 346 log.warning(
347 347 'Trying to fetch from "%s" failed, not a Git repository.', url)
348 348 raise exceptions.AbortException()
349 349
350 350 # mikhail: client.fetch() returns all the remote refs, but fetches only
351 351 # refs filtered by `determine_wants` function. We need to filter result
352 352 # as well
353 353 if refs:
354 354 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
355 355
356 356 if apply_refs:
357 357 # TODO: johbo: Needs proper test coverage with a git repository
358 358 # that contains a tag object, so that we would end up with
359 359 # a peeled ref at this point.
360 360 PEELED_REF_MARKER = '^{}'
361 361 for k in remote_refs:
362 362 if k.endswith(PEELED_REF_MARKER):
363 363 log.info("Skipping peeled reference %s", k)
364 364 continue
365 365 repo[k] = remote_refs[k]
366 366
367 367 if refs:
368 368 # mikhail: explicitly set the head to the last ref.
369 369 repo['HEAD'] = remote_refs[refs[-1]]
370 370
371 371 # TODO: mikhail: should we return remote_refs here to be
372 372 # consistent?
373 373 else:
374 374 return remote_refs
375 375
376 376 @reraise_safe_exceptions
377 377 def get_remote_refs(self, wire, url):
378 378 repo = Repo(url)
379 379 return repo.get_refs()
380 380
381 381 @reraise_safe_exceptions
382 382 def get_description(self, wire):
383 383 repo = self._factory.repo(wire)
384 384 return repo.get_description()
385 385
386 386 @reraise_safe_exceptions
387 387 def get_file_history(self, wire, file_path, commit_id, limit):
388 388 repo = self._factory.repo(wire)
389 389 include = [commit_id]
390 390 paths = [file_path]
391 391
392 392 walker = repo.get_walker(include, paths=paths, max_entries=limit)
393 393 return [x.commit.id for x in walker]
394 394
395 395 @reraise_safe_exceptions
396 396 def get_missing_revs(self, wire, rev1, rev2, path2):
397 397 repo = self._factory.repo(wire)
398 398 LocalGitClient(thin_packs=False).fetch(path2, repo)
399 399
400 400 wire_remote = wire.copy()
401 401 wire_remote['path'] = path2
402 402 repo_remote = self._factory.repo(wire_remote)
403 403 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
404 404
405 405 revs = [
406 406 x.commit.id
407 407 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
408 408 return revs
409 409
410 410 @reraise_safe_exceptions
411 411 def get_object(self, wire, sha):
412 412 repo = self._factory.repo(wire)
413 413 obj = repo.get_object(sha)
414 414 commit_id = obj.id
415 415
416 416 if isinstance(obj, Tag):
417 417 commit_id = obj.object[1]
418 418
419 419 return {
420 420 'id': obj.id,
421 421 'type': obj.type_name,
422 422 'commit_id': commit_id
423 423 }
424 424
425 425 @reraise_safe_exceptions
426 426 def get_object_attrs(self, wire, sha, *attrs):
427 427 repo = self._factory.repo(wire)
428 428 obj = repo.get_object(sha)
429 429 return list(getattr(obj, a) for a in attrs)
430 430
431 431 @reraise_safe_exceptions
432 432 def get_refs(self, wire):
433 433 repo = self._factory.repo(wire)
434 434 result = {}
435 435 for ref, sha in repo.refs.as_dict().items():
436 436 peeled_sha = repo.get_peeled(ref)
437 437 result[ref] = peeled_sha
438 438 return result
439 439
440 440 @reraise_safe_exceptions
441 441 def get_refs_path(self, wire):
442 442 repo = self._factory.repo(wire)
443 443 return repo.refs.path
444 444
445 445 @reraise_safe_exceptions
446 446 def head(self, wire):
447 447 repo = self._factory.repo(wire)
448 448 return repo.head()
449 449
450 450 @reraise_safe_exceptions
451 451 def init(self, wire):
452 452 repo_path = str_to_dulwich(wire['path'])
453 453 self.repo = Repo.init(repo_path)
454 454
455 455 @reraise_safe_exceptions
456 456 def init_bare(self, wire):
457 457 repo_path = str_to_dulwich(wire['path'])
458 458 self.repo = Repo.init_bare(repo_path)
459 459
460 460 @reraise_safe_exceptions
461 461 def revision(self, wire, rev):
462 462 repo = self._factory.repo(wire)
463 463 obj = repo[rev]
464 464 obj_data = {
465 465 'id': obj.id,
466 466 }
467 467 try:
468 468 obj_data['tree'] = obj.tree
469 469 except AttributeError:
470 470 pass
471 471 return obj_data
472 472
473 473 @reraise_safe_exceptions
474 474 def commit_attribute(self, wire, rev, attr):
475 475 repo = self._factory.repo(wire)
476 476 obj = repo[rev]
477 477 return getattr(obj, attr)
478 478
479 479 @reraise_safe_exceptions
480 480 def set_refs(self, wire, key, value):
481 481 repo = self._factory.repo(wire)
482 482 repo.refs[key] = value
483 483
484 484 @reraise_safe_exceptions
485 485 def remove_ref(self, wire, key):
486 486 repo = self._factory.repo(wire)
487 487 del repo.refs[key]
488 488
489 489 @reraise_safe_exceptions
490 490 def tree_changes(self, wire, source_id, target_id):
491 491 repo = self._factory.repo(wire)
492 492 source = repo[source_id].tree if source_id else None
493 493 target = repo[target_id].tree
494 494 result = repo.object_store.tree_changes(source, target)
495 495 return list(result)
496 496
497 497 @reraise_safe_exceptions
498 498 def tree_items(self, wire, tree_id):
499 499 repo = self._factory.repo(wire)
500 500 tree = repo[tree_id]
501 501
502 502 result = []
503 503 for item in tree.iteritems():
504 504 item_sha = item.sha
505 505 item_mode = item.mode
506 506
507 507 if FILE_MODE(item_mode) == GIT_LINK:
508 508 item_type = "link"
509 509 else:
510 510 item_type = repo[item_sha].type_name
511 511
512 512 result.append((item.path, item_mode, item_sha, item_type))
513 513 return result
514 514
515 515 @reraise_safe_exceptions
516 516 def update_server_info(self, wire):
517 517 repo = self._factory.repo(wire)
518 518 update_server_info(repo)
519 519
520 520 @reraise_safe_exceptions
521 521 def discover_git_version(self):
522 522 stdout, _ = self.run_git_command(
523 523 {}, ['--version'], _bare=True, _safe=True)
524 prefix = 'git version'
525 if stdout.startswith(prefix):
526 stdout = stdout[len(prefix):]
524 527 return stdout
525 528
526 529 @reraise_safe_exceptions
527 530 def run_git_command(self, wire, cmd, **opts):
528 531 path = wire.get('path', None)
529 532
530 533 if path and os.path.isdir(path):
531 534 opts['cwd'] = path
532 535
533 536 if '_bare' in opts:
534 537 _copts = []
535 538 del opts['_bare']
536 539 else:
537 540 _copts = ['-c', 'core.quotepath=false', ]
538 541 safe_call = False
539 542 if '_safe' in opts:
540 543 # no exc on failure
541 544 del opts['_safe']
542 545 safe_call = True
543 546
544 547 gitenv = os.environ.copy()
545 548 gitenv.update(opts.pop('extra_env', {}))
546 549 # need to clean fix GIT_DIR !
547 550 if 'GIT_DIR' in gitenv:
548 551 del gitenv['GIT_DIR']
549 552 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
550 553
551 554 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
552 555
553 556 try:
554 557 _opts = {'env': gitenv, 'shell': False}
555 558 _opts.update(opts)
556 559 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
557 560
558 561 return ''.join(p), ''.join(p.error)
559 562 except (EnvironmentError, OSError) as err:
560 563 tb_err = ("Couldn't run git command (%s).\n"
561 564 "Original error was:%s\n" % (cmd, err))
562 565 log.exception(tb_err)
563 566 if safe_call:
564 567 return '', err
565 568 else:
566 569 raise exceptions.VcsException(tb_err)
567 570
568 571
569 572 def str_to_dulwich(value):
570 573 """
571 574 Dulwich 0.10.1a requires `unicode` objects to be passed in.
572 575 """
573 576 return value.decode(settings.WIRE_ENCODING)
@@ -1,714 +1,719 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2016 RodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import logging
20 20 import stat
21 21 import sys
22 22 import urllib
23 23 import urllib2
24 24
25 25 from hgext import largefiles, rebase
26 26 from hgext.strip import strip as hgext_strip
27 27 from mercurial import commands
28 28 from mercurial import unionrepo
29 29
30 30 from vcsserver import exceptions
31 31 from vcsserver.base import RepoFactory
32 32 from vcsserver.hgcompat import (
33 33 archival, bin, clone, config as hgconfig, diffopts, hex, hg_url,
34 34 httpbasicauthhandler, httpdigestauthhandler, httppeer, localrepository,
35 35 match, memctx, exchange, memfilectx, nullrev, patch, peer, revrange, ui,
36 36 Abort, LookupError, RepoError, RepoLookupError, InterventionRequired,
37 37 RequirementError)
38 38
39 39 log = logging.getLogger(__name__)
40 40
41 41
42 42 def make_ui_from_config(repo_config):
43 43 baseui = ui.ui()
44 44
45 45 # clean the baseui object
46 46 baseui._ocfg = hgconfig.config()
47 47 baseui._ucfg = hgconfig.config()
48 48 baseui._tcfg = hgconfig.config()
49 49
50 50 for section, option, value in repo_config:
51 51 baseui.setconfig(section, option, value)
52 52
53 53 # make our hgweb quiet so it doesn't print output
54 54 baseui.setconfig('ui', 'quiet', 'true')
55 55
56 56 # force mercurial to only use 1 thread, otherwise it may try to set a
57 57 # signal in a non-main thread, thus generating a ValueError.
58 58 baseui.setconfig('worker', 'numcpus', 1)
59 59
60 60 # If there is no config for the largefiles extension, we explicitly disable
61 61 # it here. This overrides settings from repositories hgrc file. Recent
62 62 # mercurial versions enable largefiles in hgrc on clone from largefile
63 63 # repo.
64 64 if not baseui.hasconfig('extensions', 'largefiles'):
65 65 log.debug('Explicitly disable largefiles extension for repo.')
66 66 baseui.setconfig('extensions', 'largefiles', '!')
67 67
68 68 return baseui
69 69
70 70
71 71 def reraise_safe_exceptions(func):
72 72 """Decorator for converting mercurial exceptions to something neutral."""
73 73 def wrapper(*args, **kwargs):
74 74 try:
75 75 return func(*args, **kwargs)
76 76 except (Abort, InterventionRequired):
77 77 raise_from_original(exceptions.AbortException)
78 78 except RepoLookupError:
79 79 raise_from_original(exceptions.LookupException)
80 80 except RequirementError:
81 81 raise_from_original(exceptions.RequirementException)
82 82 except RepoError:
83 83 raise_from_original(exceptions.VcsException)
84 84 except LookupError:
85 85 raise_from_original(exceptions.LookupException)
86 86 except Exception as e:
87 87 if not hasattr(e, '_vcs_kind'):
88 88 log.exception("Unhandled exception in hg remote call")
89 89 raise_from_original(exceptions.UnhandledException)
90 90 raise
91 91 return wrapper
92 92
93 93
94 94 def raise_from_original(new_type):
95 95 """
96 96 Raise a new exception type with original args and traceback.
97 97 """
98 98 _, original, traceback = sys.exc_info()
99 99 try:
100 100 raise new_type(*original.args), None, traceback
101 101 finally:
102 102 del traceback
103 103
104 104
105 105 class MercurialFactory(RepoFactory):
106 106
107 107 def _create_config(self, config, hooks=True):
108 108 if not hooks:
109 109 hooks_to_clean = frozenset((
110 110 'changegroup.repo_size', 'preoutgoing.pre_pull',
111 111 'outgoing.pull_logger', 'prechangegroup.pre_push'))
112 112 new_config = []
113 113 for section, option, value in config:
114 114 if section == 'hooks' and option in hooks_to_clean:
115 115 continue
116 116 new_config.append((section, option, value))
117 117 config = new_config
118 118
119 119 baseui = make_ui_from_config(config)
120 120 return baseui
121 121
122 122 def _create_repo(self, wire, create):
123 123 baseui = self._create_config(wire["config"])
124 124 return localrepository(baseui, wire["path"], create)
125 125
126 126
127 127 class HgRemote(object):
128 128
129 129 def __init__(self, factory):
130 130 self._factory = factory
131 131
132 132 self._bulk_methods = {
133 133 "affected_files": self.ctx_files,
134 134 "author": self.ctx_user,
135 135 "branch": self.ctx_branch,
136 136 "children": self.ctx_children,
137 137 "date": self.ctx_date,
138 138 "message": self.ctx_description,
139 139 "parents": self.ctx_parents,
140 140 "status": self.ctx_status,
141 141 "_file_paths": self.ctx_list,
142 142 }
143 143
144 144 @reraise_safe_exceptions
145 def discover_hg_version(self):
146 from mercurial import util
147 return util.version()
148
149 @reraise_safe_exceptions
145 150 def archive_repo(self, archive_path, mtime, file_info, kind):
146 151 if kind == "tgz":
147 152 archiver = archival.tarit(archive_path, mtime, "gz")
148 153 elif kind == "tbz2":
149 154 archiver = archival.tarit(archive_path, mtime, "bz2")
150 155 elif kind == 'zip':
151 156 archiver = archival.zipit(archive_path, mtime)
152 157 else:
153 158 raise exceptions.ArchiveException(
154 159 'Remote does not support: "%s".' % kind)
155 160
156 161 for f_path, f_mode, f_is_link, f_content in file_info:
157 162 archiver.addfile(f_path, f_mode, f_is_link, f_content)
158 163 archiver.done()
159 164
160 165 @reraise_safe_exceptions
161 166 def bookmarks(self, wire):
162 167 repo = self._factory.repo(wire)
163 168 return dict(repo._bookmarks)
164 169
165 170 @reraise_safe_exceptions
166 171 def branches(self, wire, normal, closed):
167 172 repo = self._factory.repo(wire)
168 173 iter_branches = repo.branchmap().iterbranches()
169 174 bt = {}
170 175 for branch_name, _heads, tip, is_closed in iter_branches:
171 176 if normal and not is_closed:
172 177 bt[branch_name] = tip
173 178 if closed and is_closed:
174 179 bt[branch_name] = tip
175 180
176 181 return bt
177 182
178 183 @reraise_safe_exceptions
179 184 def bulk_request(self, wire, rev, pre_load):
180 185 result = {}
181 186 for attr in pre_load:
182 187 try:
183 188 method = self._bulk_methods[attr]
184 189 result[attr] = method(wire, rev)
185 190 except KeyError:
186 191 raise exceptions.VcsException(
187 192 'Unknown bulk attribute: "%s"' % attr)
188 193 return result
189 194
190 195 @reraise_safe_exceptions
191 196 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
192 197 baseui = self._factory._create_config(wire["config"], hooks=hooks)
193 198 clone(baseui, source, dest, noupdate=not update_after_clone)
194 199
195 200 @reraise_safe_exceptions
196 201 def commitctx(
197 202 self, wire, message, parents, commit_time, commit_timezone,
198 203 user, files, extra, removed, updated):
199 204
200 205 def _filectxfn(_repo, memctx, path):
201 206 """
202 207 Marks given path as added/changed/removed in a given _repo. This is
203 208 for internal mercurial commit function.
204 209 """
205 210
206 211 # check if this path is removed
207 212 if path in removed:
208 213 # returning None is a way to mark node for removal
209 214 return None
210 215
211 216 # check if this path is added
212 217 for node in updated:
213 218 if node['path'] == path:
214 219 return memfilectx(
215 220 _repo,
216 221 path=node['path'],
217 222 data=node['content'],
218 223 islink=False,
219 224 isexec=bool(node['mode'] & stat.S_IXUSR),
220 225 copied=False,
221 226 memctx=memctx)
222 227
223 228 raise exceptions.AbortException(
224 229 "Given path haven't been marked as added, "
225 230 "changed or removed (%s)" % path)
226 231
227 232 repo = self._factory.repo(wire)
228 233
229 234 commit_ctx = memctx(
230 235 repo=repo,
231 236 parents=parents,
232 237 text=message,
233 238 files=files,
234 239 filectxfn=_filectxfn,
235 240 user=user,
236 241 date=(commit_time, commit_timezone),
237 242 extra=extra)
238 243
239 244 n = repo.commitctx(commit_ctx)
240 245 new_id = hex(n)
241 246
242 247 return new_id
243 248
244 249 @reraise_safe_exceptions
245 250 def ctx_branch(self, wire, revision):
246 251 repo = self._factory.repo(wire)
247 252 ctx = repo[revision]
248 253 return ctx.branch()
249 254
250 255 @reraise_safe_exceptions
251 256 def ctx_children(self, wire, revision):
252 257 repo = self._factory.repo(wire)
253 258 ctx = repo[revision]
254 259 return [child.rev() for child in ctx.children()]
255 260
256 261 @reraise_safe_exceptions
257 262 def ctx_date(self, wire, revision):
258 263 repo = self._factory.repo(wire)
259 264 ctx = repo[revision]
260 265 return ctx.date()
261 266
262 267 @reraise_safe_exceptions
263 268 def ctx_description(self, wire, revision):
264 269 repo = self._factory.repo(wire)
265 270 ctx = repo[revision]
266 271 return ctx.description()
267 272
268 273 @reraise_safe_exceptions
269 274 def ctx_diff(
270 275 self, wire, revision, git=True, ignore_whitespace=True, context=3):
271 276 repo = self._factory.repo(wire)
272 277 ctx = repo[revision]
273 278 result = ctx.diff(
274 279 git=git, ignore_whitespace=ignore_whitespace, context=context)
275 280 return list(result)
276 281
277 282 @reraise_safe_exceptions
278 283 def ctx_files(self, wire, revision):
279 284 repo = self._factory.repo(wire)
280 285 ctx = repo[revision]
281 286 return ctx.files()
282 287
283 288 @reraise_safe_exceptions
284 289 def ctx_list(self, path, revision):
285 290 repo = self._factory.repo(path)
286 291 ctx = repo[revision]
287 292 return list(ctx)
288 293
289 294 @reraise_safe_exceptions
290 295 def ctx_parents(self, wire, revision):
291 296 repo = self._factory.repo(wire)
292 297 ctx = repo[revision]
293 298 return [parent.rev() for parent in ctx.parents()]
294 299
295 300 @reraise_safe_exceptions
296 301 def ctx_substate(self, wire, revision):
297 302 repo = self._factory.repo(wire)
298 303 ctx = repo[revision]
299 304 return ctx.substate
300 305
301 306 @reraise_safe_exceptions
302 307 def ctx_status(self, wire, revision):
303 308 repo = self._factory.repo(wire)
304 309 ctx = repo[revision]
305 310 status = repo[ctx.p1().node()].status(other=ctx.node())
306 311 # object of status (odd, custom named tuple in mercurial) is not
307 312 # correctly serializable via Pyro, we make it a list, as the underling
308 313 # API expects this to be a list
309 314 return list(status)
310 315
311 316 @reraise_safe_exceptions
312 317 def ctx_user(self, wire, revision):
313 318 repo = self._factory.repo(wire)
314 319 ctx = repo[revision]
315 320 return ctx.user()
316 321
317 322 @reraise_safe_exceptions
318 323 def check_url(self, url, config):
319 324 log.info("Checking URL for remote cloning/import: %s", url)
320 325 _proto = None
321 326 if '+' in url[:url.find('://')]:
322 327 _proto = url[0:url.find('+')]
323 328 url = url[url.find('+') + 1:]
324 329 handlers = []
325 330 url_obj = hg_url(url)
326 331 test_uri, authinfo = url_obj.authinfo()
327 332 url_obj.passwd = '*****'
328 333 cleaned_uri = str(url_obj)
329 334
330 335 if authinfo:
331 336 # create a password manager
332 337 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
333 338 passmgr.add_password(*authinfo)
334 339
335 340 handlers.extend((httpbasicauthhandler(passmgr),
336 341 httpdigestauthhandler(passmgr)))
337 342
338 343 o = urllib2.build_opener(*handlers)
339 344 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
340 345 ('Accept', 'application/mercurial-0.1')]
341 346
342 347 q = {"cmd": 'between'}
343 348 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
344 349 qs = '?%s' % urllib.urlencode(q)
345 350 cu = "%s%s" % (test_uri, qs)
346 351 req = urllib2.Request(cu, None, {})
347 352
348 353 try:
349 354 log.debug("Trying to open URL %s", url)
350 355 resp = o.open(req)
351 356 if resp.code != 200:
352 357 raise exceptions.URLError('Return Code is not 200')
353 358 except Exception as e:
354 359 log.warning("URL cannot be opened: %s", url, exc_info=True)
355 360 # means it cannot be cloned
356 361 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
357 362
358 363 # now check if it's a proper hg repo, but don't do it for svn
359 364 try:
360 365 if _proto == 'svn':
361 366 pass
362 367 else:
363 368 # check for pure hg repos
364 369 log.debug(
365 370 "Verifying if URL is a Mercurial repository: %s", url)
366 371 httppeer(make_ui_from_config(config), url).lookup('tip')
367 372 except Exception as e:
368 373 log.warning("URL is not a valid Mercurial repository: %s", url)
369 374 raise exceptions.URLError(
370 375 "url [%s] does not look like an hg repo org_exc: %s"
371 376 % (cleaned_uri, e))
372 377
373 378 log.info("URL is a valid Mercurial repository: %s", url)
374 379 return True
375 380
376 381 @reraise_safe_exceptions
377 382 def diff(
378 383 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
379 384 context):
380 385 repo = self._factory.repo(wire)
381 386
382 387 if file_filter:
383 388 filter = match(file_filter[0], '', [file_filter[1]])
384 389 else:
385 390 filter = file_filter
386 391 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
387 392
388 393 try:
389 394 return "".join(patch.diff(
390 395 repo, node1=rev1, node2=rev2, match=filter, opts=opts))
391 396 except RepoLookupError:
392 397 raise exceptions.LookupException()
393 398
394 399 @reraise_safe_exceptions
395 400 def file_history(self, wire, revision, path, limit):
396 401 repo = self._factory.repo(wire)
397 402
398 403 ctx = repo[revision]
399 404 fctx = ctx.filectx(path)
400 405
401 406 def history_iter():
402 407 limit_rev = fctx.rev()
403 408 for obj in reversed(list(fctx.filelog())):
404 409 obj = fctx.filectx(obj)
405 410 if limit_rev >= obj.rev():
406 411 yield obj
407 412
408 413 history = []
409 414 for cnt, obj in enumerate(history_iter()):
410 415 if limit and cnt >= limit:
411 416 break
412 417 history.append(hex(obj.node()))
413 418
414 419 return [x for x in history]
415 420
416 421 @reraise_safe_exceptions
417 422 def file_history_untill(self, wire, revision, path, limit):
418 423 repo = self._factory.repo(wire)
419 424 ctx = repo[revision]
420 425 fctx = ctx.filectx(path)
421 426
422 427 file_log = list(fctx.filelog())
423 428 if limit:
424 429 # Limit to the last n items
425 430 file_log = file_log[-limit:]
426 431
427 432 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
428 433
429 434 @reraise_safe_exceptions
430 435 def fctx_annotate(self, wire, revision, path):
431 436 repo = self._factory.repo(wire)
432 437 ctx = repo[revision]
433 438 fctx = ctx.filectx(path)
434 439
435 440 result = []
436 441 for i, annotate_data in enumerate(fctx.annotate()):
437 442 ln_no = i + 1
438 443 sha = hex(annotate_data[0].node())
439 444 result.append((ln_no, sha, annotate_data[1]))
440 445 return result
441 446
442 447 @reraise_safe_exceptions
443 448 def fctx_data(self, wire, revision, path):
444 449 repo = self._factory.repo(wire)
445 450 ctx = repo[revision]
446 451 fctx = ctx.filectx(path)
447 452 return fctx.data()
448 453
449 454 @reraise_safe_exceptions
450 455 def fctx_flags(self, wire, revision, path):
451 456 repo = self._factory.repo(wire)
452 457 ctx = repo[revision]
453 458 fctx = ctx.filectx(path)
454 459 return fctx.flags()
455 460
456 461 @reraise_safe_exceptions
457 462 def fctx_size(self, wire, revision, path):
458 463 repo = self._factory.repo(wire)
459 464 ctx = repo[revision]
460 465 fctx = ctx.filectx(path)
461 466 return fctx.size()
462 467
463 468 @reraise_safe_exceptions
464 469 def get_all_commit_ids(self, wire, name):
465 470 repo = self._factory.repo(wire)
466 471 revs = repo.filtered(name).changelog.index
467 472 return map(lambda x: hex(x[7]), revs)[:-1]
468 473
469 474 @reraise_safe_exceptions
470 475 def get_config_value(self, wire, section, name, untrusted=False):
471 476 repo = self._factory.repo(wire)
472 477 return repo.ui.config(section, name, untrusted=untrusted)
473 478
474 479 @reraise_safe_exceptions
475 480 def get_config_bool(self, wire, section, name, untrusted=False):
476 481 repo = self._factory.repo(wire)
477 482 return repo.ui.configbool(section, name, untrusted=untrusted)
478 483
479 484 @reraise_safe_exceptions
480 485 def get_config_list(self, wire, section, name, untrusted=False):
481 486 repo = self._factory.repo(wire)
482 487 return repo.ui.configlist(section, name, untrusted=untrusted)
483 488
484 489 @reraise_safe_exceptions
485 490 def is_large_file(self, wire, path):
486 491 return largefiles.lfutil.isstandin(path)
487 492
488 493 @reraise_safe_exceptions
489 494 def in_store(self, wire, sha):
490 495 repo = self._factory.repo(wire)
491 496 return largefiles.lfutil.instore(repo, sha)
492 497
493 498 @reraise_safe_exceptions
494 499 def in_user_cache(self, wire, sha):
495 500 repo = self._factory.repo(wire)
496 501 return largefiles.lfutil.inusercache(repo.ui, sha)
497 502
498 503 @reraise_safe_exceptions
499 504 def store_path(self, wire, sha):
500 505 repo = self._factory.repo(wire)
501 506 return largefiles.lfutil.storepath(repo, sha)
502 507
503 508 @reraise_safe_exceptions
504 509 def link(self, wire, sha, path):
505 510 repo = self._factory.repo(wire)
506 511 largefiles.lfutil.link(
507 512 largefiles.lfutil.usercachepath(repo.ui, sha), path)
508 513
509 514 @reraise_safe_exceptions
510 515 def localrepository(self, wire, create=False):
511 516 self._factory.repo(wire, create=create)
512 517
513 518 @reraise_safe_exceptions
514 519 def lookup(self, wire, revision, both):
515 520 # TODO Paris: Ugly hack to "deserialize" long for msgpack
516 521 if isinstance(revision, float):
517 522 revision = long(revision)
518 523 repo = self._factory.repo(wire)
519 524 try:
520 525 ctx = repo[revision]
521 526 except RepoLookupError:
522 527 raise exceptions.LookupException(revision)
523 528 except LookupError as e:
524 529 raise exceptions.LookupException(e.name)
525 530
526 531 if not both:
527 532 return ctx.hex()
528 533
529 534 ctx = repo[ctx.hex()]
530 535 return ctx.hex(), ctx.rev()
531 536
532 537 @reraise_safe_exceptions
533 538 def pull(self, wire, url, commit_ids=None):
534 539 repo = self._factory.repo(wire)
535 540 remote = peer(repo, {}, url)
536 541 if commit_ids:
537 542 commit_ids = [bin(commit_id) for commit_id in commit_ids]
538 543
539 544 return exchange.pull(
540 545 repo, remote, heads=commit_ids, force=None).cgresult
541 546
542 547 @reraise_safe_exceptions
543 548 def revision(self, wire, rev):
544 549 repo = self._factory.repo(wire)
545 550 ctx = repo[rev]
546 551 return ctx.rev()
547 552
548 553 @reraise_safe_exceptions
549 554 def rev_range(self, wire, filter):
550 555 repo = self._factory.repo(wire)
551 556 revisions = [rev for rev in revrange(repo, filter)]
552 557 return revisions
553 558
554 559 @reraise_safe_exceptions
555 560 def rev_range_hash(self, wire, node):
556 561 repo = self._factory.repo(wire)
557 562
558 563 def get_revs(repo, rev_opt):
559 564 if rev_opt:
560 565 revs = revrange(repo, rev_opt)
561 566 if len(revs) == 0:
562 567 return (nullrev, nullrev)
563 568 return max(revs), min(revs)
564 569 else:
565 570 return len(repo) - 1, 0
566 571
567 572 stop, start = get_revs(repo, [node + ':'])
568 573 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
569 574 return revs
570 575
571 576 @reraise_safe_exceptions
572 577 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
573 578 other_path = kwargs.pop('other_path', None)
574 579
575 580 # case when we want to compare two independent repositories
576 581 if other_path and other_path != wire["path"]:
577 582 baseui = self._factory._create_config(wire["config"])
578 583 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
579 584 else:
580 585 repo = self._factory.repo(wire)
581 586 return list(repo.revs(rev_spec, *args))
582 587
583 588 @reraise_safe_exceptions
584 589 def strip(self, wire, revision, update, backup):
585 590 repo = self._factory.repo(wire)
586 591 ctx = repo[revision]
587 592 hgext_strip(
588 593 repo.baseui, repo, ctx.node(), update=update, backup=backup)
589 594
590 595 @reraise_safe_exceptions
591 596 def tag(self, wire, name, revision, message, local, user,
592 597 tag_time, tag_timezone):
593 598 repo = self._factory.repo(wire)
594 599 ctx = repo[revision]
595 600 node = ctx.node()
596 601
597 602 date = (tag_time, tag_timezone)
598 603 try:
599 604 repo.tag(name, node, message, local, user, date)
600 605 except Abort:
601 606 log.exception("Tag operation aborted")
602 607 raise exceptions.AbortException()
603 608
604 609 @reraise_safe_exceptions
605 610 def tags(self, wire):
606 611 repo = self._factory.repo(wire)
607 612 return repo.tags()
608 613
609 614 @reraise_safe_exceptions
610 615 def update(self, wire, node=None, clean=False):
611 616 repo = self._factory.repo(wire)
612 617 baseui = self._factory._create_config(wire['config'])
613 618 commands.update(baseui, repo, node=node, clean=clean)
614 619
615 620 @reraise_safe_exceptions
616 621 def identify(self, wire):
617 622 repo = self._factory.repo(wire)
618 623 baseui = self._factory._create_config(wire['config'])
619 624 output = io.BytesIO()
620 625 baseui.write = output.write
621 626 # This is required to get a full node id
622 627 baseui.debugflag = True
623 628 commands.identify(baseui, repo, id=True)
624 629
625 630 return output.getvalue()
626 631
627 632 @reraise_safe_exceptions
628 633 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
629 634 hooks=True):
630 635 repo = self._factory.repo(wire)
631 636 baseui = self._factory._create_config(wire['config'], hooks=hooks)
632 637
633 638 # Mercurial internally has a lot of logic that checks ONLY if
634 639 # option is defined, we just pass those if they are defined then
635 640 opts = {}
636 641 if bookmark:
637 642 opts['bookmark'] = bookmark
638 643 if branch:
639 644 opts['branch'] = branch
640 645 if revision:
641 646 opts['rev'] = revision
642 647
643 648 commands.pull(baseui, repo, source, **opts)
644 649
645 650 @reraise_safe_exceptions
646 651 def heads(self, wire, branch=None):
647 652 repo = self._factory.repo(wire)
648 653 baseui = self._factory._create_config(wire['config'])
649 654 output = io.BytesIO()
650 655
651 656 def write(data, **unused_kwargs):
652 657 output.write(data)
653 658
654 659 baseui.write = write
655 660 if branch:
656 661 args = [branch]
657 662 else:
658 663 args = []
659 664 commands.heads(baseui, repo, template='{node} ', *args)
660 665
661 666 return output.getvalue()
662 667
663 668 @reraise_safe_exceptions
664 669 def ancestor(self, wire, revision1, revision2):
665 670 repo = self._factory.repo(wire)
666 671 baseui = self._factory._create_config(wire['config'])
667 672 output = io.BytesIO()
668 673 baseui.write = output.write
669 674 commands.debugancestor(baseui, repo, revision1, revision2)
670 675
671 676 return output.getvalue()
672 677
673 678 @reraise_safe_exceptions
674 679 def push(self, wire, revisions, dest_path, hooks=True,
675 680 push_branches=False):
676 681 repo = self._factory.repo(wire)
677 682 baseui = self._factory._create_config(wire['config'], hooks=hooks)
678 683 commands.push(baseui, repo, dest=dest_path, rev=revisions,
679 684 new_branch=push_branches)
680 685
681 686 @reraise_safe_exceptions
682 687 def merge(self, wire, revision):
683 688 repo = self._factory.repo(wire)
684 689 baseui = self._factory._create_config(wire['config'])
685 690 repo.ui.setconfig('ui', 'merge', 'internal:dump')
686 691
687 692 # In case of sub repositories are used mercurial prompts the user in
688 693 # case of merge conflicts or different sub repository sources. By
689 694 # setting the interactive flag to `False` mercurial doesn't prompt the
690 695 # used but instead uses a default value.
691 696 repo.ui.setconfig('ui', 'interactive', False)
692 697
693 698 commands.merge(baseui, repo, rev=revision)
694 699
695 700 @reraise_safe_exceptions
696 701 def commit(self, wire, message, username):
697 702 repo = self._factory.repo(wire)
698 703 baseui = self._factory._create_config(wire['config'])
699 704 repo.ui.setconfig('ui', 'username', username)
700 705 commands.commit(baseui, repo, message=message)
701 706
702 707 @reraise_safe_exceptions
703 708 def rebase(self, wire, source=None, dest=None, abort=False):
704 709 repo = self._factory.repo(wire)
705 710 baseui = self._factory._create_config(wire['config'])
706 711 repo.ui.setconfig('ui', 'merge', 'internal:dump')
707 712 rebase.rebase(
708 713 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
709 714
710 715 @reraise_safe_exceptions
711 716 def bookmark(self, wire, bookmark, revision=None):
712 717 repo = self._factory.repo(wire)
713 718 baseui = self._factory._create_config(wire['config'])
714 719 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
@@ -1,591 +1,625 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2016 RodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 from __future__ import absolute_import
19 19
20 20 from urllib2 import URLError
21 21 import logging
22 22 import posixpath as vcspath
23 23 import StringIO
24 24 import subprocess
25 25 import urllib
26 26
27 27 import svn.client
28 28 import svn.core
29 29 import svn.delta
30 30 import svn.diff
31 31 import svn.fs
32 32 import svn.repos
33 33
34 34 from vcsserver import svn_diff
35 from vcsserver import exceptions
35 36 from vcsserver.base import RepoFactory
36 37
37 38
38 39 log = logging.getLogger(__name__)
39 40
40 41
41 42 # Set of svn compatible version flags.
42 43 # Compare with subversion/svnadmin/svnadmin.c
43 44 svn_compatible_versions = set([
44 45 'pre-1.4-compatible',
45 46 'pre-1.5-compatible',
46 47 'pre-1.6-compatible',
47 48 'pre-1.8-compatible',
48 49 ])
49 50
50 51
52 def reraise_safe_exceptions(func):
53 """Decorator for converting svn exceptions to something neutral."""
54 def wrapper(*args, **kwargs):
55 try:
56 return func(*args, **kwargs)
57 except Exception as e:
58 if not hasattr(e, '_vcs_kind'):
59 log.exception("Unhandled exception in hg remote call")
60 raise_from_original(exceptions.UnhandledException)
61 raise
62 return wrapper
63
64
65 def raise_from_original(new_type):
66 """
67 Raise a new exception type with original args and traceback.
68 """
69 _, original, traceback = sys.exc_info()
70 try:
71 raise new_type(*original.args), None, traceback
72 finally:
73 del traceback
74
75
51 76 class SubversionFactory(RepoFactory):
52 77
53 78 def _create_repo(self, wire, create, compatible_version):
54 79 path = svn.core.svn_path_canonicalize(wire['path'])
55 80 if create:
56 81 fs_config = {}
57 82 if compatible_version:
58 83 if compatible_version not in svn_compatible_versions:
59 84 raise Exception('Unknown SVN compatible version "{}"'
60 85 .format(compatible_version))
61 86 log.debug('Create SVN repo with compatible version "%s"',
62 87 compatible_version)
63 88 fs_config[compatible_version] = '1'
64 89 repo = svn.repos.create(path, "", "", None, fs_config)
65 90 else:
66 91 repo = svn.repos.open(path)
67 92 return repo
68 93
69 94 def repo(self, wire, create=False, compatible_version=None):
70 95 def create_new_repo():
71 96 return self._create_repo(wire, create, compatible_version)
72 97
73 98 return self._repo(wire, create_new_repo)
74 99
75 100
76 101
77 102 NODE_TYPE_MAPPING = {
78 103 svn.core.svn_node_file: 'file',
79 104 svn.core.svn_node_dir: 'dir',
80 105 }
81 106
82 107
83 108 class SvnRemote(object):
84 109
85 110 def __init__(self, factory, hg_factory=None):
86 111 self._factory = factory
87 112 # TODO: Remove once we do not use internal Mercurial objects anymore
88 113 # for subversion
89 114 self._hg_factory = hg_factory
90 115
116 @reraise_safe_exceptions
117 def discover_svn_version(self):
118 try:
119 import svn.core
120 svn_ver = svn.core.SVN_VERSION
121 except ImportError:
122 svn_ver = None
123 return svn_ver
124
91 125 def check_url(self, url, config_items):
92 126 # this can throw exception if not installed, but we detect this
93 127 from hgsubversion import svnrepo
94 128
95 129 baseui = self._hg_factory._create_config(config_items)
96 130 # uuid function get's only valid UUID from proper repo, else
97 131 # throws exception
98 132 try:
99 133 svnrepo.svnremoterepo(baseui, url).svn.uuid
100 134 except:
101 135 log.debug("Invalid svn url: %s", url)
102 136 raise URLError(
103 137 '"%s" is not a valid Subversion source url.' % (url, ))
104 138 return True
105 139
106 140 def is_path_valid_repository(self, wire, path):
107 141 try:
108 142 svn.repos.open(path)
109 143 except svn.core.SubversionException:
110 144 log.debug("Invalid Subversion path %s", path)
111 145 return False
112 146 return True
113 147
114 148 def lookup(self, wire, revision):
115 149 if revision not in [-1, None, 'HEAD']:
116 150 raise NotImplementedError
117 151 repo = self._factory.repo(wire)
118 152 fs_ptr = svn.repos.fs(repo)
119 153 head = svn.fs.youngest_rev(fs_ptr)
120 154 return head
121 155
122 156 def lookup_interval(self, wire, start_ts, end_ts):
123 157 repo = self._factory.repo(wire)
124 158 fsobj = svn.repos.fs(repo)
125 159 start_rev = None
126 160 end_rev = None
127 161 if start_ts:
128 162 start_ts_svn = apr_time_t(start_ts)
129 163 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
130 164 else:
131 165 start_rev = 1
132 166 if end_ts:
133 167 end_ts_svn = apr_time_t(end_ts)
134 168 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
135 169 else:
136 170 end_rev = svn.fs.youngest_rev(fsobj)
137 171 return start_rev, end_rev
138 172
139 173 def revision_properties(self, wire, revision):
140 174 repo = self._factory.repo(wire)
141 175 fs_ptr = svn.repos.fs(repo)
142 176 return svn.fs.revision_proplist(fs_ptr, revision)
143 177
144 178 def revision_changes(self, wire, revision):
145 179
146 180 repo = self._factory.repo(wire)
147 181 fsobj = svn.repos.fs(repo)
148 182 rev_root = svn.fs.revision_root(fsobj, revision)
149 183
150 184 editor = svn.repos.ChangeCollector(fsobj, rev_root)
151 185 editor_ptr, editor_baton = svn.delta.make_editor(editor)
152 186 base_dir = ""
153 187 send_deltas = False
154 188 svn.repos.replay2(
155 189 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
156 190 editor_ptr, editor_baton, None)
157 191
158 192 added = []
159 193 changed = []
160 194 removed = []
161 195
162 196 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
163 197 for path, change in editor.changes.iteritems():
164 198 # TODO: Decide what to do with directory nodes. Subversion can add
165 199 # empty directories.
166 200 if change.item_kind == svn.core.svn_node_dir:
167 201 continue
168 202 if change.action == svn.repos.CHANGE_ACTION_ADD:
169 203 added.append(path)
170 204 elif change.action == svn.repos.CHANGE_ACTION_MODIFY:
171 205 changed.append(path)
172 206 elif change.action == svn.repos.CHANGE_ACTION_DELETE:
173 207 removed.append(path)
174 208 else:
175 209 raise NotImplementedError(
176 210 "Action %s not supported on path %s" % (
177 211 change.action, path))
178 212
179 213 changes = {
180 214 'added': added,
181 215 'changed': changed,
182 216 'removed': removed,
183 217 }
184 218 return changes
185 219
186 220 def node_history(self, wire, path, revision, limit):
187 221 cross_copies = False
188 222 repo = self._factory.repo(wire)
189 223 fsobj = svn.repos.fs(repo)
190 224 rev_root = svn.fs.revision_root(fsobj, revision)
191 225
192 226 history_revisions = []
193 227 history = svn.fs.node_history(rev_root, path)
194 228 history = svn.fs.history_prev(history, cross_copies)
195 229 while history:
196 230 __, node_revision = svn.fs.history_location(history)
197 231 history_revisions.append(node_revision)
198 232 if limit and len(history_revisions) >= limit:
199 233 break
200 234 history = svn.fs.history_prev(history, cross_copies)
201 235 return history_revisions
202 236
203 237 def node_properties(self, wire, path, revision):
204 238 repo = self._factory.repo(wire)
205 239 fsobj = svn.repos.fs(repo)
206 240 rev_root = svn.fs.revision_root(fsobj, revision)
207 241 return svn.fs.node_proplist(rev_root, path)
208 242
209 243 def file_annotate(self, wire, path, revision):
210 244 abs_path = 'file://' + urllib.pathname2url(
211 245 vcspath.join(wire['path'], path))
212 246 file_uri = svn.core.svn_path_canonicalize(abs_path)
213 247
214 248 start_rev = svn_opt_revision_value_t(0)
215 249 peg_rev = svn_opt_revision_value_t(revision)
216 250 end_rev = peg_rev
217 251
218 252 annotations = []
219 253
220 254 def receiver(line_no, revision, author, date, line, pool):
221 255 annotations.append((line_no, revision, line))
222 256
223 257 # TODO: Cannot use blame5, missing typemap function in the swig code
224 258 try:
225 259 svn.client.blame2(
226 260 file_uri, peg_rev, start_rev, end_rev,
227 261 receiver, svn.client.create_context())
228 262 except svn.core.SubversionException as exc:
229 263 log.exception("Error during blame operation.")
230 264 raise Exception(
231 265 "Blame not supported or file does not exist at path %s. "
232 266 "Error %s." % (path, exc))
233 267
234 268 return annotations
235 269
236 270 def get_node_type(self, wire, path, rev=None):
237 271 repo = self._factory.repo(wire)
238 272 fs_ptr = svn.repos.fs(repo)
239 273 if rev is None:
240 274 rev = svn.fs.youngest_rev(fs_ptr)
241 275 root = svn.fs.revision_root(fs_ptr, rev)
242 276 node = svn.fs.check_path(root, path)
243 277 return NODE_TYPE_MAPPING.get(node, None)
244 278
245 279 def get_nodes(self, wire, path, revision=None):
246 280 repo = self._factory.repo(wire)
247 281 fsobj = svn.repos.fs(repo)
248 282 if revision is None:
249 283 revision = svn.fs.youngest_rev(fsobj)
250 284 root = svn.fs.revision_root(fsobj, revision)
251 285 entries = svn.fs.dir_entries(root, path)
252 286 result = []
253 287 for entry_path, entry_info in entries.iteritems():
254 288 result.append(
255 289 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
256 290 return result
257 291
258 292 def get_file_content(self, wire, path, rev=None):
259 293 repo = self._factory.repo(wire)
260 294 fsobj = svn.repos.fs(repo)
261 295 if rev is None:
262 296 rev = svn.fs.youngest_revision(fsobj)
263 297 root = svn.fs.revision_root(fsobj, rev)
264 298 content = svn.core.Stream(svn.fs.file_contents(root, path))
265 299 return content.read()
266 300
267 301 def get_file_size(self, wire, path, revision=None):
268 302 repo = self._factory.repo(wire)
269 303 fsobj = svn.repos.fs(repo)
270 304 if revision is None:
271 305 revision = svn.fs.youngest_revision(fsobj)
272 306 root = svn.fs.revision_root(fsobj, revision)
273 307 size = svn.fs.file_length(root, path)
274 308 return size
275 309
276 310 def create_repository(self, wire, compatible_version=None):
277 311 log.info('Creating Subversion repository in path "%s"', wire['path'])
278 312 self._factory.repo(wire, create=True,
279 313 compatible_version=compatible_version)
280 314
281 315 def import_remote_repository(self, wire, src_url):
282 316 repo_path = wire['path']
283 317 if not self.is_path_valid_repository(wire, repo_path):
284 318 raise Exception(
285 319 "Path %s is not a valid Subversion repository." % repo_path)
286 320 # TODO: johbo: URL checks ?
287 321 rdump = subprocess.Popen(
288 322 ['svnrdump', 'dump', '--non-interactive', src_url],
289 323 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
290 324 load = subprocess.Popen(
291 325 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
292 326
293 327 # TODO: johbo: This can be a very long operation, might be better
294 328 # to track some kind of status and provide an api to check if the
295 329 # import is done.
296 330 rdump.wait()
297 331 load.wait()
298 332
299 333 if rdump.returncode != 0:
300 334 errors = rdump.stderr.read()
301 335 log.error('svnrdump dump failed: statuscode %s: message: %s',
302 336 rdump.returncode, errors)
303 337 reason = 'UNKNOWN'
304 338 if 'svnrdump: E230001:' in errors:
305 339 reason = 'INVALID_CERTIFICATE'
306 340 raise Exception(
307 341 'Failed to dump the remote repository from %s.' % src_url,
308 342 reason)
309 343 if load.returncode != 0:
310 344 raise Exception(
311 345 'Failed to load the dump of remote repository from %s.' %
312 346 (src_url, ))
313 347
314 348 def commit(self, wire, message, author, timestamp, updated, removed):
315 349 assert isinstance(message, str)
316 350 assert isinstance(author, str)
317 351
318 352 repo = self._factory.repo(wire)
319 353 fsobj = svn.repos.fs(repo)
320 354
321 355 rev = svn.fs.youngest_rev(fsobj)
322 356 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
323 357 txn_root = svn.fs.txn_root(txn)
324 358
325 359 for node in updated:
326 360 TxnNodeProcessor(node, txn_root).update()
327 361 for node in removed:
328 362 TxnNodeProcessor(node, txn_root).remove()
329 363
330 364 commit_id = svn.repos.fs_commit_txn(repo, txn)
331 365
332 366 if timestamp:
333 367 apr_time = apr_time_t(timestamp)
334 368 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
335 369 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
336 370
337 371 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
338 372 return commit_id
339 373
340 374 def diff(self, wire, rev1, rev2, path1=None, path2=None,
341 375 ignore_whitespace=False, context=3):
342 376 wire.update(cache=False)
343 377 repo = self._factory.repo(wire)
344 378 diff_creator = SvnDiffer(
345 379 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
346 380 return diff_creator.generate_diff()
347 381
348 382
349 383 class SvnDiffer(object):
350 384 """
351 385 Utility to create diffs based on difflib and the Subversion api
352 386 """
353 387
354 388 binary_content = False
355 389
356 390 def __init__(
357 391 self, repo, src_rev, src_path, tgt_rev, tgt_path,
358 392 ignore_whitespace, context):
359 393 self.repo = repo
360 394 self.ignore_whitespace = ignore_whitespace
361 395 self.context = context
362 396
363 397 fsobj = svn.repos.fs(repo)
364 398
365 399 self.tgt_rev = tgt_rev
366 400 self.tgt_path = tgt_path or ''
367 401 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
368 402 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
369 403
370 404 self.src_rev = src_rev
371 405 self.src_path = src_path or self.tgt_path
372 406 self.src_root = svn.fs.revision_root(fsobj, src_rev)
373 407 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
374 408
375 409 self._validate()
376 410
377 411 def _validate(self):
378 412 if (self.tgt_kind != svn.core.svn_node_none and
379 413 self.src_kind != svn.core.svn_node_none and
380 414 self.src_kind != self.tgt_kind):
381 415 # TODO: johbo: proper error handling
382 416 raise Exception(
383 417 "Source and target are not compatible for diff generation. "
384 418 "Source type: %s, target type: %s" %
385 419 (self.src_kind, self.tgt_kind))
386 420
387 421 def generate_diff(self):
388 422 buf = StringIO.StringIO()
389 423 if self.tgt_kind == svn.core.svn_node_dir:
390 424 self._generate_dir_diff(buf)
391 425 else:
392 426 self._generate_file_diff(buf)
393 427 return buf.getvalue()
394 428
395 429 def _generate_dir_diff(self, buf):
396 430 editor = DiffChangeEditor()
397 431 editor_ptr, editor_baton = svn.delta.make_editor(editor)
398 432 svn.repos.dir_delta2(
399 433 self.src_root,
400 434 self.src_path,
401 435 '', # src_entry
402 436 self.tgt_root,
403 437 self.tgt_path,
404 438 editor_ptr, editor_baton,
405 439 authorization_callback_allow_all,
406 440 False, # text_deltas
407 441 svn.core.svn_depth_infinity, # depth
408 442 False, # entry_props
409 443 False, # ignore_ancestry
410 444 )
411 445
412 446 for path, __, change in sorted(editor.changes):
413 447 self._generate_node_diff(
414 448 buf, change, path, self.tgt_path, path, self.src_path)
415 449
416 450 def _generate_file_diff(self, buf):
417 451 change = None
418 452 if self.src_kind == svn.core.svn_node_none:
419 453 change = "add"
420 454 elif self.tgt_kind == svn.core.svn_node_none:
421 455 change = "delete"
422 456 tgt_base, tgt_path = vcspath.split(self.tgt_path)
423 457 src_base, src_path = vcspath.split(self.src_path)
424 458 self._generate_node_diff(
425 459 buf, change, tgt_path, tgt_base, src_path, src_base)
426 460
427 461 def _generate_node_diff(
428 462 self, buf, change, tgt_path, tgt_base, src_path, src_base):
429 463 tgt_full_path = vcspath.join(tgt_base, tgt_path)
430 464 src_full_path = vcspath.join(src_base, src_path)
431 465
432 466 self.binary_content = False
433 467 mime_type = self._get_mime_type(tgt_full_path)
434 468 if mime_type and not mime_type.startswith('text'):
435 469 self.binary_content = True
436 470 buf.write("=" * 67 + '\n')
437 471 buf.write("Cannot display: file marked as a binary type.\n")
438 472 buf.write("svn:mime-type = %s\n" % mime_type)
439 473 buf.write("Index: %s\n" % (tgt_path, ))
440 474 buf.write("=" * 67 + '\n')
441 475 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
442 476 'tgt_path': tgt_path})
443 477
444 478 if change == 'add':
445 479 # TODO: johbo: SVN is missing a zero here compared to git
446 480 buf.write("new file mode 10644\n")
447 481 buf.write("--- /dev/null\t(revision 0)\n")
448 482 src_lines = []
449 483 else:
450 484 if change == 'delete':
451 485 buf.write("deleted file mode 10644\n")
452 486 buf.write("--- a/%s\t(revision %s)\n" % (
453 487 src_path, self.src_rev))
454 488 src_lines = self._svn_readlines(self.src_root, src_full_path)
455 489
456 490 if change == 'delete':
457 491 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
458 492 tgt_lines = []
459 493 else:
460 494 buf.write("+++ b/%s\t(revision %s)\n" % (
461 495 tgt_path, self.tgt_rev))
462 496 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
463 497
464 498 if not self.binary_content:
465 499 udiff = svn_diff.unified_diff(
466 500 src_lines, tgt_lines, context=self.context,
467 501 ignore_blank_lines=self.ignore_whitespace,
468 502 ignore_case=False,
469 503 ignore_space_changes=self.ignore_whitespace)
470 504 buf.writelines(udiff)
471 505
472 506 def _get_mime_type(self, path):
473 507 try:
474 508 mime_type = svn.fs.node_prop(
475 509 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
476 510 except svn.core.SubversionException:
477 511 mime_type = svn.fs.node_prop(
478 512 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
479 513 return mime_type
480 514
481 515 def _svn_readlines(self, fs_root, node_path):
482 516 if self.binary_content:
483 517 return []
484 518 node_kind = svn.fs.check_path(fs_root, node_path)
485 519 if node_kind not in (
486 520 svn.core.svn_node_file, svn.core.svn_node_symlink):
487 521 return []
488 522 content = svn.core.Stream(
489 523 svn.fs.file_contents(fs_root, node_path)).read()
490 524 return content.splitlines(True)
491 525
492 526
493 527 class DiffChangeEditor(svn.delta.Editor):
494 528 """
495 529 Records changes between two given revisions
496 530 """
497 531
498 532 def __init__(self):
499 533 self.changes = []
500 534
501 535 def delete_entry(self, path, revision, parent_baton, pool=None):
502 536 self.changes.append((path, None, 'delete'))
503 537
504 538 def add_file(
505 539 self, path, parent_baton, copyfrom_path, copyfrom_revision,
506 540 file_pool=None):
507 541 self.changes.append((path, 'file', 'add'))
508 542
509 543 def open_file(self, path, parent_baton, base_revision, file_pool=None):
510 544 self.changes.append((path, 'file', 'change'))
511 545
512 546
513 547 def authorization_callback_allow_all(root, path, pool):
514 548 return True
515 549
516 550
517 551 class TxnNodeProcessor(object):
518 552 """
519 553 Utility to process the change of one node within a transaction root.
520 554
521 555 It encapsulates the knowledge of how to add, update or remove
522 556 a node for a given transaction root. The purpose is to support the method
523 557 `SvnRemote.commit`.
524 558 """
525 559
526 560 def __init__(self, node, txn_root):
527 561 assert isinstance(node['path'], str)
528 562
529 563 self.node = node
530 564 self.txn_root = txn_root
531 565
532 566 def update(self):
533 567 self._ensure_parent_dirs()
534 568 self._add_file_if_node_does_not_exist()
535 569 self._update_file_content()
536 570 self._update_file_properties()
537 571
538 572 def remove(self):
539 573 svn.fs.delete(self.txn_root, self.node['path'])
540 574 # TODO: Clean up directory if empty
541 575
542 576 def _ensure_parent_dirs(self):
543 577 curdir = vcspath.dirname(self.node['path'])
544 578 dirs_to_create = []
545 579 while not self._svn_path_exists(curdir):
546 580 dirs_to_create.append(curdir)
547 581 curdir = vcspath.dirname(curdir)
548 582
549 583 for curdir in reversed(dirs_to_create):
550 584 log.debug('Creating missing directory "%s"', curdir)
551 585 svn.fs.make_dir(self.txn_root, curdir)
552 586
553 587 def _svn_path_exists(self, path):
554 588 path_status = svn.fs.check_path(self.txn_root, path)
555 589 return path_status != svn.core.svn_node_none
556 590
557 591 def _add_file_if_node_does_not_exist(self):
558 592 kind = svn.fs.check_path(self.txn_root, self.node['path'])
559 593 if kind == svn.core.svn_node_none:
560 594 svn.fs.make_file(self.txn_root, self.node['path'])
561 595
562 596 def _update_file_content(self):
563 597 assert isinstance(self.node['content'], str)
564 598 handler, baton = svn.fs.apply_textdelta(
565 599 self.txn_root, self.node['path'], None, None)
566 600 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
567 601
568 602 def _update_file_properties(self):
569 603 properties = self.node.get('properties', {})
570 604 for key, value in properties.iteritems():
571 605 svn.fs.change_node_prop(
572 606 self.txn_root, self.node['path'], key, value)
573 607
574 608
575 609 def apr_time_t(timestamp):
576 610 """
577 611 Convert a Python timestamp into APR timestamp type apr_time_t
578 612 """
579 613 return timestamp * 1E6
580 614
581 615
582 616 def svn_opt_revision_value_t(num):
583 617 """
584 618 Put `num` into a `svn_opt_revision_value_t` structure.
585 619 """
586 620 value = svn.core.svn_opt_revision_value_t()
587 621 value.number = num
588 622 revision = svn.core.svn_opt_revision_t()
589 623 revision.kind = svn.core.svn_opt_revision_number
590 624 revision.value = value
591 625 return revision
General Comments 0
You need to be logged in to leave comments. Login now