##// END OF EJS Templates
kill trailing whitespace
Dirkjan Ochtman -
r9312:c5f0825c default
parent child Browse files
Show More
@@ -1,1136 +1,1136
1 1 # Subversion 1.4/1.5 Python API backend
2 2 #
3 3 # Copyright(C) 2007 Daniel Holth et al
4 4
5 5 import os
6 6 import re
7 7 import sys
8 8 import cPickle as pickle
9 9 import tempfile
10 10 import urllib
11 11
12 12 from mercurial import strutil, util, encoding
13 13 from mercurial.i18n import _
14 14
15 15 # Subversion stuff. Works best with very recent Python SVN bindings
16 16 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
17 17 # these bindings.
18 18
19 19 from cStringIO import StringIO
20 20
21 21 from common import NoRepo, MissingTool, commit, encodeargs, decodeargs
22 22 from common import commandline, converter_source, converter_sink, mapfile
23 23
24 24 try:
25 25 from svn.core import SubversionException, Pool
26 26 import svn
27 27 import svn.client
28 28 import svn.core
29 29 import svn.ra
30 30 import svn.delta
31 31 import transport
32 32 import warnings
33 33 warnings.filterwarnings('ignore',
34 34 module='svn.core',
35 35 category=DeprecationWarning)
36 36
37 37 except ImportError:
38 38 pass
39 39
40 40 class SvnPathNotFound(Exception):
41 41 pass
42 42
43 43 def geturl(path):
44 44 try:
45 45 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
46 46 except SubversionException:
47 47 pass
48 48 if os.path.isdir(path):
49 49 path = os.path.normpath(os.path.abspath(path))
50 50 if os.name == 'nt':
51 51 path = '/' + util.normpath(path)
52 52 # Module URL is later compared with the repository URL returned
53 53 # by svn API, which is UTF-8.
54 54 path = encoding.tolocal(path)
55 55 return 'file://%s' % urllib.quote(path)
56 56 return path
57 57
58 58 def optrev(number):
59 59 optrev = svn.core.svn_opt_revision_t()
60 60 optrev.kind = svn.core.svn_opt_revision_number
61 61 optrev.value.number = number
62 62 return optrev
63 63
64 64 class changedpath(object):
65 65 def __init__(self, p):
66 66 self.copyfrom_path = p.copyfrom_path
67 67 self.copyfrom_rev = p.copyfrom_rev
68 68 self.action = p.action
69 69
70 70 def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
71 71 strict_node_history=False):
72 72 protocol = -1
73 73 def receiver(orig_paths, revnum, author, date, message, pool):
74 74 if orig_paths is not None:
75 75 for k, v in orig_paths.iteritems():
76 76 orig_paths[k] = changedpath(v)
77 77 pickle.dump((orig_paths, revnum, author, date, message),
78 78 fp, protocol)
79 79
80 80 try:
81 81 # Use an ra of our own so that our parent can consume
82 82 # our results without confusing the server.
83 83 t = transport.SvnRaTransport(url=url)
84 84 svn.ra.get_log(t.ra, paths, start, end, limit,
85 85 discover_changed_paths,
86 86 strict_node_history,
87 87 receiver)
88 88 except SubversionException, (inst, num):
89 89 pickle.dump(num, fp, protocol)
90 90 except IOError:
91 91 # Caller may interrupt the iteration
92 92 pickle.dump(None, fp, protocol)
93 93 else:
94 94 pickle.dump(None, fp, protocol)
95 95 fp.close()
96 96 # With large history, cleanup process goes crazy and suddenly
97 97 # consumes *huge* amount of memory. The output file being closed,
98 98 # there is no need for clean termination.
99 99 os._exit(0)
100 100
101 101 def debugsvnlog(ui, **opts):
102 102 """Fetch SVN log in a subprocess and channel them back to parent to
103 103 avoid memory collection issues.
104 104 """
105 105 util.set_binary(sys.stdin)
106 106 util.set_binary(sys.stdout)
107 107 args = decodeargs(sys.stdin.read())
108 108 get_log_child(sys.stdout, *args)
109 109
110 110 class logstream(object):
111 111 """Interruptible revision log iterator."""
112 112 def __init__(self, stdout):
113 113 self._stdout = stdout
114 114
115 115 def __iter__(self):
116 116 while True:
117 117 entry = pickle.load(self._stdout)
118 118 try:
119 119 orig_paths, revnum, author, date, message = entry
120 120 except:
121 121 if entry is None:
122 122 break
123 123 raise SubversionException("child raised exception", entry)
124 124 yield entry
125 125
126 126 def close(self):
127 127 if self._stdout:
128 128 self._stdout.close()
129 129 self._stdout = None
130 130
131 131
132 132 # Check to see if the given path is a local Subversion repo. Verify this by
133 133 # looking for several svn-specific files and directories in the given
134 134 # directory.
135 135 def filecheck(path, proto):
136 136 for x in ('locks', 'hooks', 'format', 'db', ):
137 137 if not os.path.exists(os.path.join(path, x)):
138 138 return False
139 139 return True
140 140
141 141 # Check to see if a given path is the root of an svn repo over http. We verify
142 142 # this by requesting a version-controlled URL we know can't exist and looking
143 143 # for the svn-specific "not found" XML.
144 144 def httpcheck(path, proto):
145 145 return ('<m:human-readable errcode="160013">' in
146 146 urllib.urlopen('%s://%s/!svn/ver/0/.svn' % (proto, path)).read())
147 147
148 148 protomap = {'http': httpcheck,
149 149 'https': httpcheck,
150 150 'file': filecheck,
151 151 }
152 152 def issvnurl(url):
153 153 try:
154 154 proto, path = url.split('://', 1)
155 155 path = urllib.url2pathname(path)
156 156 except ValueError:
157 157 proto = 'file'
158 158 path = os.path.abspath(url)
159 159 path = path.replace(os.sep, '/')
160 160 check = protomap.get(proto, lambda p, p2: False)
161 161 while '/' in path:
162 162 if check(path, proto):
163 163 return True
164 164 path = path.rsplit('/', 1)[0]
165 165 return False
166 166
167 167 # SVN conversion code stolen from bzr-svn and tailor
168 168 #
169 169 # Subversion looks like a versioned filesystem, branches structures
170 170 # are defined by conventions and not enforced by the tool. First,
171 171 # we define the potential branches (modules) as "trunk" and "branches"
172 172 # children directories. Revisions are then identified by their
173 173 # module and revision number (and a repository identifier).
174 174 #
175 175 # The revision graph is really a tree (or a forest). By default, a
176 176 # revision parent is the previous revision in the same module. If the
177 177 # module directory is copied/moved from another module then the
178 178 # revision is the module root and its parent the source revision in
179 179 # the parent module. A revision has at most one parent.
180 180 #
181 181 class svn_source(converter_source):
182 182 def __init__(self, ui, url, rev=None):
183 183 super(svn_source, self).__init__(ui, url, rev=rev)
184 184
185 185 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
186 186 (os.path.exists(url) and
187 187 os.path.exists(os.path.join(url, '.svn'))) or
188 188 issvnurl(url)):
189 189 raise NoRepo("%s does not look like a Subversion repo" % url)
190 190
191 191 try:
192 192 SubversionException
193 193 except NameError:
194 194 raise MissingTool(_('Subversion python bindings could not be loaded'))
195 195
196 196 try:
197 197 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
198 198 if version < (1, 4):
199 199 raise MissingTool(_('Subversion python bindings %d.%d found, '
200 200 '1.4 or later required') % version)
201 201 except AttributeError:
202 202 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
203 203 'or later required'))
204 204
205 205 self.lastrevs = {}
206 206
207 207 latest = None
208 208 try:
209 209 # Support file://path@rev syntax. Useful e.g. to convert
210 210 # deleted branches.
211 211 at = url.rfind('@')
212 212 if at >= 0:
213 213 latest = int(url[at+1:])
214 214 url = url[:at]
215 215 except ValueError:
216 216 pass
217 217 self.url = geturl(url)
218 218 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
219 219 try:
220 220 self.transport = transport.SvnRaTransport(url=self.url)
221 221 self.ra = self.transport.ra
222 222 self.ctx = self.transport.client
223 223 self.baseurl = svn.ra.get_repos_root(self.ra)
224 224 # Module is either empty or a repository path starting with
225 225 # a slash and not ending with a slash.
226 226 self.module = urllib.unquote(self.url[len(self.baseurl):])
227 227 self.prevmodule = None
228 228 self.rootmodule = self.module
229 229 self.commits = {}
230 230 self.paths = {}
231 231 self.uuid = svn.ra.get_uuid(self.ra)
232 232 except SubversionException:
233 233 ui.traceback()
234 234 raise NoRepo("%s does not look like a Subversion repo" % self.url)
235 235
236 236 if rev:
237 237 try:
238 238 latest = int(rev)
239 239 except ValueError:
240 240 raise util.Abort(_('svn: revision %s is not an integer') % rev)
241 241
242 242 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
243 243 try:
244 244 self.startrev = int(self.startrev)
245 245 if self.startrev < 0:
246 246 self.startrev = 0
247 247 except ValueError:
248 248 raise util.Abort(_('svn: start revision %s is not an integer')
249 249 % self.startrev)
250 250
251 251 self.head = self.latest(self.module, latest)
252 252 if not self.head:
253 253 raise util.Abort(_('no revision found in module %s')
254 254 % self.module)
255 255 self.last_changed = self.revnum(self.head)
256 256
257 257 self._changescache = None
258 258
259 259 if os.path.exists(os.path.join(url, '.svn/entries')):
260 260 self.wc = url
261 261 else:
262 262 self.wc = None
263 263 self.convertfp = None
264 264
265 265 def setrevmap(self, revmap):
266 266 lastrevs = {}
267 267 for revid in revmap.iterkeys():
268 268 uuid, module, revnum = self.revsplit(revid)
269 269 lastrevnum = lastrevs.setdefault(module, revnum)
270 270 if revnum > lastrevnum:
271 271 lastrevs[module] = revnum
272 272 self.lastrevs = lastrevs
273 273
274 274 def exists(self, path, optrev):
275 275 try:
276 276 svn.client.ls(self.url.rstrip('/') + '/' + urllib.quote(path),
277 277 optrev, False, self.ctx)
278 278 return True
279 279 except SubversionException:
280 280 return False
281 281
282 282 def getheads(self):
283 283
284 284 def isdir(path, revnum):
285 285 kind = self._checkpath(path, revnum)
286 286 return kind == svn.core.svn_node_dir
287 287
288 288 def getcfgpath(name, rev):
289 289 cfgpath = self.ui.config('convert', 'svn.' + name)
290 290 if cfgpath is not None and cfgpath.strip() == '':
291 291 return None
292 292 path = (cfgpath or name).strip('/')
293 293 if not self.exists(path, rev):
294 294 if cfgpath:
295 295 raise util.Abort(_('expected %s to be at %r, but not found')
296 296 % (name, path))
297 297 return None
298 298 self.ui.note(_('found %s at %r\n') % (name, path))
299 299 return path
300 300
301 301 rev = optrev(self.last_changed)
302 302 oldmodule = ''
303 303 trunk = getcfgpath('trunk', rev)
304 304 self.tags = getcfgpath('tags', rev)
305 305 branches = getcfgpath('branches', rev)
306 306
307 307 # If the project has a trunk or branches, we will extract heads
308 308 # from them. We keep the project root otherwise.
309 309 if trunk:
310 310 oldmodule = self.module or ''
311 311 self.module += '/' + trunk
312 312 self.head = self.latest(self.module, self.last_changed)
313 313 if not self.head:
314 raise util.Abort(_('no revision found in module %s')
314 raise util.Abort(_('no revision found in module %s')
315 315 % self.module)
316 316
317 317 # First head in the list is the module's head
318 318 self.heads = [self.head]
319 319 if self.tags is not None:
320 320 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
321 321
322 322 # Check if branches bring a few more heads to the list
323 323 if branches:
324 324 rpath = self.url.strip('/')
325 325 branchnames = svn.client.ls(rpath + '/' + urllib.quote(branches),
326 326 rev, False, self.ctx)
327 327 for branch in branchnames.keys():
328 328 module = '%s/%s/%s' % (oldmodule, branches, branch)
329 329 if not isdir(module, self.last_changed):
330 330 continue
331 331 brevid = self.latest(module, self.last_changed)
332 332 if not brevid:
333 333 self.ui.note(_('ignoring empty branch %s\n') % branch)
334 334 continue
335 335 self.ui.note(_('found branch %s at %d\n') %
336 336 (branch, self.revnum(brevid)))
337 337 self.heads.append(brevid)
338 338
339 339 if self.startrev and self.heads:
340 340 if len(self.heads) > 1:
341 341 raise util.Abort(_('svn: start revision is not supported '
342 342 'with more than one branch'))
343 343 revnum = self.revnum(self.heads[0])
344 344 if revnum < self.startrev:
345 345 raise util.Abort(_('svn: no revision found after start revision %d')
346 346 % self.startrev)
347 347
348 348 return self.heads
349 349
350 350 def getfile(self, file, rev):
351 351 data, mode = self._getfile(file, rev)
352 352 self.modecache[(file, rev)] = mode
353 353 return data
354 354
355 355 def getmode(self, file, rev):
356 356 return self.modecache[(file, rev)]
357 357
358 358 def getchanges(self, rev):
359 359 if self._changescache and self._changescache[0] == rev:
360 360 return self._changescache[1]
361 361 self._changescache = None
362 362 self.modecache = {}
363 363 (paths, parents) = self.paths[rev]
364 364 if parents:
365 365 files, copies = self.expandpaths(rev, paths, parents)
366 366 else:
367 367 # Perform a full checkout on roots
368 368 uuid, module, revnum = self.revsplit(rev)
369 369 entries = svn.client.ls(self.baseurl + urllib.quote(module),
370 370 optrev(revnum), True, self.ctx)
371 371 files = [n for n,e in entries.iteritems()
372 372 if e.kind == svn.core.svn_node_file]
373 373 copies = {}
374 374
375 375 files.sort()
376 376 files = zip(files, [rev] * len(files))
377 377
378 378 # caller caches the result, so free it here to release memory
379 379 del self.paths[rev]
380 380 return (files, copies)
381 381
382 382 def getchangedfiles(self, rev, i):
383 383 changes = self.getchanges(rev)
384 384 self._changescache = (rev, changes)
385 385 return [f[0] for f in changes[0]]
386 386
387 387 def getcommit(self, rev):
388 388 if rev not in self.commits:
389 389 uuid, module, revnum = self.revsplit(rev)
390 390 self.module = module
391 391 self.reparent(module)
392 392 # We assume that:
393 393 # - requests for revisions after "stop" come from the
394 394 # revision graph backward traversal. Cache all of them
395 395 # down to stop, they will be used eventually.
396 396 # - requests for revisions before "stop" come to get
397 397 # isolated branches parents. Just fetch what is needed.
398 398 stop = self.lastrevs.get(module, 0)
399 399 if revnum < stop:
400 400 stop = revnum + 1
401 401 self._fetch_revisions(revnum, stop)
402 402 commit = self.commits[rev]
403 403 # caller caches the result, so free it here to release memory
404 404 del self.commits[rev]
405 405 return commit
406 406
407 407 def gettags(self):
408 408 tags = {}
409 409 if self.tags is None:
410 410 return tags
411 411
412 412 # svn tags are just a convention, project branches left in a
413 413 # 'tags' directory. There is no other relationship than
414 414 # ancestry, which is expensive to discover and makes them hard
415 415 # to update incrementally. Worse, past revisions may be
416 416 # referenced by tags far away in the future, requiring a deep
417 417 # history traversal on every calculation. Current code
418 418 # performs a single backward traversal, tracking moves within
419 419 # the tags directory (tag renaming) and recording a new tag
420 420 # everytime a project is copied from outside the tags
421 421 # directory. It also lists deleted tags, this behaviour may
422 422 # change in the future.
423 423 pendings = []
424 424 tagspath = self.tags
425 425 start = svn.ra.get_latest_revnum(self.ra)
426 426 try:
427 427 for entry in self._getlog([self.tags], start, self.startrev):
428 428 origpaths, revnum, author, date, message = entry
429 429 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
430 430 in origpaths.iteritems() if e.copyfrom_path]
431 431 # Apply moves/copies from more specific to general
432 432 copies.sort(reverse=True)
433 433
434 434 srctagspath = tagspath
435 435 if copies and copies[-1][2] == tagspath:
436 436 # Track tags directory moves
437 437 srctagspath = copies.pop()[0]
438 438
439 439 for source, sourcerev, dest in copies:
440 440 if not dest.startswith(tagspath + '/'):
441 441 continue
442 442 for tag in pendings:
443 443 if tag[0].startswith(dest):
444 444 tagpath = source + tag[0][len(dest):]
445 445 tag[:2] = [tagpath, sourcerev]
446 446 break
447 447 else:
448 448 pendings.append([source, sourcerev, dest])
449 449
450 450 # Filter out tags with children coming from different
451 451 # parts of the repository like:
452 452 # /tags/tag.1 (from /trunk:10)
453 453 # /tags/tag.1/foo (from /branches/foo:12)
454 454 # Here/tags/tag.1 discarded as well as its children.
455 455 # It happens with tools like cvs2svn. Such tags cannot
456 456 # be represented in mercurial.
457 addeds = dict((p, e.copyfrom_path) for p, e
458 in origpaths.iteritems()
457 addeds = dict((p, e.copyfrom_path) for p, e
458 in origpaths.iteritems()
459 459 if e.action == 'A' and e.copyfrom_path)
460 460 badroots = set()
461 461 for destroot in addeds:
462 462 for source, sourcerev, dest in pendings:
463 463 if (not dest.startswith(destroot + '/')
464 464 or source.startswith(addeds[destroot] + '/')):
465 465 continue
466 466 badroots.add(destroot)
467 467 break
468 468
469 469 for badroot in badroots:
470 470 pendings = [p for p in pendings if p[2] != badroot
471 471 and not p[2].startswith(badroot + '/')]
472 472
473 473 # Tell tag renamings from tag creations
474 474 remainings = []
475 475 for source, sourcerev, dest in pendings:
476 476 tagname = dest.split('/')[-1]
477 477 if source.startswith(srctagspath):
478 478 remainings.append([source, sourcerev, tagname])
479 479 continue
480 480 if tagname in tags:
481 481 # Keep the latest tag value
482 482 continue
483 483 # From revision may be fake, get one with changes
484 484 try:
485 485 tagid = self.latest(source, sourcerev)
486 486 if tagid and tagname not in tags:
487 487 tags[tagname] = tagid
488 488 except SvnPathNotFound:
489 489 # It happens when we are following directories
490 490 # we assumed were copied with their parents
491 491 # but were really created in the tag
492 492 # directory.
493 493 pass
494 494 pendings = remainings
495 495 tagspath = srctagspath
496 496
497 497 except SubversionException:
498 498 self.ui.note(_('no tags found at revision %d\n') % start)
499 499 return tags
500 500
501 501 def converted(self, rev, destrev):
502 502 if not self.wc:
503 503 return
504 504 if self.convertfp is None:
505 505 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
506 506 'a')
507 507 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
508 508 self.convertfp.flush()
509 509
510 510 def revid(self, revnum, module=None):
511 511 return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
512 512
513 513 def revnum(self, rev):
514 514 return int(rev.split('@')[-1])
515 515
516 516 def revsplit(self, rev):
517 517 url, revnum = rev.rsplit('@', 1)
518 518 revnum = int(revnum)
519 519 parts = url.split('/', 1)
520 520 uuid = parts.pop(0)[4:]
521 521 mod = ''
522 522 if parts:
523 523 mod = '/' + parts[0]
524 524 return uuid, mod, revnum
525 525
526 526 def latest(self, path, stop=0):
527 527 """Find the latest revid affecting path, up to stop. It may return
528 528 a revision in a different module, since a branch may be moved without
529 529 a change being reported. Return None if computed module does not
530 530 belong to rootmodule subtree.
531 531 """
532 532 if not path.startswith(self.rootmodule):
533 533 # Requests on foreign branches may be forbidden at server level
534 534 self.ui.debug(_('ignoring foreign branch %r\n') % path)
535 535 return None
536 536
537 537 if not stop:
538 538 stop = svn.ra.get_latest_revnum(self.ra)
539 539 try:
540 540 prevmodule = self.reparent('')
541 541 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
542 542 self.reparent(prevmodule)
543 543 except SubversionException:
544 544 dirent = None
545 545 if not dirent:
546 546 raise SvnPathNotFound(_('%s not found up to revision %d') % (path, stop))
547 547
548 548 # stat() gives us the previous revision on this line of
549 549 # development, but it might be in *another module*. Fetch the
550 550 # log and detect renames down to the latest revision.
551 551 stream = self._getlog([path], stop, dirent.created_rev)
552 552 try:
553 553 for entry in stream:
554 554 paths, revnum, author, date, message = entry
555 555 if revnum <= dirent.created_rev:
556 556 break
557 557
558 558 for p in paths:
559 559 if not path.startswith(p) or not paths[p].copyfrom_path:
560 560 continue
561 561 newpath = paths[p].copyfrom_path + path[len(p):]
562 562 self.ui.debug(_("branch renamed from %s to %s at %d\n") %
563 563 (path, newpath, revnum))
564 564 path = newpath
565 565 break
566 566 finally:
567 567 stream.close()
568 568
569 569 if not path.startswith(self.rootmodule):
570 570 self.ui.debug(_('ignoring foreign branch %r\n') % path)
571 571 return None
572 572 return self.revid(dirent.created_rev, path)
573 573
574 574 def reparent(self, module):
575 575 """Reparent the svn transport and return the previous parent."""
576 576 if self.prevmodule == module:
577 577 return module
578 578 svnurl = self.baseurl + urllib.quote(module)
579 579 prevmodule = self.prevmodule
580 580 if prevmodule is None:
581 581 prevmodule = ''
582 582 self.ui.debug(_("reparent to %s\n") % svnurl)
583 583 svn.ra.reparent(self.ra, svnurl)
584 584 self.prevmodule = module
585 585 return prevmodule
586 586
587 587 def expandpaths(self, rev, paths, parents):
588 588 entries = []
589 589 # Map of entrypath, revision for finding source of deleted
590 590 # revisions.
591 591 copyfrom = {}
592 592 copies = {}
593 593
594 594 new_module, revnum = self.revsplit(rev)[1:]
595 595 if new_module != self.module:
596 596 self.module = new_module
597 597 self.reparent(self.module)
598 598
599 599 for path, ent in paths:
600 600 entrypath = self.getrelpath(path)
601 601
602 602 kind = self._checkpath(entrypath, revnum)
603 603 if kind == svn.core.svn_node_file:
604 604 entries.append(self.recode(entrypath))
605 605 if not ent.copyfrom_path or not parents:
606 606 continue
607 607 # Copy sources not in parent revisions cannot be
608 608 # represented, ignore their origin for now
609 609 pmodule, prevnum = self.revsplit(parents[0])[1:]
610 610 if ent.copyfrom_rev < prevnum:
611 611 continue
612 612 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
613 613 if not copyfrom_path:
614 614 continue
615 615 self.ui.debug(_("copied to %s from %s@%s\n") %
616 616 (entrypath, copyfrom_path, ent.copyfrom_rev))
617 617 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
618 618 elif kind == 0: # gone, but had better be a deleted *file*
619 619 self.ui.debug(_("gone from %s\n") % ent.copyfrom_rev)
620 620 pmodule, prevnum = self.revsplit(parents[0])[1:]
621 621 parentpath = pmodule + "/" + entrypath
622 622 self.ui.debug(_("entry %s\n") % parentpath)
623 623
624 624 # We can avoid the reparent calls if the module has
625 625 # not changed but it probably does not worth the pain.
626 626 prevmodule = self.reparent('')
627 627 fromkind = svn.ra.check_path(self.ra, parentpath.strip('/'), prevnum)
628 628 self.reparent(prevmodule)
629 629
630 630 if fromkind == svn.core.svn_node_file:
631 631 entries.append(self.recode(entrypath))
632 632 elif fromkind == svn.core.svn_node_dir:
633 633 if ent.action == 'C':
634 634 children = self._find_children(path, prevnum)
635 635 else:
636 636 oroot = parentpath.strip('/')
637 637 nroot = path.strip('/')
638 638 children = self._find_children(oroot, prevnum)
639 639 children = [s.replace(oroot,nroot) for s in children]
640 640
641 641 for child in children:
642 642 childpath = self.getrelpath("/" + child, pmodule)
643 643 if not childpath:
644 644 continue
645 645 if childpath in copies:
646 646 del copies[childpath]
647 647 entries.append(childpath)
648 648 else:
649 649 self.ui.debug(_('unknown path in revision %d: %s\n') % \
650 650 (revnum, path))
651 651 elif kind == svn.core.svn_node_dir:
652 652 # If the directory just had a prop change,
653 653 # then we shouldn't need to look for its children.
654 654 if ent.action == 'M':
655 655 continue
656 656
657 657 children = sorted(self._find_children(path, revnum))
658 658 for child in children:
659 659 # Can we move a child directory and its
660 660 # parent in the same commit? (probably can). Could
661 661 # cause problems if instead of revnum -1,
662 662 # we have to look in (copyfrom_path, revnum - 1)
663 663 entrypath = self.getrelpath("/" + child)
664 664 if entrypath:
665 665 # Need to filter out directories here...
666 666 kind = self._checkpath(entrypath, revnum)
667 667 if kind != svn.core.svn_node_dir:
668 668 entries.append(self.recode(entrypath))
669 669
670 670 # Handle directory copies
671 671 if not ent.copyfrom_path or not parents:
672 672 continue
673 673 # Copy sources not in parent revisions cannot be
674 674 # represented, ignore their origin for now
675 675 pmodule, prevnum = self.revsplit(parents[0])[1:]
676 676 if ent.copyfrom_rev < prevnum:
677 677 continue
678 678 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
679 679 if not copyfrompath:
680 680 continue
681 681 copyfrom[path] = ent
682 682 self.ui.debug(_("mark %s came from %s:%d\n")
683 683 % (path, copyfrompath, ent.copyfrom_rev))
684 684 children = self._find_children(ent.copyfrom_path, ent.copyfrom_rev)
685 685 children.sort()
686 686 for child in children:
687 687 entrypath = self.getrelpath("/" + child, pmodule)
688 688 if not entrypath:
689 689 continue
690 690 copytopath = path + entrypath[len(copyfrompath):]
691 691 copytopath = self.getrelpath(copytopath)
692 692 copies[self.recode(copytopath)] = self.recode(entrypath)
693 693
694 694 return (list(set(entries)), copies)
695 695
696 696 def _fetch_revisions(self, from_revnum, to_revnum):
697 697 if from_revnum < to_revnum:
698 698 from_revnum, to_revnum = to_revnum, from_revnum
699 699
700 700 self.child_cset = None
701 701
702 702 def parselogentry(orig_paths, revnum, author, date, message):
703 703 """Return the parsed commit object or None, and True if
704 704 the revision is a branch root.
705 705 """
706 706 self.ui.debug(_("parsing revision %d (%d changes)\n") %
707 707 (revnum, len(orig_paths)))
708 708
709 709 branched = False
710 710 rev = self.revid(revnum)
711 711 # branch log might return entries for a parent we already have
712 712
713 713 if rev in self.commits or revnum < to_revnum:
714 714 return None, branched
715 715
716 716 parents = []
717 717 # check whether this revision is the start of a branch or part
718 718 # of a branch renaming
719 719 orig_paths = sorted(orig_paths.iteritems())
720 720 root_paths = [(p,e) for p,e in orig_paths if self.module.startswith(p)]
721 721 if root_paths:
722 722 path, ent = root_paths[-1]
723 723 if ent.copyfrom_path:
724 724 branched = True
725 725 newpath = ent.copyfrom_path + self.module[len(path):]
726 726 # ent.copyfrom_rev may not be the actual last revision
727 727 previd = self.latest(newpath, ent.copyfrom_rev)
728 728 if previd is not None:
729 729 prevmodule, prevnum = self.revsplit(previd)[1:]
730 730 if prevnum >= self.startrev:
731 731 parents = [previd]
732 732 self.ui.note(_('found parent of branch %s at %d: %s\n') %
733 733 (self.module, prevnum, prevmodule))
734 734 else:
735 735 self.ui.debug(_("no copyfrom path, don't know what to do.\n"))
736 736
737 737 paths = []
738 738 # filter out unrelated paths
739 739 for path, ent in orig_paths:
740 740 if self.getrelpath(path) is None:
741 741 continue
742 742 paths.append((path, ent))
743 743
744 744 # Example SVN datetime. Includes microseconds.
745 745 # ISO-8601 conformant
746 746 # '2007-01-04T17:35:00.902377Z'
747 747 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
748 748
749 749 log = message and self.recode(message) or ''
750 750 author = author and self.recode(author) or ''
751 751 try:
752 752 branch = self.module.split("/")[-1]
753 753 if branch == 'trunk':
754 754 branch = ''
755 755 except IndexError:
756 756 branch = None
757 757
758 758 cset = commit(author=author,
759 759 date=util.datestr(date),
760 760 desc=log,
761 761 parents=parents,
762 762 branch=branch,
763 763 rev=rev)
764 764
765 765 self.commits[rev] = cset
766 766 # The parents list is *shared* among self.paths and the
767 767 # commit object. Both will be updated below.
768 768 self.paths[rev] = (paths, cset.parents)
769 769 if self.child_cset and not self.child_cset.parents:
770 770 self.child_cset.parents[:] = [rev]
771 771 self.child_cset = cset
772 772 return cset, branched
773 773
774 774 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
775 775 (self.module, from_revnum, to_revnum))
776 776
777 777 try:
778 778 firstcset = None
779 779 lastonbranch = False
780 780 stream = self._getlog([self.module], from_revnum, to_revnum)
781 781 try:
782 782 for entry in stream:
783 783 paths, revnum, author, date, message = entry
784 784 if revnum < self.startrev:
785 785 lastonbranch = True
786 786 break
787 787 if not paths:
788 788 self.ui.debug(_('revision %d has no entries\n') % revnum)
789 789 continue
790 790 cset, lastonbranch = parselogentry(paths, revnum, author,
791 791 date, message)
792 792 if cset:
793 793 firstcset = cset
794 794 if lastonbranch:
795 795 break
796 796 finally:
797 797 stream.close()
798 798
799 799 if not lastonbranch and firstcset and not firstcset.parents:
800 800 # The first revision of the sequence (the last fetched one)
801 801 # has invalid parents if not a branch root. Find the parent
802 802 # revision now, if any.
803 803 try:
804 804 firstrevnum = self.revnum(firstcset.rev)
805 805 if firstrevnum > 1:
806 806 latest = self.latest(self.module, firstrevnum - 1)
807 807 if latest:
808 808 firstcset.parents.append(latest)
809 809 except SvnPathNotFound:
810 810 pass
811 811 except SubversionException, (inst, num):
812 812 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
813 813 raise util.Abort(_('svn: branch has no revision %s') % to_revnum)
814 814 raise
815 815
816 816 def _getfile(self, file, rev):
817 817 # TODO: ra.get_file transmits the whole file instead of diffs.
818 818 mode = ''
819 819 try:
820 820 new_module, revnum = self.revsplit(rev)[1:]
821 821 if self.module != new_module:
822 822 self.module = new_module
823 823 self.reparent(self.module)
824 824 io = StringIO()
825 825 info = svn.ra.get_file(self.ra, file, revnum, io)
826 826 data = io.getvalue()
827 827 # ra.get_files() seems to keep a reference on the input buffer
828 828 # preventing collection. Release it explicitely.
829 829 io.close()
830 830 if isinstance(info, list):
831 831 info = info[-1]
832 832 mode = ("svn:executable" in info) and 'x' or ''
833 833 mode = ("svn:special" in info) and 'l' or mode
834 834 except SubversionException, e:
835 835 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
836 836 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
837 837 if e.apr_err in notfound: # File not found
838 838 raise IOError()
839 839 raise
840 840 if mode == 'l':
841 841 link_prefix = "link "
842 842 if data.startswith(link_prefix):
843 843 data = data[len(link_prefix):]
844 844 return data, mode
845 845
846 846 def _find_children(self, path, revnum):
847 847 path = path.strip('/')
848 848 pool = Pool()
849 849 rpath = '/'.join([self.baseurl, urllib.quote(path)]).strip('/')
850 850 return ['%s/%s' % (path, x) for x in
851 851 svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool).keys()]
852 852
853 853 def getrelpath(self, path, module=None):
854 854 if module is None:
855 855 module = self.module
856 856 # Given the repository url of this wc, say
857 857 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
858 858 # extract the "entry" portion (a relative path) from what
859 859 # svn log --xml says, ie
860 860 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
861 861 # that is to say "tests/PloneTestCase.py"
862 862 if path.startswith(module):
863 863 relative = path.rstrip('/')[len(module):]
864 864 if relative.startswith('/'):
865 865 return relative[1:]
866 866 elif relative == '':
867 867 return relative
868 868
869 869 # The path is outside our tracked tree...
870 870 self.ui.debug(_('%r is not under %r, ignoring\n') % (path, module))
871 871 return None
872 872
873 873 def _checkpath(self, path, revnum):
874 874 # ra.check_path does not like leading slashes very much, it leads
875 875 # to PROPFIND subversion errors
876 876 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
877 877
878 878 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
879 879 strict_node_history=False):
880 880 # Normalize path names, svn >= 1.5 only wants paths relative to
881 881 # supplied URL
882 882 relpaths = []
883 883 for p in paths:
884 884 if not p.startswith('/'):
885 885 p = self.module + '/' + p
886 886 relpaths.append(p.strip('/'))
887 887 args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths,
888 888 strict_node_history]
889 889 arg = encodeargs(args)
890 890 hgexe = util.hgexecutable()
891 891 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
892 892 stdin, stdout = util.popen2(cmd)
893 893 stdin.write(arg)
894 894 stdin.close()
895 895 return logstream(stdout)
896 896
897 897 pre_revprop_change = '''#!/bin/sh
898 898
899 899 REPOS="$1"
900 900 REV="$2"
901 901 USER="$3"
902 902 PROPNAME="$4"
903 903 ACTION="$5"
904 904
905 905 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
906 906 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
907 907 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
908 908
909 909 echo "Changing prohibited revision property" >&2
910 910 exit 1
911 911 '''
912 912
913 913 class svn_sink(converter_sink, commandline):
914 914 commit_re = re.compile(r'Committed revision (\d+).', re.M)
915 915
916 916 def prerun(self):
917 917 if self.wc:
918 918 os.chdir(self.wc)
919 919
920 920 def postrun(self):
921 921 if self.wc:
922 922 os.chdir(self.cwd)
923 923
924 924 def join(self, name):
925 925 return os.path.join(self.wc, '.svn', name)
926 926
927 927 def revmapfile(self):
928 928 return self.join('hg-shamap')
929 929
930 930 def authorfile(self):
931 931 return self.join('hg-authormap')
932 932
933 933 def __init__(self, ui, path):
934 934 converter_sink.__init__(self, ui, path)
935 935 commandline.__init__(self, ui, 'svn')
936 936 self.delete = []
937 937 self.setexec = []
938 938 self.delexec = []
939 939 self.copies = []
940 940 self.wc = None
941 941 self.cwd = os.getcwd()
942 942
943 943 path = os.path.realpath(path)
944 944
945 945 created = False
946 946 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
947 947 self.wc = path
948 948 self.run0('update')
949 949 else:
950 950 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
951 951
952 952 if os.path.isdir(os.path.dirname(path)):
953 953 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
954 954 ui.status(_('initializing svn repo %r\n') %
955 955 os.path.basename(path))
956 956 commandline(ui, 'svnadmin').run0('create', path)
957 957 created = path
958 958 path = util.normpath(path)
959 959 if not path.startswith('/'):
960 960 path = '/' + path
961 961 path = 'file://' + path
962 962
963 963 ui.status(_('initializing svn wc %r\n') % os.path.basename(wcpath))
964 964 self.run0('checkout', path, wcpath)
965 965
966 966 self.wc = wcpath
967 967 self.opener = util.opener(self.wc)
968 968 self.wopener = util.opener(self.wc)
969 969 self.childmap = mapfile(ui, self.join('hg-childmap'))
970 970 self.is_exec = util.checkexec(self.wc) and util.is_exec or None
971 971
972 972 if created:
973 973 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
974 974 fp = open(hook, 'w')
975 975 fp.write(pre_revprop_change)
976 976 fp.close()
977 977 util.set_flags(hook, False, True)
978 978
979 979 xport = transport.SvnRaTransport(url=geturl(path))
980 980 self.uuid = svn.ra.get_uuid(xport.ra)
981 981
982 982 def wjoin(self, *names):
983 983 return os.path.join(self.wc, *names)
984 984
985 985 def putfile(self, filename, flags, data):
986 986 if 'l' in flags:
987 987 self.wopener.symlink(data, filename)
988 988 else:
989 989 try:
990 990 if os.path.islink(self.wjoin(filename)):
991 991 os.unlink(filename)
992 992 except OSError:
993 993 pass
994 994 self.wopener(filename, 'w').write(data)
995 995
996 996 if self.is_exec:
997 997 was_exec = self.is_exec(self.wjoin(filename))
998 998 else:
999 999 # On filesystems not supporting execute-bit, there is no way
1000 1000 # to know if it is set but asking subversion. Setting it
1001 1001 # systematically is just as expensive and much simpler.
1002 1002 was_exec = 'x' not in flags
1003 1003
1004 1004 util.set_flags(self.wjoin(filename), False, 'x' in flags)
1005 1005 if was_exec:
1006 1006 if 'x' not in flags:
1007 1007 self.delexec.append(filename)
1008 1008 else:
1009 1009 if 'x' in flags:
1010 1010 self.setexec.append(filename)
1011 1011
1012 1012 def _copyfile(self, source, dest):
1013 1013 # SVN's copy command pukes if the destination file exists, but
1014 1014 # our copyfile method expects to record a copy that has
1015 1015 # already occurred. Cross the semantic gap.
1016 1016 wdest = self.wjoin(dest)
1017 1017 exists = os.path.exists(wdest)
1018 1018 if exists:
1019 1019 fd, tempname = tempfile.mkstemp(
1020 1020 prefix='hg-copy-', dir=os.path.dirname(wdest))
1021 1021 os.close(fd)
1022 1022 os.unlink(tempname)
1023 1023 os.rename(wdest, tempname)
1024 1024 try:
1025 1025 self.run0('copy', source, dest)
1026 1026 finally:
1027 1027 if exists:
1028 1028 try:
1029 1029 os.unlink(wdest)
1030 1030 except OSError:
1031 1031 pass
1032 1032 os.rename(tempname, wdest)
1033 1033
1034 1034 def dirs_of(self, files):
1035 1035 dirs = set()
1036 1036 for f in files:
1037 1037 if os.path.isdir(self.wjoin(f)):
1038 1038 dirs.add(f)
1039 1039 for i in strutil.rfindall(f, '/'):
1040 1040 dirs.add(f[:i])
1041 1041 return dirs
1042 1042
1043 1043 def add_dirs(self, files):
1044 1044 add_dirs = [d for d in sorted(self.dirs_of(files))
1045 1045 if not os.path.exists(self.wjoin(d, '.svn', 'entries'))]
1046 1046 if add_dirs:
1047 1047 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1048 1048 return add_dirs
1049 1049
1050 1050 def add_files(self, files):
1051 1051 if files:
1052 1052 self.xargs(files, 'add', quiet=True)
1053 1053 return files
1054 1054
1055 1055 def tidy_dirs(self, names):
1056 1056 deleted = []
1057 1057 for d in sorted(self.dirs_of(names), reverse=True):
1058 1058 wd = self.wjoin(d)
1059 1059 if os.listdir(wd) == '.svn':
1060 1060 self.run0('delete', d)
1061 1061 deleted.append(d)
1062 1062 return deleted
1063 1063
1064 1064 def addchild(self, parent, child):
1065 1065 self.childmap[parent] = child
1066 1066
1067 1067 def revid(self, rev):
1068 1068 return u"svn:%s@%s" % (self.uuid, rev)
1069 1069
1070 1070 def putcommit(self, files, copies, parents, commit, source, revmap):
1071 1071 # Apply changes to working copy
1072 1072 for f, v in files:
1073 1073 try:
1074 1074 data = source.getfile(f, v)
1075 1075 except IOError:
1076 1076 self.delete.append(f)
1077 1077 else:
1078 1078 e = source.getmode(f, v)
1079 1079 self.putfile(f, e, data)
1080 1080 if f in copies:
1081 1081 self.copies.append([copies[f], f])
1082 1082 files = [f[0] for f in files]
1083 1083
1084 1084 for parent in parents:
1085 1085 try:
1086 1086 return self.revid(self.childmap[parent])
1087 1087 except KeyError:
1088 1088 pass
1089 1089 entries = set(self.delete)
1090 1090 files = frozenset(files)
1091 1091 entries.update(self.add_dirs(files.difference(entries)))
1092 1092 if self.copies:
1093 1093 for s, d in self.copies:
1094 1094 self._copyfile(s, d)
1095 1095 self.copies = []
1096 1096 if self.delete:
1097 1097 self.xargs(self.delete, 'delete')
1098 1098 self.delete = []
1099 1099 entries.update(self.add_files(files.difference(entries)))
1100 1100 entries.update(self.tidy_dirs(entries))
1101 1101 if self.delexec:
1102 1102 self.xargs(self.delexec, 'propdel', 'svn:executable')
1103 1103 self.delexec = []
1104 1104 if self.setexec:
1105 1105 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1106 1106 self.setexec = []
1107 1107
1108 1108 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1109 1109 fp = os.fdopen(fd, 'w')
1110 1110 fp.write(commit.desc)
1111 1111 fp.close()
1112 1112 try:
1113 1113 output = self.run0('commit',
1114 1114 username=util.shortuser(commit.author),
1115 1115 file=messagefile,
1116 1116 encoding='utf-8')
1117 1117 try:
1118 1118 rev = self.commit_re.search(output).group(1)
1119 1119 except AttributeError:
1120 1120 self.ui.warn(_('unexpected svn output:\n'))
1121 1121 self.ui.warn(output)
1122 1122 raise util.Abort(_('unable to cope with svn output'))
1123 1123 if commit.rev:
1124 1124 self.run('propset', 'hg:convert-rev', commit.rev,
1125 1125 revprop=True, revision=rev)
1126 1126 if commit.branch and commit.branch != 'default':
1127 1127 self.run('propset', 'hg:convert-branch', commit.branch,
1128 1128 revprop=True, revision=rev)
1129 1129 for parent in parents:
1130 1130 self.addchild(parent, rev)
1131 1131 return self.revid(rev)
1132 1132 finally:
1133 1133 os.unlink(messagefile)
1134 1134
1135 1135 def puttags(self, tags):
1136 1136 self.ui.warn(_('XXX TAGS NOT IMPLEMENTED YET\n'))
@@ -1,339 +1,338
1 1 # tags.py - read tag info from local repository
2 2 #
3 3 # Copyright 2009 Matt Mackall <mpm@selenic.com>
4 4 # Copyright 2009 Greg Ward <greg@gerg.ca>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2, incorporated herein by reference.
8 8
9 9 # Currently this module only deals with reading and caching tags.
10 10 # Eventually, it could take care of updating (adding/removing/moving)
11 11 # tags too.
12 12
13 13 import os
14 14 from node import nullid, bin, hex, short
15 15 from i18n import _
16 16 import encoding
17 17 import error
18 18
19 19 def _debugalways(ui, *msg):
20 20 ui.write(*msg)
21 21
22 22 def _debugconditional(ui, *msg):
23 23 ui.debug(*msg)
24 24
25 25 def _debugnever(ui, *msg):
26 26 pass
27 27
28 28 _debug = _debugalways
29 29 _debug = _debugnever
30 30
31 31 def findglobaltags1(ui, repo, alltags, tagtypes):
32 32 '''Find global tags in repo by reading .hgtags from every head that
33 33 has a distinct version of it. Updates the dicts alltags, tagtypes
34 34 in place: alltags maps tag name to (node, hist) pair (see _readtags()
35 35 below), and tagtypes maps tag name to tag type ('global' in this
36 36 case).'''
37 37
38 38 seen = set()
39 39 fctx = None
40 40 ctxs = [] # list of filectx
41 41 for node in repo.heads():
42 42 try:
43 43 fnode = repo[node].filenode('.hgtags')
44 44 except error.LookupError:
45 45 continue
46 46 if fnode not in seen:
47 47 seen.add(fnode)
48 48 if not fctx:
49 49 fctx = repo.filectx('.hgtags', fileid=fnode)
50 50 else:
51 51 fctx = fctx.filectx(fnode)
52 52 ctxs.append(fctx)
53 53
54 54 # read the tags file from each head, ending with the tip
55 55 for fctx in reversed(ctxs):
56 56 filetags = _readtags(
57 57 ui, repo, fctx.data().splitlines(), fctx)
58 58 _updatetags(filetags, "global", alltags, tagtypes)
59 59
60 60 def findglobaltags2(ui, repo, alltags, tagtypes):
61 61 '''Same as findglobaltags1(), but with caching.'''
62 62 # This is so we can be lazy and assume alltags contains only global
63 63 # tags when we pass it to _writetagcache().
64 64 assert len(alltags) == len(tagtypes) == 0, \
65 65 "findglobaltags() should be called first"
66 66
67 67 (heads, tagfnode, cachetags, shouldwrite) = _readtagcache(ui, repo)
68 68 if cachetags is not None:
69 69 assert not shouldwrite
70 70 # XXX is this really 100% correct? are there oddball special
71 71 # cases where a global tag should outrank a local tag but won't,
72 72 # because cachetags does not contain rank info?
73 73 _updatetags(cachetags, 'global', alltags, tagtypes)
74 74 return
75 75
76 76 _debug(ui, "reading tags from %d head(s): %s\n"
77 77 % (len(heads), map(short, reversed(heads))))
78 78 seen = set() # set of fnode
79 79 fctx = None
80 80 for head in reversed(heads): # oldest to newest
81 81 assert head in repo.changelog.nodemap, \
82 82 "tag cache returned bogus head %s" % short(head)
83 83
84 84 fnode = tagfnode.get(head)
85 85 if fnode and fnode not in seen:
86 86 seen.add(fnode)
87 87 if not fctx:
88 88 fctx = repo.filectx('.hgtags', fileid=fnode)
89 89 else:
90 90 fctx = fctx.filectx(fnode)
91 91
92 92 filetags = _readtags(ui, repo, fctx.data().splitlines(), fctx)
93 93 _updatetags(filetags, 'global', alltags, tagtypes)
94 94
95 95 # and update the cache (if necessary)
96 96 if shouldwrite:
97 97 _writetagcache(ui, repo, heads, tagfnode, alltags)
98 98
99 99 # Set this to findglobaltags1 to disable tag caching.
100 100 findglobaltags = findglobaltags2
101 101
102 102 def readlocaltags(ui, repo, alltags, tagtypes):
103 103 '''Read local tags in repo. Update alltags and tagtypes.'''
104 104 try:
105 105 # localtags is in the local encoding; re-encode to UTF-8 on
106 106 # input for consistency with the rest of this module.
107 107 data = repo.opener("localtags").read()
108 108 filetags = _readtags(
109 109 ui, repo, data.splitlines(), "localtags",
110 110 recode=encoding.fromlocal)
111 111 _updatetags(filetags, "local", alltags, tagtypes)
112 112 except IOError:
113 113 pass
114 114
115 115 def _readtags(ui, repo, lines, fn, recode=None):
116 116 '''Read tag definitions from a file (or any source of lines).
117 117 Return a mapping from tag name to (node, hist): node is the node id
118 118 from the last line read for that name, and hist is the list of node
119 119 ids previously associated with it (in file order). All node ids are
120 120 binary, not hex.'''
121 121
122 122 filetags = {} # map tag name to (node, hist)
123 123 count = 0
124 124
125 125 def warn(msg):
126 126 ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
127 127
128 128 for line in lines:
129 129 count += 1
130 130 if not line:
131 131 continue
132 132 try:
133 133 (nodehex, name) = line.split(" ", 1)
134 134 except ValueError:
135 135 warn(_("cannot parse entry"))
136 136 continue
137 137 name = name.strip()
138 138 if recode:
139 139 name = recode(name)
140 140 try:
141 141 nodebin = bin(nodehex)
142 142 except TypeError:
143 143 warn(_("node '%s' is not well formed") % nodehex)
144 144 continue
145 145 if nodebin not in repo.changelog.nodemap:
146 146 # silently ignore as pull -r might cause this
147 147 continue
148 148
149 149 # update filetags
150 150 hist = []
151 151 if name in filetags:
152 152 n, hist = filetags[name]
153 153 hist.append(n)
154 154 filetags[name] = (nodebin, hist)
155 155 return filetags
156 156
157 157 def _updatetags(filetags, tagtype, alltags, tagtypes):
158 158 '''Incorporate the tag info read from one file into the two
159 159 dictionaries, alltags and tagtypes, that contain all tag
160 160 info (global across all heads plus local).'''
161 161
162 162 for name, nodehist in filetags.iteritems():
163 163 if name not in alltags:
164 164 alltags[name] = nodehist
165 165 tagtypes[name] = tagtype
166 166 continue
167 167
168 168 # we prefer alltags[name] if:
169 169 # it supercedes us OR
170 170 # mutual supercedes and it has a higher rank
171 171 # otherwise we win because we're tip-most
172 172 anode, ahist = nodehist
173 173 bnode, bhist = alltags[name]
174 174 if (bnode != anode and anode in bhist and
175 175 (bnode not in ahist or len(bhist) > len(ahist))):
176 176 anode = bnode
177 177 ahist.extend([n for n in bhist if n not in ahist])
178 178 alltags[name] = anode, ahist
179 179 tagtypes[name] = tagtype
180 180
181 181
182 182 # The tag cache only stores info about heads, not the tag contents
183 183 # from each head. I.e. it doesn't try to squeeze out the maximum
184 184 # performance, but is simpler has a better chance of actually
185 185 # working correctly. And this gives the biggest performance win: it
186 186 # avoids looking up .hgtags in the manifest for every head, and it
187 187 # can avoid calling heads() at all if there have been no changes to
188 188 # the repo.
189 189
190 190 def _readtagcache(ui, repo):
191 191 '''Read the tag cache and return a tuple (heads, fnodes, cachetags,
192 192 shouldwrite). If the cache is completely up-to-date, cachetags is a
193 193 dict of the form returned by _readtags(); otherwise, it is None and
194 194 heads and fnodes are set. In that case, heads is the list of all
195 195 heads currently in the repository (ordered from tip to oldest) and
196 196 fnodes is a mapping from head to .hgtags filenode. If those two are
197 197 set, caller is responsible for reading tag info from each head.'''
198 198
199 199 try:
200 200 cachefile = repo.opener('tags.cache', 'r')
201 201 _debug(ui, 'reading tag cache from %s\n' % cachefile.name)
202 202 except IOError:
203 203 cachefile = None
204 204
205 205 # The cache file consists of lines like
206 206 # <headrev> <headnode> [<tagnode>]
207 207 # where <headrev> and <headnode> redundantly identify a repository
208 208 # head from the time the cache was written, and <tagnode> is the
209 209 # filenode of .hgtags on that head. Heads with no .hgtags file will
210 210 # have no <tagnode>. The cache is ordered from tip to oldest (which
211 211 # is part of why <headrev> is there: a quick visual check is all
212 212 # that's required to ensure correct order).
213 #
213 #
214 214 # This information is enough to let us avoid the most expensive part
215 215 # of finding global tags, which is looking up <tagnode> in the
216 216 # manifest for each head.
217 217 cacherevs = [] # list of headrev
218 218 cacheheads = [] # list of headnode
219 219 cachefnode = {} # map headnode to filenode
220 220 if cachefile:
221 221 for line in cachefile:
222 222 if line == "\n":
223 223 break
224 224 line = line.rstrip().split()
225 225 cacherevs.append(int(line[0]))
226 226 headnode = bin(line[1])
227 227 cacheheads.append(headnode)
228 228 if len(line) == 3:
229 229 fnode = bin(line[2])
230 230 cachefnode[headnode] = fnode
231 231
232 232 tipnode = repo.changelog.tip()
233 233 tiprev = len(repo.changelog) - 1
234 234
235 235 # Case 1 (common): tip is the same, so nothing has changed.
236 236 # (Unchanged tip trivially means no changesets have been added.
237 237 # But, thanks to localrepository.destroyed(), it also means none
238 238 # have been destroyed by strip or rollback.)
239 239 if cacheheads and cacheheads[0] == tipnode and cacherevs[0] == tiprev:
240 240 _debug(ui, "tag cache: tip unchanged\n")
241 241 tags = _readtags(ui, repo, cachefile, cachefile.name)
242 242 cachefile.close()
243 243 return (None, None, tags, False)
244 244 if cachefile:
245 245 cachefile.close() # ignore rest of file
246
246
247 247 repoheads = repo.heads()
248
249 248 # Case 2 (uncommon): empty repo; get out quickly and don't bother
250 249 # writing an empty cache.
251 250 if repoheads == [nullid]:
252 251 return ([], {}, {}, False)
253 252
254 253 # Case 3 (uncommon): cache file missing or empty.
255 254 if not cacheheads:
256 255 _debug(ui, 'tag cache: cache file missing or empty\n')
257 256
258 257 # Case 4 (uncommon): tip rev decreased. This should only happen
259 258 # when we're called from localrepository.destroyed(). Refresh the
260 259 # cache so future invocations will not see disappeared heads in the
261 260 # cache.
262 261 elif cacheheads and tiprev < cacherevs[0]:
263 262 _debug(ui,
264 263 'tag cache: tip rev decremented (from %d to %d), '
265 264 'so we must be destroying nodes\n'
266 265 % (cacherevs[0], tiprev))
267 266
268 267 # Case 5 (common): tip has changed, so we've added/replaced heads.
269 268 else:
270 269 _debug(ui,
271 270 'tag cache: tip has changed (%d:%s); must find new heads\n'
272 271 % (tiprev, short(tipnode)))
273 272
274 273 # Luckily, the code to handle cases 3, 4, 5 is the same. So the
275 274 # above if/elif/else can disappear once we're confident this thing
276 275 # actually works and we don't need the debug output.
277 276
278 277 # N.B. in case 4 (nodes destroyed), "new head" really means "newly
279 278 # exposed".
280 279 newheads = [head
281 280 for head in repoheads
282 281 if head not in set(cacheheads)]
283 282 _debug(ui, 'tag cache: found %d head(s) not in cache: %s\n'
284 283 % (len(newheads), map(short, newheads)))
285 284
286 285 # Now we have to lookup the .hgtags filenode for every new head.
287 286 # This is the most expensive part of finding tags, so performance
288 287 # depends primarily on the size of newheads. Worst case: no cache
289 288 # file, so newheads == repoheads.
290 289 for head in newheads:
291 290 cctx = repo[head]
292 291 try:
293 292 fnode = cctx.filenode('.hgtags')
294 293 cachefnode[head] = fnode
295 294 except error.LookupError:
296 295 # no .hgtags file on this head
297 296 pass
298 297
299 298 # Caller has to iterate over all heads, but can use the filenodes in
300 299 # cachefnode to get to each .hgtags revision quickly.
301 300 return (repoheads, cachefnode, None, True)
302 301
303 302 def _writetagcache(ui, repo, heads, tagfnode, cachetags):
304 303
305 304 cachefile = repo.opener('tags.cache', 'w', atomictemp=True)
306 305 _debug(ui, 'writing cache file %s\n' % cachefile.name)
307 306
308 307 realheads = repo.heads() # for sanity checks below
309 308 for head in heads:
310 309 # temporary sanity checks; these can probably be removed
311 310 # once this code has been in crew for a few weeks
312 311 assert head in repo.changelog.nodemap, \
313 312 'trying to write non-existent node %s to tag cache' % short(head)
314 313 assert head in realheads, \
315 314 'trying to write non-head %s to tag cache' % short(head)
316 315 assert head != nullid, \
317 316 'trying to write nullid to tag cache'
318 317
319 318 # This can't fail because of the first assert above. When/if we
320 319 # remove that assert, we might want to catch LookupError here
321 320 # and downgrade it to a warning.
322 321 rev = repo.changelog.rev(head)
323 322
324 323 fnode = tagfnode.get(head)
325 324 if fnode:
326 325 cachefile.write('%d %s %s\n' % (rev, hex(head), hex(fnode)))
327 326 else:
328 327 cachefile.write('%d %s\n' % (rev, hex(head)))
329 328
330 329 # Tag names in the cache are in UTF-8 -- which is the whole reason
331 330 # we keep them in UTF-8 throughout this module. If we converted
332 331 # them local encoding on input, we would lose info writing them to
333 332 # the cache.
334 333 cachefile.write('\n')
335 334 for (name, (node, hist)) in cachetags.iteritems():
336 335 cachefile.write("%s %s\n" % (hex(node), name))
337 336
338 337 cachefile.rename()
339 338 cachefile.close()
@@ -1,381 +1,381
1 1 # ui.py - user interface bits for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2, incorporated herein by reference.
7 7
8 8 from i18n import _
9 9 import errno, getpass, os, socket, sys, tempfile, traceback
10 10 import config, util, error
11 11
12 12 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True,
13 13 '0': False, 'no': False, 'false': False, 'off': False}
14 14
15 15 class ui(object):
16 16 def __init__(self, src=None):
17 17 self._buffers = []
18 18 self.quiet = self.verbose = self.debugflag = self._traceback = False
19 19 self._reportuntrusted = True
20 20 self._ocfg = config.config() # overlay
21 21 self._tcfg = config.config() # trusted
22 22 self._ucfg = config.config() # untrusted
23 23 self._trustusers = set()
24 24 self._trustgroups = set()
25 25
26 26 if src:
27 27 self._tcfg = src._tcfg.copy()
28 28 self._ucfg = src._ucfg.copy()
29 29 self._ocfg = src._ocfg.copy()
30 30 self._trustusers = src._trustusers.copy()
31 31 self._trustgroups = src._trustgroups.copy()
32 32 self.fixconfig()
33 33 else:
34 34 # we always trust global config files
35 35 for f in util.rcpath():
36 36 self.readconfig(f, trust=True)
37 37
38 38 def copy(self):
39 39 return self.__class__(self)
40 40
41 41 def _is_trusted(self, fp, f):
42 42 st = util.fstat(fp)
43 43 if util.isowner(st):
44 44 return True
45 45
46 46 tusers, tgroups = self._trustusers, self._trustgroups
47 47 if '*' in tusers or '*' in tgroups:
48 48 return True
49 49
50 50 user = util.username(st.st_uid)
51 51 group = util.groupname(st.st_gid)
52 52 if user in tusers or group in tgroups or user == util.username():
53 53 return True
54 54
55 55 if self._reportuntrusted:
56 56 self.warn(_('Not trusting file %s from untrusted '
57 57 'user %s, group %s\n') % (f, user, group))
58 58 return False
59 59
60 60 def readconfig(self, filename, root=None, trust=False,
61 61 sections=None, remap=None):
62 62 try:
63 63 fp = open(filename)
64 64 except IOError:
65 65 if not sections: # ignore unless we were looking for something
66 66 return
67 67 raise
68 68
69 69 cfg = config.config()
70 70 trusted = sections or trust or self._is_trusted(fp, filename)
71 71
72 72 try:
73 73 cfg.read(filename, fp, sections=sections, remap=remap)
74 74 except error.ConfigError, inst:
75 75 if trusted:
76 76 raise
77 77 self.warn(_("Ignored: %s\n") % str(inst))
78 78
79 79 if trusted:
80 80 self._tcfg.update(cfg)
81 81 self._tcfg.update(self._ocfg)
82 82 self._ucfg.update(cfg)
83 83 self._ucfg.update(self._ocfg)
84 84
85 85 if root is None:
86 86 root = os.path.expanduser('~')
87 87 self.fixconfig(root=root)
88 88
89 89 def fixconfig(self, root=None):
90 90 # translate paths relative to root (or home) into absolute paths
91 91 root = root or os.getcwd()
92 92 for c in self._tcfg, self._ucfg, self._ocfg:
93 93 for n, p in c.items('paths'):
94 94 if p and "://" not in p and not os.path.isabs(p):
95 95 c.set("paths", n, os.path.normpath(os.path.join(root, p)))
96 96
97 97 # update ui options
98 98 self.debugflag = self.configbool('ui', 'debug')
99 99 self.verbose = self.debugflag or self.configbool('ui', 'verbose')
100 100 self.quiet = not self.debugflag and self.configbool('ui', 'quiet')
101 101 if self.verbose and self.quiet:
102 102 self.quiet = self.verbose = False
103 103 self._reportuntrusted = self.configbool("ui", "report_untrusted", True)
104 104 self._traceback = self.configbool('ui', 'traceback', False)
105 105
106 106 # update trust information
107 107 self._trustusers.update(self.configlist('trusted', 'users'))
108 108 self._trustgroups.update(self.configlist('trusted', 'groups'))
109 109
110 110 def setconfig(self, section, name, value):
111 111 for cfg in (self._ocfg, self._tcfg, self._ucfg):
112 112 cfg.set(section, name, value)
113 113 self.fixconfig()
114 114
115 115 def _data(self, untrusted):
116 116 return untrusted and self._ucfg or self._tcfg
117 117
118 118 def configsource(self, section, name, untrusted=False):
119 119 return self._data(untrusted).source(section, name) or 'none'
120 120
121 121 def config(self, section, name, default=None, untrusted=False):
122 122 value = self._data(untrusted).get(section, name, default)
123 123 if self.debugflag and not untrusted and self._reportuntrusted:
124 124 uvalue = self._ucfg.get(section, name)
125 125 if uvalue is not None and uvalue != value:
126 126 self.debug(_("ignoring untrusted configuration option "
127 127 "%s.%s = %s\n") % (section, name, uvalue))
128 128 return value
129 129
130 130 def configbool(self, section, name, default=False, untrusted=False):
131 131 v = self.config(section, name, None, untrusted)
132 132 if v is None:
133 133 return default
134 134 if v.lower() not in _booleans:
135 135 raise error.ConfigError(_("%s.%s not a boolean ('%s')")
136 136 % (section, name, v))
137 137 return _booleans[v.lower()]
138 138
139 139 def configlist(self, section, name, default=None, untrusted=False):
140 140 """Return a list of comma/space separated strings"""
141 141 result = self.config(section, name, untrusted=untrusted)
142 142 if result is None:
143 143 result = default or []
144 144 if isinstance(result, basestring):
145 145 result = result.replace(",", " ").split()
146 146 return result
147 147
148 148 def has_section(self, section, untrusted=False):
149 149 '''tell whether section exists in config.'''
150 150 return section in self._data(untrusted)
151 151
152 152 def configitems(self, section, untrusted=False):
153 153 items = self._data(untrusted).items(section)
154 154 if self.debugflag and not untrusted and self._reportuntrusted:
155 155 for k, v in self._ucfg.items(section):
156 156 if self._tcfg.get(section, k) != v:
157 157 self.debug(_("ignoring untrusted configuration option "
158 158 "%s.%s = %s\n") % (section, k, v))
159 159 return items
160 160
161 161 def walkconfig(self, untrusted=False):
162 162 cfg = self._data(untrusted)
163 163 for section in cfg.sections():
164 164 for name, value in self.configitems(section, untrusted):
165 165 yield section, name, str(value).replace('\n', '\\n')
166 166
167 167 def username(self):
168 168 """Return default username to be used in commits.
169 169
170 170 Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
171 171 and stop searching if one of these is set.
172 172 If not found and ui.askusername is True, ask the user, else use
173 173 ($LOGNAME or $USER or $LNAME or $USERNAME) + "@full.hostname".
174 174 """
175 175 user = os.environ.get("HGUSER")
176 176 if user is None:
177 177 user = self.config("ui", "username")
178 178 if user is None:
179 179 user = os.environ.get("EMAIL")
180 180 if user is None and self.configbool("ui", "askusername"):
181 181 user = self.prompt(_("enter a commit username:"), default=None)
182 182 if user is None:
183 183 try:
184 184 user = '%s@%s' % (util.getuser(), socket.getfqdn())
185 185 self.warn(_("No username found, using '%s' instead\n") % user)
186 186 except KeyError:
187 187 pass
188 188 if not user:
189 189 raise util.Abort(_("Please specify a username."))
190 190 if "\n" in user:
191 191 raise util.Abort(_("username %s contains a newline\n") % repr(user))
192 192 return user
193 193
194 194 def shortuser(self, user):
195 195 """Return a short representation of a user name or email address."""
196 196 if not self.verbose: user = util.shortuser(user)
197 197 return user
198 198
199 199 def _path(self, loc):
200 200 p = self.config('paths', loc)
201 201 if p and '%%' in p:
202 202 self.warn('(deprecated \'%%\' in path %s=%s from %s)\n' %
203 203 (loc, p, self.configsource('paths', loc)))
204 204 p = p.replace('%%', '%')
205 205 return p
206 206
207 207 def expandpath(self, loc, default=None):
208 208 """Return repository location relative to cwd or from [paths]"""
209 209 if "://" in loc or os.path.isdir(os.path.join(loc, '.hg')):
210 210 return loc
211 211
212 212 path = self._path(loc)
213 213 if not path and default is not None:
214 214 path = self._path(default)
215 215 return path or loc
216 216
217 217 def pushbuffer(self):
218 218 self._buffers.append([])
219 219
220 220 def popbuffer(self):
221 221 return "".join(self._buffers.pop())
222 222
223 223 def write(self, *args):
224 224 if self._buffers:
225 225 self._buffers[-1].extend([str(a) for a in args])
226 226 else:
227 227 for a in args:
228 228 sys.stdout.write(str(a))
229 229
230 230 def write_err(self, *args):
231 231 try:
232 232 if not sys.stdout.closed: sys.stdout.flush()
233 233 for a in args:
234 234 sys.stderr.write(str(a))
235 235 # stderr may be buffered under win32 when redirected to files,
236 236 # including stdout.
237 237 if not sys.stderr.closed: sys.stderr.flush()
238 238 except IOError, inst:
239 239 if inst.errno != errno.EPIPE:
240 240 raise
241 241
242 242 def flush(self):
243 243 try: sys.stdout.flush()
244 244 except: pass
245 245 try: sys.stderr.flush()
246 246 except: pass
247 247
248 248 def interactive(self):
249 249 i = self.configbool("ui", "interactive", None)
250 250 if i is None:
251 251 return sys.stdin.isatty()
252 252 return i
253 253
254 254 def _readline(self, prompt=''):
255 255 if sys.stdin.isatty():
256 256 try:
257 257 # magically add command line editing support, where
258 258 # available
259 259 import readline
260 260 # force demandimport to really load the module
261 261 readline.read_history_file
262 262 # windows sometimes raises something other than ImportError
263 263 except Exception:
264 264 pass
265 265 line = raw_input(prompt)
266 266 # When stdin is in binary mode on Windows, it can cause
267 267 # raw_input() to emit an extra trailing carriage return
268 268 if os.linesep == '\r\n' and line and line[-1] == '\r':
269 269 line = line[:-1]
270 270 return line
271 271
272 272 def prompt(self, msg, default="y"):
273 273 """Prompt user with msg, read response.
274 274 If ui is not interactive, the default is returned.
275 275 """
276 276 if not self.interactive():
277 277 self.write(msg, ' ', default, "\n")
278 278 return default
279 279 try:
280 280 r = self._readline(msg + ' ')
281 281 if not r:
282 282 return default
283 283 return r
284 284 except EOFError:
285 285 raise util.Abort(_('response expected'))
286 286
287 287 def promptchoice(self, msg, choices, default=0):
288 288 """Prompt user with msg, read response, and ensure it matches
289 289 one of the provided choices. The index of the choice is returned.
290 290 choices is a sequence of acceptable responses with the format:
291 ('&None', 'E&xec', 'Sym&link') Responses are case insensitive.
291 ('&None', 'E&xec', 'Sym&link') Responses are case insensitive.
292 292 If ui is not interactive, the default is returned.
293 293 """
294 294 resps = [s[s.index('&')+1].lower() for s in choices]
295 295 while True:
296 296 r = self.prompt(msg, resps[default])
297 297 if r.lower() in resps:
298 298 return resps.index(r.lower())
299 299 self.write(_("unrecognized response\n"))
300 300
301 301
302 302 def getpass(self, prompt=None, default=None):
303 303 if not self.interactive(): return default
304 304 try:
305 305 return getpass.getpass(prompt or _('password: '))
306 306 except EOFError:
307 307 raise util.Abort(_('response expected'))
308 308 def status(self, *msg):
309 309 if not self.quiet: self.write(*msg)
310 310 def warn(self, *msg):
311 311 self.write_err(*msg)
312 312 def note(self, *msg):
313 313 if self.verbose: self.write(*msg)
314 314 def debug(self, *msg):
315 315 if self.debugflag: self.write(*msg)
316 316 def edit(self, text, user):
317 317 (fd, name) = tempfile.mkstemp(prefix="hg-editor-", suffix=".txt",
318 318 text=True)
319 319 try:
320 320 f = os.fdopen(fd, "w")
321 321 f.write(text)
322 322 f.close()
323 323
324 324 editor = self.geteditor()
325 325
326 326 util.system("%s \"%s\"" % (editor, name),
327 327 environ={'HGUSER': user},
328 328 onerr=util.Abort, errprefix=_("edit failed"))
329 329
330 330 f = open(name)
331 331 t = f.read()
332 332 f.close()
333 333 finally:
334 334 os.unlink(name)
335 335
336 336 return t
337 337
338 338 def traceback(self):
339 339 '''print exception traceback if traceback printing enabled.
340 340 only to call in exception handler. returns true if traceback
341 341 printed.'''
342 342 if self._traceback:
343 343 traceback.print_exc()
344 344 return self._traceback
345 345
346 346 def geteditor(self):
347 347 '''return editor to use'''
348 348 return (os.environ.get("HGEDITOR") or
349 349 self.config("ui", "editor") or
350 350 os.environ.get("VISUAL") or
351 351 os.environ.get("EDITOR", "vi"))
352 352
353 353 def progress(self, topic, pos, item="", unit="", total=None):
354 354 '''show a progress message
355 355
356 356 With stock hg, this is simply a debug message that is hidden
357 357 by default, but with extensions or GUI tools it may be
358 358 visible. 'topic' is the current operation, 'item' is a
359 359 non-numeric marker of the current position (ie the currently
360 360 in-process file), 'pos' is the current numeric position (ie
361 361 revision, bytes, etc.), units is a corresponding unit label,
362 362 and total is the highest expected pos.
363 363
364 364 Multiple nested topics may be active at a time. All topics
365 365 should be marked closed by setting pos to None at termination.
366 366 '''
367 367
368 368 if pos == None or not self.debugflag:
369 369 return
370 370
371 371 if units:
372 372 units = ' ' + units
373 373 if item:
374 374 item = ' ' + item
375 375
376 376 if total:
377 377 pct = 100.0 * pos / total
378 378 ui.debug('%s:%s %s/%s%s (%4.2g%%)\n'
379 379 % (topic, item, pos, total, units, pct))
380 380 else:
381 381 ui.debug('%s:%s %s%s\n' % (topic, item, pos, units))
General Comments 0
You need to be logged in to leave comments. Login now