##// END OF EJS Templates
merge with backout
Benoit Boissinot -
r3593:9bfb3c04 merge default
parent child Browse files
Show More
@@ -1,3550 +1,3551 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from demandload import demandload
9 9 from node import *
10 10 from i18n import gettext as _
11 11 demandload(globals(), "os re sys signal shutil imp urllib pdb shlex")
12 12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
13 13 demandload(globals(), "fnmatch difflib patch random signal tempfile time")
14 14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
15 15 demandload(globals(), "archival cStringIO changegroup")
16 16 demandload(globals(), "cmdutil hgweb.server sshserver")
17 17
18 18 class UnknownCommand(Exception):
19 19 """Exception raised if command is not in the command table."""
20 20 class AmbiguousCommand(Exception):
21 21 """Exception raised if command shortcut matches more than one command."""
22 22
23 23 def bail_if_changed(repo):
24 24 modified, added, removed, deleted = repo.status()[:4]
25 25 if modified or added or removed or deleted:
26 26 raise util.Abort(_("outstanding uncommitted changes"))
27 27
28 28 def relpath(repo, args):
29 29 cwd = repo.getcwd()
30 30 if cwd:
31 31 return [util.normpath(os.path.join(cwd, x)) for x in args]
32 32 return args
33 33
34 34 def logmessage(opts):
35 35 """ get the log message according to -m and -l option """
36 36 message = opts['message']
37 37 logfile = opts['logfile']
38 38
39 39 if message and logfile:
40 40 raise util.Abort(_('options --message and --logfile are mutually '
41 41 'exclusive'))
42 42 if not message and logfile:
43 43 try:
44 44 if logfile == '-':
45 45 message = sys.stdin.read()
46 46 else:
47 47 message = open(logfile).read()
48 48 except IOError, inst:
49 49 raise util.Abort(_("can't read commit message '%s': %s") %
50 50 (logfile, inst.strerror))
51 51 return message
52 52
53 53 def walkchangerevs(ui, repo, pats, change, opts):
54 54 '''Iterate over files and the revs they changed in.
55 55
56 56 Callers most commonly need to iterate backwards over the history
57 57 it is interested in. Doing so has awful (quadratic-looking)
58 58 performance, so we use iterators in a "windowed" way.
59 59
60 60 We walk a window of revisions in the desired order. Within the
61 61 window, we first walk forwards to gather data, then in the desired
62 62 order (usually backwards) to display it.
63 63
64 64 This function returns an (iterator, matchfn) tuple. The iterator
65 65 yields 3-tuples. They will be of one of the following forms:
66 66
67 67 "window", incrementing, lastrev: stepping through a window,
68 68 positive if walking forwards through revs, last rev in the
69 69 sequence iterated over - use to reset state for the current window
70 70
71 71 "add", rev, fns: out-of-order traversal of the given file names
72 72 fns, which changed during revision rev - use to gather data for
73 73 possible display
74 74
75 75 "iter", rev, None: in-order traversal of the revs earlier iterated
76 76 over with "add" - use to display data'''
77 77
78 78 def increasing_windows(start, end, windowsize=8, sizelimit=512):
79 79 if start < end:
80 80 while start < end:
81 81 yield start, min(windowsize, end-start)
82 82 start += windowsize
83 83 if windowsize < sizelimit:
84 84 windowsize *= 2
85 85 else:
86 86 while start > end:
87 87 yield start, min(windowsize, start-end-1)
88 88 start -= windowsize
89 89 if windowsize < sizelimit:
90 90 windowsize *= 2
91 91
92 92 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
93 93 follow = opts.get('follow') or opts.get('follow_first')
94 94
95 95 if repo.changelog.count() == 0:
96 96 return [], matchfn
97 97
98 98 if follow:
99 99 defrange = '%s:0' % repo.changectx().rev()
100 100 else:
101 101 defrange = 'tip:0'
102 102 revs = cmdutil.revrange(ui, repo, opts['rev'] or [defrange])
103 103 wanted = {}
104 104 slowpath = anypats
105 105 fncache = {}
106 106
107 107 if not slowpath and not files:
108 108 # No files, no patterns. Display all revs.
109 109 wanted = dict.fromkeys(revs)
110 110 copies = []
111 111 if not slowpath:
112 112 # Only files, no patterns. Check the history of each file.
113 113 def filerevgen(filelog, node):
114 114 cl_count = repo.changelog.count()
115 115 if node is None:
116 116 last = filelog.count() - 1
117 117 else:
118 118 last = filelog.rev(node)
119 119 for i, window in increasing_windows(last, nullrev):
120 120 revs = []
121 121 for j in xrange(i - window, i + 1):
122 122 n = filelog.node(j)
123 123 revs.append((filelog.linkrev(n),
124 124 follow and filelog.renamed(n)))
125 125 revs.reverse()
126 126 for rev in revs:
127 127 # only yield rev for which we have the changelog, it can
128 128 # happen while doing "hg log" during a pull or commit
129 129 if rev[0] < cl_count:
130 130 yield rev
131 131 def iterfiles():
132 132 for filename in files:
133 133 yield filename, None
134 134 for filename_node in copies:
135 135 yield filename_node
136 136 minrev, maxrev = min(revs), max(revs)
137 137 for file_, node in iterfiles():
138 138 filelog = repo.file(file_)
139 139 # A zero count may be a directory or deleted file, so
140 140 # try to find matching entries on the slow path.
141 141 if filelog.count() == 0:
142 142 slowpath = True
143 143 break
144 144 for rev, copied in filerevgen(filelog, node):
145 145 if rev <= maxrev:
146 146 if rev < minrev:
147 147 break
148 148 fncache.setdefault(rev, [])
149 149 fncache[rev].append(file_)
150 150 wanted[rev] = 1
151 151 if follow and copied:
152 152 copies.append(copied)
153 153 if slowpath:
154 154 if follow:
155 155 raise util.Abort(_('can only follow copies/renames for explicit '
156 156 'file names'))
157 157
158 158 # The slow path checks files modified in every changeset.
159 159 def changerevgen():
160 160 for i, window in increasing_windows(repo.changelog.count()-1,
161 161 nullrev):
162 162 for j in xrange(i - window, i + 1):
163 163 yield j, change(j)[3]
164 164
165 165 for rev, changefiles in changerevgen():
166 166 matches = filter(matchfn, changefiles)
167 167 if matches:
168 168 fncache[rev] = matches
169 169 wanted[rev] = 1
170 170
171 171 class followfilter:
172 172 def __init__(self, onlyfirst=False):
173 173 self.startrev = nullrev
174 174 self.roots = []
175 175 self.onlyfirst = onlyfirst
176 176
177 177 def match(self, rev):
178 178 def realparents(rev):
179 179 if self.onlyfirst:
180 180 return repo.changelog.parentrevs(rev)[0:1]
181 181 else:
182 182 return filter(lambda x: x != nullrev,
183 183 repo.changelog.parentrevs(rev))
184 184
185 185 if self.startrev == nullrev:
186 186 self.startrev = rev
187 187 return True
188 188
189 189 if rev > self.startrev:
190 190 # forward: all descendants
191 191 if not self.roots:
192 192 self.roots.append(self.startrev)
193 193 for parent in realparents(rev):
194 194 if parent in self.roots:
195 195 self.roots.append(rev)
196 196 return True
197 197 else:
198 198 # backwards: all parents
199 199 if not self.roots:
200 200 self.roots.extend(realparents(self.startrev))
201 201 if rev in self.roots:
202 202 self.roots.remove(rev)
203 203 self.roots.extend(realparents(rev))
204 204 return True
205 205
206 206 return False
207 207
208 208 # it might be worthwhile to do this in the iterator if the rev range
209 209 # is descending and the prune args are all within that range
210 210 for rev in opts.get('prune', ()):
211 211 rev = repo.changelog.rev(repo.lookup(rev))
212 212 ff = followfilter()
213 213 stop = min(revs[0], revs[-1])
214 214 for x in xrange(rev, stop-1, -1):
215 215 if ff.match(x) and x in wanted:
216 216 del wanted[x]
217 217
218 218 def iterate():
219 219 if follow and not files:
220 220 ff = followfilter(onlyfirst=opts.get('follow_first'))
221 221 def want(rev):
222 222 if ff.match(rev) and rev in wanted:
223 223 return True
224 224 return False
225 225 else:
226 226 def want(rev):
227 227 return rev in wanted
228 228
229 229 for i, window in increasing_windows(0, len(revs)):
230 230 yield 'window', revs[0] < revs[-1], revs[-1]
231 231 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
232 232 srevs = list(nrevs)
233 233 srevs.sort()
234 234 for rev in srevs:
235 235 fns = fncache.get(rev)
236 236 if not fns:
237 237 def fns_generator():
238 238 for f in change(rev)[3]:
239 239 if matchfn(f):
240 240 yield f
241 241 fns = fns_generator()
242 242 yield 'add', rev, fns
243 243 for rev in nrevs:
244 244 yield 'iter', rev, None
245 245 return iterate(), matchfn
246 246
247 247 def write_bundle(cg, filename=None, compress=True):
248 248 """Write a bundle file and return its filename.
249 249
250 250 Existing files will not be overwritten.
251 251 If no filename is specified, a temporary file is created.
252 252 bz2 compression can be turned off.
253 253 The bundle file will be deleted in case of errors.
254 254 """
255 255 class nocompress(object):
256 256 def compress(self, x):
257 257 return x
258 258 def flush(self):
259 259 return ""
260 260
261 261 fh = None
262 262 cleanup = None
263 263 try:
264 264 if filename:
265 265 if os.path.exists(filename):
266 266 raise util.Abort(_("file '%s' already exists") % filename)
267 267 fh = open(filename, "wb")
268 268 else:
269 269 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
270 270 fh = os.fdopen(fd, "wb")
271 271 cleanup = filename
272 272
273 273 if compress:
274 274 fh.write("HG10")
275 275 z = bz2.BZ2Compressor(9)
276 276 else:
277 277 fh.write("HG10UN")
278 278 z = nocompress()
279 279 # parse the changegroup data, otherwise we will block
280 280 # in case of sshrepo because we don't know the end of the stream
281 281
282 282 # an empty chunkiter is the end of the changegroup
283 283 empty = False
284 284 while not empty:
285 285 empty = True
286 286 for chunk in changegroup.chunkiter(cg):
287 287 empty = False
288 288 fh.write(z.compress(changegroup.genchunk(chunk)))
289 289 fh.write(z.compress(changegroup.closechunk()))
290 290 fh.write(z.flush())
291 291 cleanup = None
292 292 return filename
293 293 finally:
294 294 if fh is not None:
295 295 fh.close()
296 296 if cleanup is not None:
297 297 os.unlink(cleanup)
298 298
299 299 class changeset_printer(object):
300 300 '''show changeset information when templating not requested.'''
301 301
302 302 def __init__(self, ui, repo):
303 303 self.ui = ui
304 304 self.repo = repo
305 305
306 306 def show(self, rev=0, changenode=None, brinfo=None, copies=None):
307 307 '''show a single changeset or file revision'''
308 308 log = self.repo.changelog
309 309 if changenode is None:
310 310 changenode = log.node(rev)
311 311 elif not rev:
312 312 rev = log.rev(changenode)
313 313
314 314 if self.ui.quiet:
315 315 self.ui.write("%d:%s\n" % (rev, short(changenode)))
316 316 return
317 317
318 318 changes = log.read(changenode)
319 319 date = util.datestr(changes[2])
320 320 extra = changes[5]
321 321 branch = extra.get("branch")
322 322
323 323 hexfunc = self.ui.debugflag and hex or short
324 324
325 325 parents = log.parentrevs(rev)
326 326 if not self.ui.debugflag:
327 327 parents = [p for p in parents if p != nullrev]
328 328 if len(parents) == 1 and parents[0] == rev-1:
329 329 parents = []
330 330 parents = [(p, hexfunc(log.node(p))) for p in parents]
331 331
332 332
333 333 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
334 334
335 335 if branch:
336 336 self.ui.write(_("branch: %s\n") % branch)
337 337 for tag in self.repo.nodetags(changenode):
338 338 self.ui.write(_("tag: %s\n") % tag)
339 339 for parent in parents:
340 340 self.ui.write(_("parent: %d:%s\n") % parent)
341 341
342 342 if brinfo and changenode in brinfo:
343 343 br = brinfo[changenode]
344 344 self.ui.write(_("branch: %s\n") % " ".join(br))
345 345
346 346 if self.ui.debugflag:
347 347 self.ui.write(_("manifest: %d:%s\n") %
348 348 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
349 349 self.ui.write(_("user: %s\n") % changes[1])
350 350 self.ui.write(_("date: %s\n") % date)
351 351
352 352 if self.ui.debugflag:
353 353 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
354 354 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
355 355 files):
356 356 if value:
357 357 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
358 358 elif changes[3] and self.ui.verbose:
359 359 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
360 360 if copies and self.ui.verbose:
361 361 copies = ['%s (%s)' % c for c in copies]
362 362 self.ui.write(_("copies: %s\n") % ' '.join(copies))
363 363
364 364 if extra and self.ui.debugflag:
365 365 extraitems = extra.items()
366 366 extraitems.sort()
367 367 for key, value in extraitems:
368 368 self.ui.write(_("extra: %s=%s\n")
369 369 % (key, value.encode('string_escape')))
370 370
371 371 description = changes[4].strip()
372 372 if description:
373 373 if self.ui.verbose:
374 374 self.ui.write(_("description:\n"))
375 375 self.ui.write(description)
376 376 self.ui.write("\n\n")
377 377 else:
378 378 self.ui.write(_("summary: %s\n") %
379 379 description.splitlines()[0])
380 380 self.ui.write("\n")
381 381
382 382 def show_changeset(ui, repo, opts):
383 383 """show one changeset using template or regular display.
384 384
385 385 Display format will be the first non-empty hit of:
386 386 1. option 'template'
387 387 2. option 'style'
388 388 3. [ui] setting 'logtemplate'
389 389 4. [ui] setting 'style'
390 390 If all of these values are either the unset or the empty string,
391 391 regular display via changeset_printer() is done.
392 392 """
393 393 # options
394 394 tmpl = opts.get('template')
395 395 mapfile = None
396 396 if tmpl:
397 397 tmpl = templater.parsestring(tmpl, quoted=False)
398 398 else:
399 399 mapfile = opts.get('style')
400 400 # ui settings
401 401 if not mapfile:
402 402 tmpl = ui.config('ui', 'logtemplate')
403 403 if tmpl:
404 404 tmpl = templater.parsestring(tmpl)
405 405 else:
406 406 mapfile = ui.config('ui', 'style')
407 407
408 408 if tmpl or mapfile:
409 409 if mapfile:
410 410 if not os.path.split(mapfile)[0]:
411 411 mapname = (templater.templatepath('map-cmdline.' + mapfile)
412 412 or templater.templatepath(mapfile))
413 413 if mapname: mapfile = mapname
414 414 try:
415 415 t = templater.changeset_templater(ui, repo, mapfile)
416 416 except SyntaxError, inst:
417 417 raise util.Abort(inst.args[0])
418 418 if tmpl: t.use_template(tmpl)
419 419 return t
420 420 return changeset_printer(ui, repo)
421 421
422 422 def setremoteconfig(ui, opts):
423 423 "copy remote options to ui tree"
424 424 if opts.get('ssh'):
425 425 ui.setconfig("ui", "ssh", opts['ssh'])
426 426 if opts.get('remotecmd'):
427 427 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
428 428
429 429 def show_version(ui):
430 430 """output version and copyright information"""
431 431 ui.write(_("Mercurial Distributed SCM (version %s)\n")
432 432 % version.get_version())
433 433 ui.status(_(
434 434 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
435 435 "This is free software; see the source for copying conditions. "
436 436 "There is NO\nwarranty; "
437 437 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
438 438 ))
439 439
440 440 def help_(ui, name=None, with_version=False):
441 441 """show help for a command, extension, or list of commands
442 442
443 443 With no arguments, print a list of commands and short help.
444 444
445 445 Given a command name, print help for that command.
446 446
447 447 Given an extension name, print help for that extension, and the
448 448 commands it provides."""
449 449 option_lists = []
450 450
451 451 def helpcmd(name):
452 452 if with_version:
453 453 show_version(ui)
454 454 ui.write('\n')
455 455 aliases, i = findcmd(ui, name)
456 456 # synopsis
457 457 ui.write("%s\n\n" % i[2])
458 458
459 459 # description
460 460 doc = i[0].__doc__
461 461 if not doc:
462 462 doc = _("(No help text available)")
463 463 if ui.quiet:
464 464 doc = doc.splitlines(0)[0]
465 465 ui.write("%s\n" % doc.rstrip())
466 466
467 467 if not ui.quiet:
468 468 # aliases
469 469 if len(aliases) > 1:
470 470 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
471 471
472 472 # options
473 473 if i[1]:
474 474 option_lists.append(("options", i[1]))
475 475
476 476 def helplist(select=None):
477 477 h = {}
478 478 cmds = {}
479 479 for c, e in table.items():
480 480 f = c.split("|", 1)[0]
481 481 if select and not select(f):
482 482 continue
483 483 if name == "shortlist" and not f.startswith("^"):
484 484 continue
485 485 f = f.lstrip("^")
486 486 if not ui.debugflag and f.startswith("debug"):
487 487 continue
488 488 doc = e[0].__doc__
489 489 if not doc:
490 490 doc = _("(No help text available)")
491 491 h[f] = doc.splitlines(0)[0].rstrip()
492 492 cmds[f] = c.lstrip("^")
493 493
494 494 fns = h.keys()
495 495 fns.sort()
496 496 m = max(map(len, fns))
497 497 for f in fns:
498 498 if ui.verbose:
499 499 commands = cmds[f].replace("|",", ")
500 500 ui.write(" %s:\n %s\n"%(commands, h[f]))
501 501 else:
502 502 ui.write(' %-*s %s\n' % (m, f, h[f]))
503 503
504 504 def helpext(name):
505 505 try:
506 506 mod = findext(name)
507 507 except KeyError:
508 508 raise UnknownCommand(name)
509 509
510 510 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
511 511 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
512 512 for d in doc[1:]:
513 513 ui.write(d, '\n')
514 514
515 515 ui.status('\n')
516 516 if ui.verbose:
517 517 ui.status(_('list of commands:\n\n'))
518 518 else:
519 519 ui.status(_('list of commands (use "hg help -v %s" '
520 520 'to show aliases and global options):\n\n') % name)
521 521
522 522 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
523 523 helplist(modcmds.has_key)
524 524
525 525 if name and name != 'shortlist':
526 526 try:
527 527 helpcmd(name)
528 528 except UnknownCommand:
529 529 helpext(name)
530 530
531 531 else:
532 532 # program name
533 533 if ui.verbose or with_version:
534 534 show_version(ui)
535 535 else:
536 536 ui.status(_("Mercurial Distributed SCM\n"))
537 537 ui.status('\n')
538 538
539 539 # list of commands
540 540 if name == "shortlist":
541 541 ui.status(_('basic commands (use "hg help" '
542 542 'for the full list or option "-v" for details):\n\n'))
543 543 elif ui.verbose:
544 544 ui.status(_('list of commands:\n\n'))
545 545 else:
546 546 ui.status(_('list of commands (use "hg help -v" '
547 547 'to show aliases and global options):\n\n'))
548 548
549 549 helplist()
550 550
551 551 # global options
552 552 if ui.verbose:
553 553 option_lists.append(("global options", globalopts))
554 554
555 555 # list all option lists
556 556 opt_output = []
557 557 for title, options in option_lists:
558 558 opt_output.append(("\n%s:\n" % title, None))
559 559 for shortopt, longopt, default, desc in options:
560 560 if "DEPRECATED" in desc and not ui.verbose: continue
561 561 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
562 562 longopt and " --%s" % longopt),
563 563 "%s%s" % (desc,
564 564 default
565 565 and _(" (default: %s)") % default
566 566 or "")))
567 567
568 568 if opt_output:
569 569 opts_len = max([len(line[0]) for line in opt_output if line[1]])
570 570 for first, second in opt_output:
571 571 if second:
572 572 ui.write(" %-*s %s\n" % (opts_len, first, second))
573 573 else:
574 574 ui.write("%s\n" % first)
575 575
576 576 # Commands start here, listed alphabetically
577 577
578 578 def add(ui, repo, *pats, **opts):
579 579 """add the specified files on the next commit
580 580
581 581 Schedule files to be version controlled and added to the repository.
582 582
583 583 The files will be added to the repository at the next commit.
584 584
585 585 If no names are given, add all files in the repository.
586 586 """
587 587
588 588 names = []
589 589 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
590 590 if exact:
591 591 if ui.verbose:
592 592 ui.status(_('adding %s\n') % rel)
593 593 names.append(abs)
594 594 elif repo.dirstate.state(abs) == '?':
595 595 ui.status(_('adding %s\n') % rel)
596 596 names.append(abs)
597 597 if not opts.get('dry_run'):
598 598 repo.add(names)
599 599
600 600 def addremove(ui, repo, *pats, **opts):
601 601 """add all new files, delete all missing files
602 602
603 603 Add all new files and remove all missing files from the repository.
604 604
605 605 New files are ignored if they match any of the patterns in .hgignore. As
606 606 with add, these changes take effect at the next commit.
607 607
608 608 Use the -s option to detect renamed files. With a parameter > 0,
609 609 this compares every removed file with every added file and records
610 610 those similar enough as renames. This option takes a percentage
611 611 between 0 (disabled) and 100 (files must be identical) as its
612 612 parameter. Detecting renamed files this way can be expensive.
613 613 """
614 614 sim = float(opts.get('similarity') or 0)
615 615 if sim < 0 or sim > 100:
616 616 raise util.Abort(_('similarity must be between 0 and 100'))
617 617 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
618 618
619 619 def annotate(ui, repo, *pats, **opts):
620 620 """show changeset information per file line
621 621
622 622 List changes in files, showing the revision id responsible for each line
623 623
624 624 This command is useful to discover who did a change or when a change took
625 625 place.
626 626
627 627 Without the -a option, annotate will avoid processing files it
628 628 detects as binary. With -a, annotate will generate an annotation
629 629 anyway, probably with undesirable results.
630 630 """
631 631 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
632 632
633 633 if not pats:
634 634 raise util.Abort(_('at least one file name or pattern required'))
635 635
636 636 opmap = [['user', lambda x: ui.shortuser(x.user())],
637 637 ['number', lambda x: str(x.rev())],
638 638 ['changeset', lambda x: short(x.node())],
639 639 ['date', getdate], ['follow', lambda x: x.path()]]
640 640 if (not opts['user'] and not opts['changeset'] and not opts['date']
641 641 and not opts['follow']):
642 642 opts['number'] = 1
643 643
644 644 ctx = repo.changectx(opts['rev'])
645 645
646 646 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
647 647 node=ctx.node()):
648 648 fctx = ctx.filectx(abs)
649 649 if not opts['text'] and util.binary(fctx.data()):
650 650 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
651 651 continue
652 652
653 653 lines = fctx.annotate(follow=opts.get('follow'))
654 654 pieces = []
655 655
656 656 for o, f in opmap:
657 657 if opts[o]:
658 658 l = [f(n) for n, dummy in lines]
659 659 if l:
660 660 m = max(map(len, l))
661 661 pieces.append(["%*s" % (m, x) for x in l])
662 662
663 663 if pieces:
664 664 for p, l in zip(zip(*pieces), lines):
665 665 ui.write("%s: %s" % (" ".join(p), l[1]))
666 666
667 667 def archive(ui, repo, dest, **opts):
668 668 '''create unversioned archive of a repository revision
669 669
670 670 By default, the revision used is the parent of the working
671 671 directory; use "-r" to specify a different revision.
672 672
673 673 To specify the type of archive to create, use "-t". Valid
674 674 types are:
675 675
676 676 "files" (default): a directory full of files
677 677 "tar": tar archive, uncompressed
678 678 "tbz2": tar archive, compressed using bzip2
679 679 "tgz": tar archive, compressed using gzip
680 680 "uzip": zip archive, uncompressed
681 681 "zip": zip archive, compressed using deflate
682 682
683 683 The exact name of the destination archive or directory is given
684 684 using a format string; see "hg help export" for details.
685 685
686 686 Each member added to an archive file has a directory prefix
687 687 prepended. Use "-p" to specify a format string for the prefix.
688 688 The default is the basename of the archive, with suffixes removed.
689 689 '''
690 690
691 691 node = repo.changectx(opts['rev']).node()
692 692 dest = cmdutil.make_filename(repo, dest, node)
693 693 if os.path.realpath(dest) == repo.root:
694 694 raise util.Abort(_('repository root cannot be destination'))
695 695 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
696 696 kind = opts.get('type') or 'files'
697 697 prefix = opts['prefix']
698 698 if dest == '-':
699 699 if kind == 'files':
700 700 raise util.Abort(_('cannot archive plain files to stdout'))
701 701 dest = sys.stdout
702 702 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
703 703 prefix = cmdutil.make_filename(repo, prefix, node)
704 704 archival.archive(repo, dest, node, kind, not opts['no_decode'],
705 705 matchfn, prefix)
706 706
707 707 def backout(ui, repo, rev, **opts):
708 708 '''reverse effect of earlier changeset
709 709
710 710 Commit the backed out changes as a new changeset. The new
711 711 changeset is a child of the backed out changeset.
712 712
713 713 If you back out a changeset other than the tip, a new head is
714 714 created. This head is the parent of the working directory. If
715 715 you back out an old changeset, your working directory will appear
716 716 old after the backout. You should merge the backout changeset
717 717 with another head.
718 718
719 719 The --merge option remembers the parent of the working directory
720 720 before starting the backout, then merges the new head with that
721 721 changeset afterwards. This saves you from doing the merge by
722 722 hand. The result of this merge is not committed, as for a normal
723 723 merge.'''
724 724
725 725 bail_if_changed(repo)
726 726 op1, op2 = repo.dirstate.parents()
727 727 if op2 != nullid:
728 728 raise util.Abort(_('outstanding uncommitted merge'))
729 729 node = repo.lookup(rev)
730 730 p1, p2 = repo.changelog.parents(node)
731 731 if p1 == nullid:
732 732 raise util.Abort(_('cannot back out a change with no parents'))
733 733 if p2 != nullid:
734 734 if not opts['parent']:
735 735 raise util.Abort(_('cannot back out a merge changeset without '
736 736 '--parent'))
737 737 p = repo.lookup(opts['parent'])
738 738 if p not in (p1, p2):
739 739 raise util.Abort(_('%s is not a parent of %s' %
740 740 (short(p), short(node))))
741 741 parent = p
742 742 else:
743 743 if opts['parent']:
744 744 raise util.Abort(_('cannot use --parent on non-merge changeset'))
745 745 parent = p1
746 746 hg.clean(repo, node, show_stats=False)
747 747 revert_opts = opts.copy()
748 748 revert_opts['all'] = True
749 749 revert_opts['rev'] = hex(parent)
750 750 revert(ui, repo, **revert_opts)
751 751 commit_opts = opts.copy()
752 752 commit_opts['addremove'] = False
753 753 if not commit_opts['message'] and not commit_opts['logfile']:
754 754 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
755 755 commit_opts['force_editor'] = True
756 756 commit(ui, repo, **commit_opts)
757 757 def nice(node):
758 758 return '%d:%s' % (repo.changelog.rev(node), short(node))
759 759 ui.status(_('changeset %s backs out changeset %s\n') %
760 760 (nice(repo.changelog.tip()), nice(node)))
761 761 if op1 != node:
762 762 if opts['merge']:
763 763 ui.status(_('merging with changeset %s\n') % nice(op1))
764 764 n = _lookup(repo, hex(op1))
765 765 hg.merge(repo, n)
766 766 else:
767 767 ui.status(_('the backout changeset is a new head - '
768 768 'do not forget to merge\n'))
769 769 ui.status(_('(use "backout --merge" '
770 770 'if you want to auto-merge)\n'))
771 771
772 772 def branch(ui, repo, label=None):
773 773 """set or show the current branch name
774 774
775 775 With <name>, set the current branch name. Otherwise, show the
776 776 current branch name.
777 777 """
778 778
779 779 if label is not None:
780 780 repo.opener("branch", "w").write(label)
781 781 else:
782 782 b = repo.workingctx().branch()
783 783 if b:
784 784 ui.write("%s\n" % b)
785 785
786 786 def branches(ui, repo):
787 787 """list repository named branches
788 788
789 789 List the repository's named branches.
790 790 """
791 791 b = repo.branchtags()
792 792 l = [(-repo.changelog.rev(n), n, t) for t,n in b.items()]
793 793 l.sort()
794 794 for r, n, t in l:
795 795 hexfunc = ui.debugflag and hex or short
796 796 if ui.quiet:
797 797 ui.write("%s\n" % t)
798 798 else:
799 799 ui.write("%-30s %s:%s\n" % (t, -r, hexfunc(n)))
800 800
801 801 def bundle(ui, repo, fname, dest=None, **opts):
802 802 """create a changegroup file
803 803
804 804 Generate a compressed changegroup file collecting changesets not
805 805 found in the other repository.
806 806
807 807 If no destination repository is specified the destination is assumed
808 808 to have all the nodes specified by one or more --base parameters.
809 809
810 810 The bundle file can then be transferred using conventional means and
811 811 applied to another repository with the unbundle or pull command.
812 812 This is useful when direct push and pull are not available or when
813 813 exporting an entire repository is undesirable.
814 814
815 815 Applying bundles preserves all changeset contents including
816 816 permissions, copy/rename information, and revision history.
817 817 """
818 818 revs = opts.get('rev') or None
819 819 if revs:
820 820 revs = [repo.lookup(rev) for rev in revs]
821 821 base = opts.get('base')
822 822 if base:
823 823 if dest:
824 824 raise util.Abort(_("--base is incompatible with specifiying "
825 825 "a destination"))
826 826 base = [repo.lookup(rev) for rev in base]
827 827 # create the right base
828 828 # XXX: nodesbetween / changegroup* should be "fixed" instead
829 829 o = []
830 830 has_set = sets.Set(base)
831 831 for n in base:
832 832 has_set.update(repo.changelog.reachable(n))
833 833 if revs:
834 834 visit = list(revs)
835 835 else:
836 836 visit = repo.changelog.heads()
837 837 seen = sets.Set(visit)
838 838 while visit:
839 839 n = visit.pop(0)
840 840 parents = [p for p in repo.changelog.parents(n)
841 841 if p != nullid and p not in has_set]
842 842 if len(parents) == 0:
843 843 o.insert(0, n)
844 844 else:
845 845 for p in parents:
846 846 if p not in seen:
847 847 seen.add(p)
848 848 visit.append(p)
849 849 else:
850 850 setremoteconfig(ui, opts)
851 851 dest = ui.expandpath(dest or 'default-push', dest or 'default')
852 852 other = hg.repository(ui, dest)
853 853 o = repo.findoutgoing(other, force=opts['force'])
854 854
855 855 if revs:
856 856 cg = repo.changegroupsubset(o, revs, 'bundle')
857 857 else:
858 858 cg = repo.changegroup(o, 'bundle')
859 859 write_bundle(cg, fname)
860 860
861 861 def cat(ui, repo, file1, *pats, **opts):
862 862 """output the latest or given revisions of files
863 863
864 864 Print the specified files as they were at the given revision.
865 865 If no revision is given then working dir parent is used, or tip
866 866 if no revision is checked out.
867 867
868 868 Output may be to a file, in which case the name of the file is
869 869 given using a format string. The formatting rules are the same as
870 870 for the export command, with the following additions:
871 871
872 872 %s basename of file being printed
873 873 %d dirname of file being printed, or '.' if in repo root
874 874 %p root-relative path name of file being printed
875 875 """
876 876 ctx = repo.changectx(opts['rev'])
877 877 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
878 878 ctx.node()):
879 879 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
880 880 fp.write(ctx.filectx(abs).data())
881 881
882 882 def clone(ui, source, dest=None, **opts):
883 883 """make a copy of an existing repository
884 884
885 885 Create a copy of an existing repository in a new directory.
886 886
887 887 If no destination directory name is specified, it defaults to the
888 888 basename of the source.
889 889
890 890 The location of the source is added to the new repository's
891 891 .hg/hgrc file, as the default to be used for future pulls.
892 892
893 893 For efficiency, hardlinks are used for cloning whenever the source
894 894 and destination are on the same filesystem (note this applies only
895 895 to the repository data, not to the checked out files). Some
896 896 filesystems, such as AFS, implement hardlinking incorrectly, but
897 897 do not report errors. In these cases, use the --pull option to
898 898 avoid hardlinking.
899 899
900 900 You can safely clone repositories and checked out files using full
901 901 hardlinks with
902 902
903 903 $ cp -al REPO REPOCLONE
904 904
905 905 which is the fastest way to clone. However, the operation is not
906 906 atomic (making sure REPO is not modified during the operation is
907 907 up to you) and you have to make sure your editor breaks hardlinks
908 908 (Emacs and most Linux Kernel tools do so).
909 909
910 910 If you use the -r option to clone up to a specific revision, no
911 911 subsequent revisions will be present in the cloned repository.
912 912 This option implies --pull, even on local repositories.
913 913
914 914 See pull for valid source format details.
915 915
916 916 It is possible to specify an ssh:// URL as the destination, but no
917 .hg/hgrc will be created on the remote side. Look at the help text
918 for the pull command for important details about ssh:// URLs.
917 .hg/hgrc and working directory will be created on the remote side.
918 Look at the help text for the pull command for important details
919 about ssh:// URLs.
919 920 """
920 921 setremoteconfig(ui, opts)
921 922 hg.clone(ui, ui.expandpath(source), dest,
922 923 pull=opts['pull'],
923 924 stream=opts['uncompressed'],
924 925 rev=opts['rev'],
925 926 update=not opts['noupdate'])
926 927
927 928 def commit(ui, repo, *pats, **opts):
928 929 """commit the specified files or all outstanding changes
929 930
930 931 Commit changes to the given files into the repository.
931 932
932 933 If a list of files is omitted, all changes reported by "hg status"
933 934 will be committed.
934 935
935 936 If no commit message is specified, the editor configured in your hgrc
936 937 or in the EDITOR environment variable is started to enter a message.
937 938 """
938 939 message = logmessage(opts)
939 940
940 941 if opts['addremove']:
941 942 cmdutil.addremove(repo, pats, opts)
942 943 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
943 944 if pats:
944 945 modified, added, removed = repo.status(files=fns, match=match)[:3]
945 946 files = modified + added + removed
946 947 else:
947 948 files = []
948 949 try:
949 950 repo.commit(files, message, opts['user'], opts['date'], match,
950 951 force_editor=opts.get('force_editor'))
951 952 except ValueError, inst:
952 953 raise util.Abort(str(inst))
953 954
954 955 def docopy(ui, repo, pats, opts, wlock):
955 956 # called with the repo lock held
956 957 cwd = repo.getcwd()
957 958 errors = 0
958 959 copied = []
959 960 targets = {}
960 961
961 962 def okaytocopy(abs, rel, exact):
962 963 reasons = {'?': _('is not managed'),
963 964 'a': _('has been marked for add'),
964 965 'r': _('has been marked for remove')}
965 966 state = repo.dirstate.state(abs)
966 967 reason = reasons.get(state)
967 968 if reason:
968 969 if state == 'a':
969 970 origsrc = repo.dirstate.copied(abs)
970 971 if origsrc is not None:
971 972 return origsrc
972 973 if exact:
973 974 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
974 975 else:
975 976 return abs
976 977
977 978 def copy(origsrc, abssrc, relsrc, target, exact):
978 979 abstarget = util.canonpath(repo.root, cwd, target)
979 980 reltarget = util.pathto(cwd, abstarget)
980 981 prevsrc = targets.get(abstarget)
981 982 if prevsrc is not None:
982 983 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
983 984 (reltarget, abssrc, prevsrc))
984 985 return
985 986 if (not opts['after'] and os.path.exists(reltarget) or
986 987 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
987 988 if not opts['force']:
988 989 ui.warn(_('%s: not overwriting - file exists\n') %
989 990 reltarget)
990 991 return
991 992 if not opts['after'] and not opts.get('dry_run'):
992 993 os.unlink(reltarget)
993 994 if opts['after']:
994 995 if not os.path.exists(reltarget):
995 996 return
996 997 else:
997 998 targetdir = os.path.dirname(reltarget) or '.'
998 999 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
999 1000 os.makedirs(targetdir)
1000 1001 try:
1001 1002 restore = repo.dirstate.state(abstarget) == 'r'
1002 1003 if restore and not opts.get('dry_run'):
1003 1004 repo.undelete([abstarget], wlock)
1004 1005 try:
1005 1006 if not opts.get('dry_run'):
1006 1007 shutil.copyfile(relsrc, reltarget)
1007 1008 shutil.copymode(relsrc, reltarget)
1008 1009 restore = False
1009 1010 finally:
1010 1011 if restore:
1011 1012 repo.remove([abstarget], wlock)
1012 1013 except shutil.Error, inst:
1013 1014 raise util.Abort(str(inst))
1014 1015 except IOError, inst:
1015 1016 if inst.errno == errno.ENOENT:
1016 1017 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1017 1018 else:
1018 1019 ui.warn(_('%s: cannot copy - %s\n') %
1019 1020 (relsrc, inst.strerror))
1020 1021 errors += 1
1021 1022 return
1022 1023 if ui.verbose or not exact:
1023 1024 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1024 1025 targets[abstarget] = abssrc
1025 1026 if abstarget != origsrc and not opts.get('dry_run'):
1026 1027 repo.copy(origsrc, abstarget, wlock)
1027 1028 copied.append((abssrc, relsrc, exact))
1028 1029
1029 1030 def targetpathfn(pat, dest, srcs):
1030 1031 if os.path.isdir(pat):
1031 1032 abspfx = util.canonpath(repo.root, cwd, pat)
1032 1033 if destdirexists:
1033 1034 striplen = len(os.path.split(abspfx)[0])
1034 1035 else:
1035 1036 striplen = len(abspfx)
1036 1037 if striplen:
1037 1038 striplen += len(os.sep)
1038 1039 res = lambda p: os.path.join(dest, p[striplen:])
1039 1040 elif destdirexists:
1040 1041 res = lambda p: os.path.join(dest, os.path.basename(p))
1041 1042 else:
1042 1043 res = lambda p: dest
1043 1044 return res
1044 1045
1045 1046 def targetpathafterfn(pat, dest, srcs):
1046 1047 if util.patkind(pat, None)[0]:
1047 1048 # a mercurial pattern
1048 1049 res = lambda p: os.path.join(dest, os.path.basename(p))
1049 1050 else:
1050 1051 abspfx = util.canonpath(repo.root, cwd, pat)
1051 1052 if len(abspfx) < len(srcs[0][0]):
1052 1053 # A directory. Either the target path contains the last
1053 1054 # component of the source path or it does not.
1054 1055 def evalpath(striplen):
1055 1056 score = 0
1056 1057 for s in srcs:
1057 1058 t = os.path.join(dest, s[0][striplen:])
1058 1059 if os.path.exists(t):
1059 1060 score += 1
1060 1061 return score
1061 1062
1062 1063 striplen = len(abspfx)
1063 1064 if striplen:
1064 1065 striplen += len(os.sep)
1065 1066 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1066 1067 score = evalpath(striplen)
1067 1068 striplen1 = len(os.path.split(abspfx)[0])
1068 1069 if striplen1:
1069 1070 striplen1 += len(os.sep)
1070 1071 if evalpath(striplen1) > score:
1071 1072 striplen = striplen1
1072 1073 res = lambda p: os.path.join(dest, p[striplen:])
1073 1074 else:
1074 1075 # a file
1075 1076 if destdirexists:
1076 1077 res = lambda p: os.path.join(dest, os.path.basename(p))
1077 1078 else:
1078 1079 res = lambda p: dest
1079 1080 return res
1080 1081
1081 1082
1082 1083 pats = list(pats)
1083 1084 if not pats:
1084 1085 raise util.Abort(_('no source or destination specified'))
1085 1086 if len(pats) == 1:
1086 1087 raise util.Abort(_('no destination specified'))
1087 1088 dest = pats.pop()
1088 1089 destdirexists = os.path.isdir(dest)
1089 1090 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1090 1091 raise util.Abort(_('with multiple sources, destination must be an '
1091 1092 'existing directory'))
1092 1093 if opts['after']:
1093 1094 tfn = targetpathafterfn
1094 1095 else:
1095 1096 tfn = targetpathfn
1096 1097 copylist = []
1097 1098 for pat in pats:
1098 1099 srcs = []
1099 1100 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
1100 1101 origsrc = okaytocopy(abssrc, relsrc, exact)
1101 1102 if origsrc:
1102 1103 srcs.append((origsrc, abssrc, relsrc, exact))
1103 1104 if not srcs:
1104 1105 continue
1105 1106 copylist.append((tfn(pat, dest, srcs), srcs))
1106 1107 if not copylist:
1107 1108 raise util.Abort(_('no files to copy'))
1108 1109
1109 1110 for targetpath, srcs in copylist:
1110 1111 for origsrc, abssrc, relsrc, exact in srcs:
1111 1112 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1112 1113
1113 1114 if errors:
1114 1115 ui.warn(_('(consider using --after)\n'))
1115 1116 return errors, copied
1116 1117
1117 1118 def copy(ui, repo, *pats, **opts):
1118 1119 """mark files as copied for the next commit
1119 1120
1120 1121 Mark dest as having copies of source files. If dest is a
1121 1122 directory, copies are put in that directory. If dest is a file,
1122 1123 there can only be one source.
1123 1124
1124 1125 By default, this command copies the contents of files as they
1125 1126 stand in the working directory. If invoked with --after, the
1126 1127 operation is recorded, but no copying is performed.
1127 1128
1128 1129 This command takes effect in the next commit.
1129 1130
1130 1131 NOTE: This command should be treated as experimental. While it
1131 1132 should properly record copied files, this information is not yet
1132 1133 fully used by merge, nor fully reported by log.
1133 1134 """
1134 1135 wlock = repo.wlock(0)
1135 1136 errs, copied = docopy(ui, repo, pats, opts, wlock)
1136 1137 return errs
1137 1138
1138 1139 def debugancestor(ui, index, rev1, rev2):
1139 1140 """find the ancestor revision of two revisions in a given index"""
1140 1141 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1141 1142 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1142 1143 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1143 1144
1144 1145 def debugcomplete(ui, cmd='', **opts):
1145 1146 """returns the completion list associated with the given command"""
1146 1147
1147 1148 if opts['options']:
1148 1149 options = []
1149 1150 otables = [globalopts]
1150 1151 if cmd:
1151 1152 aliases, entry = findcmd(ui, cmd)
1152 1153 otables.append(entry[1])
1153 1154 for t in otables:
1154 1155 for o in t:
1155 1156 if o[0]:
1156 1157 options.append('-%s' % o[0])
1157 1158 options.append('--%s' % o[1])
1158 1159 ui.write("%s\n" % "\n".join(options))
1159 1160 return
1160 1161
1161 1162 clist = findpossible(ui, cmd).keys()
1162 1163 clist.sort()
1163 1164 ui.write("%s\n" % "\n".join(clist))
1164 1165
1165 1166 def debugrebuildstate(ui, repo, rev=None):
1166 1167 """rebuild the dirstate as it would look like for the given revision"""
1167 1168 if not rev:
1168 1169 rev = repo.changelog.tip()
1169 1170 else:
1170 1171 rev = repo.lookup(rev)
1171 1172 change = repo.changelog.read(rev)
1172 1173 n = change[0]
1173 1174 files = repo.manifest.read(n)
1174 1175 wlock = repo.wlock()
1175 1176 repo.dirstate.rebuild(rev, files)
1176 1177
1177 1178 def debugcheckstate(ui, repo):
1178 1179 """validate the correctness of the current dirstate"""
1179 1180 parent1, parent2 = repo.dirstate.parents()
1180 1181 repo.dirstate.read()
1181 1182 dc = repo.dirstate.map
1182 1183 keys = dc.keys()
1183 1184 keys.sort()
1184 1185 m1n = repo.changelog.read(parent1)[0]
1185 1186 m2n = repo.changelog.read(parent2)[0]
1186 1187 m1 = repo.manifest.read(m1n)
1187 1188 m2 = repo.manifest.read(m2n)
1188 1189 errors = 0
1189 1190 for f in dc:
1190 1191 state = repo.dirstate.state(f)
1191 1192 if state in "nr" and f not in m1:
1192 1193 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1193 1194 errors += 1
1194 1195 if state in "a" and f in m1:
1195 1196 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1196 1197 errors += 1
1197 1198 if state in "m" and f not in m1 and f not in m2:
1198 1199 ui.warn(_("%s in state %s, but not in either manifest\n") %
1199 1200 (f, state))
1200 1201 errors += 1
1201 1202 for f in m1:
1202 1203 state = repo.dirstate.state(f)
1203 1204 if state not in "nrm":
1204 1205 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1205 1206 errors += 1
1206 1207 if errors:
1207 1208 error = _(".hg/dirstate inconsistent with current parent's manifest")
1208 1209 raise util.Abort(error)
1209 1210
1210 1211 def showconfig(ui, repo, *values, **opts):
1211 1212 """show combined config settings from all hgrc files
1212 1213
1213 1214 With no args, print names and values of all config items.
1214 1215
1215 1216 With one arg of the form section.name, print just the value of
1216 1217 that config item.
1217 1218
1218 1219 With multiple args, print names and values of all config items
1219 1220 with matching section names."""
1220 1221
1221 1222 untrusted = bool(opts.get('untrusted'))
1222 1223 if values:
1223 1224 if len([v for v in values if '.' in v]) > 1:
1224 1225 raise util.Abort(_('only one config item permitted'))
1225 1226 for section, name, value in ui.walkconfig(untrusted=untrusted):
1226 1227 sectname = section + '.' + name
1227 1228 if values:
1228 1229 for v in values:
1229 1230 if v == section:
1230 1231 ui.write('%s=%s\n' % (sectname, value))
1231 1232 elif v == sectname:
1232 1233 ui.write(value, '\n')
1233 1234 else:
1234 1235 ui.write('%s=%s\n' % (sectname, value))
1235 1236
1236 1237 def debugsetparents(ui, repo, rev1, rev2=None):
1237 1238 """manually set the parents of the current working directory
1238 1239
1239 1240 This is useful for writing repository conversion tools, but should
1240 1241 be used with care.
1241 1242 """
1242 1243
1243 1244 if not rev2:
1244 1245 rev2 = hex(nullid)
1245 1246
1246 1247 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1247 1248
1248 1249 def debugstate(ui, repo):
1249 1250 """show the contents of the current dirstate"""
1250 1251 repo.dirstate.read()
1251 1252 dc = repo.dirstate.map
1252 1253 keys = dc.keys()
1253 1254 keys.sort()
1254 1255 for file_ in keys:
1255 1256 ui.write("%c %3o %10d %s %s\n"
1256 1257 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1257 1258 time.strftime("%x %X",
1258 1259 time.localtime(dc[file_][3])), file_))
1259 1260 for f in repo.dirstate.copies():
1260 1261 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1261 1262
1262 1263 def debugdata(ui, file_, rev):
1263 1264 """dump the contents of an data file revision"""
1264 1265 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1265 1266 file_[:-2] + ".i", file_, 0)
1266 1267 try:
1267 1268 ui.write(r.revision(r.lookup(rev)))
1268 1269 except KeyError:
1269 1270 raise util.Abort(_('invalid revision identifier %s') % rev)
1270 1271
1271 1272 def debugindex(ui, file_):
1272 1273 """dump the contents of an index file"""
1273 1274 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1274 1275 ui.write(" rev offset length base linkrev" +
1275 1276 " nodeid p1 p2\n")
1276 1277 for i in xrange(r.count()):
1277 1278 node = r.node(i)
1278 1279 pp = r.parents(node)
1279 1280 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1280 1281 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1281 1282 short(node), short(pp[0]), short(pp[1])))
1282 1283
1283 1284 def debugindexdot(ui, file_):
1284 1285 """dump an index DAG as a .dot file"""
1285 1286 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1286 1287 ui.write("digraph G {\n")
1287 1288 for i in xrange(r.count()):
1288 1289 node = r.node(i)
1289 1290 pp = r.parents(node)
1290 1291 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1291 1292 if pp[1] != nullid:
1292 1293 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1293 1294 ui.write("}\n")
1294 1295
1295 1296 def debugrename(ui, repo, file, rev=None):
1296 1297 """dump rename information"""
1297 1298 r = repo.file(relpath(repo, [file])[0])
1298 1299 if rev:
1299 1300 try:
1300 1301 # assume all revision numbers are for changesets
1301 1302 n = repo.lookup(rev)
1302 1303 change = repo.changelog.read(n)
1303 1304 m = repo.manifest.read(change[0])
1304 1305 n = m[relpath(repo, [file])[0]]
1305 1306 except (hg.RepoError, KeyError):
1306 1307 n = r.lookup(rev)
1307 1308 else:
1308 1309 n = r.tip()
1309 1310 m = r.renamed(n)
1310 1311 if m:
1311 1312 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1312 1313 else:
1313 1314 ui.write(_("not renamed\n"))
1314 1315
1315 1316 def debugwalk(ui, repo, *pats, **opts):
1316 1317 """show how files match on given patterns"""
1317 1318 items = list(cmdutil.walk(repo, pats, opts))
1318 1319 if not items:
1319 1320 return
1320 1321 fmt = '%%s %%-%ds %%-%ds %%s' % (
1321 1322 max([len(abs) for (src, abs, rel, exact) in items]),
1322 1323 max([len(rel) for (src, abs, rel, exact) in items]))
1323 1324 for src, abs, rel, exact in items:
1324 1325 line = fmt % (src, abs, rel, exact and 'exact' or '')
1325 1326 ui.write("%s\n" % line.rstrip())
1326 1327
1327 1328 def diff(ui, repo, *pats, **opts):
1328 1329 """diff repository (or selected files)
1329 1330
1330 1331 Show differences between revisions for the specified files.
1331 1332
1332 1333 Differences between files are shown using the unified diff format.
1333 1334
1334 1335 When two revision arguments are given, then changes are shown
1335 1336 between those revisions. If only one revision is specified then
1336 1337 that revision is compared to the working directory, and, when no
1337 1338 revisions are specified, the working directory files are compared
1338 1339 to its parent.
1339 1340
1340 1341 Without the -a option, diff will avoid generating diffs of files
1341 1342 it detects as binary. With -a, diff will generate a diff anyway,
1342 1343 probably with undesirable results.
1343 1344 """
1344 1345 node1, node2 = cmdutil.revpair(ui, repo, opts['rev'])
1345 1346
1346 1347 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1347 1348
1348 1349 patch.diff(repo, node1, node2, fns, match=matchfn,
1349 1350 opts=patch.diffopts(ui, opts))
1350 1351
1351 1352 def export(ui, repo, *changesets, **opts):
1352 1353 """dump the header and diffs for one or more changesets
1353 1354
1354 1355 Print the changeset header and diffs for one or more revisions.
1355 1356
1356 1357 The information shown in the changeset header is: author,
1357 1358 changeset hash, parent and commit comment.
1358 1359
1359 1360 Output may be to a file, in which case the name of the file is
1360 1361 given using a format string. The formatting rules are as follows:
1361 1362
1362 1363 %% literal "%" character
1363 1364 %H changeset hash (40 bytes of hexadecimal)
1364 1365 %N number of patches being generated
1365 1366 %R changeset revision number
1366 1367 %b basename of the exporting repository
1367 1368 %h short-form changeset hash (12 bytes of hexadecimal)
1368 1369 %n zero-padded sequence number, starting at 1
1369 1370 %r zero-padded changeset revision number
1370 1371
1371 1372 Without the -a option, export will avoid generating diffs of files
1372 1373 it detects as binary. With -a, export will generate a diff anyway,
1373 1374 probably with undesirable results.
1374 1375
1375 1376 With the --switch-parent option, the diff will be against the second
1376 1377 parent. It can be useful to review a merge.
1377 1378 """
1378 1379 if not changesets:
1379 1380 raise util.Abort(_("export requires at least one changeset"))
1380 1381 revs = cmdutil.revrange(ui, repo, changesets)
1381 1382 if len(revs) > 1:
1382 1383 ui.note(_('exporting patches:\n'))
1383 1384 else:
1384 1385 ui.note(_('exporting patch:\n'))
1385 1386 patch.export(repo, map(repo.lookup, revs), template=opts['output'],
1386 1387 switch_parent=opts['switch_parent'],
1387 1388 opts=patch.diffopts(ui, opts))
1388 1389
1389 1390 def grep(ui, repo, pattern, *pats, **opts):
1390 1391 """search for a pattern in specified files and revisions
1391 1392
1392 1393 Search revisions of files for a regular expression.
1393 1394
1394 1395 This command behaves differently than Unix grep. It only accepts
1395 1396 Python/Perl regexps. It searches repository history, not the
1396 1397 working directory. It always prints the revision number in which
1397 1398 a match appears.
1398 1399
1399 1400 By default, grep only prints output for the first revision of a
1400 1401 file in which it finds a match. To get it to print every revision
1401 1402 that contains a change in match status ("-" for a match that
1402 1403 becomes a non-match, or "+" for a non-match that becomes a match),
1403 1404 use the --all flag.
1404 1405 """
1405 1406 reflags = 0
1406 1407 if opts['ignore_case']:
1407 1408 reflags |= re.I
1408 1409 regexp = re.compile(pattern, reflags)
1409 1410 sep, eol = ':', '\n'
1410 1411 if opts['print0']:
1411 1412 sep = eol = '\0'
1412 1413
1413 1414 fcache = {}
1414 1415 def getfile(fn):
1415 1416 if fn not in fcache:
1416 1417 fcache[fn] = repo.file(fn)
1417 1418 return fcache[fn]
1418 1419
1419 1420 def matchlines(body):
1420 1421 begin = 0
1421 1422 linenum = 0
1422 1423 while True:
1423 1424 match = regexp.search(body, begin)
1424 1425 if not match:
1425 1426 break
1426 1427 mstart, mend = match.span()
1427 1428 linenum += body.count('\n', begin, mstart) + 1
1428 1429 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1429 1430 lend = body.find('\n', mend)
1430 1431 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1431 1432 begin = lend + 1
1432 1433
1433 1434 class linestate(object):
1434 1435 def __init__(self, line, linenum, colstart, colend):
1435 1436 self.line = line
1436 1437 self.linenum = linenum
1437 1438 self.colstart = colstart
1438 1439 self.colend = colend
1439 1440
1440 1441 def __eq__(self, other):
1441 1442 return self.line == other.line
1442 1443
1443 1444 matches = {}
1444 1445 copies = {}
1445 1446 def grepbody(fn, rev, body):
1446 1447 matches[rev].setdefault(fn, [])
1447 1448 m = matches[rev][fn]
1448 1449 for lnum, cstart, cend, line in matchlines(body):
1449 1450 s = linestate(line, lnum, cstart, cend)
1450 1451 m.append(s)
1451 1452
1452 1453 def difflinestates(a, b):
1453 1454 sm = difflib.SequenceMatcher(None, a, b)
1454 1455 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1455 1456 if tag == 'insert':
1456 1457 for i in xrange(blo, bhi):
1457 1458 yield ('+', b[i])
1458 1459 elif tag == 'delete':
1459 1460 for i in xrange(alo, ahi):
1460 1461 yield ('-', a[i])
1461 1462 elif tag == 'replace':
1462 1463 for i in xrange(alo, ahi):
1463 1464 yield ('-', a[i])
1464 1465 for i in xrange(blo, bhi):
1465 1466 yield ('+', b[i])
1466 1467
1467 1468 prev = {}
1468 1469 def display(fn, rev, states, prevstates):
1469 1470 counts = {'-': 0, '+': 0}
1470 1471 filerevmatches = {}
1471 1472 if incrementing or not opts['all']:
1472 1473 a, b, r = prevstates, states, rev
1473 1474 else:
1474 1475 a, b, r = states, prevstates, prev.get(fn, -1)
1475 1476 for change, l in difflinestates(a, b):
1476 1477 cols = [fn, str(r)]
1477 1478 if opts['line_number']:
1478 1479 cols.append(str(l.linenum))
1479 1480 if opts['all']:
1480 1481 cols.append(change)
1481 1482 if opts['user']:
1482 1483 cols.append(ui.shortuser(getchange(r)[1]))
1483 1484 if opts['files_with_matches']:
1484 1485 c = (fn, r)
1485 1486 if c in filerevmatches:
1486 1487 continue
1487 1488 filerevmatches[c] = 1
1488 1489 else:
1489 1490 cols.append(l.line)
1490 1491 ui.write(sep.join(cols), eol)
1491 1492 counts[change] += 1
1492 1493 return counts['+'], counts['-']
1493 1494
1494 1495 fstate = {}
1495 1496 skip = {}
1496 1497 getchange = util.cachefunc(lambda r:repo.changectx(r).changeset())
1497 1498 changeiter, matchfn = walkchangerevs(ui, repo, pats, getchange, opts)
1498 1499 count = 0
1499 1500 incrementing = False
1500 1501 follow = opts.get('follow')
1501 1502 for st, rev, fns in changeiter:
1502 1503 if st == 'window':
1503 1504 incrementing = rev
1504 1505 matches.clear()
1505 1506 elif st == 'add':
1506 1507 mf = repo.changectx(rev).manifest()
1507 1508 matches[rev] = {}
1508 1509 for fn in fns:
1509 1510 if fn in skip:
1510 1511 continue
1511 1512 fstate.setdefault(fn, {})
1512 1513 try:
1513 1514 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1514 1515 if follow:
1515 1516 copied = getfile(fn).renamed(mf[fn])
1516 1517 if copied:
1517 1518 copies.setdefault(rev, {})[fn] = copied[0]
1518 1519 except KeyError:
1519 1520 pass
1520 1521 elif st == 'iter':
1521 1522 states = matches[rev].items()
1522 1523 states.sort()
1523 1524 for fn, m in states:
1524 1525 copy = copies.get(rev, {}).get(fn)
1525 1526 if fn in skip:
1526 1527 if copy:
1527 1528 skip[copy] = True
1528 1529 continue
1529 1530 if incrementing or not opts['all'] or fstate[fn]:
1530 1531 pos, neg = display(fn, rev, m, fstate[fn])
1531 1532 count += pos + neg
1532 1533 if pos and not opts['all']:
1533 1534 skip[fn] = True
1534 1535 if copy:
1535 1536 skip[copy] = True
1536 1537 fstate[fn] = m
1537 1538 if copy:
1538 1539 fstate[copy] = m
1539 1540 prev[fn] = rev
1540 1541
1541 1542 if not incrementing:
1542 1543 fstate = fstate.items()
1543 1544 fstate.sort()
1544 1545 for fn, state in fstate:
1545 1546 if fn in skip:
1546 1547 continue
1547 1548 if fn not in copies.get(prev[fn], {}):
1548 1549 display(fn, rev, {}, state)
1549 1550 return (count == 0 and 1) or 0
1550 1551
1551 1552 def heads(ui, repo, **opts):
1552 1553 """show current repository heads
1553 1554
1554 1555 Show all repository head changesets.
1555 1556
1556 1557 Repository "heads" are changesets that don't have children
1557 1558 changesets. They are where development generally takes place and
1558 1559 are the usual targets for update and merge operations.
1559 1560 """
1560 1561 if opts['rev']:
1561 1562 heads = repo.heads(repo.lookup(opts['rev']))
1562 1563 else:
1563 1564 heads = repo.heads()
1564 1565 br = None
1565 1566 if opts['branches']:
1566 1567 ui.warn(_("the --branches option is deprecated, "
1567 1568 "please use 'hg branches' instead\n"))
1568 1569 br = repo.branchlookup(heads)
1569 1570 displayer = show_changeset(ui, repo, opts)
1570 1571 for n in heads:
1571 1572 displayer.show(changenode=n, brinfo=br)
1572 1573
1573 1574 def identify(ui, repo):
1574 1575 """print information about the working copy
1575 1576
1576 1577 Print a short summary of the current state of the repo.
1577 1578
1578 1579 This summary identifies the repository state using one or two parent
1579 1580 hash identifiers, followed by a "+" if there are uncommitted changes
1580 1581 in the working directory, followed by a list of tags for this revision.
1581 1582 """
1582 1583 parents = [p for p in repo.dirstate.parents() if p != nullid]
1583 1584 if not parents:
1584 1585 ui.write(_("unknown\n"))
1585 1586 return
1586 1587
1587 1588 hexfunc = ui.debugflag and hex or short
1588 1589 modified, added, removed, deleted = repo.status()[:4]
1589 1590 output = ["%s%s" %
1590 1591 ('+'.join([hexfunc(parent) for parent in parents]),
1591 1592 (modified or added or removed or deleted) and "+" or "")]
1592 1593
1593 1594 if not ui.quiet:
1594 1595
1595 1596 branch = repo.workingctx().branch()
1596 1597 if branch:
1597 1598 output.append("(%s)" % branch)
1598 1599
1599 1600 # multiple tags for a single parent separated by '/'
1600 1601 parenttags = ['/'.join(tags)
1601 1602 for tags in map(repo.nodetags, parents) if tags]
1602 1603 # tags for multiple parents separated by ' + '
1603 1604 if parenttags:
1604 1605 output.append(' + '.join(parenttags))
1605 1606
1606 1607 ui.write("%s\n" % ' '.join(output))
1607 1608
1608 1609 def import_(ui, repo, patch1, *patches, **opts):
1609 1610 """import an ordered set of patches
1610 1611
1611 1612 Import a list of patches and commit them individually.
1612 1613
1613 1614 If there are outstanding changes in the working directory, import
1614 1615 will abort unless given the -f flag.
1615 1616
1616 1617 You can import a patch straight from a mail message. Even patches
1617 1618 as attachments work (body part must be type text/plain or
1618 1619 text/x-patch to be used). From and Subject headers of email
1619 1620 message are used as default committer and commit message. All
1620 1621 text/plain body parts before first diff are added to commit
1621 1622 message.
1622 1623
1623 1624 If imported patch was generated by hg export, user and description
1624 1625 from patch override values from message headers and body. Values
1625 1626 given on command line with -m and -u override these.
1626 1627
1627 1628 To read a patch from standard input, use patch name "-".
1628 1629 """
1629 1630 patches = (patch1,) + patches
1630 1631
1631 1632 if not opts['force']:
1632 1633 bail_if_changed(repo)
1633 1634
1634 1635 d = opts["base"]
1635 1636 strip = opts["strip"]
1636 1637
1637 1638 wlock = repo.wlock()
1638 1639 lock = repo.lock()
1639 1640
1640 1641 for p in patches:
1641 1642 pf = os.path.join(d, p)
1642 1643
1643 1644 if pf == '-':
1644 1645 ui.status(_("applying patch from stdin\n"))
1645 1646 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1646 1647 else:
1647 1648 ui.status(_("applying %s\n") % p)
1648 1649 tmpname, message, user, date = patch.extract(ui, file(pf))
1649 1650
1650 1651 if tmpname is None:
1651 1652 raise util.Abort(_('no diffs found'))
1652 1653
1653 1654 try:
1654 1655 if opts['message']:
1655 1656 # pickup the cmdline msg
1656 1657 message = opts['message']
1657 1658 elif message:
1658 1659 # pickup the patch msg
1659 1660 message = message.strip()
1660 1661 else:
1661 1662 # launch the editor
1662 1663 message = None
1663 1664 ui.debug(_('message:\n%s\n') % message)
1664 1665
1665 1666 files = {}
1666 1667 try:
1667 1668 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1668 1669 files=files)
1669 1670 finally:
1670 1671 files = patch.updatedir(ui, repo, files, wlock=wlock)
1671 1672 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1672 1673 finally:
1673 1674 os.unlink(tmpname)
1674 1675
1675 1676 def incoming(ui, repo, source="default", **opts):
1676 1677 """show new changesets found in source
1677 1678
1678 1679 Show new changesets found in the specified path/URL or the default
1679 1680 pull location. These are the changesets that would be pulled if a pull
1680 1681 was requested.
1681 1682
1682 1683 For remote repository, using --bundle avoids downloading the changesets
1683 1684 twice if the incoming is followed by a pull.
1684 1685
1685 1686 See pull for valid source format details.
1686 1687 """
1687 1688 source = ui.expandpath(source)
1688 1689 setremoteconfig(ui, opts)
1689 1690
1690 1691 other = hg.repository(ui, source)
1691 1692 incoming = repo.findincoming(other, force=opts["force"])
1692 1693 if not incoming:
1693 1694 ui.status(_("no changes found\n"))
1694 1695 return
1695 1696
1696 1697 cleanup = None
1697 1698 try:
1698 1699 fname = opts["bundle"]
1699 1700 if fname or not other.local():
1700 1701 # create a bundle (uncompressed if other repo is not local)
1701 1702 cg = other.changegroup(incoming, "incoming")
1702 1703 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1703 1704 # keep written bundle?
1704 1705 if opts["bundle"]:
1705 1706 cleanup = None
1706 1707 if not other.local():
1707 1708 # use the created uncompressed bundlerepo
1708 1709 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1709 1710
1710 1711 revs = None
1711 1712 if opts['rev']:
1712 1713 revs = [other.lookup(rev) for rev in opts['rev']]
1713 1714 o = other.changelog.nodesbetween(incoming, revs)[0]
1714 1715 if opts['newest_first']:
1715 1716 o.reverse()
1716 1717 displayer = show_changeset(ui, other, opts)
1717 1718 for n in o:
1718 1719 parents = [p for p in other.changelog.parents(n) if p != nullid]
1719 1720 if opts['no_merges'] and len(parents) == 2:
1720 1721 continue
1721 1722 displayer.show(changenode=n)
1722 1723 if opts['patch']:
1723 1724 prev = (parents and parents[0]) or nullid
1724 1725 patch.diff(other, prev, n, fp=repo.ui)
1725 1726 ui.write("\n")
1726 1727 finally:
1727 1728 if hasattr(other, 'close'):
1728 1729 other.close()
1729 1730 if cleanup:
1730 1731 os.unlink(cleanup)
1731 1732
1732 1733 def init(ui, dest=".", **opts):
1733 1734 """create a new repository in the given directory
1734 1735
1735 1736 Initialize a new repository in the given directory. If the given
1736 1737 directory does not exist, it is created.
1737 1738
1738 1739 If no directory is given, the current directory is used.
1739 1740
1740 1741 It is possible to specify an ssh:// URL as the destination.
1741 1742 Look at the help text for the pull command for important details
1742 1743 about ssh:// URLs.
1743 1744 """
1744 1745 setremoteconfig(ui, opts)
1745 1746 hg.repository(ui, dest, create=1)
1746 1747
1747 1748 def locate(ui, repo, *pats, **opts):
1748 1749 """locate files matching specific patterns
1749 1750
1750 1751 Print all files under Mercurial control whose names match the
1751 1752 given patterns.
1752 1753
1753 1754 This command searches the current directory and its
1754 1755 subdirectories. To search an entire repository, move to the root
1755 1756 of the repository.
1756 1757
1757 1758 If no patterns are given to match, this command prints all file
1758 1759 names.
1759 1760
1760 1761 If you want to feed the output of this command into the "xargs"
1761 1762 command, use the "-0" option to both this command and "xargs".
1762 1763 This will avoid the problem of "xargs" treating single filenames
1763 1764 that contain white space as multiple filenames.
1764 1765 """
1765 1766 end = opts['print0'] and '\0' or '\n'
1766 1767 rev = opts['rev']
1767 1768 if rev:
1768 1769 node = repo.lookup(rev)
1769 1770 else:
1770 1771 node = None
1771 1772
1772 1773 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1773 1774 head='(?:.*/|)'):
1774 1775 if not node and repo.dirstate.state(abs) == '?':
1775 1776 continue
1776 1777 if opts['fullpath']:
1777 1778 ui.write(os.path.join(repo.root, abs), end)
1778 1779 else:
1779 1780 ui.write(((pats and rel) or abs), end)
1780 1781
1781 1782 def log(ui, repo, *pats, **opts):
1782 1783 """show revision history of entire repository or files
1783 1784
1784 1785 Print the revision history of the specified files or the entire
1785 1786 project.
1786 1787
1787 1788 File history is shown without following rename or copy history of
1788 1789 files. Use -f/--follow with a file name to follow history across
1789 1790 renames and copies. --follow without a file name will only show
1790 1791 ancestors or descendants of the starting revision. --follow-first
1791 1792 only follows the first parent of merge revisions.
1792 1793
1793 1794 If no revision range is specified, the default is tip:0 unless
1794 1795 --follow is set, in which case the working directory parent is
1795 1796 used as the starting revision.
1796 1797
1797 1798 By default this command outputs: changeset id and hash, tags,
1798 1799 non-trivial parents, user, date and time, and a summary for each
1799 1800 commit. When the -v/--verbose switch is used, the list of changed
1800 1801 files and full commit message is shown.
1801 1802 """
1802 1803 class dui(object):
1803 1804 # Implement and delegate some ui protocol. Save hunks of
1804 1805 # output for later display in the desired order.
1805 1806 def __init__(self, ui):
1806 1807 self.ui = ui
1807 1808 self.hunk = {}
1808 1809 self.header = {}
1809 1810 self.quiet = ui.quiet
1810 1811 self.verbose = ui.verbose
1811 1812 self.debugflag = ui.debugflag
1812 1813 def bump(self, rev):
1813 1814 self.rev = rev
1814 1815 self.hunk[rev] = []
1815 1816 self.header[rev] = []
1816 1817 def note(self, *args):
1817 1818 if self.verbose:
1818 1819 self.write(*args)
1819 1820 def status(self, *args):
1820 1821 if not self.quiet:
1821 1822 self.write(*args)
1822 1823 def write(self, *args):
1823 1824 self.hunk[self.rev].extend(args)
1824 1825 def write_header(self, *args):
1825 1826 self.header[self.rev].extend(args)
1826 1827 def debug(self, *args):
1827 1828 if self.debugflag:
1828 1829 self.write(*args)
1829 1830 def __getattr__(self, key):
1830 1831 return getattr(self.ui, key)
1831 1832
1832 1833 getchange = util.cachefunc(lambda r:repo.changectx(r).changeset())
1833 1834 changeiter, matchfn = walkchangerevs(ui, repo, pats, getchange, opts)
1834 1835
1835 1836 if opts['branches']:
1836 1837 ui.warn(_("the --branches option is deprecated, "
1837 1838 "please use 'hg branches' instead\n"))
1838 1839
1839 1840 if opts['limit']:
1840 1841 try:
1841 1842 limit = int(opts['limit'])
1842 1843 except ValueError:
1843 1844 raise util.Abort(_('limit must be a positive integer'))
1844 1845 if limit <= 0: raise util.Abort(_('limit must be positive'))
1845 1846 else:
1846 1847 limit = sys.maxint
1847 1848 count = 0
1848 1849
1849 1850 if opts['copies'] and opts['rev']:
1850 1851 endrev = max(cmdutil.revrange(ui, repo, opts['rev'])) + 1
1851 1852 else:
1852 1853 endrev = repo.changelog.count()
1853 1854 rcache = {}
1854 1855 ncache = {}
1855 1856 dcache = []
1856 1857 def getrenamed(fn, rev, man):
1857 1858 '''looks up all renames for a file (up to endrev) the first
1858 1859 time the file is given. It indexes on the changerev and only
1859 1860 parses the manifest if linkrev != changerev.
1860 1861 Returns rename info for fn at changerev rev.'''
1861 1862 if fn not in rcache:
1862 1863 rcache[fn] = {}
1863 1864 ncache[fn] = {}
1864 1865 fl = repo.file(fn)
1865 1866 for i in xrange(fl.count()):
1866 1867 node = fl.node(i)
1867 1868 lr = fl.linkrev(node)
1868 1869 renamed = fl.renamed(node)
1869 1870 rcache[fn][lr] = renamed
1870 1871 if renamed:
1871 1872 ncache[fn][node] = renamed
1872 1873 if lr >= endrev:
1873 1874 break
1874 1875 if rev in rcache[fn]:
1875 1876 return rcache[fn][rev]
1876 1877 mr = repo.manifest.rev(man)
1877 1878 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1878 1879 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1879 1880 if not dcache or dcache[0] != man:
1880 1881 dcache[:] = [man, repo.manifest.readdelta(man)]
1881 1882 if fn in dcache[1]:
1882 1883 return ncache[fn].get(dcache[1][fn])
1883 1884 return None
1884 1885
1885 1886 displayer = show_changeset(ui, repo, opts)
1886 1887 for st, rev, fns in changeiter:
1887 1888 if st == 'window':
1888 1889 du = dui(ui)
1889 1890 displayer.ui = du
1890 1891 elif st == 'add':
1891 1892 du.bump(rev)
1892 1893 changenode = repo.changelog.node(rev)
1893 1894 parents = [p for p in repo.changelog.parentrevs(rev)
1894 1895 if p != nullrev]
1895 1896 if opts['no_merges'] and len(parents) == 2:
1896 1897 continue
1897 1898 if opts['only_merges'] and len(parents) != 2:
1898 1899 continue
1899 1900
1900 1901 if opts['keyword']:
1901 1902 changes = getchange(rev)
1902 1903 miss = 0
1903 1904 for k in [kw.lower() for kw in opts['keyword']]:
1904 1905 if not (k in changes[1].lower() or
1905 1906 k in changes[4].lower() or
1906 1907 k in " ".join(changes[3][:20]).lower()):
1907 1908 miss = 1
1908 1909 break
1909 1910 if miss:
1910 1911 continue
1911 1912
1912 1913 br = None
1913 1914 if opts['branches']:
1914 1915 br = repo.branchlookup([repo.changelog.node(rev)])
1915 1916
1916 1917 copies = []
1917 1918 if opts.get('copies') and rev:
1918 1919 mf = getchange(rev)[0]
1919 1920 for fn in getchange(rev)[3]:
1920 1921 rename = getrenamed(fn, rev, mf)
1921 1922 if rename:
1922 1923 copies.append((fn, rename[0]))
1923 1924 displayer.show(rev, changenode, brinfo=br, copies=copies)
1924 1925 if opts['patch']:
1925 1926 if parents:
1926 1927 prev = parents[0]
1927 1928 else:
1928 1929 prev = nullrev
1929 1930 prev = repo.changelog.node(prev)
1930 1931 patch.diff(repo, prev, changenode, match=matchfn, fp=du)
1931 1932 du.write("\n\n")
1932 1933 elif st == 'iter':
1933 1934 if count == limit: break
1934 1935 if du.header[rev]:
1935 1936 ui.write_header(*du.header[rev])
1936 1937 if du.hunk[rev]:
1937 1938 count += 1
1938 1939 ui.write(*du.hunk[rev])
1939 1940
1940 1941 def manifest(ui, repo, rev=None):
1941 1942 """output the latest or given revision of the project manifest
1942 1943
1943 1944 Print a list of version controlled files for the given revision.
1944 1945
1945 1946 The manifest is the list of files being version controlled. If no revision
1946 1947 is given then the tip is used.
1947 1948 """
1948 1949 if rev:
1949 1950 try:
1950 1951 # assume all revision numbers are for changesets
1951 1952 n = repo.lookup(rev)
1952 1953 change = repo.changelog.read(n)
1953 1954 n = change[0]
1954 1955 except hg.RepoError:
1955 1956 n = repo.manifest.lookup(rev)
1956 1957 else:
1957 1958 n = repo.manifest.tip()
1958 1959 m = repo.manifest.read(n)
1959 1960 files = m.keys()
1960 1961 files.sort()
1961 1962
1962 1963 for f in files:
1963 1964 ui.write("%40s %3s %s\n" % (hex(m[f]),
1964 1965 m.execf(f) and "755" or "644", f))
1965 1966
1966 1967 def merge(ui, repo, node=None, force=None, branch=None):
1967 1968 """Merge working directory with another revision
1968 1969
1969 1970 Merge the contents of the current working directory and the
1970 1971 requested revision. Files that changed between either parent are
1971 1972 marked as changed for the next commit and a commit must be
1972 1973 performed before any further updates are allowed.
1973 1974
1974 1975 If no revision is specified, the working directory's parent is a
1975 1976 head revision, and the repository contains exactly one other head,
1976 1977 the other head is merged with by default. Otherwise, an explicit
1977 1978 revision to merge with must be provided.
1978 1979 """
1979 1980
1980 1981 if node or branch:
1981 1982 node = _lookup(repo, node, branch)
1982 1983 else:
1983 1984 heads = repo.heads()
1984 1985 if len(heads) > 2:
1985 1986 raise util.Abort(_('repo has %d heads - '
1986 1987 'please merge with an explicit rev') %
1987 1988 len(heads))
1988 1989 if len(heads) == 1:
1989 1990 raise util.Abort(_('there is nothing to merge - '
1990 1991 'use "hg update" instead'))
1991 1992 parent = repo.dirstate.parents()[0]
1992 1993 if parent not in heads:
1993 1994 raise util.Abort(_('working dir not at a head rev - '
1994 1995 'use "hg update" or merge with an explicit rev'))
1995 1996 node = parent == heads[0] and heads[-1] or heads[0]
1996 1997 return hg.merge(repo, node, force=force)
1997 1998
1998 1999 def outgoing(ui, repo, dest=None, **opts):
1999 2000 """show changesets not found in destination
2000 2001
2001 2002 Show changesets not found in the specified destination repository or
2002 2003 the default push location. These are the changesets that would be pushed
2003 2004 if a push was requested.
2004 2005
2005 2006 See pull for valid destination format details.
2006 2007 """
2007 2008 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2008 2009 setremoteconfig(ui, opts)
2009 2010 revs = None
2010 2011 if opts['rev']:
2011 2012 revs = [repo.lookup(rev) for rev in opts['rev']]
2012 2013
2013 2014 other = hg.repository(ui, dest)
2014 2015 o = repo.findoutgoing(other, force=opts['force'])
2015 2016 if not o:
2016 2017 ui.status(_("no changes found\n"))
2017 2018 return
2018 2019 o = repo.changelog.nodesbetween(o, revs)[0]
2019 2020 if opts['newest_first']:
2020 2021 o.reverse()
2021 2022 displayer = show_changeset(ui, repo, opts)
2022 2023 for n in o:
2023 2024 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2024 2025 if opts['no_merges'] and len(parents) == 2:
2025 2026 continue
2026 2027 displayer.show(changenode=n)
2027 2028 if opts['patch']:
2028 2029 prev = (parents and parents[0]) or nullid
2029 2030 patch.diff(repo, prev, n)
2030 2031 ui.write("\n")
2031 2032
2032 2033 def parents(ui, repo, file_=None, rev=None, branches=None, **opts):
2033 2034 """show the parents of the working dir or revision
2034 2035
2035 2036 Print the working directory's parent revisions.
2036 2037 """
2037 2038 # legacy
2038 2039 if file_ and not rev:
2039 2040 try:
2040 2041 rev = repo.lookup(file_)
2041 2042 file_ = None
2042 2043 except hg.RepoError:
2043 2044 pass
2044 2045 else:
2045 2046 ui.warn(_("'hg parent REV' is deprecated, "
2046 2047 "please use 'hg parents -r REV instead\n"))
2047 2048
2048 2049 if rev:
2049 2050 if file_:
2050 2051 ctx = repo.filectx(file_, changeid=rev)
2051 2052 else:
2052 2053 ctx = repo.changectx(rev)
2053 2054 p = [cp.node() for cp in ctx.parents()]
2054 2055 else:
2055 2056 p = repo.dirstate.parents()
2056 2057
2057 2058 br = None
2058 2059 if branches is not None:
2059 2060 ui.warn(_("the --branches option is deprecated, "
2060 2061 "please use 'hg branches' instead\n"))
2061 2062 br = repo.branchlookup(p)
2062 2063 displayer = show_changeset(ui, repo, opts)
2063 2064 for n in p:
2064 2065 if n != nullid:
2065 2066 displayer.show(changenode=n, brinfo=br)
2066 2067
2067 2068 def paths(ui, repo, search=None):
2068 2069 """show definition of symbolic path names
2069 2070
2070 2071 Show definition of symbolic path name NAME. If no name is given, show
2071 2072 definition of available names.
2072 2073
2073 2074 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2074 2075 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2075 2076 """
2076 2077 if search:
2077 2078 for name, path in ui.configitems("paths"):
2078 2079 if name == search:
2079 2080 ui.write("%s\n" % path)
2080 2081 return
2081 2082 ui.warn(_("not found!\n"))
2082 2083 return 1
2083 2084 else:
2084 2085 for name, path in ui.configitems("paths"):
2085 2086 ui.write("%s = %s\n" % (name, path))
2086 2087
2087 2088 def postincoming(ui, repo, modheads, optupdate):
2088 2089 if modheads == 0:
2089 2090 return
2090 2091 if optupdate:
2091 2092 if modheads == 1:
2092 2093 return hg.update(repo, repo.changelog.tip()) # update
2093 2094 else:
2094 2095 ui.status(_("not updating, since new heads added\n"))
2095 2096 if modheads > 1:
2096 2097 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2097 2098 else:
2098 2099 ui.status(_("(run 'hg update' to get a working copy)\n"))
2099 2100
2100 2101 def pull(ui, repo, source="default", **opts):
2101 2102 """pull changes from the specified source
2102 2103
2103 2104 Pull changes from a remote repository to a local one.
2104 2105
2105 2106 This finds all changes from the repository at the specified path
2106 2107 or URL and adds them to the local repository. By default, this
2107 2108 does not update the copy of the project in the working directory.
2108 2109
2109 2110 Valid URLs are of the form:
2110 2111
2111 2112 local/filesystem/path (or file://local/filesystem/path)
2112 2113 http://[user@]host[:port]/[path]
2113 2114 https://[user@]host[:port]/[path]
2114 2115 ssh://[user@]host[:port]/[path]
2115 2116 static-http://host[:port]/[path]
2116 2117
2117 2118 Paths in the local filesystem can either point to Mercurial
2118 2119 repositories or to bundle files (as created by 'hg bundle' or
2119 2120 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2120 2121 allows access to a Mercurial repository where you simply use a web
2121 2122 server to publish the .hg directory as static content.
2122 2123
2123 2124 Some notes about using SSH with Mercurial:
2124 2125 - SSH requires an accessible shell account on the destination machine
2125 2126 and a copy of hg in the remote path or specified with as remotecmd.
2126 2127 - path is relative to the remote user's home directory by default.
2127 2128 Use an extra slash at the start of a path to specify an absolute path:
2128 2129 ssh://example.com//tmp/repository
2129 2130 - Mercurial doesn't use its own compression via SSH; the right thing
2130 2131 to do is to configure it in your ~/.ssh/config, e.g.:
2131 2132 Host *.mylocalnetwork.example.com
2132 2133 Compression no
2133 2134 Host *
2134 2135 Compression yes
2135 2136 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2136 2137 with the --ssh command line option.
2137 2138 """
2138 2139 source = ui.expandpath(source)
2139 2140 setremoteconfig(ui, opts)
2140 2141
2141 2142 other = hg.repository(ui, source)
2142 2143 ui.status(_('pulling from %s\n') % (source))
2143 2144 revs = None
2144 2145 if opts['rev']:
2145 2146 if 'lookup' in other.capabilities:
2146 2147 revs = [other.lookup(rev) for rev in opts['rev']]
2147 2148 else:
2148 2149 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
2149 2150 raise util.Abort(error)
2150 2151 modheads = repo.pull(other, heads=revs, force=opts['force'])
2151 2152 return postincoming(ui, repo, modheads, opts['update'])
2152 2153
2153 2154 def push(ui, repo, dest=None, **opts):
2154 2155 """push changes to the specified destination
2155 2156
2156 2157 Push changes from the local repository to the given destination.
2157 2158
2158 2159 This is the symmetrical operation for pull. It helps to move
2159 2160 changes from the current repository to a different one. If the
2160 2161 destination is local this is identical to a pull in that directory
2161 2162 from the current one.
2162 2163
2163 2164 By default, push will refuse to run if it detects the result would
2164 2165 increase the number of remote heads. This generally indicates the
2165 2166 the client has forgotten to sync and merge before pushing.
2166 2167
2167 2168 Valid URLs are of the form:
2168 2169
2169 2170 local/filesystem/path (or file://local/filesystem/path)
2170 2171 ssh://[user@]host[:port]/[path]
2171 2172 http://[user@]host[:port]/[path]
2172 2173 https://[user@]host[:port]/[path]
2173 2174
2174 2175 Look at the help text for the pull command for important details
2175 2176 about ssh:// URLs.
2176 2177
2177 2178 Pushing to http:// and https:// URLs is only possible, if this
2178 2179 feature is explicitly enabled on the remote Mercurial server.
2179 2180 """
2180 2181 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2181 2182 setremoteconfig(ui, opts)
2182 2183
2183 2184 other = hg.repository(ui, dest)
2184 2185 ui.status('pushing to %s\n' % (dest))
2185 2186 revs = None
2186 2187 if opts['rev']:
2187 2188 revs = [repo.lookup(rev) for rev in opts['rev']]
2188 2189 r = repo.push(other, opts['force'], revs=revs)
2189 2190 return r == 0
2190 2191
2191 2192 def rawcommit(ui, repo, *flist, **rc):
2192 2193 """raw commit interface (DEPRECATED)
2193 2194
2194 2195 (DEPRECATED)
2195 2196 Lowlevel commit, for use in helper scripts.
2196 2197
2197 2198 This command is not intended to be used by normal users, as it is
2198 2199 primarily useful for importing from other SCMs.
2199 2200
2200 2201 This command is now deprecated and will be removed in a future
2201 2202 release, please use debugsetparents and commit instead.
2202 2203 """
2203 2204
2204 2205 ui.warn(_("(the rawcommit command is deprecated)\n"))
2205 2206
2206 2207 message = rc['message']
2207 2208 if not message and rc['logfile']:
2208 2209 try:
2209 2210 message = open(rc['logfile']).read()
2210 2211 except IOError:
2211 2212 pass
2212 2213 if not message and not rc['logfile']:
2213 2214 raise util.Abort(_("missing commit message"))
2214 2215
2215 2216 files = relpath(repo, list(flist))
2216 2217 if rc['files']:
2217 2218 files += open(rc['files']).read().splitlines()
2218 2219
2219 2220 rc['parent'] = map(repo.lookup, rc['parent'])
2220 2221
2221 2222 try:
2222 2223 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2223 2224 except ValueError, inst:
2224 2225 raise util.Abort(str(inst))
2225 2226
2226 2227 def recover(ui, repo):
2227 2228 """roll back an interrupted transaction
2228 2229
2229 2230 Recover from an interrupted commit or pull.
2230 2231
2231 2232 This command tries to fix the repository status after an interrupted
2232 2233 operation. It should only be necessary when Mercurial suggests it.
2233 2234 """
2234 2235 if repo.recover():
2235 2236 return hg.verify(repo)
2236 2237 return 1
2237 2238
2238 2239 def remove(ui, repo, *pats, **opts):
2239 2240 """remove the specified files on the next commit
2240 2241
2241 2242 Schedule the indicated files for removal from the repository.
2242 2243
2243 2244 This command schedules the files to be removed at the next commit.
2244 2245 This only removes files from the current branch, not from the
2245 2246 entire project history. If the files still exist in the working
2246 2247 directory, they will be deleted from it. If invoked with --after,
2247 2248 files that have been manually deleted are marked as removed.
2248 2249
2249 2250 Modified files and added files are not removed by default. To
2250 2251 remove them, use the -f/--force option.
2251 2252 """
2252 2253 names = []
2253 2254 if not opts['after'] and not pats:
2254 2255 raise util.Abort(_('no files specified'))
2255 2256 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2256 2257 exact = dict.fromkeys(files)
2257 2258 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2258 2259 modified, added, removed, deleted, unknown = mardu
2259 2260 remove, forget = [], []
2260 2261 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2261 2262 reason = None
2262 2263 if abs not in deleted and opts['after']:
2263 2264 reason = _('is still present')
2264 2265 elif abs in modified and not opts['force']:
2265 2266 reason = _('is modified (use -f to force removal)')
2266 2267 elif abs in added:
2267 2268 if opts['force']:
2268 2269 forget.append(abs)
2269 2270 continue
2270 2271 reason = _('has been marked for add (use -f to force removal)')
2271 2272 elif abs in unknown:
2272 2273 reason = _('is not managed')
2273 2274 elif abs in removed:
2274 2275 continue
2275 2276 if reason:
2276 2277 if exact:
2277 2278 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2278 2279 else:
2279 2280 if ui.verbose or not exact:
2280 2281 ui.status(_('removing %s\n') % rel)
2281 2282 remove.append(abs)
2282 2283 repo.forget(forget)
2283 2284 repo.remove(remove, unlink=not opts['after'])
2284 2285
2285 2286 def rename(ui, repo, *pats, **opts):
2286 2287 """rename files; equivalent of copy + remove
2287 2288
2288 2289 Mark dest as copies of sources; mark sources for deletion. If
2289 2290 dest is a directory, copies are put in that directory. If dest is
2290 2291 a file, there can only be one source.
2291 2292
2292 2293 By default, this command copies the contents of files as they
2293 2294 stand in the working directory. If invoked with --after, the
2294 2295 operation is recorded, but no copying is performed.
2295 2296
2296 2297 This command takes effect in the next commit.
2297 2298
2298 2299 NOTE: This command should be treated as experimental. While it
2299 2300 should properly record rename files, this information is not yet
2300 2301 fully used by merge, nor fully reported by log.
2301 2302 """
2302 2303 wlock = repo.wlock(0)
2303 2304 errs, copied = docopy(ui, repo, pats, opts, wlock)
2304 2305 names = []
2305 2306 for abs, rel, exact in copied:
2306 2307 if ui.verbose or not exact:
2307 2308 ui.status(_('removing %s\n') % rel)
2308 2309 names.append(abs)
2309 2310 if not opts.get('dry_run'):
2310 2311 repo.remove(names, True, wlock)
2311 2312 return errs
2312 2313
2313 2314 def revert(ui, repo, *pats, **opts):
2314 2315 """revert files or dirs to their states as of some revision
2315 2316
2316 2317 With no revision specified, revert the named files or directories
2317 2318 to the contents they had in the parent of the working directory.
2318 2319 This restores the contents of the affected files to an unmodified
2319 2320 state. If the working directory has two parents, you must
2320 2321 explicitly specify the revision to revert to.
2321 2322
2322 2323 Modified files are saved with a .orig suffix before reverting.
2323 2324 To disable these backups, use --no-backup.
2324 2325
2325 2326 Using the -r option, revert the given files or directories to their
2326 2327 contents as of a specific revision. This can be helpful to "roll
2327 2328 back" some or all of a change that should not have been committed.
2328 2329
2329 2330 Revert modifies the working directory. It does not commit any
2330 2331 changes, or change the parent of the working directory. If you
2331 2332 revert to a revision other than the parent of the working
2332 2333 directory, the reverted files will thus appear modified
2333 2334 afterwards.
2334 2335
2335 2336 If a file has been deleted, it is recreated. If the executable
2336 2337 mode of a file was changed, it is reset.
2337 2338
2338 2339 If names are given, all files matching the names are reverted.
2339 2340
2340 2341 If no arguments are given, no files are reverted.
2341 2342 """
2342 2343
2343 2344 if not pats and not opts['all']:
2344 2345 raise util.Abort(_('no files or directories specified; '
2345 2346 'use --all to revert the whole repo'))
2346 2347
2347 2348 parent, p2 = repo.dirstate.parents()
2348 2349 if not opts['rev'] and p2 != nullid:
2349 2350 raise util.Abort(_('uncommitted merge - please provide a '
2350 2351 'specific revision'))
2351 2352 node = repo.changectx(opts['rev']).node()
2352 2353 mf = repo.manifest.read(repo.changelog.read(node)[0])
2353 2354 if node == parent:
2354 2355 pmf = mf
2355 2356 else:
2356 2357 pmf = None
2357 2358
2358 2359 wlock = repo.wlock()
2359 2360
2360 2361 # need all matching names in dirstate and manifest of target rev,
2361 2362 # so have to walk both. do not print errors if files exist in one
2362 2363 # but not other.
2363 2364
2364 2365 names = {}
2365 2366 target_only = {}
2366 2367
2367 2368 # walk dirstate.
2368 2369
2369 2370 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2370 2371 badmatch=mf.has_key):
2371 2372 names[abs] = (rel, exact)
2372 2373 if src == 'b':
2373 2374 target_only[abs] = True
2374 2375
2375 2376 # walk target manifest.
2376 2377
2377 2378 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2378 2379 badmatch=names.has_key):
2379 2380 if abs in names: continue
2380 2381 names[abs] = (rel, exact)
2381 2382 target_only[abs] = True
2382 2383
2383 2384 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2384 2385 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2385 2386
2386 2387 revert = ([], _('reverting %s\n'))
2387 2388 add = ([], _('adding %s\n'))
2388 2389 remove = ([], _('removing %s\n'))
2389 2390 forget = ([], _('forgetting %s\n'))
2390 2391 undelete = ([], _('undeleting %s\n'))
2391 2392 update = {}
2392 2393
2393 2394 disptable = (
2394 2395 # dispatch table:
2395 2396 # file state
2396 2397 # action if in target manifest
2397 2398 # action if not in target manifest
2398 2399 # make backup if in target manifest
2399 2400 # make backup if not in target manifest
2400 2401 (modified, revert, remove, True, True),
2401 2402 (added, revert, forget, True, False),
2402 2403 (removed, undelete, None, False, False),
2403 2404 (deleted, revert, remove, False, False),
2404 2405 (unknown, add, None, True, False),
2405 2406 (target_only, add, None, False, False),
2406 2407 )
2407 2408
2408 2409 entries = names.items()
2409 2410 entries.sort()
2410 2411
2411 2412 for abs, (rel, exact) in entries:
2412 2413 mfentry = mf.get(abs)
2413 2414 def handle(xlist, dobackup):
2414 2415 xlist[0].append(abs)
2415 2416 update[abs] = 1
2416 2417 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2417 2418 bakname = "%s.orig" % rel
2418 2419 ui.note(_('saving current version of %s as %s\n') %
2419 2420 (rel, bakname))
2420 2421 if not opts.get('dry_run'):
2421 2422 shutil.copyfile(rel, bakname)
2422 2423 shutil.copymode(rel, bakname)
2423 2424 if ui.verbose or not exact:
2424 2425 ui.status(xlist[1] % rel)
2425 2426 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2426 2427 if abs not in table: continue
2427 2428 # file has changed in dirstate
2428 2429 if mfentry:
2429 2430 handle(hitlist, backuphit)
2430 2431 elif misslist is not None:
2431 2432 handle(misslist, backupmiss)
2432 2433 else:
2433 2434 if exact: ui.warn(_('file not managed: %s\n' % rel))
2434 2435 break
2435 2436 else:
2436 2437 # file has not changed in dirstate
2437 2438 if node == parent:
2438 2439 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2439 2440 continue
2440 2441 if pmf is None:
2441 2442 # only need parent manifest in this unlikely case,
2442 2443 # so do not read by default
2443 2444 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2444 2445 if abs in pmf:
2445 2446 if mfentry:
2446 2447 # if version of file is same in parent and target
2447 2448 # manifests, do nothing
2448 2449 if pmf[abs] != mfentry:
2449 2450 handle(revert, False)
2450 2451 else:
2451 2452 handle(remove, False)
2452 2453
2453 2454 if not opts.get('dry_run'):
2454 2455 repo.dirstate.forget(forget[0])
2455 2456 r = hg.revert(repo, node, update.has_key, wlock)
2456 2457 repo.dirstate.update(add[0], 'a')
2457 2458 repo.dirstate.update(undelete[0], 'n')
2458 2459 repo.dirstate.update(remove[0], 'r')
2459 2460 return r
2460 2461
2461 2462 def rollback(ui, repo):
2462 2463 """roll back the last transaction in this repository
2463 2464
2464 2465 Roll back the last transaction in this repository, restoring the
2465 2466 project to its state prior to the transaction.
2466 2467
2467 2468 Transactions are used to encapsulate the effects of all commands
2468 2469 that create new changesets or propagate existing changesets into a
2469 2470 repository. For example, the following commands are transactional,
2470 2471 and their effects can be rolled back:
2471 2472
2472 2473 commit
2473 2474 import
2474 2475 pull
2475 2476 push (with this repository as destination)
2476 2477 unbundle
2477 2478
2478 2479 This command should be used with care. There is only one level of
2479 2480 rollback, and there is no way to undo a rollback.
2480 2481
2481 2482 This command is not intended for use on public repositories. Once
2482 2483 changes are visible for pull by other users, rolling a transaction
2483 2484 back locally is ineffective (someone else may already have pulled
2484 2485 the changes). Furthermore, a race is possible with readers of the
2485 2486 repository; for example an in-progress pull from the repository
2486 2487 may fail if a rollback is performed.
2487 2488 """
2488 2489 repo.rollback()
2489 2490
2490 2491 def root(ui, repo):
2491 2492 """print the root (top) of the current working dir
2492 2493
2493 2494 Print the root directory of the current repository.
2494 2495 """
2495 2496 ui.write(repo.root + "\n")
2496 2497
2497 2498 def serve(ui, repo, **opts):
2498 2499 """export the repository via HTTP
2499 2500
2500 2501 Start a local HTTP repository browser and pull server.
2501 2502
2502 2503 By default, the server logs accesses to stdout and errors to
2503 2504 stderr. Use the "-A" and "-E" options to log to files.
2504 2505 """
2505 2506
2506 2507 if opts["stdio"]:
2507 2508 if repo is None:
2508 2509 raise hg.RepoError(_("There is no Mercurial repository here"
2509 2510 " (.hg not found)"))
2510 2511 s = sshserver.sshserver(ui, repo)
2511 2512 s.serve_forever()
2512 2513
2513 2514 optlist = ("name templates style address port ipv6"
2514 2515 " accesslog errorlog webdir_conf")
2515 2516 for o in optlist.split():
2516 2517 if opts[o]:
2517 2518 ui.setconfig("web", o, str(opts[o]))
2518 2519
2519 2520 if repo is None and not ui.config("web", "webdir_conf"):
2520 2521 raise hg.RepoError(_("There is no Mercurial repository here"
2521 2522 " (.hg not found)"))
2522 2523
2523 2524 if opts['daemon'] and not opts['daemon_pipefds']:
2524 2525 rfd, wfd = os.pipe()
2525 2526 args = sys.argv[:]
2526 2527 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2527 2528 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2528 2529 args[0], args)
2529 2530 os.close(wfd)
2530 2531 os.read(rfd, 1)
2531 2532 os._exit(0)
2532 2533
2533 2534 try:
2534 2535 httpd = hgweb.server.create_server(ui, repo)
2535 2536 except socket.error, inst:
2536 2537 raise util.Abort(_('cannot start server: %s') % inst.args[1])
2537 2538
2538 2539 if ui.verbose:
2539 2540 addr, port = httpd.socket.getsockname()
2540 2541 if addr == '0.0.0.0':
2541 2542 addr = socket.gethostname()
2542 2543 else:
2543 2544 try:
2544 2545 addr = socket.gethostbyaddr(addr)[0]
2545 2546 except socket.error:
2546 2547 pass
2547 2548 if port != 80:
2548 2549 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2549 2550 else:
2550 2551 ui.status(_('listening at http://%s/\n') % addr)
2551 2552
2552 2553 if opts['pid_file']:
2553 2554 fp = open(opts['pid_file'], 'w')
2554 2555 fp.write(str(os.getpid()) + '\n')
2555 2556 fp.close()
2556 2557
2557 2558 if opts['daemon_pipefds']:
2558 2559 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2559 2560 os.close(rfd)
2560 2561 os.write(wfd, 'y')
2561 2562 os.close(wfd)
2562 2563 sys.stdout.flush()
2563 2564 sys.stderr.flush()
2564 2565 fd = os.open(util.nulldev, os.O_RDWR)
2565 2566 if fd != 0: os.dup2(fd, 0)
2566 2567 if fd != 1: os.dup2(fd, 1)
2567 2568 if fd != 2: os.dup2(fd, 2)
2568 2569 if fd not in (0, 1, 2): os.close(fd)
2569 2570
2570 2571 httpd.serve_forever()
2571 2572
2572 2573 def status(ui, repo, *pats, **opts):
2573 2574 """show changed files in the working directory
2574 2575
2575 2576 Show status of files in the repository. If names are given, only
2576 2577 files that match are shown. Files that are clean or ignored, are
2577 2578 not listed unless -c (clean), -i (ignored) or -A is given.
2578 2579
2579 2580 If one revision is given, it is used as the base revision.
2580 2581 If two revisions are given, the difference between them is shown.
2581 2582
2582 2583 The codes used to show the status of files are:
2583 2584 M = modified
2584 2585 A = added
2585 2586 R = removed
2586 2587 C = clean
2587 2588 ! = deleted, but still tracked
2588 2589 ? = not tracked
2589 2590 I = ignored (not shown by default)
2590 2591 = the previous added file was copied from here
2591 2592 """
2592 2593
2593 2594 all = opts['all']
2594 2595 node1, node2 = cmdutil.revpair(ui, repo, opts.get('rev'))
2595 2596
2596 2597 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2597 2598 cwd = (pats and repo.getcwd()) or ''
2598 2599 modified, added, removed, deleted, unknown, ignored, clean = [
2599 2600 [util.pathto(cwd, x) for x in n]
2600 2601 for n in repo.status(node1=node1, node2=node2, files=files,
2601 2602 match=matchfn,
2602 2603 list_ignored=all or opts['ignored'],
2603 2604 list_clean=all or opts['clean'])]
2604 2605
2605 2606 changetypes = (('modified', 'M', modified),
2606 2607 ('added', 'A', added),
2607 2608 ('removed', 'R', removed),
2608 2609 ('deleted', '!', deleted),
2609 2610 ('unknown', '?', unknown),
2610 2611 ('ignored', 'I', ignored))
2611 2612
2612 2613 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2613 2614
2614 2615 end = opts['print0'] and '\0' or '\n'
2615 2616
2616 2617 for opt, char, changes in ([ct for ct in explicit_changetypes
2617 2618 if all or opts[ct[0]]]
2618 2619 or changetypes):
2619 2620 if opts['no_status']:
2620 2621 format = "%%s%s" % end
2621 2622 else:
2622 2623 format = "%s %%s%s" % (char, end)
2623 2624
2624 2625 for f in changes:
2625 2626 ui.write(format % f)
2626 2627 if ((all or opts.get('copies')) and not opts.get('no_status')):
2627 2628 copied = repo.dirstate.copied(f)
2628 2629 if copied:
2629 2630 ui.write(' %s%s' % (copied, end))
2630 2631
2631 2632 def tag(ui, repo, name, rev_=None, **opts):
2632 2633 """add a tag for the current tip or a given revision
2633 2634
2634 2635 Name a particular revision using <name>.
2635 2636
2636 2637 Tags are used to name particular revisions of the repository and are
2637 2638 very useful to compare different revision, to go back to significant
2638 2639 earlier versions or to mark branch points as releases, etc.
2639 2640
2640 2641 If no revision is given, the parent of the working directory is used.
2641 2642
2642 2643 To facilitate version control, distribution, and merging of tags,
2643 2644 they are stored as a file named ".hgtags" which is managed
2644 2645 similarly to other project files and can be hand-edited if
2645 2646 necessary. The file '.hg/localtags' is used for local tags (not
2646 2647 shared among repositories).
2647 2648 """
2648 2649 if name in ['tip', '.']:
2649 2650 raise util.Abort(_("the name '%s' is reserved") % name)
2650 2651 if rev_ is not None:
2651 2652 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2652 2653 "please use 'hg tag [-r REV] NAME' instead\n"))
2653 2654 if opts['rev']:
2654 2655 raise util.Abort(_("use only one form to specify the revision"))
2655 2656 if opts['rev']:
2656 2657 rev_ = opts['rev']
2657 2658 if not rev_ and repo.dirstate.parents()[1] != nullid:
2658 2659 raise util.Abort(_('uncommitted merge - please provide a '
2659 2660 'specific revision'))
2660 2661 r = repo.changectx(rev_).node()
2661 2662
2662 2663 message = opts['message']
2663 2664 if not message:
2664 2665 message = _('Added tag %s for changeset %s') % (name, short(r))
2665 2666
2666 2667 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2667 2668
2668 2669 def tags(ui, repo):
2669 2670 """list repository tags
2670 2671
2671 2672 List the repository tags.
2672 2673
2673 2674 This lists both regular and local tags.
2674 2675 """
2675 2676
2676 2677 l = repo.tagslist()
2677 2678 l.reverse()
2678 2679 hexfunc = ui.debugflag and hex or short
2679 2680 for t, n in l:
2680 2681 try:
2681 2682 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2682 2683 except KeyError:
2683 2684 r = " ?:?"
2684 2685 if ui.quiet:
2685 2686 ui.write("%s\n" % t)
2686 2687 else:
2687 2688 ui.write("%-30s %s\n" % (t, r))
2688 2689
2689 2690 def tip(ui, repo, **opts):
2690 2691 """show the tip revision
2691 2692
2692 2693 Show the tip revision.
2693 2694 """
2694 2695 n = repo.changelog.tip()
2695 2696 br = None
2696 2697 if opts['branches']:
2697 2698 ui.warn(_("the --branches option is deprecated, "
2698 2699 "please use 'hg branches' instead\n"))
2699 2700 br = repo.branchlookup([n])
2700 2701 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2701 2702 if opts['patch']:
2702 2703 patch.diff(repo, repo.changelog.parents(n)[0], n)
2703 2704
2704 2705 def unbundle(ui, repo, fname, **opts):
2705 2706 """apply a changegroup file
2706 2707
2707 2708 Apply a compressed changegroup file generated by the bundle
2708 2709 command.
2709 2710 """
2710 2711 f = urllib.urlopen(fname)
2711 2712
2712 2713 header = f.read(6)
2713 2714 if not header.startswith("HG"):
2714 2715 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2715 2716 elif not header.startswith("HG10"):
2716 2717 raise util.Abort(_("%s: unknown bundle version") % fname)
2717 2718 elif header == "HG10BZ":
2718 2719 def generator(f):
2719 2720 zd = bz2.BZ2Decompressor()
2720 2721 zd.decompress("BZ")
2721 2722 for chunk in f:
2722 2723 yield zd.decompress(chunk)
2723 2724 elif header == "HG10UN":
2724 2725 def generator(f):
2725 2726 for chunk in f:
2726 2727 yield chunk
2727 2728 else:
2728 2729 raise util.Abort(_("%s: unknown bundle compression type")
2729 2730 % fname)
2730 2731 gen = generator(util.filechunkiter(f, 4096))
2731 2732 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle',
2732 2733 'bundle:' + fname)
2733 2734 return postincoming(ui, repo, modheads, opts['update'])
2734 2735
2735 2736 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2736 2737 branch=None):
2737 2738 """update or merge working directory
2738 2739
2739 2740 Update the working directory to the specified revision.
2740 2741
2741 2742 If there are no outstanding changes in the working directory and
2742 2743 there is a linear relationship between the current version and the
2743 2744 requested version, the result is the requested version.
2744 2745
2745 2746 To merge the working directory with another revision, use the
2746 2747 merge command.
2747 2748
2748 2749 By default, update will refuse to run if doing so would require
2749 2750 merging or discarding local changes.
2750 2751 """
2751 2752 node = _lookup(repo, node, branch)
2752 2753 if clean:
2753 2754 return hg.clean(repo, node)
2754 2755 else:
2755 2756 return hg.update(repo, node)
2756 2757
2757 2758 def _lookup(repo, node, branch=None):
2758 2759 if branch:
2759 2760 repo.ui.warn(_("the --branch option is deprecated, "
2760 2761 "please use 'hg branch' instead\n"))
2761 2762 br = repo.branchlookup(branch=branch)
2762 2763 found = []
2763 2764 for x in br:
2764 2765 if branch in br[x]:
2765 2766 found.append(x)
2766 2767 if len(found) > 1:
2767 2768 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2768 2769 for x in found:
2769 2770 show_changeset(ui, repo, {}).show(changenode=x, brinfo=br)
2770 2771 raise util.Abort("")
2771 2772 if len(found) == 1:
2772 2773 node = found[0]
2773 2774 repo.ui.warn(_("Using head %s for branch %s\n")
2774 2775 % (short(node), branch))
2775 2776 else:
2776 2777 raise util.Abort(_("branch %s not found") % branch)
2777 2778 else:
2778 2779 node = node and repo.lookup(node) or repo.changelog.tip()
2779 2780 return node
2780 2781
2781 2782 def verify(ui, repo):
2782 2783 """verify the integrity of the repository
2783 2784
2784 2785 Verify the integrity of the current repository.
2785 2786
2786 2787 This will perform an extensive check of the repository's
2787 2788 integrity, validating the hashes and checksums of each entry in
2788 2789 the changelog, manifest, and tracked files, as well as the
2789 2790 integrity of their crosslinks and indices.
2790 2791 """
2791 2792 return hg.verify(repo)
2792 2793
2793 2794 # Command options and aliases are listed here, alphabetically
2794 2795
2795 2796 globalopts = [
2796 2797 ('R', 'repository', '',
2797 2798 _('repository root directory or symbolic path name')),
2798 2799 ('', 'cwd', '', _('change working directory')),
2799 2800 ('y', 'noninteractive', None,
2800 2801 _('do not prompt, assume \'yes\' for any required answers')),
2801 2802 ('q', 'quiet', None, _('suppress output')),
2802 2803 ('v', 'verbose', None, _('enable additional output')),
2803 2804 ('', 'config', [], _('set/override config option')),
2804 2805 ('', 'debug', None, _('enable debugging output')),
2805 2806 ('', 'debugger', None, _('start debugger')),
2806 2807 ('', 'lsprof', None, _('print improved command execution profile')),
2807 2808 ('', 'traceback', None, _('print traceback on exception')),
2808 2809 ('', 'time', None, _('time how long the command takes')),
2809 2810 ('', 'profile', None, _('print command execution profile')),
2810 2811 ('', 'version', None, _('output version information and exit')),
2811 2812 ('h', 'help', None, _('display help and exit')),
2812 2813 ]
2813 2814
2814 2815 dryrunopts = [('n', 'dry-run', None,
2815 2816 _('do not perform actions, just print output'))]
2816 2817
2817 2818 remoteopts = [
2818 2819 ('e', 'ssh', '', _('specify ssh command to use')),
2819 2820 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2820 2821 ]
2821 2822
2822 2823 walkopts = [
2823 2824 ('I', 'include', [], _('include names matching the given patterns')),
2824 2825 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2825 2826 ]
2826 2827
2827 2828 table = {
2828 2829 "^add":
2829 2830 (add,
2830 2831 walkopts + dryrunopts,
2831 2832 _('hg add [OPTION]... [FILE]...')),
2832 2833 "addremove":
2833 2834 (addremove,
2834 2835 [('s', 'similarity', '',
2835 2836 _('guess renamed files by similarity (0<=s<=100)')),
2836 2837 ] + walkopts + dryrunopts,
2837 2838 _('hg addremove [OPTION]... [FILE]...')),
2838 2839 "^annotate":
2839 2840 (annotate,
2840 2841 [('r', 'rev', '', _('annotate the specified revision')),
2841 2842 ('f', 'follow', None, _('follow file copies and renames')),
2842 2843 ('a', 'text', None, _('treat all files as text')),
2843 2844 ('u', 'user', None, _('list the author')),
2844 2845 ('d', 'date', None, _('list the date')),
2845 2846 ('n', 'number', None, _('list the revision number (default)')),
2846 2847 ('c', 'changeset', None, _('list the changeset')),
2847 2848 ] + walkopts,
2848 2849 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2849 2850 "archive":
2850 2851 (archive,
2851 2852 [('', 'no-decode', None, _('do not pass files through decoders')),
2852 2853 ('p', 'prefix', '', _('directory prefix for files in archive')),
2853 2854 ('r', 'rev', '', _('revision to distribute')),
2854 2855 ('t', 'type', '', _('type of distribution to create')),
2855 2856 ] + walkopts,
2856 2857 _('hg archive [OPTION]... DEST')),
2857 2858 "backout":
2858 2859 (backout,
2859 2860 [('', 'merge', None,
2860 2861 _('merge with old dirstate parent after backout')),
2861 2862 ('m', 'message', '', _('use <text> as commit message')),
2862 2863 ('l', 'logfile', '', _('read commit message from <file>')),
2863 2864 ('d', 'date', '', _('record datecode as commit date')),
2864 2865 ('', 'parent', '', _('parent to choose when backing out merge')),
2865 2866 ('u', 'user', '', _('record user as committer')),
2866 2867 ] + walkopts,
2867 2868 _('hg backout [OPTION]... REV')),
2868 2869 "branch": (branch, [], _('hg branch [NAME]')),
2869 2870 "branches": (branches, [], _('hg branches')),
2870 2871 "bundle":
2871 2872 (bundle,
2872 2873 [('f', 'force', None,
2873 2874 _('run even when remote repository is unrelated')),
2874 2875 ('r', 'rev', [],
2875 2876 _('a changeset you would like to bundle')),
2876 2877 ('', 'base', [],
2877 2878 _('a base changeset to specify instead of a destination')),
2878 2879 ] + remoteopts,
2879 2880 _('hg bundle [--base REV]... [--rev REV]... FILE [DEST]')),
2880 2881 "cat":
2881 2882 (cat,
2882 2883 [('o', 'output', '', _('print output to file with formatted name')),
2883 2884 ('r', 'rev', '', _('print the given revision')),
2884 2885 ] + walkopts,
2885 2886 _('hg cat [OPTION]... FILE...')),
2886 2887 "^clone":
2887 2888 (clone,
2888 2889 [('U', 'noupdate', None, _('do not update the new working directory')),
2889 2890 ('r', 'rev', [],
2890 2891 _('a changeset you would like to have after cloning')),
2891 2892 ('', 'pull', None, _('use pull protocol to copy metadata')),
2892 2893 ('', 'uncompressed', None,
2893 2894 _('use uncompressed transfer (fast over LAN)')),
2894 2895 ] + remoteopts,
2895 2896 _('hg clone [OPTION]... SOURCE [DEST]')),
2896 2897 "^commit|ci":
2897 2898 (commit,
2898 2899 [('A', 'addremove', None,
2899 2900 _('mark new/missing files as added/removed before committing')),
2900 2901 ('m', 'message', '', _('use <text> as commit message')),
2901 2902 ('l', 'logfile', '', _('read the commit message from <file>')),
2902 2903 ('d', 'date', '', _('record datecode as commit date')),
2903 2904 ('u', 'user', '', _('record user as commiter')),
2904 2905 ] + walkopts,
2905 2906 _('hg commit [OPTION]... [FILE]...')),
2906 2907 "copy|cp":
2907 2908 (copy,
2908 2909 [('A', 'after', None, _('record a copy that has already occurred')),
2909 2910 ('f', 'force', None,
2910 2911 _('forcibly copy over an existing managed file')),
2911 2912 ] + walkopts + dryrunopts,
2912 2913 _('hg copy [OPTION]... [SOURCE]... DEST')),
2913 2914 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2914 2915 "debugcomplete":
2915 2916 (debugcomplete,
2916 2917 [('o', 'options', None, _('show the command options'))],
2917 2918 _('debugcomplete [-o] CMD')),
2918 2919 "debugrebuildstate":
2919 2920 (debugrebuildstate,
2920 2921 [('r', 'rev', '', _('revision to rebuild to'))],
2921 2922 _('debugrebuildstate [-r REV] [REV]')),
2922 2923 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2923 2924 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2924 2925 "debugstate": (debugstate, [], _('debugstate')),
2925 2926 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2926 2927 "debugindex": (debugindex, [], _('debugindex FILE')),
2927 2928 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2928 2929 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2929 2930 "debugwalk":
2930 2931 (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2931 2932 "^diff":
2932 2933 (diff,
2933 2934 [('r', 'rev', [], _('revision')),
2934 2935 ('a', 'text', None, _('treat all files as text')),
2935 2936 ('p', 'show-function', None,
2936 2937 _('show which function each change is in')),
2937 2938 ('g', 'git', None, _('use git extended diff format')),
2938 2939 ('', 'nodates', None, _("don't include dates in diff headers")),
2939 2940 ('w', 'ignore-all-space', None,
2940 2941 _('ignore white space when comparing lines')),
2941 2942 ('b', 'ignore-space-change', None,
2942 2943 _('ignore changes in the amount of white space')),
2943 2944 ('B', 'ignore-blank-lines', None,
2944 2945 _('ignore changes whose lines are all blank')),
2945 2946 ] + walkopts,
2946 2947 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2947 2948 "^export":
2948 2949 (export,
2949 2950 [('o', 'output', '', _('print output to file with formatted name')),
2950 2951 ('a', 'text', None, _('treat all files as text')),
2951 2952 ('g', 'git', None, _('use git extended diff format')),
2952 2953 ('', 'nodates', None, _("don't include dates in diff headers")),
2953 2954 ('', 'switch-parent', None, _('diff against the second parent'))],
2954 2955 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2955 2956 "grep":
2956 2957 (grep,
2957 2958 [('0', 'print0', None, _('end fields with NUL')),
2958 2959 ('', 'all', None, _('print all revisions that match')),
2959 2960 ('f', 'follow', None,
2960 2961 _('follow changeset history, or file history across copies and renames')),
2961 2962 ('i', 'ignore-case', None, _('ignore case when matching')),
2962 2963 ('l', 'files-with-matches', None,
2963 2964 _('print only filenames and revs that match')),
2964 2965 ('n', 'line-number', None, _('print matching line numbers')),
2965 2966 ('r', 'rev', [], _('search in given revision range')),
2966 2967 ('u', 'user', None, _('print user who committed change')),
2967 2968 ] + walkopts,
2968 2969 _('hg grep [OPTION]... PATTERN [FILE]...')),
2969 2970 "heads":
2970 2971 (heads,
2971 2972 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2972 2973 ('', 'style', '', _('display using template map file')),
2973 2974 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2974 2975 ('', 'template', '', _('display with template'))],
2975 2976 _('hg heads [-r REV]')),
2976 2977 "help": (help_, [], _('hg help [COMMAND]')),
2977 2978 "identify|id": (identify, [], _('hg identify')),
2978 2979 "import|patch":
2979 2980 (import_,
2980 2981 [('p', 'strip', 1,
2981 2982 _('directory strip option for patch. This has the same\n'
2982 2983 'meaning as the corresponding patch option')),
2983 2984 ('m', 'message', '', _('use <text> as commit message')),
2984 2985 ('b', 'base', '', _('base path (DEPRECATED)')),
2985 2986 ('f', 'force', None,
2986 2987 _('skip check for outstanding uncommitted changes'))],
2987 2988 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2988 2989 "incoming|in": (incoming,
2989 2990 [('M', 'no-merges', None, _('do not show merges')),
2990 2991 ('f', 'force', None,
2991 2992 _('run even when remote repository is unrelated')),
2992 2993 ('', 'style', '', _('display using template map file')),
2993 2994 ('n', 'newest-first', None, _('show newest record first')),
2994 2995 ('', 'bundle', '', _('file to store the bundles into')),
2995 2996 ('p', 'patch', None, _('show patch')),
2996 2997 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2997 2998 ('', 'template', '', _('display with template')),
2998 2999 ] + remoteopts,
2999 3000 _('hg incoming [-p] [-n] [-M] [-r REV]...'
3000 3001 ' [--bundle FILENAME] [SOURCE]')),
3001 3002 "^init":
3002 3003 (init, remoteopts, _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
3003 3004 "locate":
3004 3005 (locate,
3005 3006 [('r', 'rev', '', _('search the repository as it stood at rev')),
3006 3007 ('0', 'print0', None,
3007 3008 _('end filenames with NUL, for use with xargs')),
3008 3009 ('f', 'fullpath', None,
3009 3010 _('print complete paths from the filesystem root')),
3010 3011 ] + walkopts,
3011 3012 _('hg locate [OPTION]... [PATTERN]...')),
3012 3013 "^log|history":
3013 3014 (log,
3014 3015 [('b', 'branches', None, _('show branches (DEPRECATED)')),
3015 3016 ('f', 'follow', None,
3016 3017 _('follow changeset history, or file history across copies and renames')),
3017 3018 ('', 'follow-first', None,
3018 3019 _('only follow the first parent of merge changesets')),
3019 3020 ('C', 'copies', None, _('show copied files')),
3020 3021 ('k', 'keyword', [], _('search for a keyword')),
3021 3022 ('l', 'limit', '', _('limit number of changes displayed')),
3022 3023 ('r', 'rev', [], _('show the specified revision or range')),
3023 3024 ('M', 'no-merges', None, _('do not show merges')),
3024 3025 ('', 'style', '', _('display using template map file')),
3025 3026 ('m', 'only-merges', None, _('show only merges')),
3026 3027 ('p', 'patch', None, _('show patch')),
3027 3028 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3028 3029 ('', 'template', '', _('display with template')),
3029 3030 ] + walkopts,
3030 3031 _('hg log [OPTION]... [FILE]')),
3031 3032 "manifest": (manifest, [], _('hg manifest [REV]')),
3032 3033 "merge":
3033 3034 (merge,
3034 3035 [('b', 'branch', '', _('merge with head of a specific branch (DEPRECATED)')),
3035 3036 ('f', 'force', None, _('force a merge with outstanding changes'))],
3036 3037 _('hg merge [-f] [REV]')),
3037 3038 "outgoing|out": (outgoing,
3038 3039 [('M', 'no-merges', None, _('do not show merges')),
3039 3040 ('f', 'force', None,
3040 3041 _('run even when remote repository is unrelated')),
3041 3042 ('p', 'patch', None, _('show patch')),
3042 3043 ('', 'style', '', _('display using template map file')),
3043 3044 ('r', 'rev', [], _('a specific revision you would like to push')),
3044 3045 ('n', 'newest-first', None, _('show newest record first')),
3045 3046 ('', 'template', '', _('display with template')),
3046 3047 ] + remoteopts,
3047 3048 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3048 3049 "^parents":
3049 3050 (parents,
3050 3051 [('b', 'branches', None, _('show branches (DEPRECATED)')),
3051 3052 ('r', 'rev', '', _('show parents from the specified rev')),
3052 3053 ('', 'style', '', _('display using template map file')),
3053 3054 ('', 'template', '', _('display with template'))],
3054 3055 _('hg parents [-r REV] [FILE]')),
3055 3056 "paths": (paths, [], _('hg paths [NAME]')),
3056 3057 "^pull":
3057 3058 (pull,
3058 3059 [('u', 'update', None,
3059 3060 _('update to new tip if changesets were pulled')),
3060 3061 ('f', 'force', None,
3061 3062 _('run even when remote repository is unrelated')),
3062 3063 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
3063 3064 ] + remoteopts,
3064 3065 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3065 3066 "^push":
3066 3067 (push,
3067 3068 [('f', 'force', None, _('force push')),
3068 3069 ('r', 'rev', [], _('a specific revision you would like to push')),
3069 3070 ] + remoteopts,
3070 3071 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3071 3072 "debugrawcommit|rawcommit":
3072 3073 (rawcommit,
3073 3074 [('p', 'parent', [], _('parent')),
3074 3075 ('d', 'date', '', _('date code')),
3075 3076 ('u', 'user', '', _('user')),
3076 3077 ('F', 'files', '', _('file list')),
3077 3078 ('m', 'message', '', _('commit message')),
3078 3079 ('l', 'logfile', '', _('commit message file'))],
3079 3080 _('hg debugrawcommit [OPTION]... [FILE]...')),
3080 3081 "recover": (recover, [], _('hg recover')),
3081 3082 "^remove|rm":
3082 3083 (remove,
3083 3084 [('A', 'after', None, _('record remove that has already occurred')),
3084 3085 ('f', 'force', None, _('remove file even if modified')),
3085 3086 ] + walkopts,
3086 3087 _('hg remove [OPTION]... FILE...')),
3087 3088 "rename|mv":
3088 3089 (rename,
3089 3090 [('A', 'after', None, _('record a rename that has already occurred')),
3090 3091 ('f', 'force', None,
3091 3092 _('forcibly copy over an existing managed file')),
3092 3093 ] + walkopts + dryrunopts,
3093 3094 _('hg rename [OPTION]... SOURCE... DEST')),
3094 3095 "^revert":
3095 3096 (revert,
3096 3097 [('a', 'all', None, _('revert all changes when no arguments given')),
3097 3098 ('r', 'rev', '', _('revision to revert to')),
3098 3099 ('', 'no-backup', None, _('do not save backup copies of files')),
3099 3100 ] + walkopts + dryrunopts,
3100 3101 _('hg revert [-r REV] [NAME]...')),
3101 3102 "rollback": (rollback, [], _('hg rollback')),
3102 3103 "root": (root, [], _('hg root')),
3103 3104 "showconfig|debugconfig":
3104 3105 (showconfig,
3105 3106 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3106 3107 _('showconfig [-u] [NAME]...')),
3107 3108 "^serve":
3108 3109 (serve,
3109 3110 [('A', 'accesslog', '', _('name of access log file to write to')),
3110 3111 ('d', 'daemon', None, _('run server in background')),
3111 3112 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3112 3113 ('E', 'errorlog', '', _('name of error log file to write to')),
3113 3114 ('p', 'port', 0, _('port to use (default: 8000)')),
3114 3115 ('a', 'address', '', _('address to use')),
3115 3116 ('n', 'name', '',
3116 3117 _('name to show in web pages (default: working dir)')),
3117 3118 ('', 'webdir-conf', '', _('name of the webdir config file'
3118 3119 ' (serve more than one repo)')),
3119 3120 ('', 'pid-file', '', _('name of file to write process ID to')),
3120 3121 ('', 'stdio', None, _('for remote clients')),
3121 3122 ('t', 'templates', '', _('web templates to use')),
3122 3123 ('', 'style', '', _('template style to use')),
3123 3124 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3124 3125 _('hg serve [OPTION]...')),
3125 3126 "^status|st":
3126 3127 (status,
3127 3128 [('A', 'all', None, _('show status of all files')),
3128 3129 ('m', 'modified', None, _('show only modified files')),
3129 3130 ('a', 'added', None, _('show only added files')),
3130 3131 ('r', 'removed', None, _('show only removed files')),
3131 3132 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3132 3133 ('c', 'clean', None, _('show only files without changes')),
3133 3134 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3134 3135 ('i', 'ignored', None, _('show ignored files')),
3135 3136 ('n', 'no-status', None, _('hide status prefix')),
3136 3137 ('C', 'copies', None, _('show source of copied files')),
3137 3138 ('0', 'print0', None,
3138 3139 _('end filenames with NUL, for use with xargs')),
3139 3140 ('', 'rev', [], _('show difference from revision')),
3140 3141 ] + walkopts,
3141 3142 _('hg status [OPTION]... [FILE]...')),
3142 3143 "tag":
3143 3144 (tag,
3144 3145 [('l', 'local', None, _('make the tag local')),
3145 3146 ('m', 'message', '', _('message for tag commit log entry')),
3146 3147 ('d', 'date', '', _('record datecode as commit date')),
3147 3148 ('u', 'user', '', _('record user as commiter')),
3148 3149 ('r', 'rev', '', _('revision to tag'))],
3149 3150 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3150 3151 "tags": (tags, [], _('hg tags')),
3151 3152 "tip":
3152 3153 (tip,
3153 3154 [('b', 'branches', None, _('show branches (DEPRECATED)')),
3154 3155 ('', 'style', '', _('display using template map file')),
3155 3156 ('p', 'patch', None, _('show patch')),
3156 3157 ('', 'template', '', _('display with template'))],
3157 3158 _('hg tip [-p]')),
3158 3159 "unbundle":
3159 3160 (unbundle,
3160 3161 [('u', 'update', None,
3161 3162 _('update to new tip if changesets were unbundled'))],
3162 3163 _('hg unbundle [-u] FILE')),
3163 3164 "^update|up|checkout|co":
3164 3165 (update,
3165 3166 [('b', 'branch', '',
3166 3167 _('checkout the head of a specific branch (DEPRECATED)')),
3167 3168 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3168 3169 ('C', 'clean', None, _('overwrite locally modified files')),
3169 3170 ('f', 'force', None, _('force a merge with outstanding changes'))],
3170 3171 _('hg update [-C] [-f] [REV]')),
3171 3172 "verify": (verify, [], _('hg verify')),
3172 3173 "version": (show_version, [], _('hg version')),
3173 3174 }
3174 3175
3175 3176 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3176 3177 " debugindex debugindexdot")
3177 3178 optionalrepo = ("paths serve showconfig")
3178 3179
3179 3180 def findpossible(ui, cmd):
3180 3181 """
3181 3182 Return cmd -> (aliases, command table entry)
3182 3183 for each matching command.
3183 3184 Return debug commands (or their aliases) only if no normal command matches.
3184 3185 """
3185 3186 choice = {}
3186 3187 debugchoice = {}
3187 3188 for e in table.keys():
3188 3189 aliases = e.lstrip("^").split("|")
3189 3190 found = None
3190 3191 if cmd in aliases:
3191 3192 found = cmd
3192 3193 elif not ui.config("ui", "strict"):
3193 3194 for a in aliases:
3194 3195 if a.startswith(cmd):
3195 3196 found = a
3196 3197 break
3197 3198 if found is not None:
3198 3199 if aliases[0].startswith("debug") or found.startswith("debug"):
3199 3200 debugchoice[found] = (aliases, table[e])
3200 3201 else:
3201 3202 choice[found] = (aliases, table[e])
3202 3203
3203 3204 if not choice and debugchoice:
3204 3205 choice = debugchoice
3205 3206
3206 3207 return choice
3207 3208
3208 3209 def findcmd(ui, cmd):
3209 3210 """Return (aliases, command table entry) for command string."""
3210 3211 choice = findpossible(ui, cmd)
3211 3212
3212 3213 if choice.has_key(cmd):
3213 3214 return choice[cmd]
3214 3215
3215 3216 if len(choice) > 1:
3216 3217 clist = choice.keys()
3217 3218 clist.sort()
3218 3219 raise AmbiguousCommand(cmd, clist)
3219 3220
3220 3221 if choice:
3221 3222 return choice.values()[0]
3222 3223
3223 3224 raise UnknownCommand(cmd)
3224 3225
3225 3226 def catchterm(*args):
3226 3227 raise util.SignalInterrupt
3227 3228
3228 3229 def run():
3229 3230 sys.exit(dispatch(sys.argv[1:]))
3230 3231
3231 3232 class ParseError(Exception):
3232 3233 """Exception raised on errors in parsing the command line."""
3233 3234
3234 3235 def parse(ui, args):
3235 3236 options = {}
3236 3237 cmdoptions = {}
3237 3238
3238 3239 try:
3239 3240 args = fancyopts.fancyopts(args, globalopts, options)
3240 3241 except fancyopts.getopt.GetoptError, inst:
3241 3242 raise ParseError(None, inst)
3242 3243
3243 3244 if args:
3244 3245 cmd, args = args[0], args[1:]
3245 3246 aliases, i = findcmd(ui, cmd)
3246 3247 cmd = aliases[0]
3247 3248 defaults = ui.config("defaults", cmd)
3248 3249 if defaults:
3249 3250 args = shlex.split(defaults) + args
3250 3251 c = list(i[1])
3251 3252 else:
3252 3253 cmd = None
3253 3254 c = []
3254 3255
3255 3256 # combine global options into local
3256 3257 for o in globalopts:
3257 3258 c.append((o[0], o[1], options[o[1]], o[3]))
3258 3259
3259 3260 try:
3260 3261 args = fancyopts.fancyopts(args, c, cmdoptions)
3261 3262 except fancyopts.getopt.GetoptError, inst:
3262 3263 raise ParseError(cmd, inst)
3263 3264
3264 3265 # separate global options back out
3265 3266 for o in globalopts:
3266 3267 n = o[1]
3267 3268 options[n] = cmdoptions[n]
3268 3269 del cmdoptions[n]
3269 3270
3270 3271 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3271 3272
3272 3273 external = {}
3273 3274
3274 3275 def findext(name):
3275 3276 '''return module with given extension name'''
3276 3277 try:
3277 3278 return sys.modules[external[name]]
3278 3279 except KeyError:
3279 3280 for k, v in external.iteritems():
3280 3281 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3281 3282 return sys.modules[v]
3282 3283 raise KeyError(name)
3283 3284
3284 3285 def load_extensions(ui):
3285 3286 added = []
3286 3287 for ext_name, load_from_name in ui.extensions():
3287 3288 if ext_name in external:
3288 3289 continue
3289 3290 try:
3290 3291 if load_from_name:
3291 3292 # the module will be loaded in sys.modules
3292 3293 # choose an unique name so that it doesn't
3293 3294 # conflicts with other modules
3294 3295 module_name = "hgext_%s" % ext_name.replace('.', '_')
3295 3296 mod = imp.load_source(module_name, load_from_name)
3296 3297 else:
3297 3298 def importh(name):
3298 3299 mod = __import__(name)
3299 3300 components = name.split('.')
3300 3301 for comp in components[1:]:
3301 3302 mod = getattr(mod, comp)
3302 3303 return mod
3303 3304 try:
3304 3305 mod = importh("hgext.%s" % ext_name)
3305 3306 except ImportError:
3306 3307 mod = importh(ext_name)
3307 3308 external[ext_name] = mod.__name__
3308 3309 added.append((mod, ext_name))
3309 3310 except (util.SignalInterrupt, KeyboardInterrupt):
3310 3311 raise
3311 3312 except Exception, inst:
3312 3313 ui.warn(_("*** failed to import extension %s: %s\n") %
3313 3314 (ext_name, inst))
3314 3315 if ui.print_exc():
3315 3316 return 1
3316 3317
3317 3318 for mod, name in added:
3318 3319 uisetup = getattr(mod, 'uisetup', None)
3319 3320 if uisetup:
3320 3321 uisetup(ui)
3321 3322 cmdtable = getattr(mod, 'cmdtable', {})
3322 3323 for t in cmdtable:
3323 3324 if t in table:
3324 3325 ui.warn(_("module %s overrides %s\n") % (name, t))
3325 3326 table.update(cmdtable)
3326 3327
3327 3328 def parseconfig(config):
3328 3329 """parse the --config options from the command line"""
3329 3330 parsed = []
3330 3331 for cfg in config:
3331 3332 try:
3332 3333 name, value = cfg.split('=', 1)
3333 3334 section, name = name.split('.', 1)
3334 3335 if not section or not name:
3335 3336 raise IndexError
3336 3337 parsed.append((section, name, value))
3337 3338 except (IndexError, ValueError):
3338 3339 raise util.Abort(_('malformed --config option: %s') % cfg)
3339 3340 return parsed
3340 3341
3341 3342 def dispatch(args):
3342 3343 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3343 3344 num = getattr(signal, name, None)
3344 3345 if num: signal.signal(num, catchterm)
3345 3346
3346 3347 try:
3347 3348 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3348 3349 except util.Abort, inst:
3349 3350 sys.stderr.write(_("abort: %s\n") % inst)
3350 3351 return -1
3351 3352
3352 3353 load_extensions(u)
3353 3354 u.addreadhook(load_extensions)
3354 3355
3355 3356 try:
3356 3357 cmd, func, args, options, cmdoptions = parse(u, args)
3357 3358 if options["time"]:
3358 3359 def get_times():
3359 3360 t = os.times()
3360 3361 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3361 3362 t = (t[0], t[1], t[2], t[3], time.clock())
3362 3363 return t
3363 3364 s = get_times()
3364 3365 def print_time():
3365 3366 t = get_times()
3366 3367 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3367 3368 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3368 3369 atexit.register(print_time)
3369 3370
3370 3371 # enter the debugger before command execution
3371 3372 if options['debugger']:
3372 3373 pdb.set_trace()
3373 3374
3374 3375 try:
3375 3376 if options['cwd']:
3376 3377 try:
3377 3378 os.chdir(options['cwd'])
3378 3379 except OSError, inst:
3379 3380 raise util.Abort('%s: %s' %
3380 3381 (options['cwd'], inst.strerror))
3381 3382
3382 3383 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3383 3384 not options["noninteractive"], options["traceback"],
3384 3385 parseconfig(options["config"]))
3385 3386
3386 3387 path = u.expandpath(options["repository"]) or ""
3387 3388 repo = path and hg.repository(u, path=path) or None
3388 3389 if repo and not repo.local():
3389 3390 raise util.Abort(_("repository '%s' is not local") % path)
3390 3391
3391 3392 if options['help']:
3392 3393 return help_(u, cmd, options['version'])
3393 3394 elif options['version']:
3394 3395 return show_version(u)
3395 3396 elif not cmd:
3396 3397 return help_(u, 'shortlist')
3397 3398
3398 3399 if cmd not in norepo.split():
3399 3400 try:
3400 3401 if not repo:
3401 3402 repo = hg.repository(u, path=path)
3402 3403 u = repo.ui
3403 3404 for name in external.itervalues():
3404 3405 mod = sys.modules[name]
3405 3406 if hasattr(mod, 'reposetup'):
3406 3407 mod.reposetup(u, repo)
3407 3408 hg.repo_setup_hooks.append(mod.reposetup)
3408 3409 except hg.RepoError:
3409 3410 if cmd not in optionalrepo.split():
3410 3411 raise
3411 3412 d = lambda: func(u, repo, *args, **cmdoptions)
3412 3413 else:
3413 3414 d = lambda: func(u, *args, **cmdoptions)
3414 3415
3415 3416 try:
3416 3417 if options['profile']:
3417 3418 import hotshot, hotshot.stats
3418 3419 prof = hotshot.Profile("hg.prof")
3419 3420 try:
3420 3421 try:
3421 3422 return prof.runcall(d)
3422 3423 except:
3423 3424 try:
3424 3425 u.warn(_('exception raised - generating '
3425 3426 'profile anyway\n'))
3426 3427 except:
3427 3428 pass
3428 3429 raise
3429 3430 finally:
3430 3431 prof.close()
3431 3432 stats = hotshot.stats.load("hg.prof")
3432 3433 stats.strip_dirs()
3433 3434 stats.sort_stats('time', 'calls')
3434 3435 stats.print_stats(40)
3435 3436 elif options['lsprof']:
3436 3437 try:
3437 3438 from mercurial import lsprof
3438 3439 except ImportError:
3439 3440 raise util.Abort(_(
3440 3441 'lsprof not available - install from '
3441 3442 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3442 3443 p = lsprof.Profiler()
3443 3444 p.enable(subcalls=True)
3444 3445 try:
3445 3446 return d()
3446 3447 finally:
3447 3448 p.disable()
3448 3449 stats = lsprof.Stats(p.getstats())
3449 3450 stats.sort()
3450 3451 stats.pprint(top=10, file=sys.stderr, climit=5)
3451 3452 else:
3452 3453 return d()
3453 3454 finally:
3454 3455 u.flush()
3455 3456 except:
3456 3457 # enter the debugger when we hit an exception
3457 3458 if options['debugger']:
3458 3459 pdb.post_mortem(sys.exc_info()[2])
3459 3460 u.print_exc()
3460 3461 raise
3461 3462 except ParseError, inst:
3462 3463 if inst.args[0]:
3463 3464 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3464 3465 help_(u, inst.args[0])
3465 3466 else:
3466 3467 u.warn(_("hg: %s\n") % inst.args[1])
3467 3468 help_(u, 'shortlist')
3468 3469 except AmbiguousCommand, inst:
3469 3470 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3470 3471 (inst.args[0], " ".join(inst.args[1])))
3471 3472 except UnknownCommand, inst:
3472 3473 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3473 3474 help_(u, 'shortlist')
3474 3475 except hg.RepoError, inst:
3475 3476 u.warn(_("abort: %s!\n") % inst)
3476 3477 except lock.LockHeld, inst:
3477 3478 if inst.errno == errno.ETIMEDOUT:
3478 3479 reason = _('timed out waiting for lock held by %s') % inst.locker
3479 3480 else:
3480 3481 reason = _('lock held by %s') % inst.locker
3481 3482 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3482 3483 except lock.LockUnavailable, inst:
3483 3484 u.warn(_("abort: could not lock %s: %s\n") %
3484 3485 (inst.desc or inst.filename, inst.strerror))
3485 3486 except revlog.RevlogError, inst:
3486 3487 u.warn(_("abort: %s!\n") % inst)
3487 3488 except util.SignalInterrupt:
3488 3489 u.warn(_("killed!\n"))
3489 3490 except KeyboardInterrupt:
3490 3491 try:
3491 3492 u.warn(_("interrupted!\n"))
3492 3493 except IOError, inst:
3493 3494 if inst.errno == errno.EPIPE:
3494 3495 if u.debugflag:
3495 3496 u.warn(_("\nbroken pipe\n"))
3496 3497 else:
3497 3498 raise
3498 3499 except IOError, inst:
3499 3500 if hasattr(inst, "code"):
3500 3501 u.warn(_("abort: %s\n") % inst)
3501 3502 elif hasattr(inst, "reason"):
3502 3503 u.warn(_("abort: error: %s\n") % inst.reason[1])
3503 3504 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3504 3505 if u.debugflag:
3505 3506 u.warn(_("broken pipe\n"))
3506 3507 elif getattr(inst, "strerror", None):
3507 3508 if getattr(inst, "filename", None):
3508 3509 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3509 3510 else:
3510 3511 u.warn(_("abort: %s\n") % inst.strerror)
3511 3512 else:
3512 3513 raise
3513 3514 except OSError, inst:
3514 3515 if getattr(inst, "filename", None):
3515 3516 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3516 3517 else:
3517 3518 u.warn(_("abort: %s\n") % inst.strerror)
3518 3519 except util.UnexpectedOutput, inst:
3519 3520 u.warn(_("abort: %s") % inst[0])
3520 3521 if not isinstance(inst[1], basestring):
3521 3522 u.warn(" %r\n" % (inst[1],))
3522 3523 elif not inst[1]:
3523 3524 u.warn(_(" empty string\n"))
3524 3525 else:
3525 3526 u.warn("\n%r%s\n" %
3526 3527 (inst[1][:400], len(inst[1]) > 400 and '...' or ''))
3527 3528 except util.Abort, inst:
3528 3529 u.warn(_("abort: %s\n") % inst)
3529 3530 except TypeError, inst:
3530 3531 # was this an argument error?
3531 3532 tb = traceback.extract_tb(sys.exc_info()[2])
3532 3533 if len(tb) > 2: # no
3533 3534 raise
3534 3535 u.debug(inst, "\n")
3535 3536 u.warn(_("%s: invalid arguments\n") % cmd)
3536 3537 help_(u, cmd)
3537 3538 except SystemExit, inst:
3538 3539 # Commands shouldn't sys.exit directly, but give a return code.
3539 3540 # Just in case catch this and and pass exit code to caller.
3540 3541 return inst.code
3541 3542 except:
3542 3543 u.warn(_("** unknown exception encountered, details follow\n"))
3543 3544 u.warn(_("** report bug details to "
3544 3545 "http://www.selenic.com/mercurial/bts\n"))
3545 3546 u.warn(_("** or mercurial@selenic.com\n"))
3546 3547 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3547 3548 % version.get_version())
3548 3549 raise
3549 3550
3550 3551 return -1
@@ -1,418 +1,418 b''
1 1 # merge.py - directory-level update/merge handling for Mercurial
2 2 #
3 3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import gettext as _
10 10 from demandload import *
11 11 demandload(globals(), "errno util os tempfile")
12 12
13 13 def filemerge(repo, fw, fo, wctx, mctx):
14 14 """perform a 3-way merge in the working directory
15 15
16 16 fw = filename in the working directory
17 17 fo = filename in other parent
18 18 wctx, mctx = working and merge changecontexts
19 19 """
20 20
21 21 def temp(prefix, ctx):
22 22 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
23 23 (fd, name) = tempfile.mkstemp(prefix=pre)
24 24 f = os.fdopen(fd, "wb")
25 25 repo.wwrite(ctx.path(), ctx.data(), f)
26 26 f.close()
27 27 return name
28 28
29 29 fcm = wctx.filectx(fw)
30 30 fco = mctx.filectx(fo)
31 31
32 32 if not fco.cmp(fcm.data()): # files identical?
33 33 return None
34 34
35 35 fca = fcm.ancestor(fco)
36 36 if not fca:
37 37 fca = repo.filectx(fw, fileid=nullrev)
38 38 a = repo.wjoin(fw)
39 39 b = temp("base", fca)
40 40 c = temp("other", fco)
41 41
42 42 if fw != fo:
43 43 repo.ui.status(_("merging %s and %s\n") % (fw, fo))
44 44 else:
45 45 repo.ui.status(_("merging %s\n") % fw)
46 46
47 47 repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcm, fco, fca))
48 48
49 49 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
50 50 or "hgmerge")
51 51 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
52 52 environ={'HG_FILE': fw,
53 53 'HG_MY_NODE': str(wctx.parents()[0]),
54 54 'HG_OTHER_NODE': str(mctx)})
55 55 if r:
56 56 repo.ui.warn(_("merging %s failed!\n") % fw)
57 57
58 58 os.unlink(b)
59 59 os.unlink(c)
60 60 return r
61 61
62 62 def checkunknown(wctx, mctx):
63 63 "check for collisions between unknown files and files in mctx"
64 64 man = mctx.manifest()
65 65 for f in wctx.unknown():
66 66 if f in man:
67 67 if mctx.filectx(f).cmp(wctx.filectx(f).data()):
68 68 raise util.Abort(_("'%s' already exists in the working"
69 69 " dir and differs from remote") % f)
70 70
71 71 def forgetremoved(wctx, mctx):
72 72 """
73 73 Forget removed files
74 74
75 75 If we're jumping between revisions (as opposed to merging), and if
76 76 neither the working directory nor the target rev has the file,
77 77 then we need to remove it from the dirstate, to prevent the
78 78 dirstate from listing the file when it is no longer in the
79 79 manifest.
80 80 """
81 81
82 82 action = []
83 83 man = mctx.manifest()
84 84 for f in wctx.deleted() + wctx.removed():
85 85 if f not in man:
86 86 action.append((f, "f"))
87 87
88 88 return action
89 89
90 90 def nonoverlap(d1, d2, d3):
91 91 "Return list of elements in d1 not in d2 or d3"
92 92
93 93 l = []
94 94 for d in d1:
95 95 if d not in d3 and d not in d2:
96 96 l.append(d)
97 97
98 98 l.sort()
99 99 return l
100 100
101 101 def findold(fctx, limit):
102 102 "find files that path was copied from, back to linkrev limit"
103 103
104 104 old = {}
105 105 orig = fctx.path()
106 106 visit = [fctx]
107 107 while visit:
108 108 fc = visit.pop()
109 109 if fc.rev() < limit:
110 110 continue
111 111 if fc.path() != orig and fc.path() not in old:
112 112 old[fc.path()] = 1
113 113 visit += fc.parents()
114 114
115 115 old = old.keys()
116 116 old.sort()
117 117 return old
118 118
119 119 def findcopies(repo, m1, m2, ma, limit):
120 120 """
121 121 Find moves and copies between m1 and m2 back to limit linkrev
122 122 """
123 123
124 124 if not repo.ui.configbool("merge", "followcopies", True):
125 125 return {}
126 126
127 127 # avoid silly behavior for update from empty dir
128 128 if not m1:
129 129 return {}
130 130
131 131 dcopies = repo.dirstate.copies()
132 132 copy = {}
133 133 match = {}
134 134 u1 = nonoverlap(m1, m2, ma)
135 135 u2 = nonoverlap(m2, m1, ma)
136 136 ctx = util.cachefunc(lambda f,n: repo.filectx(f, fileid=n[:20]))
137 137
138 138 def checkpair(c, f2, man):
139 139 ''' check if an apparent pair actually matches '''
140 140 c2 = ctx(f2, man[f2])
141 141 ca = c.ancestor(c2)
142 142 if ca and ca.path() == c.path() or ca.path() == c2.path():
143 143 copy[c.path()] = f2
144 144 copy[f2] = c.path()
145 145
146 146 for f in u1:
147 147 c = ctx(dcopies.get(f, f), m1[f])
148 148 for of in findold(c, limit):
149 149 if of in m2:
150 150 checkpair(c, of, m2)
151 151 else:
152 152 match.setdefault(of, []).append(f)
153 153
154 154 for f in u2:
155 155 c = ctx(f, m2[f])
156 156 for of in findold(c, limit):
157 157 if of in m1:
158 158 checkpair(c, of, m1)
159 159 elif of in match:
160 160 for mf in match[of]:
161 161 checkpair(c, mf, m1)
162 162
163 163 return copy
164 164
165 165 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
166 166 """
167 167 Merge p1 and p2 with ancestor ma and generate merge action list
168 168
169 169 overwrite = whether we clobber working files
170 170 partial = function to filter file lists
171 171 """
172 172
173 173 repo.ui.note(_("resolving manifests\n"))
174 174 repo.ui.debug(_(" overwrite %s partial %s\n") % (overwrite, bool(partial)))
175 175 repo.ui.debug(_(" ancestor %s local %s remote %s\n") % (pa, p1, p2))
176 176
177 177 m1 = p1.manifest()
178 178 m2 = p2.manifest()
179 179 ma = pa.manifest()
180 180 backwards = (pa == p2)
181 181 action = []
182 182 copy = {}
183 183
184 184 def fmerge(f, f2=None, fa=None):
185 185 """merge executable flags"""
186 186 if not f2:
187 187 f2 = f
188 188 fa = f
189 189 a, b, c = ma.execf(fa), m1.execf(f), m2.execf(f2)
190 190 return ((a^b) | (a^c)) ^ a
191 191
192 192 def act(msg, m, f, *args):
193 193 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
194 194 action.append((f, m) + args)
195 195
196 196 if not (backwards or overwrite):
197 197 copy = findcopies(repo, m1, m2, ma, pa.rev())
198 198
199 199 # Compare manifests
200 200 for f, n in m1.iteritems():
201 201 if partial and not partial(f):
202 202 continue
203 203 if f in m2:
204 204 # are files different?
205 205 if n != m2[f]:
206 206 a = ma.get(f, nullid)
207 207 # are both different from the ancestor?
208 208 if not overwrite and n != a and m2[f] != a:
209 209 act("versions differ", "m", f, f, f, fmerge(f), False)
210 210 # are we clobbering?
211 211 # is remote's version newer?
212 212 # or are we going back in time and clean?
213 213 elif overwrite or m2[f] != a or (backwards and not n[20:]):
214 214 act("remote is newer", "g", f, m2.execf(f))
215 215 # local is newer, not overwrite, check mode bits
216 216 elif fmerge(f) != m1.execf(f):
217 217 act("update permissions", "e", f, m2.execf(f))
218 218 # contents same, check mode bits
219 219 elif m1.execf(f) != m2.execf(f):
220 220 if overwrite or fmerge(f) != m1.execf(f):
221 221 act("update permissions", "e", f, m2.execf(f))
222 222 elif f in copy:
223 223 f2 = copy[f]
224 224 if f in ma: # case 3,20 A/B/A
225 225 act("remote moved", "m", f, f2, f2, fmerge(f, f2, f), True)
226 226 else:
227 227 if f2 in m1: # case 2 A,B/B/B
228 228 act("local copied", "m",
229 229 f, f2, f, fmerge(f, f2, f2), False)
230 230 else: # case 4,21 A/B/B
231 231 act("local moved", "m",
232 232 f, f2, f, fmerge(f, f2, f2), False)
233 233 elif f in ma:
234 234 if n != ma[f] and not overwrite:
235 235 if repo.ui.prompt(
236 236 (_(" local changed %s which remote deleted\n") % f) +
237 237 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("d"):
238 238 act("prompt delete", "r", f)
239 239 else:
240 240 act("other deleted", "r", f)
241 241 else:
242 242 # file is created on branch or in working directory
243 243 if (overwrite and n[20:] != "u") or (backwards and not n[20:]):
244 244 act("remote deleted", "r", f)
245 245
246 246 for f, n in m2.iteritems():
247 247 if partial and not partial(f):
248 248 continue
249 249 if f in m1:
250 250 continue
251 251 if f in copy:
252 252 f2 = copy[f]
253 253 if f2 not in m2: # already seen
254 254 continue
255 255 # rename case 1, A/A,B/A
256 256 act("remote copied", "m", f2, f, f, fmerge(f2, f, f2), False)
257 257 elif f in ma:
258 258 if overwrite or backwards:
259 259 act("recreating", "g", f, m2.execf(f))
260 260 elif n != ma[f]:
261 261 if repo.ui.prompt(
262 262 (_("remote changed %s which local deleted\n") % f) +
263 263 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("k"):
264 264 act("prompt recreating", "g", f, m2.execf(f))
265 265 else:
266 266 act("remote created", "g", f, m2.execf(f))
267 267
268 268 return action
269 269
270 270 def applyupdates(repo, action, wctx, mctx):
271 271 "apply the merge action list to the working directory"
272 272
273 273 updated, merged, removed, unresolved = 0, 0, 0, 0
274 274 action.sort()
275 275 for a in action:
276 276 f, m = a[:2]
277 277 if f[0] == "/":
278 278 continue
279 279 if m == "r": # remove
280 280 repo.ui.note(_("removing %s\n") % f)
281 281 util.audit_path(f)
282 282 try:
283 283 util.unlink(repo.wjoin(f))
284 284 except OSError, inst:
285 285 if inst.errno != errno.ENOENT:
286 286 repo.ui.warn(_("update failed to remove %s: %s!\n") %
287 287 (f, inst.strerror))
288 288 removed +=1
289 289 elif m == "m": # merge
290 290 f2, fd, flag, move = a[2:]
291 291 r = filemerge(repo, f, f2, wctx, mctx)
292 292 if r > 0:
293 293 unresolved += 1
294 294 else:
295 295 if r is None:
296 296 updated += 1
297 297 else:
298 298 merged += 1
299 299 if f != fd:
300 300 repo.ui.debug(_("copying %s to %s\n") % (f, fd))
301 301 repo.wwrite(fd, repo.wread(f))
302 302 if move:
303 303 repo.ui.debug(_("removing %s\n") % f)
304 304 os.unlink(repo.wjoin(f))
305 305 util.set_exec(repo.wjoin(fd), flag)
306 306 elif m == "g": # get
307 307 flag = a[2]
308 308 repo.ui.note(_("getting %s\n") % f)
309 309 t = mctx.filectx(f).data()
310 310 repo.wwrite(f, t)
311 311 util.set_exec(repo.wjoin(f), flag)
312 312 updated += 1
313 313 elif m == "e": # exec
314 314 flag = a[2]
315 315 util.set_exec(repo.wjoin(f), flag)
316 316
317 317 return updated, merged, removed, unresolved
318 318
319 319 def recordupdates(repo, action, branchmerge):
320 320 "record merge actions to the dirstate"
321 321
322 322 for a in action:
323 323 f, m = a[:2]
324 324 if m == "r": # remove
325 325 if branchmerge:
326 326 repo.dirstate.update([f], 'r')
327 327 else:
328 328 repo.dirstate.forget([f])
329 329 elif m == "f": # forget
330 330 repo.dirstate.forget([f])
331 331 elif m == "g": # get
332 332 if branchmerge:
333 333 repo.dirstate.update([f], 'n', st_mtime=-1)
334 334 else:
335 335 repo.dirstate.update([f], 'n')
336 336 elif m == "m": # merge
337 337 f2, fd, flag, move = a[2:]
338 338 if branchmerge:
339 339 # We've done a branch merge, mark this file as merged
340 340 # so that we properly record the merger later
341 341 repo.dirstate.update([fd], 'm')
342 342 if f != f2: # copy/rename
343 343 if move:
344 344 repo.dirstate.update([f], 'r')
345 345 if f != fd:
346 346 repo.dirstate.copy(f, fd)
347 347 else:
348 348 repo.dirstate.copy(f2, fd)
349 349 else:
350 350 # We've update-merged a locally modified file, so
351 351 # we set the dirstate to emulate a normal checkout
352 352 # of that file some time in the past. Thus our
353 353 # merge will appear as a normal local file
354 354 # modification.
355 355 repo.dirstate.update([fd], 'n', st_size=-1, st_mtime=-1)
356 356 if move:
357 357 repo.dirstate.forget([f])
358 358
359 359 def update(repo, node, branchmerge, force, partial, wlock):
360 360 """
361 361 Perform a merge between the working directory and the given node
362 362
363 363 branchmerge = whether to merge between branches
364 364 force = whether to force branch merging or file overwriting
365 365 partial = a function to filter file lists (dirstate not updated)
366 366 wlock = working dir lock, if already held
367 367 """
368 368
369 369 if not wlock:
370 370 wlock = repo.wlock()
371 371
372 372 overwrite = force and not branchmerge
373 373 forcemerge = force and branchmerge
374 374 wc = repo.workingctx()
375 375 pl = wc.parents()
376 376 p1, p2 = pl[0], repo.changectx(node)
377 377 pa = p1.ancestor(p2)
378 378 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
379 379
380 380 ### check phase
381 381 if not overwrite and len(pl) > 1:
382 382 raise util.Abort(_("outstanding uncommitted merges"))
383 383 if pa == p1 or pa == p2: # is there a linear path from p1 to p2?
384 384 if branchmerge:
385 385 raise util.Abort(_("there is nothing to merge, just use "
386 386 "'hg update' or look at 'hg heads'"))
387 387 elif not (overwrite or branchmerge):
388 388 raise util.Abort(_("update spans branches, use 'hg merge' "
389 389 "or 'hg update -C' to lose changes"))
390 390 if branchmerge and not forcemerge:
391 if wc.modified() or wc.added() or wc.removed():
391 if wc.files():
392 392 raise util.Abort(_("outstanding uncommitted changes"))
393 393
394 394 ### calculate phase
395 395 action = []
396 396 if not force:
397 397 checkunknown(wc, p2)
398 398 if not branchmerge:
399 399 action += forgetremoved(wc, p2)
400 400 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
401 401
402 402 ### apply phase
403 403 if not branchmerge: # just jump to the new rev
404 404 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
405 405 if not partial:
406 406 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
407 407
408 408 stats = applyupdates(repo, action, wc, p2)
409 409
410 410 if not partial:
411 411 recordupdates(repo, action, branchmerge)
412 412 repo.dirstate.setparents(fp1, fp2)
413 413 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
414 414 if not branchmerge:
415 415 repo.opener("branch", "w").write(p2.branch() + "\n")
416 416
417 417 return stats
418 418
@@ -1,1278 +1,1284 b''
1 1 """
2 2 revlog.py - storage back-end for mercurial
3 3
4 4 This provides efficient delta storage with O(1) retrieve and append
5 5 and O(changes) merge between branches
6 6
7 7 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
8 8
9 9 This software may be used and distributed according to the terms
10 10 of the GNU General Public License, incorporated herein by reference.
11 11 """
12 12
13 13 from node import *
14 14 from i18n import gettext as _
15 15 from demandload import demandload
16 16 demandload(globals(), "binascii changegroup errno ancestor mdiff os")
17 17 demandload(globals(), "sha struct util zlib")
18 18
19 19 # revlog version strings
20 20 REVLOGV0 = 0
21 21 REVLOGNG = 1
22 22
23 23 # revlog flags
24 24 REVLOGNGINLINEDATA = (1 << 16)
25 25 REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
26 26
27 27 REVLOG_DEFAULT_FORMAT = REVLOGNG
28 28 REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
29 29
30 30 def flagstr(flag):
31 31 if flag == "inline":
32 32 return REVLOGNGINLINEDATA
33 33 raise RevlogError(_("unknown revlog flag %s" % flag))
34 34
35 35 def hash(text, p1, p2):
36 36 """generate a hash from the given text and its parent hashes
37 37
38 38 This hash combines both the current file contents and its history
39 39 in a manner that makes it easy to distinguish nodes with the same
40 40 content in the revision graph.
41 41 """
42 42 l = [p1, p2]
43 43 l.sort()
44 44 s = sha.new(l[0])
45 45 s.update(l[1])
46 46 s.update(text)
47 47 return s.digest()
48 48
49 49 def compress(text):
50 50 """ generate a possibly-compressed representation of text """
51 51 if not text: return ("", text)
52 52 if len(text) < 44:
53 53 if text[0] == '\0': return ("", text)
54 54 return ('u', text)
55 55 bin = zlib.compress(text)
56 56 if len(bin) > len(text):
57 57 if text[0] == '\0': return ("", text)
58 58 return ('u', text)
59 59 return ("", bin)
60 60
61 61 def decompress(bin):
62 62 """ decompress the given input """
63 63 if not bin: return bin
64 64 t = bin[0]
65 65 if t == '\0': return bin
66 66 if t == 'x': return zlib.decompress(bin)
67 67 if t == 'u': return bin[1:]
68 68 raise RevlogError(_("unknown compression type %r") % t)
69 69
70 70 indexformatv0 = ">4l20s20s20s"
71 71 v0shaoffset = 56
72 72 # index ng:
73 73 # 6 bytes offset
74 74 # 2 bytes flags
75 75 # 4 bytes compressed length
76 76 # 4 bytes uncompressed length
77 77 # 4 bytes: base rev
78 78 # 4 bytes link rev
79 79 # 4 bytes parent 1 rev
80 80 # 4 bytes parent 2 rev
81 81 # 32 bytes: nodeid
82 82 indexformatng = ">Qiiiiii20s12x"
83 83 ngshaoffset = 32
84 84 versionformat = ">i"
85 85
86 86 class lazyparser(object):
87 87 """
88 88 this class avoids the need to parse the entirety of large indices
89 89 """
90 90
91 91 # lazyparser is not safe to use on windows if win32 extensions not
92 92 # available. it keeps file handle open, which make it not possible
93 93 # to break hardlinks on local cloned repos.
94 94 safe_to_use = os.name != 'nt' or (not util.is_win_9x() and
95 95 hasattr(util, 'win32api'))
96 96
97 97 def __init__(self, dataf, size, indexformat, shaoffset):
98 98 self.dataf = dataf
99 99 self.format = indexformat
100 100 self.s = struct.calcsize(indexformat)
101 101 self.indexformat = indexformat
102 102 self.datasize = size
103 103 self.l = size/self.s
104 104 self.index = [None] * self.l
105 105 self.map = {nullid: nullrev}
106 106 self.allmap = 0
107 107 self.all = 0
108 108 self.mapfind_count = 0
109 109 self.shaoffset = shaoffset
110 110
111 111 def loadmap(self):
112 112 """
113 113 during a commit, we need to make sure the rev being added is
114 114 not a duplicate. This requires loading the entire index,
115 115 which is fairly slow. loadmap can load up just the node map,
116 116 which takes much less time.
117 117 """
118 118 if self.allmap: return
119 119 end = self.datasize
120 120 self.allmap = 1
121 121 cur = 0
122 122 count = 0
123 123 blocksize = self.s * 256
124 124 self.dataf.seek(0)
125 125 while cur < end:
126 126 data = self.dataf.read(blocksize)
127 127 off = 0
128 128 for x in xrange(256):
129 129 n = data[off + self.shaoffset:off + self.shaoffset + 20]
130 130 self.map[n] = count
131 131 count += 1
132 132 if count >= self.l:
133 133 break
134 134 off += self.s
135 135 cur += blocksize
136 136
137 137 def loadblock(self, blockstart, blocksize, data=None):
138 138 if self.all: return
139 139 if data is None:
140 140 self.dataf.seek(blockstart)
141 141 if blockstart + blocksize > self.datasize:
142 142 # the revlog may have grown since we've started running,
143 143 # but we don't have space in self.index for more entries.
144 144 # limit blocksize so that we don't get too much data.
145 145 blocksize = max(self.datasize - blockstart, 0)
146 146 data = self.dataf.read(blocksize)
147 147 lend = len(data) / self.s
148 148 i = blockstart / self.s
149 149 off = 0
150 150 for x in xrange(lend):
151 151 if self.index[i + x] == None:
152 152 b = data[off : off + self.s]
153 153 self.index[i + x] = b
154 154 n = b[self.shaoffset:self.shaoffset + 20]
155 155 self.map[n] = i + x
156 156 off += self.s
157 157
158 158 def findnode(self, node):
159 159 """search backwards through the index file for a specific node"""
160 160 if self.allmap: return None
161 161
162 162 # hg log will cause many many searches for the manifest
163 163 # nodes. After we get called a few times, just load the whole
164 164 # thing.
165 165 if self.mapfind_count > 8:
166 166 self.loadmap()
167 167 if node in self.map:
168 168 return node
169 169 return None
170 170 self.mapfind_count += 1
171 171 last = self.l - 1
172 172 while self.index[last] != None:
173 173 if last == 0:
174 174 self.all = 1
175 175 self.allmap = 1
176 176 return None
177 177 last -= 1
178 178 end = (last + 1) * self.s
179 179 blocksize = self.s * 256
180 180 while end >= 0:
181 181 start = max(end - blocksize, 0)
182 182 self.dataf.seek(start)
183 183 data = self.dataf.read(end - start)
184 184 findend = end - start
185 185 while True:
186 186 # we're searching backwards, so weh have to make sure
187 187 # we don't find a changeset where this node is a parent
188 188 off = data.rfind(node, 0, findend)
189 189 findend = off
190 190 if off >= 0:
191 191 i = off / self.s
192 192 off = i * self.s
193 193 n = data[off + self.shaoffset:off + self.shaoffset + 20]
194 194 if n == node:
195 195 self.map[n] = i + start / self.s
196 196 return node
197 197 else:
198 198 break
199 199 end -= blocksize
200 200 return None
201 201
202 202 def loadindex(self, i=None, end=None):
203 203 if self.all: return
204 204 all = False
205 205 if i == None:
206 206 blockstart = 0
207 207 blocksize = (512 / self.s) * self.s
208 208 end = self.datasize
209 209 all = True
210 210 else:
211 211 if end:
212 212 blockstart = i * self.s
213 213 end = end * self.s
214 214 blocksize = end - blockstart
215 215 else:
216 216 blockstart = (i & ~(32)) * self.s
217 217 blocksize = self.s * 64
218 218 end = blockstart + blocksize
219 219 while blockstart < end:
220 220 self.loadblock(blockstart, blocksize)
221 221 blockstart += blocksize
222 222 if all: self.all = True
223 223
224 224 class lazyindex(object):
225 225 """a lazy version of the index array"""
226 226 def __init__(self, parser):
227 227 self.p = parser
228 228 def __len__(self):
229 229 return len(self.p.index)
230 230 def load(self, pos):
231 231 if pos < 0:
232 232 pos += len(self.p.index)
233 233 self.p.loadindex(pos)
234 234 return self.p.index[pos]
235 235 def __getitem__(self, pos):
236 236 ret = self.p.index[pos] or self.load(pos)
237 237 if isinstance(ret, str):
238 238 ret = struct.unpack(self.p.indexformat, ret)
239 239 return ret
240 240 def __setitem__(self, pos, item):
241 241 self.p.index[pos] = item
242 242 def __delitem__(self, pos):
243 243 del self.p.index[pos]
244 244 def append(self, e):
245 245 self.p.index.append(e)
246 246
247 247 class lazymap(object):
248 248 """a lazy version of the node map"""
249 249 def __init__(self, parser):
250 250 self.p = parser
251 251 def load(self, key):
252 252 n = self.p.findnode(key)
253 253 if n == None:
254 254 raise KeyError(key)
255 255 def __contains__(self, key):
256 256 if key in self.p.map:
257 257 return True
258 258 self.p.loadmap()
259 259 return key in self.p.map
260 260 def __iter__(self):
261 261 yield nullid
262 262 for i in xrange(self.p.l):
263 263 ret = self.p.index[i]
264 264 if not ret:
265 265 self.p.loadindex(i)
266 266 ret = self.p.index[i]
267 267 if isinstance(ret, str):
268 268 ret = struct.unpack(self.p.indexformat, ret)
269 269 yield ret[-1]
270 270 def __getitem__(self, key):
271 271 try:
272 272 return self.p.map[key]
273 273 except KeyError:
274 274 try:
275 275 self.load(key)
276 276 return self.p.map[key]
277 277 except KeyError:
278 278 raise KeyError("node " + hex(key))
279 279 def __setitem__(self, key, val):
280 280 self.p.map[key] = val
281 281 def __delitem__(self, key):
282 282 del self.p.map[key]
283 283
284 284 class RevlogError(Exception): pass
285 285
286 286 class revlog(object):
287 287 """
288 288 the underlying revision storage object
289 289
290 290 A revlog consists of two parts, an index and the revision data.
291 291
292 292 The index is a file with a fixed record size containing
293 293 information on each revision, includings its nodeid (hash), the
294 294 nodeids of its parents, the position and offset of its data within
295 295 the data file, and the revision it's based on. Finally, each entry
296 296 contains a linkrev entry that can serve as a pointer to external
297 297 data.
298 298
299 299 The revision data itself is a linear collection of data chunks.
300 300 Each chunk represents a revision and is usually represented as a
301 301 delta against the previous chunk. To bound lookup time, runs of
302 302 deltas are limited to about 2 times the length of the original
303 303 version data. This makes retrieval of a version proportional to
304 304 its size, or O(1) relative to the number of revisions.
305 305
306 306 Both pieces of the revlog are written to in an append-only
307 307 fashion, which means we never need to rewrite a file to insert or
308 308 remove data, and can use some simple techniques to avoid the need
309 309 for locking while reading.
310 310 """
311 311 def __init__(self, opener, indexfile, datafile,
312 312 defversion=REVLOG_DEFAULT_VERSION):
313 313 """
314 314 create a revlog object
315 315
316 316 opener is a function that abstracts the file opening operation
317 317 and can be used to implement COW semantics or the like.
318 318 """
319 319 self.indexfile = indexfile
320 320 self.datafile = datafile
321 321 self.opener = opener
322 322
323 323 self.indexstat = None
324 324 self.cache = None
325 325 self.chunkcache = None
326 326 self.defversion = defversion
327 327 self.load()
328 328
329 329 def load(self):
330 330 v = self.defversion
331 331 try:
332 332 f = self.opener(self.indexfile)
333 333 i = f.read(4)
334 334 f.seek(0)
335 335 except IOError, inst:
336 336 if inst.errno != errno.ENOENT:
337 337 raise
338 338 i = ""
339 339 else:
340 340 try:
341 341 st = util.fstat(f)
342 342 except AttributeError, inst:
343 343 st = None
344 344 else:
345 345 oldst = self.indexstat
346 346 if (oldst and st.st_dev == oldst.st_dev
347 347 and st.st_ino == oldst.st_ino
348 348 and st.st_mtime == oldst.st_mtime
349 349 and st.st_ctime == oldst.st_ctime):
350 350 return
351 351 self.indexstat = st
352 352 if len(i) > 0:
353 353 v = struct.unpack(versionformat, i)[0]
354 354 flags = v & ~0xFFFF
355 355 fmt = v & 0xFFFF
356 356 if fmt == REVLOGV0:
357 357 if flags:
358 358 raise RevlogError(_("index %s invalid flags %x for format v0" %
359 359 (self.indexfile, flags)))
360 360 elif fmt == REVLOGNG:
361 361 if flags & ~REVLOGNGINLINEDATA:
362 362 raise RevlogError(_("index %s invalid flags %x for revlogng" %
363 363 (self.indexfile, flags)))
364 364 else:
365 365 raise RevlogError(_("index %s invalid format %d" %
366 366 (self.indexfile, fmt)))
367 367 self.version = v
368 368 if v == REVLOGV0:
369 369 self.indexformat = indexformatv0
370 370 shaoffset = v0shaoffset
371 371 else:
372 372 self.indexformat = indexformatng
373 373 shaoffset = ngshaoffset
374 374
375 375 if i:
376 376 if (lazyparser.safe_to_use and not self.inlinedata() and
377 377 st and st.st_size > 10000):
378 378 # big index, let's parse it on demand
379 379 parser = lazyparser(f, st.st_size, self.indexformat, shaoffset)
380 380 self.index = lazyindex(parser)
381 381 self.nodemap = lazymap(parser)
382 382 else:
383 383 self.parseindex(f, st)
384 384 if self.version != REVLOGV0:
385 385 e = list(self.index[0])
386 386 type = self.ngtype(e[0])
387 387 e[0] = self.offset_type(0, type)
388 388 self.index[0] = e
389 389 else:
390 390 self.nodemap = {nullid: nullrev}
391 391 self.index = []
392 392
393 393
394 394 def parseindex(self, fp, st):
395 395 s = struct.calcsize(self.indexformat)
396 396 self.index = []
397 397 self.nodemap = {nullid: nullrev}
398 398 inline = self.inlinedata()
399 399 n = 0
400 400 leftover = None
401 401 while True:
402 402 if st:
403 403 data = fp.read(65536)
404 404 else:
405 405 # hack for httprangereader, it doesn't do partial reads well
406 406 data = fp.read()
407 407 if not data:
408 408 break
409 409 if n == 0 and self.inlinedata():
410 410 # cache the first chunk
411 411 self.chunkcache = (0, data)
412 412 if leftover:
413 413 data = leftover + data
414 414 leftover = None
415 415 off = 0
416 416 l = len(data)
417 417 while off < l:
418 418 if l - off < s:
419 419 leftover = data[off:]
420 420 break
421 421 cur = data[off:off + s]
422 422 off += s
423 423 e = struct.unpack(self.indexformat, cur)
424 424 self.index.append(e)
425 425 self.nodemap[e[-1]] = n
426 426 n += 1
427 427 if inline:
428 428 off += e[1]
429 429 if off > l:
430 430 # some things don't seek well, just read it
431 431 fp.read(off - l)
432 432 if not st:
433 433 break
434 434
435 435
436 436 def ngoffset(self, q):
437 437 if q & 0xFFFF:
438 438 raise RevlogError(_('%s: incompatible revision flag %x') %
439 439 (self.indexfile, q))
440 440 return long(q >> 16)
441 441
442 442 def ngtype(self, q):
443 443 return int(q & 0xFFFF)
444 444
445 445 def offset_type(self, offset, type):
446 446 return long(long(offset) << 16 | type)
447 447
448 448 def loadindex(self, start, end):
449 449 """load a block of indexes all at once from the lazy parser"""
450 450 if isinstance(self.index, lazyindex):
451 451 self.index.p.loadindex(start, end)
452 452
453 453 def loadindexmap(self):
454 454 """loads both the map and the index from the lazy parser"""
455 455 if isinstance(self.index, lazyindex):
456 456 p = self.index.p
457 457 p.loadindex()
458 458 self.nodemap = p.map
459 459
460 460 def loadmap(self):
461 461 """loads the map from the lazy parser"""
462 462 if isinstance(self.nodemap, lazymap):
463 463 self.nodemap.p.loadmap()
464 464 self.nodemap = self.nodemap.p.map
465 465
466 466 def inlinedata(self): return self.version & REVLOGNGINLINEDATA
467 467 def tip(self): return self.node(len(self.index) - 1)
468 468 def count(self): return len(self.index)
469 469 def node(self, rev):
470 return (rev < 0) and nullid or self.index[rev][-1]
470 return rev == nullrev and nullid or self.index[rev][-1]
471 471 def rev(self, node):
472 472 try:
473 473 return self.nodemap[node]
474 474 except KeyError:
475 475 raise RevlogError(_('%s: no node %s') % (self.indexfile, hex(node)))
476 476 def linkrev(self, node):
477 477 return (node == nullid) and nullrev or self.index[self.rev(node)][-4]
478 478 def parents(self, node):
479 479 if node == nullid: return (nullid, nullid)
480 480 r = self.rev(node)
481 481 d = self.index[r][-3:-1]
482 482 if self.version == REVLOGV0:
483 483 return d
484 484 return (self.node(d[0]), self.node(d[1]))
485 485 def parentrevs(self, rev):
486 486 if rev == nullrev:
487 487 return (nullrev, nullrev)
488 488 d = self.index[rev][-3:-1]
489 489 if self.version == REVLOGV0:
490 490 return (self.rev(d[0]), self.rev(d[1]))
491 491 return d
492 492 def start(self, rev):
493 if rev < 0:
494 return nullrev
493 if rev == nullrev:
494 return 0
495 495 if self.version != REVLOGV0:
496 496 return self.ngoffset(self.index[rev][0])
497 497 return self.index[rev][0]
498 498
499 499 def end(self, rev): return self.start(rev) + self.length(rev)
500 500
501 501 def size(self, rev):
502 502 """return the length of the uncompressed text for a given revision"""
503 if rev == nullrev:
504 return 0
503 505 l = -1
504 506 if self.version != REVLOGV0:
505 507 l = self.index[rev][2]
506 508 if l >= 0:
507 509 return l
508 510
509 511 t = self.revision(self.node(rev))
510 512 return len(t)
511 513
512 514 # alternate implementation, The advantage to this code is it
513 515 # will be faster for a single revision. But, the results are not
514 516 # cached, so finding the size of every revision will be slower.
515 517 """
516 518 if self.cache and self.cache[1] == rev:
517 519 return len(self.cache[2])
518 520
519 521 base = self.base(rev)
520 522 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
521 523 base = self.cache[1]
522 524 text = self.cache[2]
523 525 else:
524 526 text = self.revision(self.node(base))
525 527
526 528 l = len(text)
527 529 for x in xrange(base + 1, rev + 1):
528 530 l = mdiff.patchedsize(l, self.chunk(x))
529 531 return l
530 532 """
531 533
532 534 def length(self, rev):
533 if rev < 0:
535 if rev == nullrev:
534 536 return 0
535 537 else:
536 538 return self.index[rev][1]
537 def base(self, rev): return (rev < 0) and rev or self.index[rev][-5]
539 def base(self, rev):
540 if (rev == nullrev):
541 return nullrev
542 else:
543 return self.index[rev][-5]
538 544
539 545 def reachable(self, rev, stop=None):
540 546 reachable = {}
541 547 visit = [rev]
542 548 reachable[rev] = 1
543 549 if stop:
544 550 stopn = self.rev(stop)
545 551 else:
546 552 stopn = 0
547 553 while visit:
548 554 n = visit.pop(0)
549 555 if n == stop:
550 556 continue
551 557 if n == nullid:
552 558 continue
553 559 for p in self.parents(n):
554 560 if self.rev(p) < stopn:
555 561 continue
556 562 if p not in reachable:
557 563 reachable[p] = 1
558 564 visit.append(p)
559 565 return reachable
560 566
561 567 def nodesbetween(self, roots=None, heads=None):
562 568 """Return a tuple containing three elements. Elements 1 and 2 contain
563 569 a final list bases and heads after all the unreachable ones have been
564 570 pruned. Element 0 contains a topologically sorted list of all
565 571
566 572 nodes that satisfy these constraints:
567 573 1. All nodes must be descended from a node in roots (the nodes on
568 574 roots are considered descended from themselves).
569 575 2. All nodes must also be ancestors of a node in heads (the nodes in
570 576 heads are considered to be their own ancestors).
571 577
572 578 If roots is unspecified, nullid is assumed as the only root.
573 579 If heads is unspecified, it is taken to be the output of the
574 580 heads method (i.e. a list of all nodes in the repository that
575 581 have no children)."""
576 582 nonodes = ([], [], [])
577 583 if roots is not None:
578 584 roots = list(roots)
579 585 if not roots:
580 586 return nonodes
581 587 lowestrev = min([self.rev(n) for n in roots])
582 588 else:
583 589 roots = [nullid] # Everybody's a descendent of nullid
584 590 lowestrev = nullrev
585 591 if (lowestrev == nullrev) and (heads is None):
586 592 # We want _all_ the nodes!
587 593 return ([self.node(r) for r in xrange(0, self.count())],
588 594 [nullid], list(self.heads()))
589 595 if heads is None:
590 596 # All nodes are ancestors, so the latest ancestor is the last
591 597 # node.
592 598 highestrev = self.count() - 1
593 599 # Set ancestors to None to signal that every node is an ancestor.
594 600 ancestors = None
595 601 # Set heads to an empty dictionary for later discovery of heads
596 602 heads = {}
597 603 else:
598 604 heads = list(heads)
599 605 if not heads:
600 606 return nonodes
601 607 ancestors = {}
602 608 # Turn heads into a dictionary so we can remove 'fake' heads.
603 609 # Also, later we will be using it to filter out the heads we can't
604 610 # find from roots.
605 611 heads = dict.fromkeys(heads, 0)
606 612 # Start at the top and keep marking parents until we're done.
607 613 nodestotag = heads.keys()
608 614 # Remember where the top was so we can use it as a limit later.
609 615 highestrev = max([self.rev(n) for n in nodestotag])
610 616 while nodestotag:
611 617 # grab a node to tag
612 618 n = nodestotag.pop()
613 619 # Never tag nullid
614 620 if n == nullid:
615 621 continue
616 622 # A node's revision number represents its place in a
617 623 # topologically sorted list of nodes.
618 624 r = self.rev(n)
619 625 if r >= lowestrev:
620 626 if n not in ancestors:
621 627 # If we are possibly a descendent of one of the roots
622 628 # and we haven't already been marked as an ancestor
623 629 ancestors[n] = 1 # Mark as ancestor
624 630 # Add non-nullid parents to list of nodes to tag.
625 631 nodestotag.extend([p for p in self.parents(n) if
626 632 p != nullid])
627 633 elif n in heads: # We've seen it before, is it a fake head?
628 634 # So it is, real heads should not be the ancestors of
629 635 # any other heads.
630 636 heads.pop(n)
631 637 if not ancestors:
632 638 return nonodes
633 639 # Now that we have our set of ancestors, we want to remove any
634 640 # roots that are not ancestors.
635 641
636 642 # If one of the roots was nullid, everything is included anyway.
637 643 if lowestrev > nullrev:
638 644 # But, since we weren't, let's recompute the lowest rev to not
639 645 # include roots that aren't ancestors.
640 646
641 647 # Filter out roots that aren't ancestors of heads
642 648 roots = [n for n in roots if n in ancestors]
643 649 # Recompute the lowest revision
644 650 if roots:
645 651 lowestrev = min([self.rev(n) for n in roots])
646 652 else:
647 653 # No more roots? Return empty list
648 654 return nonodes
649 655 else:
650 656 # We are descending from nullid, and don't need to care about
651 657 # any other roots.
652 658 lowestrev = nullrev
653 659 roots = [nullid]
654 660 # Transform our roots list into a 'set' (i.e. a dictionary where the
655 661 # values don't matter.
656 662 descendents = dict.fromkeys(roots, 1)
657 663 # Also, keep the original roots so we can filter out roots that aren't
658 664 # 'real' roots (i.e. are descended from other roots).
659 665 roots = descendents.copy()
660 666 # Our topologically sorted list of output nodes.
661 667 orderedout = []
662 668 # Don't start at nullid since we don't want nullid in our output list,
663 669 # and if nullid shows up in descedents, empty parents will look like
664 670 # they're descendents.
665 671 for r in xrange(max(lowestrev, 0), highestrev + 1):
666 672 n = self.node(r)
667 673 isdescendent = False
668 674 if lowestrev == nullrev: # Everybody is a descendent of nullid
669 675 isdescendent = True
670 676 elif n in descendents:
671 677 # n is already a descendent
672 678 isdescendent = True
673 679 # This check only needs to be done here because all the roots
674 680 # will start being marked is descendents before the loop.
675 681 if n in roots:
676 682 # If n was a root, check if it's a 'real' root.
677 683 p = tuple(self.parents(n))
678 684 # If any of its parents are descendents, it's not a root.
679 685 if (p[0] in descendents) or (p[1] in descendents):
680 686 roots.pop(n)
681 687 else:
682 688 p = tuple(self.parents(n))
683 689 # A node is a descendent if either of its parents are
684 690 # descendents. (We seeded the dependents list with the roots
685 691 # up there, remember?)
686 692 if (p[0] in descendents) or (p[1] in descendents):
687 693 descendents[n] = 1
688 694 isdescendent = True
689 695 if isdescendent and ((ancestors is None) or (n in ancestors)):
690 696 # Only include nodes that are both descendents and ancestors.
691 697 orderedout.append(n)
692 698 if (ancestors is not None) and (n in heads):
693 699 # We're trying to figure out which heads are reachable
694 700 # from roots.
695 701 # Mark this head as having been reached
696 702 heads[n] = 1
697 703 elif ancestors is None:
698 704 # Otherwise, we're trying to discover the heads.
699 705 # Assume this is a head because if it isn't, the next step
700 706 # will eventually remove it.
701 707 heads[n] = 1
702 708 # But, obviously its parents aren't.
703 709 for p in self.parents(n):
704 710 heads.pop(p, None)
705 711 heads = [n for n in heads.iterkeys() if heads[n] != 0]
706 712 roots = roots.keys()
707 713 assert orderedout
708 714 assert roots
709 715 assert heads
710 716 return (orderedout, roots, heads)
711 717
712 718 def heads(self, start=None):
713 719 """return the list of all nodes that have no children
714 720
715 721 if start is specified, only heads that are descendants of
716 722 start will be returned
717 723
718 724 """
719 725 if start is None:
720 726 start = nullid
721 727 startrev = self.rev(start)
722 728 reachable = {startrev: 1}
723 729 heads = {startrev: 1}
724 730
725 731 parentrevs = self.parentrevs
726 732 for r in xrange(startrev + 1, self.count()):
727 733 for p in parentrevs(r):
728 734 if p in reachable:
729 735 reachable[r] = 1
730 736 heads[r] = 1
731 737 if p in heads:
732 738 del heads[p]
733 739 return [self.node(r) for r in heads]
734 740
735 741 def children(self, node):
736 742 """find the children of a given node"""
737 743 c = []
738 744 p = self.rev(node)
739 745 for r in range(p + 1, self.count()):
740 746 for pr in self.parentrevs(r):
741 747 if pr == p:
742 748 c.append(self.node(r))
743 749 return c
744 750
745 751 def _match(self, id):
746 752 if isinstance(id, (long, int)):
747 753 # rev
748 754 return self.node(id)
749 755 if len(id) == 20:
750 756 # possibly a binary node
751 757 # odds of a binary node being all hex in ASCII are 1 in 10**25
752 758 try:
753 759 node = id
754 760 r = self.rev(node) # quick search the index
755 761 return node
756 762 except RevlogError:
757 763 pass # may be partial hex id
758 764 try:
759 765 # str(rev)
760 766 rev = int(id)
761 767 if str(rev) != id: raise ValueError
762 768 if rev < 0: rev = self.count() + rev
763 769 if rev < 0 or rev >= self.count(): raise ValueError
764 770 return self.node(rev)
765 771 except (ValueError, OverflowError):
766 772 pass
767 773 if len(id) == 40:
768 774 try:
769 775 # a full hex nodeid?
770 776 node = bin(id)
771 777 r = self.rev(node)
772 778 return node
773 779 except TypeError:
774 780 pass
775 781
776 782 def _partialmatch(self, id):
777 783 if len(id) < 40:
778 784 try:
779 785 # hex(node)[:...]
780 786 bin_id = bin(id[:len(id) & ~1]) # grab an even number of digits
781 787 node = None
782 788 for n in self.nodemap:
783 789 if n.startswith(bin_id) and hex(n).startswith(id):
784 790 if node is not None:
785 791 raise RevlogError(_("Ambiguous identifier"))
786 792 node = n
787 793 if node is not None:
788 794 return node
789 795 except TypeError:
790 796 pass
791 797
792 798 def lookup(self, id):
793 799 """locate a node based on:
794 800 - revision number or str(revision number)
795 801 - nodeid or subset of hex nodeid
796 802 """
797 803
798 804 n = self._match(id)
799 805 if n is not None:
800 806 return n
801 807 n = self._partialmatch(id)
802 808 if n:
803 809 return n
804 810
805 811 raise RevlogError(_("No match found"))
806 812
807 813 def cmp(self, node, text):
808 814 """compare text with a given file revision"""
809 815 p1, p2 = self.parents(node)
810 816 return hash(text, p1, p2) != node
811 817
812 818 def makenode(self, node, text):
813 819 """calculate a file nodeid for text, descended or possibly
814 820 unchanged from node"""
815 821
816 822 if self.cmp(node, text):
817 823 return hash(text, node, nullid)
818 824 return node
819 825
820 826 def diff(self, a, b):
821 827 """return a delta between two revisions"""
822 828 return mdiff.textdiff(a, b)
823 829
824 830 def patches(self, t, pl):
825 831 """apply a list of patches to a string"""
826 832 return mdiff.patches(t, pl)
827 833
828 834 def chunk(self, rev, df=None, cachelen=4096):
829 835 start, length = self.start(rev), self.length(rev)
830 836 inline = self.inlinedata()
831 837 if inline:
832 838 start += (rev + 1) * struct.calcsize(self.indexformat)
833 839 end = start + length
834 840 def loadcache(df):
835 841 cache_length = max(cachelen, length) # 4k
836 842 if not df:
837 843 if inline:
838 844 df = self.opener(self.indexfile)
839 845 else:
840 846 df = self.opener(self.datafile)
841 847 df.seek(start)
842 848 self.chunkcache = (start, df.read(cache_length))
843 849
844 850 if not self.chunkcache:
845 851 loadcache(df)
846 852
847 853 cache_start = self.chunkcache[0]
848 854 cache_end = cache_start + len(self.chunkcache[1])
849 855 if start >= cache_start and end <= cache_end:
850 856 # it is cached
851 857 offset = start - cache_start
852 858 else:
853 859 loadcache(df)
854 860 offset = 0
855 861
856 862 #def checkchunk():
857 863 # df = self.opener(self.datafile)
858 864 # df.seek(start)
859 865 # return df.read(length)
860 866 #assert s == checkchunk()
861 867 return decompress(self.chunkcache[1][offset:offset + length])
862 868
863 869 def delta(self, node):
864 870 """return or calculate a delta between a node and its predecessor"""
865 871 r = self.rev(node)
866 872 return self.revdiff(r - 1, r)
867 873
868 874 def revdiff(self, rev1, rev2):
869 875 """return or calculate a delta between two revisions"""
870 876 b1 = self.base(rev1)
871 877 b2 = self.base(rev2)
872 878 if b1 == b2 and rev1 + 1 == rev2:
873 879 return self.chunk(rev2)
874 880 else:
875 881 return self.diff(self.revision(self.node(rev1)),
876 882 self.revision(self.node(rev2)))
877 883
878 884 def revision(self, node):
879 885 """return an uncompressed revision of a given"""
880 886 if node == nullid: return ""
881 887 if self.cache and self.cache[0] == node: return self.cache[2]
882 888
883 889 # look up what we need to read
884 890 text = None
885 891 rev = self.rev(node)
886 892 base = self.base(rev)
887 893
888 894 if self.inlinedata():
889 895 # we probably have the whole chunk cached
890 896 df = None
891 897 else:
892 898 df = self.opener(self.datafile)
893 899
894 900 # do we have useful data cached?
895 901 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
896 902 base = self.cache[1]
897 903 text = self.cache[2]
898 904 self.loadindex(base, rev + 1)
899 905 else:
900 906 self.loadindex(base, rev + 1)
901 907 text = self.chunk(base, df=df)
902 908
903 909 bins = []
904 910 for r in xrange(base + 1, rev + 1):
905 911 bins.append(self.chunk(r, df=df))
906 912
907 913 text = self.patches(text, bins)
908 914
909 915 p1, p2 = self.parents(node)
910 916 if node != hash(text, p1, p2):
911 917 raise RevlogError(_("integrity check failed on %s:%d")
912 918 % (self.datafile, rev))
913 919
914 920 self.cache = (node, rev, text)
915 921 return text
916 922
917 923 def checkinlinesize(self, tr, fp=None):
918 924 if not self.inlinedata():
919 925 return
920 926 if not fp:
921 927 fp = self.opener(self.indexfile, 'r')
922 928 fp.seek(0, 2)
923 929 size = fp.tell()
924 930 if size < 131072:
925 931 return
926 932 trinfo = tr.find(self.indexfile)
927 933 if trinfo == None:
928 934 raise RevlogError(_("%s not found in the transaction" %
929 935 self.indexfile))
930 936
931 937 trindex = trinfo[2]
932 938 dataoff = self.start(trindex)
933 939
934 940 tr.add(self.datafile, dataoff)
935 941 df = self.opener(self.datafile, 'w')
936 942 calc = struct.calcsize(self.indexformat)
937 943 for r in xrange(self.count()):
938 944 start = self.start(r) + (r + 1) * calc
939 945 length = self.length(r)
940 946 fp.seek(start)
941 947 d = fp.read(length)
942 948 df.write(d)
943 949 fp.close()
944 950 df.close()
945 951 fp = self.opener(self.indexfile, 'w', atomictemp=True)
946 952 self.version &= ~(REVLOGNGINLINEDATA)
947 953 if self.count():
948 954 x = self.index[0]
949 955 e = struct.pack(self.indexformat, *x)[4:]
950 956 l = struct.pack(versionformat, self.version)
951 957 fp.write(l)
952 958 fp.write(e)
953 959
954 960 for i in xrange(1, self.count()):
955 961 x = self.index[i]
956 962 e = struct.pack(self.indexformat, *x)
957 963 fp.write(e)
958 964
959 965 # if we don't call rename, the temp file will never replace the
960 966 # real index
961 967 fp.rename()
962 968
963 969 tr.replace(self.indexfile, trindex * calc)
964 970 self.chunkcache = None
965 971
966 972 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
967 973 """add a revision to the log
968 974
969 975 text - the revision data to add
970 976 transaction - the transaction object used for rollback
971 977 link - the linkrev data to add
972 978 p1, p2 - the parent nodeids of the revision
973 979 d - an optional precomputed delta
974 980 """
975 981 if not self.inlinedata():
976 982 dfh = self.opener(self.datafile, "a")
977 983 else:
978 984 dfh = None
979 985 ifh = self.opener(self.indexfile, "a+")
980 986 return self._addrevision(text, transaction, link, p1, p2, d, ifh, dfh)
981 987
982 988 def _addrevision(self, text, transaction, link, p1, p2, d, ifh, dfh):
983 989 if text is None: text = ""
984 990 if p1 is None: p1 = self.tip()
985 991 if p2 is None: p2 = nullid
986 992
987 993 node = hash(text, p1, p2)
988 994
989 995 if node in self.nodemap:
990 996 return node
991 997
992 998 n = self.count()
993 999 t = n - 1
994 1000
995 1001 if n:
996 1002 base = self.base(t)
997 1003 start = self.start(base)
998 1004 end = self.end(t)
999 1005 if not d:
1000 1006 prev = self.revision(self.tip())
1001 1007 d = self.diff(prev, text)
1002 1008 data = compress(d)
1003 1009 l = len(data[1]) + len(data[0])
1004 1010 dist = end - start + l
1005 1011
1006 1012 # full versions are inserted when the needed deltas
1007 1013 # become comparable to the uncompressed text
1008 1014 if not n or dist > len(text) * 2:
1009 1015 data = compress(text)
1010 1016 l = len(data[1]) + len(data[0])
1011 1017 base = n
1012 1018 else:
1013 1019 base = self.base(t)
1014 1020
1015 1021 offset = 0
1016 1022 if t >= 0:
1017 1023 offset = self.end(t)
1018 1024
1019 1025 if self.version == REVLOGV0:
1020 1026 e = (offset, l, base, link, p1, p2, node)
1021 1027 else:
1022 1028 e = (self.offset_type(offset, 0), l, len(text),
1023 1029 base, link, self.rev(p1), self.rev(p2), node)
1024 1030
1025 1031 self.index.append(e)
1026 1032 self.nodemap[node] = n
1027 1033 entry = struct.pack(self.indexformat, *e)
1028 1034
1029 1035 if not self.inlinedata():
1030 1036 transaction.add(self.datafile, offset)
1031 1037 transaction.add(self.indexfile, n * len(entry))
1032 1038 if data[0]:
1033 1039 dfh.write(data[0])
1034 1040 dfh.write(data[1])
1035 1041 dfh.flush()
1036 1042 else:
1037 1043 ifh.seek(0, 2)
1038 1044 transaction.add(self.indexfile, ifh.tell(), self.count() - 1)
1039 1045
1040 1046 if len(self.index) == 1 and self.version != REVLOGV0:
1041 1047 l = struct.pack(versionformat, self.version)
1042 1048 ifh.write(l)
1043 1049 entry = entry[4:]
1044 1050
1045 1051 ifh.write(entry)
1046 1052
1047 1053 if self.inlinedata():
1048 1054 ifh.write(data[0])
1049 1055 ifh.write(data[1])
1050 1056 self.checkinlinesize(transaction, ifh)
1051 1057
1052 1058 self.cache = (node, n, text)
1053 1059 return node
1054 1060
1055 1061 def ancestor(self, a, b):
1056 1062 """calculate the least common ancestor of nodes a and b"""
1057 1063
1058 1064 def parents(rev):
1059 1065 return [p for p in self.parentrevs(rev) if p != nullrev]
1060 1066
1061 1067 c = ancestor.ancestor(self.rev(a), self.rev(b), parents)
1062 1068 if c is None:
1063 1069 return nullid
1064 1070
1065 1071 return self.node(c)
1066 1072
1067 1073 def group(self, nodelist, lookup, infocollect=None):
1068 1074 """calculate a delta group
1069 1075
1070 1076 Given a list of changeset revs, return a set of deltas and
1071 1077 metadata corresponding to nodes. the first delta is
1072 1078 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
1073 1079 have this parent as it has all history before these
1074 1080 changesets. parent is parent[0]
1075 1081 """
1076 1082 revs = [self.rev(n) for n in nodelist]
1077 1083
1078 1084 # if we don't have any revisions touched by these changesets, bail
1079 1085 if not revs:
1080 1086 yield changegroup.closechunk()
1081 1087 return
1082 1088
1083 1089 # add the parent of the first rev
1084 1090 p = self.parents(self.node(revs[0]))[0]
1085 1091 revs.insert(0, self.rev(p))
1086 1092
1087 1093 # build deltas
1088 1094 for d in xrange(0, len(revs) - 1):
1089 1095 a, b = revs[d], revs[d + 1]
1090 1096 nb = self.node(b)
1091 1097
1092 1098 if infocollect is not None:
1093 1099 infocollect(nb)
1094 1100
1095 1101 d = self.revdiff(a, b)
1096 1102 p = self.parents(nb)
1097 1103 meta = nb + p[0] + p[1] + lookup(nb)
1098 1104 yield changegroup.genchunk("%s%s" % (meta, d))
1099 1105
1100 1106 yield changegroup.closechunk()
1101 1107
1102 1108 def addgroup(self, revs, linkmapper, transaction, unique=0):
1103 1109 """
1104 1110 add a delta group
1105 1111
1106 1112 given a set of deltas, add them to the revision log. the
1107 1113 first delta is against its parent, which should be in our
1108 1114 log, the rest are against the previous delta.
1109 1115 """
1110 1116
1111 1117 #track the base of the current delta log
1112 1118 r = self.count()
1113 1119 t = r - 1
1114 1120 node = None
1115 1121
1116 1122 base = prev = nullrev
1117 1123 start = end = textlen = 0
1118 1124 if r:
1119 1125 end = self.end(t)
1120 1126
1121 1127 ifh = self.opener(self.indexfile, "a+")
1122 1128 ifh.seek(0, 2)
1123 1129 transaction.add(self.indexfile, ifh.tell(), self.count())
1124 1130 if self.inlinedata():
1125 1131 dfh = None
1126 1132 else:
1127 1133 transaction.add(self.datafile, end)
1128 1134 dfh = self.opener(self.datafile, "a")
1129 1135
1130 1136 # loop through our set of deltas
1131 1137 chain = None
1132 1138 for chunk in revs:
1133 1139 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
1134 1140 link = linkmapper(cs)
1135 1141 if node in self.nodemap:
1136 1142 # this can happen if two branches make the same change
1137 1143 # if unique:
1138 1144 # raise RevlogError(_("already have %s") % hex(node[:4]))
1139 1145 chain = node
1140 1146 continue
1141 1147 delta = chunk[80:]
1142 1148
1143 1149 for p in (p1, p2):
1144 1150 if not p in self.nodemap:
1145 1151 raise RevlogError(_("unknown parent %s") % short(p))
1146 1152
1147 1153 if not chain:
1148 1154 # retrieve the parent revision of the delta chain
1149 1155 chain = p1
1150 1156 if not chain in self.nodemap:
1151 1157 raise RevlogError(_("unknown base %s") % short(chain[:4]))
1152 1158
1153 1159 # full versions are inserted when the needed deltas become
1154 1160 # comparable to the uncompressed text or when the previous
1155 1161 # version is not the one we have a delta against. We use
1156 1162 # the size of the previous full rev as a proxy for the
1157 1163 # current size.
1158 1164
1159 1165 if chain == prev:
1160 1166 tempd = compress(delta)
1161 1167 cdelta = tempd[0] + tempd[1]
1162 1168 textlen = mdiff.patchedsize(textlen, delta)
1163 1169
1164 1170 if chain != prev or (end - start + len(cdelta)) > textlen * 2:
1165 1171 # flush our writes here so we can read it in revision
1166 1172 if dfh:
1167 1173 dfh.flush()
1168 1174 ifh.flush()
1169 1175 text = self.revision(chain)
1170 1176 text = self.patches(text, [delta])
1171 1177 chk = self._addrevision(text, transaction, link, p1, p2, None,
1172 1178 ifh, dfh)
1173 1179 if not dfh and not self.inlinedata():
1174 1180 # addrevision switched from inline to conventional
1175 1181 # reopen the index
1176 1182 dfh = self.opener(self.datafile, "a")
1177 1183 ifh = self.opener(self.indexfile, "a")
1178 1184 if chk != node:
1179 1185 raise RevlogError(_("consistency error adding group"))
1180 1186 textlen = len(text)
1181 1187 else:
1182 1188 if self.version == REVLOGV0:
1183 1189 e = (end, len(cdelta), base, link, p1, p2, node)
1184 1190 else:
1185 1191 e = (self.offset_type(end, 0), len(cdelta), textlen, base,
1186 1192 link, self.rev(p1), self.rev(p2), node)
1187 1193 self.index.append(e)
1188 1194 self.nodemap[node] = r
1189 1195 if self.inlinedata():
1190 1196 ifh.write(struct.pack(self.indexformat, *e))
1191 1197 ifh.write(cdelta)
1192 1198 self.checkinlinesize(transaction, ifh)
1193 1199 if not self.inlinedata():
1194 1200 dfh = self.opener(self.datafile, "a")
1195 1201 ifh = self.opener(self.indexfile, "a")
1196 1202 else:
1197 1203 dfh.write(cdelta)
1198 1204 ifh.write(struct.pack(self.indexformat, *e))
1199 1205
1200 1206 t, r, chain, prev = r, r + 1, node, node
1201 1207 base = self.base(t)
1202 1208 start = self.start(base)
1203 1209 end = self.end(t)
1204 1210
1205 1211 return node
1206 1212
1207 1213 def strip(self, rev, minlink):
1208 1214 if self.count() == 0 or rev >= self.count():
1209 1215 return
1210 1216
1211 1217 if isinstance(self.index, lazyindex):
1212 1218 self.loadindexmap()
1213 1219
1214 1220 # When stripping away a revision, we need to make sure it
1215 1221 # does not actually belong to an older changeset.
1216 1222 # The minlink parameter defines the oldest revision
1217 1223 # we're allowed to strip away.
1218 1224 while minlink > self.index[rev][-4]:
1219 1225 rev += 1
1220 1226 if rev >= self.count():
1221 1227 return
1222 1228
1223 1229 # first truncate the files on disk
1224 1230 end = self.start(rev)
1225 1231 if not self.inlinedata():
1226 1232 df = self.opener(self.datafile, "a")
1227 1233 df.truncate(end)
1228 1234 end = rev * struct.calcsize(self.indexformat)
1229 1235 else:
1230 1236 end += rev * struct.calcsize(self.indexformat)
1231 1237
1232 1238 indexf = self.opener(self.indexfile, "a")
1233 1239 indexf.truncate(end)
1234 1240
1235 1241 # then reset internal state in memory to forget those revisions
1236 1242 self.cache = None
1237 1243 self.chunkcache = None
1238 1244 for x in xrange(rev, self.count()):
1239 1245 del self.nodemap[self.node(x)]
1240 1246
1241 1247 del self.index[rev:]
1242 1248
1243 1249 def checksize(self):
1244 1250 expected = 0
1245 1251 if self.count():
1246 1252 expected = self.end(self.count() - 1)
1247 1253
1248 1254 try:
1249 1255 f = self.opener(self.datafile)
1250 1256 f.seek(0, 2)
1251 1257 actual = f.tell()
1252 1258 dd = actual - expected
1253 1259 except IOError, inst:
1254 1260 if inst.errno != errno.ENOENT:
1255 1261 raise
1256 1262 dd = 0
1257 1263
1258 1264 try:
1259 1265 f = self.opener(self.indexfile)
1260 1266 f.seek(0, 2)
1261 1267 actual = f.tell()
1262 1268 s = struct.calcsize(self.indexformat)
1263 1269 i = actual / s
1264 1270 di = actual - (i * s)
1265 1271 if self.inlinedata():
1266 1272 databytes = 0
1267 1273 for r in xrange(self.count()):
1268 1274 databytes += self.length(r)
1269 1275 dd = 0
1270 1276 di = actual - self.count() * s - databytes
1271 1277 except IOError, inst:
1272 1278 if inst.errno != errno.ENOENT:
1273 1279 raise
1274 1280 di = 0
1275 1281
1276 1282 return (dd, di)
1277 1283
1278 1284
General Comments 0
You need to be logged in to leave comments. Login now