##// END OF EJS Templates
ui: replace parentui mechanism with repo.baseui
Matt Mackall -
r8189:d2899a85 default
parent child Browse files
Show More
@@ -1,1226 +1,1226 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import os, sys, bisect, stat, encoding
11 11 import mdiff, bdiff, util, templater, templatefilters, patch, errno, error
12 12 import match as _match
13 13
14 14 revrangesep = ':'
15 15
16 16 def findpossible(cmd, table, strict=False):
17 17 """
18 18 Return cmd -> (aliases, command table entry)
19 19 for each matching command.
20 20 Return debug commands (or their aliases) only if no normal command matches.
21 21 """
22 22 choice = {}
23 23 debugchoice = {}
24 24 for e in table.keys():
25 25 aliases = e.lstrip("^").split("|")
26 26 found = None
27 27 if cmd in aliases:
28 28 found = cmd
29 29 elif not strict:
30 30 for a in aliases:
31 31 if a.startswith(cmd):
32 32 found = a
33 33 break
34 34 if found is not None:
35 35 if aliases[0].startswith("debug") or found.startswith("debug"):
36 36 debugchoice[found] = (aliases, table[e])
37 37 else:
38 38 choice[found] = (aliases, table[e])
39 39
40 40 if not choice and debugchoice:
41 41 choice = debugchoice
42 42
43 43 return choice
44 44
45 45 def findcmd(cmd, table, strict=True):
46 46 """Return (aliases, command table entry) for command string."""
47 47 choice = findpossible(cmd, table, strict)
48 48
49 49 if cmd in choice:
50 50 return choice[cmd]
51 51
52 52 if len(choice) > 1:
53 53 clist = choice.keys()
54 54 clist.sort()
55 55 raise error.AmbiguousCommand(cmd, clist)
56 56
57 57 if choice:
58 58 return choice.values()[0]
59 59
60 60 raise error.UnknownCommand(cmd)
61 61
62 62 def bail_if_changed(repo):
63 63 if repo.dirstate.parents()[1] != nullid:
64 64 raise util.Abort(_('outstanding uncommitted merge'))
65 65 modified, added, removed, deleted = repo.status()[:4]
66 66 if modified or added or removed or deleted:
67 67 raise util.Abort(_("outstanding uncommitted changes"))
68 68
69 69 def logmessage(opts):
70 70 """ get the log message according to -m and -l option """
71 71 message = opts.get('message')
72 72 logfile = opts.get('logfile')
73 73
74 74 if message and logfile:
75 75 raise util.Abort(_('options --message and --logfile are mutually '
76 76 'exclusive'))
77 77 if not message and logfile:
78 78 try:
79 79 if logfile == '-':
80 80 message = sys.stdin.read()
81 81 else:
82 82 message = open(logfile).read()
83 83 except IOError, inst:
84 84 raise util.Abort(_("can't read commit message '%s': %s") %
85 85 (logfile, inst.strerror))
86 86 return message
87 87
88 88 def loglimit(opts):
89 89 """get the log limit according to option -l/--limit"""
90 90 limit = opts.get('limit')
91 91 if limit:
92 92 try:
93 93 limit = int(limit)
94 94 except ValueError:
95 95 raise util.Abort(_('limit must be a positive integer'))
96 96 if limit <= 0: raise util.Abort(_('limit must be positive'))
97 97 else:
98 98 limit = sys.maxint
99 99 return limit
100 100
101 101 def remoteui(src, opts):
102 102 'build a remote ui from ui or repo and opts'
103 if hasattr(src, 'ui'): # looks like a repository
104 dst = src.ui.parentui # drop repo-specific config
103 if hasattr(src, 'baseui'): # looks like a repository
104 dst = src.baseui # drop repo-specific config
105 105 src = src.ui # copy target options from repo
106 else: # assume it's a ui object
106 else: # assume it's a global ui object
107 107 dst = src # keep all global options
108 108
109 109 # copy ssh-specific options
110 110 for o in 'ssh', 'remotecmd':
111 111 v = opts.get(o) or src.config('ui', o)
112 112 if v:
113 113 dst.setconfig("ui", o, v)
114 114 # copy bundle-specific options
115 115 r = src.config('bundle', 'mainreporoot')
116 116 if r:
117 117 dst.setconfig('bundle', 'mainreporoot', r)
118 118
119 119 return dst
120 120
121 121 def revpair(repo, revs):
122 122 '''return pair of nodes, given list of revisions. second item can
123 123 be None, meaning use working dir.'''
124 124
125 125 def revfix(repo, val, defval):
126 126 if not val and val != 0 and defval is not None:
127 127 val = defval
128 128 return repo.lookup(val)
129 129
130 130 if not revs:
131 131 return repo.dirstate.parents()[0], None
132 132 end = None
133 133 if len(revs) == 1:
134 134 if revrangesep in revs[0]:
135 135 start, end = revs[0].split(revrangesep, 1)
136 136 start = revfix(repo, start, 0)
137 137 end = revfix(repo, end, len(repo) - 1)
138 138 else:
139 139 start = revfix(repo, revs[0], None)
140 140 elif len(revs) == 2:
141 141 if revrangesep in revs[0] or revrangesep in revs[1]:
142 142 raise util.Abort(_('too many revisions specified'))
143 143 start = revfix(repo, revs[0], None)
144 144 end = revfix(repo, revs[1], None)
145 145 else:
146 146 raise util.Abort(_('too many revisions specified'))
147 147 return start, end
148 148
149 149 def revrange(repo, revs):
150 150 """Yield revision as strings from a list of revision specifications."""
151 151
152 152 def revfix(repo, val, defval):
153 153 if not val and val != 0 and defval is not None:
154 154 return defval
155 155 return repo.changelog.rev(repo.lookup(val))
156 156
157 157 seen, l = {}, []
158 158 for spec in revs:
159 159 if revrangesep in spec:
160 160 start, end = spec.split(revrangesep, 1)
161 161 start = revfix(repo, start, 0)
162 162 end = revfix(repo, end, len(repo) - 1)
163 163 step = start > end and -1 or 1
164 164 for rev in xrange(start, end+step, step):
165 165 if rev in seen:
166 166 continue
167 167 seen[rev] = 1
168 168 l.append(rev)
169 169 else:
170 170 rev = revfix(repo, spec, None)
171 171 if rev in seen:
172 172 continue
173 173 seen[rev] = 1
174 174 l.append(rev)
175 175
176 176 return l
177 177
178 178 def make_filename(repo, pat, node,
179 179 total=None, seqno=None, revwidth=None, pathname=None):
180 180 node_expander = {
181 181 'H': lambda: hex(node),
182 182 'R': lambda: str(repo.changelog.rev(node)),
183 183 'h': lambda: short(node),
184 184 }
185 185 expander = {
186 186 '%': lambda: '%',
187 187 'b': lambda: os.path.basename(repo.root),
188 188 }
189 189
190 190 try:
191 191 if node:
192 192 expander.update(node_expander)
193 193 if node:
194 194 expander['r'] = (lambda:
195 195 str(repo.changelog.rev(node)).zfill(revwidth or 0))
196 196 if total is not None:
197 197 expander['N'] = lambda: str(total)
198 198 if seqno is not None:
199 199 expander['n'] = lambda: str(seqno)
200 200 if total is not None and seqno is not None:
201 201 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
202 202 if pathname is not None:
203 203 expander['s'] = lambda: os.path.basename(pathname)
204 204 expander['d'] = lambda: os.path.dirname(pathname) or '.'
205 205 expander['p'] = lambda: pathname
206 206
207 207 newname = []
208 208 patlen = len(pat)
209 209 i = 0
210 210 while i < patlen:
211 211 c = pat[i]
212 212 if c == '%':
213 213 i += 1
214 214 c = pat[i]
215 215 c = expander[c]()
216 216 newname.append(c)
217 217 i += 1
218 218 return ''.join(newname)
219 219 except KeyError, inst:
220 220 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
221 221 inst.args[0])
222 222
223 223 def make_file(repo, pat, node=None,
224 224 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
225 225
226 226 writable = 'w' in mode or 'a' in mode
227 227
228 228 if not pat or pat == '-':
229 229 return writable and sys.stdout or sys.stdin
230 230 if hasattr(pat, 'write') and writable:
231 231 return pat
232 232 if hasattr(pat, 'read') and 'r' in mode:
233 233 return pat
234 234 return open(make_filename(repo, pat, node, total, seqno, revwidth,
235 235 pathname),
236 236 mode)
237 237
238 238 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
239 239 if not globbed and default == 'relpath':
240 240 pats = util.expand_glob(pats or [])
241 241 m = _match.match(repo.root, repo.getcwd(), pats,
242 242 opts.get('include'), opts.get('exclude'), default)
243 243 def badfn(f, msg):
244 244 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
245 245 return False
246 246 m.bad = badfn
247 247 return m
248 248
249 249 def matchall(repo):
250 250 return _match.always(repo.root, repo.getcwd())
251 251
252 252 def matchfiles(repo, files):
253 253 return _match.exact(repo.root, repo.getcwd(), files)
254 254
255 255 def findrenames(repo, added=None, removed=None, threshold=0.5):
256 256 '''find renamed files -- yields (before, after, score) tuples'''
257 257 if added is None or removed is None:
258 258 added, removed = repo.status()[1:3]
259 259 ctx = repo['.']
260 260 for a in added:
261 261 aa = repo.wread(a)
262 262 bestname, bestscore = None, threshold
263 263 for r in removed:
264 264 rr = ctx.filectx(r).data()
265 265
266 266 # bdiff.blocks() returns blocks of matching lines
267 267 # count the number of bytes in each
268 268 equal = 0
269 269 alines = mdiff.splitnewlines(aa)
270 270 matches = bdiff.blocks(aa, rr)
271 271 for x1,x2,y1,y2 in matches:
272 272 for line in alines[x1:x2]:
273 273 equal += len(line)
274 274
275 275 lengths = len(aa) + len(rr)
276 276 if lengths:
277 277 myscore = equal*2.0 / lengths
278 278 if myscore >= bestscore:
279 279 bestname, bestscore = r, myscore
280 280 if bestname:
281 281 yield bestname, a, bestscore
282 282
283 283 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
284 284 if dry_run is None:
285 285 dry_run = opts.get('dry_run')
286 286 if similarity is None:
287 287 similarity = float(opts.get('similarity') or 0)
288 288 add, remove = [], []
289 289 mapping = {}
290 290 audit_path = util.path_auditor(repo.root)
291 291 m = match(repo, pats, opts)
292 292 for abs in repo.walk(m):
293 293 target = repo.wjoin(abs)
294 294 good = True
295 295 try:
296 296 audit_path(abs)
297 297 except:
298 298 good = False
299 299 rel = m.rel(abs)
300 300 exact = m.exact(abs)
301 301 if good and abs not in repo.dirstate:
302 302 add.append(abs)
303 303 mapping[abs] = rel, m.exact(abs)
304 304 if repo.ui.verbose or not exact:
305 305 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
306 306 if repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
307 307 or (os.path.isdir(target) and not os.path.islink(target))):
308 308 remove.append(abs)
309 309 mapping[abs] = rel, exact
310 310 if repo.ui.verbose or not exact:
311 311 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
312 312 if not dry_run:
313 313 repo.remove(remove)
314 314 repo.add(add)
315 315 if similarity > 0:
316 316 for old, new, score in findrenames(repo, add, remove, similarity):
317 317 oldrel, oldexact = mapping[old]
318 318 newrel, newexact = mapping[new]
319 319 if repo.ui.verbose or not oldexact or not newexact:
320 320 repo.ui.status(_('recording removal of %s as rename to %s '
321 321 '(%d%% similar)\n') %
322 322 (oldrel, newrel, score * 100))
323 323 if not dry_run:
324 324 repo.copy(old, new)
325 325
326 326 def copy(ui, repo, pats, opts, rename=False):
327 327 # called with the repo lock held
328 328 #
329 329 # hgsep => pathname that uses "/" to separate directories
330 330 # ossep => pathname that uses os.sep to separate directories
331 331 cwd = repo.getcwd()
332 332 targets = {}
333 333 after = opts.get("after")
334 334 dryrun = opts.get("dry_run")
335 335
336 336 def walkpat(pat):
337 337 srcs = []
338 338 m = match(repo, [pat], opts, globbed=True)
339 339 for abs in repo.walk(m):
340 340 state = repo.dirstate[abs]
341 341 rel = m.rel(abs)
342 342 exact = m.exact(abs)
343 343 if state in '?r':
344 344 if exact and state == '?':
345 345 ui.warn(_('%s: not copying - file is not managed\n') % rel)
346 346 if exact and state == 'r':
347 347 ui.warn(_('%s: not copying - file has been marked for'
348 348 ' remove\n') % rel)
349 349 continue
350 350 # abs: hgsep
351 351 # rel: ossep
352 352 srcs.append((abs, rel, exact))
353 353 return srcs
354 354
355 355 # abssrc: hgsep
356 356 # relsrc: ossep
357 357 # otarget: ossep
358 358 def copyfile(abssrc, relsrc, otarget, exact):
359 359 abstarget = util.canonpath(repo.root, cwd, otarget)
360 360 reltarget = repo.pathto(abstarget, cwd)
361 361 target = repo.wjoin(abstarget)
362 362 src = repo.wjoin(abssrc)
363 363 state = repo.dirstate[abstarget]
364 364
365 365 # check for collisions
366 366 prevsrc = targets.get(abstarget)
367 367 if prevsrc is not None:
368 368 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
369 369 (reltarget, repo.pathto(abssrc, cwd),
370 370 repo.pathto(prevsrc, cwd)))
371 371 return
372 372
373 373 # check for overwrites
374 374 exists = os.path.exists(target)
375 375 if not after and exists or after and state in 'mn':
376 376 if not opts['force']:
377 377 ui.warn(_('%s: not overwriting - file exists\n') %
378 378 reltarget)
379 379 return
380 380
381 381 if after:
382 382 if not exists:
383 383 return
384 384 elif not dryrun:
385 385 try:
386 386 if exists:
387 387 os.unlink(target)
388 388 targetdir = os.path.dirname(target) or '.'
389 389 if not os.path.isdir(targetdir):
390 390 os.makedirs(targetdir)
391 391 util.copyfile(src, target)
392 392 except IOError, inst:
393 393 if inst.errno == errno.ENOENT:
394 394 ui.warn(_('%s: deleted in working copy\n') % relsrc)
395 395 else:
396 396 ui.warn(_('%s: cannot copy - %s\n') %
397 397 (relsrc, inst.strerror))
398 398 return True # report a failure
399 399
400 400 if ui.verbose or not exact:
401 401 if rename:
402 402 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
403 403 else:
404 404 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
405 405
406 406 targets[abstarget] = abssrc
407 407
408 408 # fix up dirstate
409 409 origsrc = repo.dirstate.copied(abssrc) or abssrc
410 410 if abstarget == origsrc: # copying back a copy?
411 411 if state not in 'mn' and not dryrun:
412 412 repo.dirstate.normallookup(abstarget)
413 413 else:
414 414 if repo.dirstate[origsrc] == 'a' and origsrc == abssrc:
415 415 if not ui.quiet:
416 416 ui.warn(_("%s has not been committed yet, so no copy "
417 417 "data will be stored for %s.\n")
418 418 % (repo.pathto(origsrc, cwd), reltarget))
419 419 if repo.dirstate[abstarget] in '?r' and not dryrun:
420 420 repo.add([abstarget])
421 421 elif not dryrun:
422 422 repo.copy(origsrc, abstarget)
423 423
424 424 if rename and not dryrun:
425 425 repo.remove([abssrc], not after)
426 426
427 427 # pat: ossep
428 428 # dest ossep
429 429 # srcs: list of (hgsep, hgsep, ossep, bool)
430 430 # return: function that takes hgsep and returns ossep
431 431 def targetpathfn(pat, dest, srcs):
432 432 if os.path.isdir(pat):
433 433 abspfx = util.canonpath(repo.root, cwd, pat)
434 434 abspfx = util.localpath(abspfx)
435 435 if destdirexists:
436 436 striplen = len(os.path.split(abspfx)[0])
437 437 else:
438 438 striplen = len(abspfx)
439 439 if striplen:
440 440 striplen += len(os.sep)
441 441 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
442 442 elif destdirexists:
443 443 res = lambda p: os.path.join(dest,
444 444 os.path.basename(util.localpath(p)))
445 445 else:
446 446 res = lambda p: dest
447 447 return res
448 448
449 449 # pat: ossep
450 450 # dest ossep
451 451 # srcs: list of (hgsep, hgsep, ossep, bool)
452 452 # return: function that takes hgsep and returns ossep
453 453 def targetpathafterfn(pat, dest, srcs):
454 454 if util.patkind(pat, None)[0]:
455 455 # a mercurial pattern
456 456 res = lambda p: os.path.join(dest,
457 457 os.path.basename(util.localpath(p)))
458 458 else:
459 459 abspfx = util.canonpath(repo.root, cwd, pat)
460 460 if len(abspfx) < len(srcs[0][0]):
461 461 # A directory. Either the target path contains the last
462 462 # component of the source path or it does not.
463 463 def evalpath(striplen):
464 464 score = 0
465 465 for s in srcs:
466 466 t = os.path.join(dest, util.localpath(s[0])[striplen:])
467 467 if os.path.exists(t):
468 468 score += 1
469 469 return score
470 470
471 471 abspfx = util.localpath(abspfx)
472 472 striplen = len(abspfx)
473 473 if striplen:
474 474 striplen += len(os.sep)
475 475 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
476 476 score = evalpath(striplen)
477 477 striplen1 = len(os.path.split(abspfx)[0])
478 478 if striplen1:
479 479 striplen1 += len(os.sep)
480 480 if evalpath(striplen1) > score:
481 481 striplen = striplen1
482 482 res = lambda p: os.path.join(dest,
483 483 util.localpath(p)[striplen:])
484 484 else:
485 485 # a file
486 486 if destdirexists:
487 487 res = lambda p: os.path.join(dest,
488 488 os.path.basename(util.localpath(p)))
489 489 else:
490 490 res = lambda p: dest
491 491 return res
492 492
493 493
494 494 pats = util.expand_glob(pats)
495 495 if not pats:
496 496 raise util.Abort(_('no source or destination specified'))
497 497 if len(pats) == 1:
498 498 raise util.Abort(_('no destination specified'))
499 499 dest = pats.pop()
500 500 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
501 501 if not destdirexists:
502 502 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
503 503 raise util.Abort(_('with multiple sources, destination must be an '
504 504 'existing directory'))
505 505 if util.endswithsep(dest):
506 506 raise util.Abort(_('destination %s is not a directory') % dest)
507 507
508 508 tfn = targetpathfn
509 509 if after:
510 510 tfn = targetpathafterfn
511 511 copylist = []
512 512 for pat in pats:
513 513 srcs = walkpat(pat)
514 514 if not srcs:
515 515 continue
516 516 copylist.append((tfn(pat, dest, srcs), srcs))
517 517 if not copylist:
518 518 raise util.Abort(_('no files to copy'))
519 519
520 520 errors = 0
521 521 for targetpath, srcs in copylist:
522 522 for abssrc, relsrc, exact in srcs:
523 523 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
524 524 errors += 1
525 525
526 526 if errors:
527 527 ui.warn(_('(consider using --after)\n'))
528 528
529 529 return errors
530 530
531 531 def service(opts, parentfn=None, initfn=None, runfn=None):
532 532 '''Run a command as a service.'''
533 533
534 534 if opts['daemon'] and not opts['daemon_pipefds']:
535 535 rfd, wfd = os.pipe()
536 536 args = sys.argv[:]
537 537 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
538 538 # Don't pass --cwd to the child process, because we've already
539 539 # changed directory.
540 540 for i in xrange(1,len(args)):
541 541 if args[i].startswith('--cwd='):
542 542 del args[i]
543 543 break
544 544 elif args[i].startswith('--cwd'):
545 545 del args[i:i+2]
546 546 break
547 547 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
548 548 args[0], args)
549 549 os.close(wfd)
550 550 os.read(rfd, 1)
551 551 if parentfn:
552 552 return parentfn(pid)
553 553 else:
554 554 os._exit(0)
555 555
556 556 if initfn:
557 557 initfn()
558 558
559 559 if opts['pid_file']:
560 560 fp = open(opts['pid_file'], 'w')
561 561 fp.write(str(os.getpid()) + '\n')
562 562 fp.close()
563 563
564 564 if opts['daemon_pipefds']:
565 565 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
566 566 os.close(rfd)
567 567 try:
568 568 os.setsid()
569 569 except AttributeError:
570 570 pass
571 571 os.write(wfd, 'y')
572 572 os.close(wfd)
573 573 sys.stdout.flush()
574 574 sys.stderr.flush()
575 575 fd = os.open(util.nulldev, os.O_RDWR)
576 576 if fd != 0: os.dup2(fd, 0)
577 577 if fd != 1: os.dup2(fd, 1)
578 578 if fd != 2: os.dup2(fd, 2)
579 579 if fd not in (0, 1, 2): os.close(fd)
580 580
581 581 if runfn:
582 582 return runfn()
583 583
584 584 class changeset_printer(object):
585 585 '''show changeset information when templating not requested.'''
586 586
587 587 def __init__(self, ui, repo, patch, diffopts, buffered):
588 588 self.ui = ui
589 589 self.repo = repo
590 590 self.buffered = buffered
591 591 self.patch = patch
592 592 self.diffopts = diffopts
593 593 self.header = {}
594 594 self.hunk = {}
595 595 self.lastheader = None
596 596
597 597 def flush(self, rev):
598 598 if rev in self.header:
599 599 h = self.header[rev]
600 600 if h != self.lastheader:
601 601 self.lastheader = h
602 602 self.ui.write(h)
603 603 del self.header[rev]
604 604 if rev in self.hunk:
605 605 self.ui.write(self.hunk[rev])
606 606 del self.hunk[rev]
607 607 return 1
608 608 return 0
609 609
610 610 def show(self, ctx, copies=(), **props):
611 611 if self.buffered:
612 612 self.ui.pushbuffer()
613 613 self._show(ctx, copies, props)
614 614 self.hunk[ctx.rev()] = self.ui.popbuffer()
615 615 else:
616 616 self._show(ctx, copies, props)
617 617
618 618 def _show(self, ctx, copies, props):
619 619 '''show a single changeset or file revision'''
620 620 changenode = ctx.node()
621 621 rev = ctx.rev()
622 622
623 623 if self.ui.quiet:
624 624 self.ui.write("%d:%s\n" % (rev, short(changenode)))
625 625 return
626 626
627 627 log = self.repo.changelog
628 628 changes = log.read(changenode)
629 629 date = util.datestr(changes[2])
630 630 extra = changes[5]
631 631 branch = extra.get("branch")
632 632
633 633 hexfunc = self.ui.debugflag and hex or short
634 634
635 635 parents = [(p, hexfunc(log.node(p)))
636 636 for p in self._meaningful_parentrevs(log, rev)]
637 637
638 638 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
639 639
640 640 # don't show the default branch name
641 641 if branch != 'default':
642 642 branch = encoding.tolocal(branch)
643 643 self.ui.write(_("branch: %s\n") % branch)
644 644 for tag in self.repo.nodetags(changenode):
645 645 self.ui.write(_("tag: %s\n") % tag)
646 646 for parent in parents:
647 647 self.ui.write(_("parent: %d:%s\n") % parent)
648 648
649 649 if self.ui.debugflag:
650 650 self.ui.write(_("manifest: %d:%s\n") %
651 651 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
652 652 self.ui.write(_("user: %s\n") % changes[1])
653 653 self.ui.write(_("date: %s\n") % date)
654 654
655 655 if self.ui.debugflag:
656 656 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
657 657 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
658 658 files):
659 659 if value:
660 660 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
661 661 elif changes[3] and self.ui.verbose:
662 662 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
663 663 if copies and self.ui.verbose:
664 664 copies = ['%s (%s)' % c for c in copies]
665 665 self.ui.write(_("copies: %s\n") % ' '.join(copies))
666 666
667 667 if extra and self.ui.debugflag:
668 668 for key, value in util.sort(extra.items()):
669 669 self.ui.write(_("extra: %s=%s\n")
670 670 % (key, value.encode('string_escape')))
671 671
672 672 description = changes[4].strip()
673 673 if description:
674 674 if self.ui.verbose:
675 675 self.ui.write(_("description:\n"))
676 676 self.ui.write(description)
677 677 self.ui.write("\n\n")
678 678 else:
679 679 self.ui.write(_("summary: %s\n") %
680 680 description.splitlines()[0])
681 681 self.ui.write("\n")
682 682
683 683 self.showpatch(changenode)
684 684
685 685 def showpatch(self, node):
686 686 if self.patch:
687 687 prev = self.repo.changelog.parents(node)[0]
688 688 chunks = patch.diff(self.repo, prev, node, match=self.patch,
689 689 opts=patch.diffopts(self.ui, self.diffopts))
690 690 for chunk in chunks:
691 691 self.ui.write(chunk)
692 692 self.ui.write("\n")
693 693
694 694 def _meaningful_parentrevs(self, log, rev):
695 695 """Return list of meaningful (or all if debug) parentrevs for rev.
696 696
697 697 For merges (two non-nullrev revisions) both parents are meaningful.
698 698 Otherwise the first parent revision is considered meaningful if it
699 699 is not the preceding revision.
700 700 """
701 701 parents = log.parentrevs(rev)
702 702 if not self.ui.debugflag and parents[1] == nullrev:
703 703 if parents[0] >= rev - 1:
704 704 parents = []
705 705 else:
706 706 parents = [parents[0]]
707 707 return parents
708 708
709 709
710 710 class changeset_templater(changeset_printer):
711 711 '''format changeset information.'''
712 712
713 713 def __init__(self, ui, repo, patch, diffopts, mapfile, buffered):
714 714 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
715 715 filters = templatefilters.filters.copy()
716 716 filters['formatnode'] = (ui.debugflag and (lambda x: x)
717 717 or (lambda x: x[:12]))
718 718 self.t = templater.templater(mapfile, filters,
719 719 cache={
720 720 'parent': '{rev}:{node|formatnode} ',
721 721 'manifest': '{rev}:{node|formatnode}',
722 722 'filecopy': '{name} ({source})'})
723 723
724 724 def use_template(self, t):
725 725 '''set template string to use'''
726 726 self.t.cache['changeset'] = t
727 727
728 728 def _meaningful_parentrevs(self, ctx):
729 729 """Return list of meaningful (or all if debug) parentrevs for rev.
730 730 """
731 731 parents = ctx.parents()
732 732 if len(parents) > 1:
733 733 return parents
734 734 if self.ui.debugflag:
735 735 return [parents[0], self.repo['null']]
736 736 if parents[0].rev() >= ctx.rev() - 1:
737 737 return []
738 738 return parents
739 739
740 740 def _show(self, ctx, copies, props):
741 741 '''show a single changeset or file revision'''
742 742
743 743 def showlist(name, values, plural=None, **args):
744 744 '''expand set of values.
745 745 name is name of key in template map.
746 746 values is list of strings or dicts.
747 747 plural is plural of name, if not simply name + 's'.
748 748
749 749 expansion works like this, given name 'foo'.
750 750
751 751 if values is empty, expand 'no_foos'.
752 752
753 753 if 'foo' not in template map, return values as a string,
754 754 joined by space.
755 755
756 756 expand 'start_foos'.
757 757
758 758 for each value, expand 'foo'. if 'last_foo' in template
759 759 map, expand it instead of 'foo' for last key.
760 760
761 761 expand 'end_foos'.
762 762 '''
763 763 if plural: names = plural
764 764 else: names = name + 's'
765 765 if not values:
766 766 noname = 'no_' + names
767 767 if noname in self.t:
768 768 yield self.t(noname, **args)
769 769 return
770 770 if name not in self.t:
771 771 if isinstance(values[0], str):
772 772 yield ' '.join(values)
773 773 else:
774 774 for v in values:
775 775 yield dict(v, **args)
776 776 return
777 777 startname = 'start_' + names
778 778 if startname in self.t:
779 779 yield self.t(startname, **args)
780 780 vargs = args.copy()
781 781 def one(v, tag=name):
782 782 try:
783 783 vargs.update(v)
784 784 except (AttributeError, ValueError):
785 785 try:
786 786 for a, b in v:
787 787 vargs[a] = b
788 788 except ValueError:
789 789 vargs[name] = v
790 790 return self.t(tag, **vargs)
791 791 lastname = 'last_' + name
792 792 if lastname in self.t:
793 793 last = values.pop()
794 794 else:
795 795 last = None
796 796 for v in values:
797 797 yield one(v)
798 798 if last is not None:
799 799 yield one(last, tag=lastname)
800 800 endname = 'end_' + names
801 801 if endname in self.t:
802 802 yield self.t(endname, **args)
803 803
804 804 def showbranches(**args):
805 805 branch = ctx.branch()
806 806 if branch != 'default':
807 807 branch = encoding.tolocal(branch)
808 808 return showlist('branch', [branch], plural='branches', **args)
809 809
810 810 def showparents(**args):
811 811 parents = [[('rev', p.rev()), ('node', p.hex())]
812 812 for p in self._meaningful_parentrevs(ctx)]
813 813 return showlist('parent', parents, **args)
814 814
815 815 def showtags(**args):
816 816 return showlist('tag', ctx.tags(), **args)
817 817
818 818 def showextras(**args):
819 819 for key, value in util.sort(ctx.extra().items()):
820 820 args = args.copy()
821 821 args.update(dict(key=key, value=value))
822 822 yield self.t('extra', **args)
823 823
824 824 def showcopies(**args):
825 825 c = [{'name': x[0], 'source': x[1]} for x in copies]
826 826 return showlist('file_copy', c, plural='file_copies', **args)
827 827
828 828 files = []
829 829 def getfiles():
830 830 if not files:
831 831 files[:] = self.repo.status(ctx.parents()[0].node(),
832 832 ctx.node())[:3]
833 833 return files
834 834 def showfiles(**args):
835 835 return showlist('file', ctx.files(), **args)
836 836 def showmods(**args):
837 837 return showlist('file_mod', getfiles()[0], **args)
838 838 def showadds(**args):
839 839 return showlist('file_add', getfiles()[1], **args)
840 840 def showdels(**args):
841 841 return showlist('file_del', getfiles()[2], **args)
842 842 def showmanifest(**args):
843 843 args = args.copy()
844 844 args.update(dict(rev=self.repo.manifest.rev(ctx.changeset()[0]),
845 845 node=hex(ctx.changeset()[0])))
846 846 return self.t('manifest', **args)
847 847
848 848 def showdiffstat(**args):
849 849 diff = patch.diff(self.repo, ctx.parents()[0].node(), ctx.node())
850 850 files, adds, removes = 0, 0, 0
851 851 for i in patch.diffstatdata(util.iterlines(diff)):
852 852 files += 1
853 853 adds += i[1]
854 854 removes += i[2]
855 855 return '%s: +%s/-%s' % (files, adds, removes)
856 856
857 857 defprops = {
858 858 'author': ctx.user(),
859 859 'branches': showbranches,
860 860 'date': ctx.date(),
861 861 'desc': ctx.description().strip(),
862 862 'file_adds': showadds,
863 863 'file_dels': showdels,
864 864 'file_mods': showmods,
865 865 'files': showfiles,
866 866 'file_copies': showcopies,
867 867 'manifest': showmanifest,
868 868 'node': ctx.hex(),
869 869 'parents': showparents,
870 870 'rev': ctx.rev(),
871 871 'tags': showtags,
872 872 'extras': showextras,
873 873 'diffstat': showdiffstat,
874 874 }
875 875 props = props.copy()
876 876 props.update(defprops)
877 877
878 878 # find correct templates for current mode
879 879
880 880 tmplmodes = [
881 881 (True, None),
882 882 (self.ui.verbose, 'verbose'),
883 883 (self.ui.quiet, 'quiet'),
884 884 (self.ui.debugflag, 'debug'),
885 885 ]
886 886
887 887 types = {'header': '', 'changeset': 'changeset'}
888 888 for mode, postfix in tmplmodes:
889 889 for type in types:
890 890 cur = postfix and ('%s_%s' % (type, postfix)) or type
891 891 if mode and cur in self.t:
892 892 types[type] = cur
893 893
894 894 try:
895 895
896 896 # write header
897 897 if types['header']:
898 898 h = templater.stringify(self.t(types['header'], **props))
899 899 if self.buffered:
900 900 self.header[ctx.rev()] = h
901 901 else:
902 902 self.ui.write(h)
903 903
904 904 # write changeset metadata, then patch if requested
905 905 key = types['changeset']
906 906 self.ui.write(templater.stringify(self.t(key, **props)))
907 907 self.showpatch(ctx.node())
908 908
909 909 except KeyError, inst:
910 910 msg = _("%s: no key named '%s'")
911 911 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
912 912 except SyntaxError, inst:
913 913 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
914 914
915 915 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
916 916 """show one changeset using template or regular display.
917 917
918 918 Display format will be the first non-empty hit of:
919 919 1. option 'template'
920 920 2. option 'style'
921 921 3. [ui] setting 'logtemplate'
922 922 4. [ui] setting 'style'
923 923 If all of these values are either the unset or the empty string,
924 924 regular display via changeset_printer() is done.
925 925 """
926 926 # options
927 927 patch = False
928 928 if opts.get('patch'):
929 929 patch = matchfn or matchall(repo)
930 930
931 931 tmpl = opts.get('template')
932 932 style = None
933 933 if tmpl:
934 934 tmpl = templater.parsestring(tmpl, quoted=False)
935 935 else:
936 936 style = opts.get('style')
937 937
938 938 # ui settings
939 939 if not (tmpl or style):
940 940 tmpl = ui.config('ui', 'logtemplate')
941 941 if tmpl:
942 942 tmpl = templater.parsestring(tmpl)
943 943 else:
944 944 style = ui.config('ui', 'style')
945 945
946 946 if not (tmpl or style):
947 947 return changeset_printer(ui, repo, patch, opts, buffered)
948 948
949 949 mapfile = None
950 950 if style and not tmpl:
951 951 mapfile = style
952 952 if not os.path.split(mapfile)[0]:
953 953 mapname = (templater.templatepath('map-cmdline.' + mapfile)
954 954 or templater.templatepath(mapfile))
955 955 if mapname: mapfile = mapname
956 956
957 957 try:
958 958 t = changeset_templater(ui, repo, patch, opts, mapfile, buffered)
959 959 except SyntaxError, inst:
960 960 raise util.Abort(inst.args[0])
961 961 if tmpl: t.use_template(tmpl)
962 962 return t
963 963
964 964 def finddate(ui, repo, date):
965 965 """Find the tipmost changeset that matches the given date spec"""
966 966 df = util.matchdate(date)
967 967 get = util.cachefunc(lambda r: repo[r].changeset())
968 968 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
969 969 results = {}
970 970 for st, rev, fns in changeiter:
971 971 if st == 'add':
972 972 d = get(rev)[2]
973 973 if df(d[0]):
974 974 results[rev] = d
975 975 elif st == 'iter':
976 976 if rev in results:
977 977 ui.status(_("Found revision %s from %s\n") %
978 978 (rev, util.datestr(results[rev])))
979 979 return str(rev)
980 980
981 981 raise util.Abort(_("revision matching date not found"))
982 982
983 983 def walkchangerevs(ui, repo, pats, change, opts):
984 984 '''Iterate over files and the revs in which they changed.
985 985
986 986 Callers most commonly need to iterate backwards over the history
987 987 in which they are interested. Doing so has awful (quadratic-looking)
988 988 performance, so we use iterators in a "windowed" way.
989 989
990 990 We walk a window of revisions in the desired order. Within the
991 991 window, we first walk forwards to gather data, then in the desired
992 992 order (usually backwards) to display it.
993 993
994 994 This function returns an (iterator, matchfn) tuple. The iterator
995 995 yields 3-tuples. They will be of one of the following forms:
996 996
997 997 "window", incrementing, lastrev: stepping through a window,
998 998 positive if walking forwards through revs, last rev in the
999 999 sequence iterated over - use to reset state for the current window
1000 1000
1001 1001 "add", rev, fns: out-of-order traversal of the given file names
1002 1002 fns, which changed during revision rev - use to gather data for
1003 1003 possible display
1004 1004
1005 1005 "iter", rev, None: in-order traversal of the revs earlier iterated
1006 1006 over with "add" - use to display data'''
1007 1007
1008 1008 def increasing_windows(start, end, windowsize=8, sizelimit=512):
1009 1009 if start < end:
1010 1010 while start < end:
1011 1011 yield start, min(windowsize, end-start)
1012 1012 start += windowsize
1013 1013 if windowsize < sizelimit:
1014 1014 windowsize *= 2
1015 1015 else:
1016 1016 while start > end:
1017 1017 yield start, min(windowsize, start-end-1)
1018 1018 start -= windowsize
1019 1019 if windowsize < sizelimit:
1020 1020 windowsize *= 2
1021 1021
1022 1022 m = match(repo, pats, opts)
1023 1023 follow = opts.get('follow') or opts.get('follow_first')
1024 1024
1025 1025 if not len(repo):
1026 1026 return [], m
1027 1027
1028 1028 if follow:
1029 1029 defrange = '%s:0' % repo['.'].rev()
1030 1030 else:
1031 1031 defrange = '-1:0'
1032 1032 revs = revrange(repo, opts['rev'] or [defrange])
1033 1033 wanted = set()
1034 1034 slowpath = m.anypats() or (m.files() and opts.get('removed'))
1035 1035 fncache = {}
1036 1036
1037 1037 if not slowpath and not m.files():
1038 1038 # No files, no patterns. Display all revs.
1039 1039 wanted = set(revs)
1040 1040 copies = []
1041 1041 if not slowpath:
1042 1042 # Only files, no patterns. Check the history of each file.
1043 1043 def filerevgen(filelog, node):
1044 1044 cl_count = len(repo)
1045 1045 if node is None:
1046 1046 last = len(filelog) - 1
1047 1047 else:
1048 1048 last = filelog.rev(node)
1049 1049 for i, window in increasing_windows(last, nullrev):
1050 1050 revs = []
1051 1051 for j in xrange(i - window, i + 1):
1052 1052 n = filelog.node(j)
1053 1053 revs.append((filelog.linkrev(j),
1054 1054 follow and filelog.renamed(n)))
1055 1055 revs.reverse()
1056 1056 for rev in revs:
1057 1057 # only yield rev for which we have the changelog, it can
1058 1058 # happen while doing "hg log" during a pull or commit
1059 1059 if rev[0] < cl_count:
1060 1060 yield rev
1061 1061 def iterfiles():
1062 1062 for filename in m.files():
1063 1063 yield filename, None
1064 1064 for filename_node in copies:
1065 1065 yield filename_node
1066 1066 minrev, maxrev = min(revs), max(revs)
1067 1067 for file_, node in iterfiles():
1068 1068 filelog = repo.file(file_)
1069 1069 if not len(filelog):
1070 1070 if node is None:
1071 1071 # A zero count may be a directory or deleted file, so
1072 1072 # try to find matching entries on the slow path.
1073 1073 if follow:
1074 1074 raise util.Abort(_('cannot follow nonexistent file: "%s"') % file_)
1075 1075 slowpath = True
1076 1076 break
1077 1077 else:
1078 1078 ui.warn(_('%s:%s copy source revision cannot be found!\n')
1079 1079 % (file_, short(node)))
1080 1080 continue
1081 1081 for rev, copied in filerevgen(filelog, node):
1082 1082 if rev <= maxrev:
1083 1083 if rev < minrev:
1084 1084 break
1085 1085 fncache.setdefault(rev, [])
1086 1086 fncache[rev].append(file_)
1087 1087 wanted.add(rev)
1088 1088 if follow and copied:
1089 1089 copies.append(copied)
1090 1090 if slowpath:
1091 1091 if follow:
1092 1092 raise util.Abort(_('can only follow copies/renames for explicit '
1093 1093 'file names'))
1094 1094
1095 1095 # The slow path checks files modified in every changeset.
1096 1096 def changerevgen():
1097 1097 for i, window in increasing_windows(len(repo) - 1, nullrev):
1098 1098 for j in xrange(i - window, i + 1):
1099 1099 yield j, change(j)[3]
1100 1100
1101 1101 for rev, changefiles in changerevgen():
1102 1102 matches = filter(m, changefiles)
1103 1103 if matches:
1104 1104 fncache[rev] = matches
1105 1105 wanted.add(rev)
1106 1106
1107 1107 class followfilter:
1108 1108 def __init__(self, onlyfirst=False):
1109 1109 self.startrev = nullrev
1110 1110 self.roots = []
1111 1111 self.onlyfirst = onlyfirst
1112 1112
1113 1113 def match(self, rev):
1114 1114 def realparents(rev):
1115 1115 if self.onlyfirst:
1116 1116 return repo.changelog.parentrevs(rev)[0:1]
1117 1117 else:
1118 1118 return filter(lambda x: x != nullrev,
1119 1119 repo.changelog.parentrevs(rev))
1120 1120
1121 1121 if self.startrev == nullrev:
1122 1122 self.startrev = rev
1123 1123 return True
1124 1124
1125 1125 if rev > self.startrev:
1126 1126 # forward: all descendants
1127 1127 if not self.roots:
1128 1128 self.roots.append(self.startrev)
1129 1129 for parent in realparents(rev):
1130 1130 if parent in self.roots:
1131 1131 self.roots.append(rev)
1132 1132 return True
1133 1133 else:
1134 1134 # backwards: all parents
1135 1135 if not self.roots:
1136 1136 self.roots.extend(realparents(self.startrev))
1137 1137 if rev in self.roots:
1138 1138 self.roots.remove(rev)
1139 1139 self.roots.extend(realparents(rev))
1140 1140 return True
1141 1141
1142 1142 return False
1143 1143
1144 1144 # it might be worthwhile to do this in the iterator if the rev range
1145 1145 # is descending and the prune args are all within that range
1146 1146 for rev in opts.get('prune', ()):
1147 1147 rev = repo.changelog.rev(repo.lookup(rev))
1148 1148 ff = followfilter()
1149 1149 stop = min(revs[0], revs[-1])
1150 1150 for x in xrange(rev, stop-1, -1):
1151 1151 if ff.match(x):
1152 1152 wanted.discard(x)
1153 1153
1154 1154 def iterate():
1155 1155 if follow and not m.files():
1156 1156 ff = followfilter(onlyfirst=opts.get('follow_first'))
1157 1157 def want(rev):
1158 1158 return ff.match(rev) and rev in wanted
1159 1159 else:
1160 1160 def want(rev):
1161 1161 return rev in wanted
1162 1162
1163 1163 for i, window in increasing_windows(0, len(revs)):
1164 1164 yield 'window', revs[0] < revs[-1], revs[-1]
1165 1165 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1166 1166 for rev in util.sort(list(nrevs)):
1167 1167 fns = fncache.get(rev)
1168 1168 if not fns:
1169 1169 def fns_generator():
1170 1170 for f in change(rev)[3]:
1171 1171 if m(f):
1172 1172 yield f
1173 1173 fns = fns_generator()
1174 1174 yield 'add', rev, fns
1175 1175 for rev in nrevs:
1176 1176 yield 'iter', rev, None
1177 1177 return iterate(), m
1178 1178
1179 1179 def commit(ui, repo, commitfunc, pats, opts):
1180 1180 '''commit the specified files or all outstanding changes'''
1181 1181 date = opts.get('date')
1182 1182 if date:
1183 1183 opts['date'] = util.parsedate(date)
1184 1184 message = logmessage(opts)
1185 1185
1186 1186 # extract addremove carefully -- this function can be called from a command
1187 1187 # that doesn't support addremove
1188 1188 if opts.get('addremove'):
1189 1189 addremove(repo, pats, opts)
1190 1190
1191 1191 m = match(repo, pats, opts)
1192 1192 if pats:
1193 1193 modified, added, removed = repo.status(match=m)[:3]
1194 1194 files = util.sort(modified + added + removed)
1195 1195
1196 1196 def is_dir(f):
1197 1197 name = f + '/'
1198 1198 i = bisect.bisect(files, name)
1199 1199 return i < len(files) and files[i].startswith(name)
1200 1200
1201 1201 for f in m.files():
1202 1202 if f == '.':
1203 1203 continue
1204 1204 if f not in files:
1205 1205 rf = repo.wjoin(f)
1206 1206 rel = repo.pathto(f)
1207 1207 try:
1208 1208 mode = os.lstat(rf)[stat.ST_MODE]
1209 1209 except OSError:
1210 1210 if is_dir(f): # deleted directory ?
1211 1211 continue
1212 1212 raise util.Abort(_("file %s not found!") % rel)
1213 1213 if stat.S_ISDIR(mode):
1214 1214 if not is_dir(f):
1215 1215 raise util.Abort(_("no match under directory %s!")
1216 1216 % rel)
1217 1217 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1218 1218 raise util.Abort(_("can't commit %s: "
1219 1219 "unsupported file type!") % rel)
1220 1220 elif f not in repo.dirstate:
1221 1221 raise util.Abort(_("file %s not tracked!") % rel)
1222 1222 m = matchfiles(repo, files)
1223 1223 try:
1224 1224 return commitfunc(ui, repo, message, m, opts)
1225 1225 except ValueError, inst:
1226 1226 raise util.Abort(str(inst))
@@ -1,3465 +1,3465 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import hex, nullid, nullrev, short
9 9 from lock import release
10 10 from i18n import _, gettext
11 11 import os, re, sys, textwrap
12 12 import hg, util, revlog, bundlerepo, extensions, copies, context, error
13 13 import difflib, patch, time, help, mdiff, tempfile, url, encoding
14 14 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
15 15 import merge as merge_
16 16
17 17 # Commands start here, listed alphabetically
18 18
19 19 def add(ui, repo, *pats, **opts):
20 20 """add the specified files on the next commit
21 21
22 22 Schedule files to be version controlled and added to the
23 23 repository.
24 24
25 25 The files will be added to the repository at the next commit. To
26 26 undo an add before that, see hg revert.
27 27
28 28 If no names are given, add all files to the repository.
29 29 """
30 30
31 31 rejected = None
32 32 exacts = {}
33 33 names = []
34 34 m = cmdutil.match(repo, pats, opts)
35 35 m.bad = lambda x,y: True
36 36 for abs in repo.walk(m):
37 37 if m.exact(abs):
38 38 if ui.verbose:
39 39 ui.status(_('adding %s\n') % m.rel(abs))
40 40 names.append(abs)
41 41 exacts[abs] = 1
42 42 elif abs not in repo.dirstate:
43 43 ui.status(_('adding %s\n') % m.rel(abs))
44 44 names.append(abs)
45 45 if not opts.get('dry_run'):
46 46 rejected = repo.add(names)
47 47 rejected = [p for p in rejected if p in exacts]
48 48 return rejected and 1 or 0
49 49
50 50 def addremove(ui, repo, *pats, **opts):
51 51 """add all new files, delete all missing files
52 52
53 53 Add all new files and remove all missing files from the
54 54 repository.
55 55
56 56 New files are ignored if they match any of the patterns in
57 57 .hgignore. As with add, these changes take effect at the next
58 58 commit.
59 59
60 60 Use the -s/--similarity option to detect renamed files. With a
61 61 parameter > 0, this compares every removed file with every added
62 62 file and records those similar enough as renames. This option
63 63 takes a percentage between 0 (disabled) and 100 (files must be
64 64 identical) as its parameter. Detecting renamed files this way can
65 65 be expensive.
66 66 """
67 67 try:
68 68 sim = float(opts.get('similarity') or 0)
69 69 except ValueError:
70 70 raise util.Abort(_('similarity must be a number'))
71 71 if sim < 0 or sim > 100:
72 72 raise util.Abort(_('similarity must be between 0 and 100'))
73 73 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
74 74
75 75 def annotate(ui, repo, *pats, **opts):
76 76 """show changeset information per file line
77 77
78 78 List changes in files, showing the revision id responsible for
79 79 each line
80 80
81 81 This command is useful to discover who did a change or when a
82 82 change took place.
83 83
84 84 Without the -a/--text option, annotate will avoid processing files
85 85 it detects as binary. With -a, annotate will generate an
86 86 annotation anyway, probably with undesirable results.
87 87 """
88 88 datefunc = ui.quiet and util.shortdate or util.datestr
89 89 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
90 90
91 91 if not pats:
92 92 raise util.Abort(_('at least one file name or pattern required'))
93 93
94 94 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
95 95 ('number', lambda x: str(x[0].rev())),
96 96 ('changeset', lambda x: short(x[0].node())),
97 97 ('date', getdate),
98 98 ('follow', lambda x: x[0].path()),
99 99 ]
100 100
101 101 if (not opts.get('user') and not opts.get('changeset') and not opts.get('date')
102 102 and not opts.get('follow')):
103 103 opts['number'] = 1
104 104
105 105 linenumber = opts.get('line_number') is not None
106 106 if (linenumber and (not opts.get('changeset')) and (not opts.get('number'))):
107 107 raise util.Abort(_('at least one of -n/-c is required for -l'))
108 108
109 109 funcmap = [func for op, func in opmap if opts.get(op)]
110 110 if linenumber:
111 111 lastfunc = funcmap[-1]
112 112 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
113 113
114 114 ctx = repo[opts.get('rev')]
115 115
116 116 m = cmdutil.match(repo, pats, opts)
117 117 for abs in ctx.walk(m):
118 118 fctx = ctx[abs]
119 119 if not opts.get('text') and util.binary(fctx.data()):
120 120 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
121 121 continue
122 122
123 123 lines = fctx.annotate(follow=opts.get('follow'),
124 124 linenumber=linenumber)
125 125 pieces = []
126 126
127 127 for f in funcmap:
128 128 l = [f(n) for n, dummy in lines]
129 129 if l:
130 130 ml = max(map(len, l))
131 131 pieces.append(["%*s" % (ml, x) for x in l])
132 132
133 133 if pieces:
134 134 for p, l in zip(zip(*pieces), lines):
135 135 ui.write("%s: %s" % (" ".join(p), l[1]))
136 136
137 137 def archive(ui, repo, dest, **opts):
138 138 '''create unversioned archive of a repository revision
139 139
140 140 By default, the revision used is the parent of the working
141 141 directory; use -r/--rev to specify a different revision.
142 142
143 143 To specify the type of archive to create, use -t/--type. Valid
144 144 types are:
145 145
146 146 "files" (default): a directory full of files
147 147 "tar": tar archive, uncompressed
148 148 "tbz2": tar archive, compressed using bzip2
149 149 "tgz": tar archive, compressed using gzip
150 150 "uzip": zip archive, uncompressed
151 151 "zip": zip archive, compressed using deflate
152 152
153 153 The exact name of the destination archive or directory is given
154 154 using a format string; see 'hg help export' for details.
155 155
156 156 Each member added to an archive file has a directory prefix
157 157 prepended. Use -p/--prefix to specify a format string for the
158 158 prefix. The default is the basename of the archive, with suffixes
159 159 removed.
160 160 '''
161 161
162 162 ctx = repo[opts.get('rev')]
163 163 if not ctx:
164 164 raise util.Abort(_('no working directory: please specify a revision'))
165 165 node = ctx.node()
166 166 dest = cmdutil.make_filename(repo, dest, node)
167 167 if os.path.realpath(dest) == repo.root:
168 168 raise util.Abort(_('repository root cannot be destination'))
169 169 matchfn = cmdutil.match(repo, [], opts)
170 170 kind = opts.get('type') or 'files'
171 171 prefix = opts.get('prefix')
172 172 if dest == '-':
173 173 if kind == 'files':
174 174 raise util.Abort(_('cannot archive plain files to stdout'))
175 175 dest = sys.stdout
176 176 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
177 177 prefix = cmdutil.make_filename(repo, prefix, node)
178 178 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
179 179 matchfn, prefix)
180 180
181 181 def backout(ui, repo, node=None, rev=None, **opts):
182 182 '''reverse effect of earlier changeset
183 183
184 184 Commit the backed out changes as a new changeset. The new
185 185 changeset is a child of the backed out changeset.
186 186
187 187 If you back out a changeset other than the tip, a new head is
188 188 created. This head will be the new tip and you should merge this
189 189 backout changeset with another head (current one by default).
190 190
191 191 The --merge option remembers the parent of the working directory
192 192 before starting the backout, then merges the new head with that
193 193 changeset afterwards. This saves you from doing the merge by hand.
194 194 The result of this merge is not committed, as with a normal merge.
195 195
196 196 See \'hg help dates\' for a list of formats valid for -d/--date.
197 197 '''
198 198 if rev and node:
199 199 raise util.Abort(_("please specify just one revision"))
200 200
201 201 if not rev:
202 202 rev = node
203 203
204 204 if not rev:
205 205 raise util.Abort(_("please specify a revision to backout"))
206 206
207 207 date = opts.get('date')
208 208 if date:
209 209 opts['date'] = util.parsedate(date)
210 210
211 211 cmdutil.bail_if_changed(repo)
212 212 node = repo.lookup(rev)
213 213
214 214 op1, op2 = repo.dirstate.parents()
215 215 a = repo.changelog.ancestor(op1, node)
216 216 if a != node:
217 217 raise util.Abort(_('cannot back out change on a different branch'))
218 218
219 219 p1, p2 = repo.changelog.parents(node)
220 220 if p1 == nullid:
221 221 raise util.Abort(_('cannot back out a change with no parents'))
222 222 if p2 != nullid:
223 223 if not opts.get('parent'):
224 224 raise util.Abort(_('cannot back out a merge changeset without '
225 225 '--parent'))
226 226 p = repo.lookup(opts['parent'])
227 227 if p not in (p1, p2):
228 228 raise util.Abort(_('%s is not a parent of %s') %
229 229 (short(p), short(node)))
230 230 parent = p
231 231 else:
232 232 if opts.get('parent'):
233 233 raise util.Abort(_('cannot use --parent on non-merge changeset'))
234 234 parent = p1
235 235
236 236 # the backout should appear on the same branch
237 237 branch = repo.dirstate.branch()
238 238 hg.clean(repo, node, show_stats=False)
239 239 repo.dirstate.setbranch(branch)
240 240 revert_opts = opts.copy()
241 241 revert_opts['date'] = None
242 242 revert_opts['all'] = True
243 243 revert_opts['rev'] = hex(parent)
244 244 revert_opts['no_backup'] = None
245 245 revert(ui, repo, **revert_opts)
246 246 commit_opts = opts.copy()
247 247 commit_opts['addremove'] = False
248 248 if not commit_opts['message'] and not commit_opts['logfile']:
249 249 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
250 250 commit_opts['force_editor'] = True
251 251 commit(ui, repo, **commit_opts)
252 252 def nice(node):
253 253 return '%d:%s' % (repo.changelog.rev(node), short(node))
254 254 ui.status(_('changeset %s backs out changeset %s\n') %
255 255 (nice(repo.changelog.tip()), nice(node)))
256 256 if op1 != node:
257 257 hg.clean(repo, op1, show_stats=False)
258 258 if opts.get('merge'):
259 259 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
260 260 hg.merge(repo, hex(repo.changelog.tip()))
261 261 else:
262 262 ui.status(_('the backout changeset is a new head - '
263 263 'do not forget to merge\n'))
264 264 ui.status(_('(use "backout --merge" '
265 265 'if you want to auto-merge)\n'))
266 266
267 267 def bisect(ui, repo, rev=None, extra=None, command=None,
268 268 reset=None, good=None, bad=None, skip=None, noupdate=None):
269 269 """subdivision search of changesets
270 270
271 271 This command helps to find changesets which introduce problems. To
272 272 use, mark the earliest changeset you know exhibits the problem as
273 273 bad, then mark the latest changeset which is free from the problem
274 274 as good. Bisect will update your working directory to a revision
275 275 for testing (unless the -U/--noupdate option is specified). Once
276 276 you have performed tests, mark the working directory as bad or
277 277 good and bisect will either update to another candidate changeset
278 278 or announce that it has found the bad revision.
279 279
280 280 As a shortcut, you can also use the revision argument to mark a
281 281 revision as good or bad without checking it out first.
282 282
283 283 If you supply a command it will be used for automatic bisection.
284 284 Its exit status will be used as flag to mark revision as bad or
285 285 good. In case exit status is 0 the revision is marked as good, 125
286 286 - skipped, 127 (command not found) - bisection will be aborted;
287 287 any other status bigger than 0 will mark revision as bad.
288 288 """
289 289 def print_result(nodes, good):
290 290 displayer = cmdutil.show_changeset(ui, repo, {})
291 291 if len(nodes) == 1:
292 292 # narrowed it down to a single revision
293 293 if good:
294 294 ui.write(_("The first good revision is:\n"))
295 295 else:
296 296 ui.write(_("The first bad revision is:\n"))
297 297 displayer.show(repo[nodes[0]])
298 298 else:
299 299 # multiple possible revisions
300 300 if good:
301 301 ui.write(_("Due to skipped revisions, the first "
302 302 "good revision could be any of:\n"))
303 303 else:
304 304 ui.write(_("Due to skipped revisions, the first "
305 305 "bad revision could be any of:\n"))
306 306 for n in nodes:
307 307 displayer.show(repo[n])
308 308
309 309 def check_state(state, interactive=True):
310 310 if not state['good'] or not state['bad']:
311 311 if (good or bad or skip or reset) and interactive:
312 312 return
313 313 if not state['good']:
314 314 raise util.Abort(_('cannot bisect (no known good revisions)'))
315 315 else:
316 316 raise util.Abort(_('cannot bisect (no known bad revisions)'))
317 317 return True
318 318
319 319 # backward compatibility
320 320 if rev in "good bad reset init".split():
321 321 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
322 322 cmd, rev, extra = rev, extra, None
323 323 if cmd == "good":
324 324 good = True
325 325 elif cmd == "bad":
326 326 bad = True
327 327 else:
328 328 reset = True
329 329 elif extra or good + bad + skip + reset + bool(command) > 1:
330 330 raise util.Abort(_('incompatible arguments'))
331 331
332 332 if reset:
333 333 p = repo.join("bisect.state")
334 334 if os.path.exists(p):
335 335 os.unlink(p)
336 336 return
337 337
338 338 state = hbisect.load_state(repo)
339 339
340 340 if command:
341 341 commandpath = util.find_exe(command)
342 342 changesets = 1
343 343 try:
344 344 while changesets:
345 345 # update state
346 346 status = os.spawnl(os.P_WAIT, commandpath, commandpath)
347 347 if status == 125:
348 348 transition = "skip"
349 349 elif status == 0:
350 350 transition = "good"
351 351 # status < 0 means process was killed
352 352 elif status == 127:
353 353 raise util.Abort(_("failed to execute %s") % command)
354 354 elif status < 0:
355 355 raise util.Abort(_("%s killed") % command)
356 356 else:
357 357 transition = "bad"
358 358 node = repo.lookup(rev or '.')
359 359 state[transition].append(node)
360 360 ui.note(_('Changeset %s: %s\n') % (short(node), transition))
361 361 check_state(state, interactive=False)
362 362 # bisect
363 363 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
364 364 # update to next check
365 365 cmdutil.bail_if_changed(repo)
366 366 hg.clean(repo, nodes[0], show_stats=False)
367 367 finally:
368 368 hbisect.save_state(repo, state)
369 369 return print_result(nodes, not status)
370 370
371 371 # update state
372 372 node = repo.lookup(rev or '.')
373 373 if good:
374 374 state['good'].append(node)
375 375 elif bad:
376 376 state['bad'].append(node)
377 377 elif skip:
378 378 state['skip'].append(node)
379 379
380 380 hbisect.save_state(repo, state)
381 381
382 382 if not check_state(state):
383 383 return
384 384
385 385 # actually bisect
386 386 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
387 387 if changesets == 0:
388 388 print_result(nodes, good)
389 389 else:
390 390 assert len(nodes) == 1 # only a single node can be tested next
391 391 node = nodes[0]
392 392 # compute the approximate number of remaining tests
393 393 tests, size = 0, 2
394 394 while size <= changesets:
395 395 tests, size = tests + 1, size * 2
396 396 rev = repo.changelog.rev(node)
397 397 ui.write(_("Testing changeset %s:%s "
398 398 "(%s changesets remaining, ~%s tests)\n")
399 399 % (rev, short(node), changesets, tests))
400 400 if not noupdate:
401 401 cmdutil.bail_if_changed(repo)
402 402 return hg.clean(repo, node)
403 403
404 404 def branch(ui, repo, label=None, **opts):
405 405 """set or show the current branch name
406 406
407 407 With no argument, show the current branch name. With one argument,
408 408 set the working directory branch name (the branch does not exist
409 409 in the repository until the next commit). It is recommended to use
410 410 the 'default' branch as your primary development branch.
411 411
412 412 Unless -f/--force is specified, branch will not let you set a
413 413 branch name that shadows an existing branch.
414 414
415 415 Use -C/--clean to reset the working directory branch to that of
416 416 the parent of the working directory, negating a previous branch
417 417 change.
418 418
419 419 Use the command 'hg update' to switch to an existing branch.
420 420 """
421 421
422 422 if opts.get('clean'):
423 423 label = repo[None].parents()[0].branch()
424 424 repo.dirstate.setbranch(label)
425 425 ui.status(_('reset working directory to branch %s\n') % label)
426 426 elif label:
427 427 if not opts.get('force') and label in repo.branchtags():
428 428 if label not in [p.branch() for p in repo.parents()]:
429 429 raise util.Abort(_('a branch of the same name already exists'
430 430 ' (use --force to override)'))
431 431 repo.dirstate.setbranch(encoding.fromlocal(label))
432 432 ui.status(_('marked working directory as branch %s\n') % label)
433 433 else:
434 434 ui.write("%s\n" % encoding.tolocal(repo.dirstate.branch()))
435 435
436 436 def branches(ui, repo, active=False):
437 437 """list repository named branches
438 438
439 439 List the repository's named branches, indicating which ones are
440 440 inactive. If active is specified, only show active branches.
441 441
442 442 A branch is considered active if it contains repository heads.
443 443
444 444 Use the command 'hg update' to switch to an existing branch.
445 445 """
446 446 hexfunc = ui.debugflag and hex or short
447 447 activebranches = [encoding.tolocal(repo[n].branch())
448 448 for n in repo.heads(closed=False)]
449 449 branches = util.sort([(tag in activebranches, repo.changelog.rev(node), tag)
450 450 for tag, node in repo.branchtags().items()])
451 451 branches.reverse()
452 452
453 453 for isactive, node, tag in branches:
454 454 if (not active) or isactive:
455 455 if ui.quiet:
456 456 ui.write("%s\n" % tag)
457 457 else:
458 458 hn = repo.lookup(node)
459 459 if isactive:
460 460 notice = ''
461 461 elif hn not in repo.branchheads(tag, closed=False):
462 462 notice = ' (closed)'
463 463 else:
464 464 notice = ' (inactive)'
465 465 rev = str(node).rjust(31 - encoding.colwidth(tag))
466 466 data = tag, rev, hexfunc(hn), notice
467 467 ui.write("%s %s:%s%s\n" % data)
468 468
469 469 def bundle(ui, repo, fname, dest=None, **opts):
470 470 """create a changegroup file
471 471
472 472 Generate a compressed changegroup file collecting changesets not
473 473 known to be in another repository.
474 474
475 475 If no destination repository is specified the destination is
476 476 assumed to have all the nodes specified by one or more --base
477 477 parameters. To create a bundle containing all changesets, use
478 478 -a/--all (or --base null). To change the compression method
479 479 applied, use the -t/--type option (by default, bundles are
480 480 compressed using bz2).
481 481
482 482 The bundle file can then be transferred using conventional means
483 483 and applied to another repository with the unbundle or pull
484 484 command. This is useful when direct push and pull are not
485 485 available or when exporting an entire repository is undesirable.
486 486
487 487 Applying bundles preserves all changeset contents including
488 488 permissions, copy/rename information, and revision history.
489 489 """
490 490 revs = opts.get('rev') or None
491 491 if revs:
492 492 revs = [repo.lookup(rev) for rev in revs]
493 493 if opts.get('all'):
494 494 base = ['null']
495 495 else:
496 496 base = opts.get('base')
497 497 if base:
498 498 if dest:
499 499 raise util.Abort(_("--base is incompatible with specifiying "
500 500 "a destination"))
501 501 base = [repo.lookup(rev) for rev in base]
502 502 # create the right base
503 503 # XXX: nodesbetween / changegroup* should be "fixed" instead
504 504 o = []
505 505 has = {nullid: None}
506 506 for n in base:
507 507 has.update(repo.changelog.reachable(n))
508 508 if revs:
509 509 visit = list(revs)
510 510 else:
511 511 visit = repo.changelog.heads()
512 512 seen = {}
513 513 while visit:
514 514 n = visit.pop(0)
515 515 parents = [p for p in repo.changelog.parents(n) if p not in has]
516 516 if len(parents) == 0:
517 517 o.insert(0, n)
518 518 else:
519 519 for p in parents:
520 520 if p not in seen:
521 521 seen[p] = 1
522 522 visit.append(p)
523 523 else:
524 524 dest, revs, checkout = hg.parseurl(
525 525 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
526 526 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
527 527 o = repo.findoutgoing(other, force=opts.get('force'))
528 528
529 529 if revs:
530 530 cg = repo.changegroupsubset(o, revs, 'bundle')
531 531 else:
532 532 cg = repo.changegroup(o, 'bundle')
533 533
534 534 bundletype = opts.get('type', 'bzip2').lower()
535 535 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
536 536 bundletype = btypes.get(bundletype)
537 537 if bundletype not in changegroup.bundletypes:
538 538 raise util.Abort(_('unknown bundle type specified with --type'))
539 539
540 540 changegroup.writebundle(cg, fname, bundletype)
541 541
542 542 def cat(ui, repo, file1, *pats, **opts):
543 543 """output the current or given revision of files
544 544
545 545 Print the specified files as they were at the given revision. If
546 546 no revision is given, the parent of the working directory is used,
547 547 or tip if no revision is checked out.
548 548
549 549 Output may be to a file, in which case the name of the file is
550 550 given using a format string. The formatting rules are the same as
551 551 for the export command, with the following additions:
552 552
553 553 %s basename of file being printed
554 554 %d dirname of file being printed, or '.' if in repository root
555 555 %p root-relative path name of file being printed
556 556 """
557 557 ctx = repo[opts.get('rev')]
558 558 err = 1
559 559 m = cmdutil.match(repo, (file1,) + pats, opts)
560 560 for abs in ctx.walk(m):
561 561 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
562 562 data = ctx[abs].data()
563 563 if opts.get('decode'):
564 564 data = repo.wwritedata(abs, data)
565 565 fp.write(data)
566 566 err = 0
567 567 return err
568 568
569 569 def clone(ui, source, dest=None, **opts):
570 570 """make a copy of an existing repository
571 571
572 572 Create a copy of an existing repository in a new directory.
573 573
574 574 If no destination directory name is specified, it defaults to the
575 575 basename of the source.
576 576
577 577 The location of the source is added to the new repository's
578 578 .hg/hgrc file, as the default to be used for future pulls.
579 579
580 580 If you use the -r/--rev option to clone up to a specific revision,
581 581 no subsequent revisions (including subsequent tags) will be
582 582 present in the cloned repository. This option implies --pull, even
583 583 on local repositories.
584 584
585 585 By default, clone will check out the head of the 'default' branch.
586 586 If the -U/--noupdate option is used, the new clone will contain
587 587 only a repository (.hg) and no working copy (the working copy
588 588 parent is the null revision).
589 589
590 590 See 'hg help urls' for valid source format details.
591 591
592 592 It is possible to specify an ssh:// URL as the destination, but no
593 593 .hg/hgrc and working directory will be created on the remote side.
594 594 Look at the help text for URLs for important details about ssh://
595 595 URLs.
596 596
597 597 For efficiency, hardlinks are used for cloning whenever the source
598 598 and destination are on the same filesystem (note this applies only
599 599 to the repository data, not to the checked out files). Some
600 600 filesystems, such as AFS, implement hardlinking incorrectly, but
601 601 do not report errors. In these cases, use the --pull option to
602 602 avoid hardlinking.
603 603
604 604 In some cases, you can clone repositories and checked out files
605 605 using full hardlinks with
606 606
607 607 $ cp -al REPO REPOCLONE
608 608
609 609 This is the fastest way to clone, but it is not always safe. The
610 610 operation is not atomic (making sure REPO is not modified during
611 611 the operation is up to you) and you have to make sure your editor
612 612 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
613 613 this is not compatible with certain extensions that place their
614 614 metadata under the .hg directory, such as mq.
615 615
616 616 """
617 617 hg.clone(cmdutil.remoteui(ui, opts), source, dest,
618 618 pull=opts.get('pull'),
619 619 stream=opts.get('uncompressed'),
620 620 rev=opts.get('rev'),
621 621 update=not opts.get('noupdate'))
622 622
623 623 def commit(ui, repo, *pats, **opts):
624 624 """commit the specified files or all outstanding changes
625 625
626 626 Commit changes to the given files into the repository. Unlike a
627 627 centralized RCS, this operation is a local operation. See hg push
628 628 for means to actively distribute your changes.
629 629
630 630 If a list of files is omitted, all changes reported by "hg status"
631 631 will be committed.
632 632
633 633 If you are committing the result of a merge, do not provide any
634 634 file names or -I/-X filters.
635 635
636 636 If no commit message is specified, the configured editor is
637 637 started to prompt you for a message.
638 638
639 639 See 'hg help dates' for a list of formats valid for -d/--date.
640 640 """
641 641 extra = {}
642 642 if opts.get('close_branch'):
643 643 extra['close'] = 1
644 644 def commitfunc(ui, repo, message, match, opts):
645 645 return repo.commit(match.files(), message, opts.get('user'),
646 646 opts.get('date'), match, force_editor=opts.get('force_editor'),
647 647 extra=extra)
648 648
649 649 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
650 650 if not node:
651 651 return
652 652 cl = repo.changelog
653 653 rev = cl.rev(node)
654 654 parents = cl.parentrevs(rev)
655 655 if rev - 1 in parents:
656 656 # one of the parents was the old tip
657 657 pass
658 658 elif (parents == (nullrev, nullrev) or
659 659 len(cl.heads(cl.node(parents[0]))) > 1 and
660 660 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
661 661 ui.status(_('created new head\n'))
662 662
663 663 if ui.debugflag:
664 664 ui.write(_('committed changeset %d:%s\n') % (rev,hex(node)))
665 665 elif ui.verbose:
666 666 ui.write(_('committed changeset %d:%s\n') % (rev,short(node)))
667 667
668 668 def copy(ui, repo, *pats, **opts):
669 669 """mark files as copied for the next commit
670 670
671 671 Mark dest as having copies of source files. If dest is a
672 672 directory, copies are put in that directory. If dest is a file,
673 673 the source must be a single file.
674 674
675 675 By default, this command copies the contents of files as they
676 676 stand in the working directory. If invoked with -A/--after, the
677 677 operation is recorded, but no copying is performed.
678 678
679 679 This command takes effect with the next commit. To undo a copy
680 680 before that, see hg revert.
681 681 """
682 682 wlock = repo.wlock(False)
683 683 try:
684 684 return cmdutil.copy(ui, repo, pats, opts)
685 685 finally:
686 686 wlock.release()
687 687
688 688 def debugancestor(ui, repo, *args):
689 689 """find the ancestor revision of two revisions in a given index"""
690 690 if len(args) == 3:
691 691 index, rev1, rev2 = args
692 692 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
693 693 lookup = r.lookup
694 694 elif len(args) == 2:
695 695 if not repo:
696 696 raise util.Abort(_("There is no Mercurial repository here "
697 697 "(.hg not found)"))
698 698 rev1, rev2 = args
699 699 r = repo.changelog
700 700 lookup = repo.lookup
701 701 else:
702 702 raise util.Abort(_('either two or three arguments required'))
703 703 a = r.ancestor(lookup(rev1), lookup(rev2))
704 704 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
705 705
706 706 def debugcommands(ui, cmd='', *args):
707 707 for cmd, vals in util.sort(table.iteritems()):
708 708 cmd = cmd.split('|')[0].strip('^')
709 709 opts = ', '.join([i[1] for i in vals[1]])
710 710 ui.write('%s: %s\n' % (cmd, opts))
711 711
712 712 def debugcomplete(ui, cmd='', **opts):
713 713 """returns the completion list associated with the given command"""
714 714
715 715 if opts.get('options'):
716 716 options = []
717 717 otables = [globalopts]
718 718 if cmd:
719 719 aliases, entry = cmdutil.findcmd(cmd, table, False)
720 720 otables.append(entry[1])
721 721 for t in otables:
722 722 for o in t:
723 723 if o[0]:
724 724 options.append('-%s' % o[0])
725 725 options.append('--%s' % o[1])
726 726 ui.write("%s\n" % "\n".join(options))
727 727 return
728 728
729 729 cmdlist = cmdutil.findpossible(cmd, table)
730 730 if ui.verbose:
731 731 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
732 732 ui.write("%s\n" % "\n".join(util.sort(cmdlist)))
733 733
734 734 def debugfsinfo(ui, path = "."):
735 735 file('.debugfsinfo', 'w').write('')
736 736 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
737 737 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
738 738 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
739 739 and 'yes' or 'no'))
740 740 os.unlink('.debugfsinfo')
741 741
742 742 def debugrebuildstate(ui, repo, rev="tip"):
743 743 """rebuild the dirstate as it would look like for the given revision"""
744 744 ctx = repo[rev]
745 745 wlock = repo.wlock()
746 746 try:
747 747 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
748 748 finally:
749 749 wlock.release()
750 750
751 751 def debugcheckstate(ui, repo):
752 752 """validate the correctness of the current dirstate"""
753 753 parent1, parent2 = repo.dirstate.parents()
754 754 m1 = repo[parent1].manifest()
755 755 m2 = repo[parent2].manifest()
756 756 errors = 0
757 757 for f in repo.dirstate:
758 758 state = repo.dirstate[f]
759 759 if state in "nr" and f not in m1:
760 760 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
761 761 errors += 1
762 762 if state in "a" and f in m1:
763 763 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
764 764 errors += 1
765 765 if state in "m" and f not in m1 and f not in m2:
766 766 ui.warn(_("%s in state %s, but not in either manifest\n") %
767 767 (f, state))
768 768 errors += 1
769 769 for f in m1:
770 770 state = repo.dirstate[f]
771 771 if state not in "nrm":
772 772 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
773 773 errors += 1
774 774 if errors:
775 775 error = _(".hg/dirstate inconsistent with current parent's manifest")
776 776 raise util.Abort(error)
777 777
778 778 def showconfig(ui, repo, *values, **opts):
779 779 """show combined config settings from all hgrc files
780 780
781 781 With no args, print names and values of all config items.
782 782
783 783 With one arg of the form section.name, print just the value of
784 784 that config item.
785 785
786 786 With multiple args, print names and values of all config items
787 787 with matching section names."""
788 788
789 789 untrusted = bool(opts.get('untrusted'))
790 790 if values:
791 791 if len([v for v in values if '.' in v]) > 1:
792 792 raise util.Abort(_('only one config item permitted'))
793 793 for section, name, value in ui.walkconfig(untrusted=untrusted):
794 794 sectname = section + '.' + name
795 795 if values:
796 796 for v in values:
797 797 if v == section:
798 798 ui.debug('%s: ' %
799 799 ui.configsource(section, name, untrusted))
800 800 ui.write('%s=%s\n' % (sectname, value))
801 801 elif v == sectname:
802 802 ui.debug('%s: ' %
803 803 ui.configsource(section, name, untrusted))
804 804 ui.write(value, '\n')
805 805 else:
806 806 ui.debug('%s: ' %
807 807 ui.configsource(section, name, untrusted))
808 808 ui.write('%s=%s\n' % (sectname, value))
809 809
810 810 def debugsetparents(ui, repo, rev1, rev2=None):
811 811 """manually set the parents of the current working directory
812 812
813 813 This is useful for writing repository conversion tools, but should
814 814 be used with care.
815 815 """
816 816
817 817 if not rev2:
818 818 rev2 = hex(nullid)
819 819
820 820 wlock = repo.wlock()
821 821 try:
822 822 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
823 823 finally:
824 824 wlock.release()
825 825
826 826 def debugstate(ui, repo, nodates=None):
827 827 """show the contents of the current dirstate"""
828 828 timestr = ""
829 829 showdate = not nodates
830 830 for file_, ent in util.sort(repo.dirstate._map.iteritems()):
831 831 if showdate:
832 832 if ent[3] == -1:
833 833 # Pad or slice to locale representation
834 834 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
835 835 timestr = 'unset'
836 836 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
837 837 else:
838 838 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
839 839 if ent[1] & 020000:
840 840 mode = 'lnk'
841 841 else:
842 842 mode = '%3o' % (ent[1] & 0777)
843 843 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
844 844 for f in repo.dirstate.copies():
845 845 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
846 846
847 847 def debugdata(ui, file_, rev):
848 848 """dump the contents of a data file revision"""
849 849 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
850 850 try:
851 851 ui.write(r.revision(r.lookup(rev)))
852 852 except KeyError:
853 853 raise util.Abort(_('invalid revision identifier %s') % rev)
854 854
855 855 def debugdate(ui, date, range=None, **opts):
856 856 """parse and display a date"""
857 857 if opts["extended"]:
858 858 d = util.parsedate(date, util.extendeddateformats)
859 859 else:
860 860 d = util.parsedate(date)
861 861 ui.write("internal: %s %s\n" % d)
862 862 ui.write("standard: %s\n" % util.datestr(d))
863 863 if range:
864 864 m = util.matchdate(range)
865 865 ui.write("match: %s\n" % m(d[0]))
866 866
867 867 def debugindex(ui, file_):
868 868 """dump the contents of an index file"""
869 869 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
870 870 ui.write(" rev offset length base linkrev"
871 871 " nodeid p1 p2\n")
872 872 for i in r:
873 873 node = r.node(i)
874 874 try:
875 875 pp = r.parents(node)
876 876 except:
877 877 pp = [nullid, nullid]
878 878 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
879 879 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
880 880 short(node), short(pp[0]), short(pp[1])))
881 881
882 882 def debugindexdot(ui, file_):
883 883 """dump an index DAG as a .dot file"""
884 884 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
885 885 ui.write("digraph G {\n")
886 886 for i in r:
887 887 node = r.node(i)
888 888 pp = r.parents(node)
889 889 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
890 890 if pp[1] != nullid:
891 891 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
892 892 ui.write("}\n")
893 893
894 894 def debuginstall(ui):
895 895 '''test Mercurial installation'''
896 896
897 897 def writetemp(contents):
898 898 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
899 899 f = os.fdopen(fd, "wb")
900 900 f.write(contents)
901 901 f.close()
902 902 return name
903 903
904 904 problems = 0
905 905
906 906 # encoding
907 907 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
908 908 try:
909 909 encoding.fromlocal("test")
910 910 except util.Abort, inst:
911 911 ui.write(" %s\n" % inst)
912 912 ui.write(_(" (check that your locale is properly set)\n"))
913 913 problems += 1
914 914
915 915 # compiled modules
916 916 ui.status(_("Checking extensions...\n"))
917 917 try:
918 918 import bdiff, mpatch, base85
919 919 except Exception, inst:
920 920 ui.write(" %s\n" % inst)
921 921 ui.write(_(" One or more extensions could not be found"))
922 922 ui.write(_(" (check that you compiled the extensions)\n"))
923 923 problems += 1
924 924
925 925 # templates
926 926 ui.status(_("Checking templates...\n"))
927 927 try:
928 928 import templater
929 929 templater.templater(templater.templatepath("map-cmdline.default"))
930 930 except Exception, inst:
931 931 ui.write(" %s\n" % inst)
932 932 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
933 933 problems += 1
934 934
935 935 # patch
936 936 ui.status(_("Checking patch...\n"))
937 937 patchproblems = 0
938 938 a = "1\n2\n3\n4\n"
939 939 b = "1\n2\n3\ninsert\n4\n"
940 940 fa = writetemp(a)
941 941 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
942 942 os.path.basename(fa))
943 943 fd = writetemp(d)
944 944
945 945 files = {}
946 946 try:
947 947 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
948 948 except util.Abort, e:
949 949 ui.write(_(" patch call failed:\n"))
950 950 ui.write(" " + str(e) + "\n")
951 951 patchproblems += 1
952 952 else:
953 953 if list(files) != [os.path.basename(fa)]:
954 954 ui.write(_(" unexpected patch output!\n"))
955 955 patchproblems += 1
956 956 a = file(fa).read()
957 957 if a != b:
958 958 ui.write(_(" patch test failed!\n"))
959 959 patchproblems += 1
960 960
961 961 if patchproblems:
962 962 if ui.config('ui', 'patch'):
963 963 ui.write(_(" (Current patch tool may be incompatible with patch,"
964 964 " or misconfigured. Please check your .hgrc file)\n"))
965 965 else:
966 966 ui.write(_(" Internal patcher failure, please report this error"
967 967 " to http://www.selenic.com/mercurial/bts\n"))
968 968 problems += patchproblems
969 969
970 970 os.unlink(fa)
971 971 os.unlink(fd)
972 972
973 973 # editor
974 974 ui.status(_("Checking commit editor...\n"))
975 975 editor = ui.geteditor()
976 976 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
977 977 if not cmdpath:
978 978 if editor == 'vi':
979 979 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
980 980 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
981 981 else:
982 982 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
983 983 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
984 984 problems += 1
985 985
986 986 # check username
987 987 ui.status(_("Checking username...\n"))
988 988 user = os.environ.get("HGUSER")
989 989 if user is None:
990 990 user = ui.config("ui", "username")
991 991 if user is None:
992 992 user = os.environ.get("EMAIL")
993 993 if not user:
994 994 ui.warn(" ")
995 995 ui.username()
996 996 ui.write(_(" (specify a username in your .hgrc file)\n"))
997 997
998 998 if not problems:
999 999 ui.status(_("No problems detected\n"))
1000 1000 else:
1001 1001 ui.write(_("%s problems detected,"
1002 1002 " please check your install!\n") % problems)
1003 1003
1004 1004 return problems
1005 1005
1006 1006 def debugrename(ui, repo, file1, *pats, **opts):
1007 1007 """dump rename information"""
1008 1008
1009 1009 ctx = repo[opts.get('rev')]
1010 1010 m = cmdutil.match(repo, (file1,) + pats, opts)
1011 1011 for abs in ctx.walk(m):
1012 1012 fctx = ctx[abs]
1013 1013 o = fctx.filelog().renamed(fctx.filenode())
1014 1014 rel = m.rel(abs)
1015 1015 if o:
1016 1016 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1017 1017 else:
1018 1018 ui.write(_("%s not renamed\n") % rel)
1019 1019
1020 1020 def debugwalk(ui, repo, *pats, **opts):
1021 1021 """show how files match on given patterns"""
1022 1022 m = cmdutil.match(repo, pats, opts)
1023 1023 items = list(repo.walk(m))
1024 1024 if not items:
1025 1025 return
1026 1026 fmt = 'f %%-%ds %%-%ds %%s' % (
1027 1027 max([len(abs) for abs in items]),
1028 1028 max([len(m.rel(abs)) for abs in items]))
1029 1029 for abs in items:
1030 1030 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1031 1031 ui.write("%s\n" % line.rstrip())
1032 1032
1033 1033 def diff(ui, repo, *pats, **opts):
1034 1034 """diff repository (or selected files)
1035 1035
1036 1036 Show differences between revisions for the specified files.
1037 1037
1038 1038 Differences between files are shown using the unified diff format.
1039 1039
1040 1040 NOTE: diff may generate unexpected results for merges, as it will
1041 1041 default to comparing against the working directory's first parent
1042 1042 changeset if no revisions are specified.
1043 1043
1044 1044 When two revision arguments are given, then changes are shown
1045 1045 between those revisions. If only one revision is specified then
1046 1046 that revision is compared to the working directory, and, when no
1047 1047 revisions are specified, the working directory files are compared
1048 1048 to its parent.
1049 1049
1050 1050 Without the -a/--text option, diff will avoid generating diffs of
1051 1051 files it detects as binary. With -a, diff will generate a diff
1052 1052 anyway, probably with undesirable results.
1053 1053
1054 1054 Use the -g/--git option to generate diffs in the git extended diff
1055 1055 format. For more information, read 'hg help diffs'.
1056 1056 """
1057 1057
1058 1058 revs = opts.get('rev')
1059 1059 change = opts.get('change')
1060 1060
1061 1061 if revs and change:
1062 1062 msg = _('cannot specify --rev and --change at the same time')
1063 1063 raise util.Abort(msg)
1064 1064 elif change:
1065 1065 node2 = repo.lookup(change)
1066 1066 node1 = repo[node2].parents()[0].node()
1067 1067 else:
1068 1068 node1, node2 = cmdutil.revpair(repo, revs)
1069 1069
1070 1070 m = cmdutil.match(repo, pats, opts)
1071 1071 it = patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
1072 1072 for chunk in it:
1073 1073 repo.ui.write(chunk)
1074 1074
1075 1075 def export(ui, repo, *changesets, **opts):
1076 1076 """dump the header and diffs for one or more changesets
1077 1077
1078 1078 Print the changeset header and diffs for one or more revisions.
1079 1079
1080 1080 The information shown in the changeset header is: author,
1081 1081 changeset hash, parent(s) and commit comment.
1082 1082
1083 1083 NOTE: export may generate unexpected diff output for merge
1084 1084 changesets, as it will compare the merge changeset against its
1085 1085 first parent only.
1086 1086
1087 1087 Output may be to a file, in which case the name of the file is
1088 1088 given using a format string. The formatting rules are as follows:
1089 1089
1090 1090 %% literal "%" character
1091 1091 %H changeset hash (40 bytes of hexadecimal)
1092 1092 %N number of patches being generated
1093 1093 %R changeset revision number
1094 1094 %b basename of the exporting repository
1095 1095 %h short-form changeset hash (12 bytes of hexadecimal)
1096 1096 %n zero-padded sequence number, starting at 1
1097 1097 %r zero-padded changeset revision number
1098 1098
1099 1099 Without the -a/--text option, export will avoid generating diffs
1100 1100 of files it detects as binary. With -a, export will generate a
1101 1101 diff anyway, probably with undesirable results.
1102 1102
1103 1103 Use the -g/--git option to generate diffs in the git extended diff
1104 1104 format. Read the diffs help topic for more information.
1105 1105
1106 1106 With the --switch-parent option, the diff will be against the
1107 1107 second parent. It can be useful to review a merge.
1108 1108 """
1109 1109 if not changesets:
1110 1110 raise util.Abort(_("export requires at least one changeset"))
1111 1111 revs = cmdutil.revrange(repo, changesets)
1112 1112 if len(revs) > 1:
1113 1113 ui.note(_('exporting patches:\n'))
1114 1114 else:
1115 1115 ui.note(_('exporting patch:\n'))
1116 1116 patch.export(repo, revs, template=opts.get('output'),
1117 1117 switch_parent=opts.get('switch_parent'),
1118 1118 opts=patch.diffopts(ui, opts))
1119 1119
1120 1120 def grep(ui, repo, pattern, *pats, **opts):
1121 1121 """search for a pattern in specified files and revisions
1122 1122
1123 1123 Search revisions of files for a regular expression.
1124 1124
1125 1125 This command behaves differently than Unix grep. It only accepts
1126 1126 Python/Perl regexps. It searches repository history, not the
1127 1127 working directory. It always prints the revision number in which a
1128 1128 match appears.
1129 1129
1130 1130 By default, grep only prints output for the first revision of a
1131 1131 file in which it finds a match. To get it to print every revision
1132 1132 that contains a change in match status ("-" for a match that
1133 1133 becomes a non-match, or "+" for a non-match that becomes a match),
1134 1134 use the --all flag.
1135 1135 """
1136 1136 reflags = 0
1137 1137 if opts.get('ignore_case'):
1138 1138 reflags |= re.I
1139 1139 try:
1140 1140 regexp = re.compile(pattern, reflags)
1141 1141 except Exception, inst:
1142 1142 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1143 1143 return None
1144 1144 sep, eol = ':', '\n'
1145 1145 if opts.get('print0'):
1146 1146 sep = eol = '\0'
1147 1147
1148 1148 fcache = {}
1149 1149 def getfile(fn):
1150 1150 if fn not in fcache:
1151 1151 fcache[fn] = repo.file(fn)
1152 1152 return fcache[fn]
1153 1153
1154 1154 def matchlines(body):
1155 1155 begin = 0
1156 1156 linenum = 0
1157 1157 while True:
1158 1158 match = regexp.search(body, begin)
1159 1159 if not match:
1160 1160 break
1161 1161 mstart, mend = match.span()
1162 1162 linenum += body.count('\n', begin, mstart) + 1
1163 1163 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1164 1164 begin = body.find('\n', mend) + 1 or len(body)
1165 1165 lend = begin - 1
1166 1166 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1167 1167
1168 1168 class linestate(object):
1169 1169 def __init__(self, line, linenum, colstart, colend):
1170 1170 self.line = line
1171 1171 self.linenum = linenum
1172 1172 self.colstart = colstart
1173 1173 self.colend = colend
1174 1174
1175 1175 def __hash__(self):
1176 1176 return hash((self.linenum, self.line))
1177 1177
1178 1178 def __eq__(self, other):
1179 1179 return self.line == other.line
1180 1180
1181 1181 matches = {}
1182 1182 copies = {}
1183 1183 def grepbody(fn, rev, body):
1184 1184 matches[rev].setdefault(fn, [])
1185 1185 m = matches[rev][fn]
1186 1186 for lnum, cstart, cend, line in matchlines(body):
1187 1187 s = linestate(line, lnum, cstart, cend)
1188 1188 m.append(s)
1189 1189
1190 1190 def difflinestates(a, b):
1191 1191 sm = difflib.SequenceMatcher(None, a, b)
1192 1192 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1193 1193 if tag == 'insert':
1194 1194 for i in xrange(blo, bhi):
1195 1195 yield ('+', b[i])
1196 1196 elif tag == 'delete':
1197 1197 for i in xrange(alo, ahi):
1198 1198 yield ('-', a[i])
1199 1199 elif tag == 'replace':
1200 1200 for i in xrange(alo, ahi):
1201 1201 yield ('-', a[i])
1202 1202 for i in xrange(blo, bhi):
1203 1203 yield ('+', b[i])
1204 1204
1205 1205 prev = {}
1206 1206 def display(fn, rev, states, prevstates):
1207 1207 datefunc = ui.quiet and util.shortdate or util.datestr
1208 1208 found = False
1209 1209 filerevmatches = {}
1210 1210 r = prev.get(fn, -1)
1211 1211 if opts.get('all'):
1212 1212 iter = difflinestates(states, prevstates)
1213 1213 else:
1214 1214 iter = [('', l) for l in prevstates]
1215 1215 for change, l in iter:
1216 1216 cols = [fn, str(r)]
1217 1217 if opts.get('line_number'):
1218 1218 cols.append(str(l.linenum))
1219 1219 if opts.get('all'):
1220 1220 cols.append(change)
1221 1221 if opts.get('user'):
1222 1222 cols.append(ui.shortuser(get(r)[1]))
1223 1223 if opts.get('date'):
1224 1224 cols.append(datefunc(get(r)[2]))
1225 1225 if opts.get('files_with_matches'):
1226 1226 c = (fn, r)
1227 1227 if c in filerevmatches:
1228 1228 continue
1229 1229 filerevmatches[c] = 1
1230 1230 else:
1231 1231 cols.append(l.line)
1232 1232 ui.write(sep.join(cols), eol)
1233 1233 found = True
1234 1234 return found
1235 1235
1236 1236 fstate = {}
1237 1237 skip = {}
1238 1238 get = util.cachefunc(lambda r: repo[r].changeset())
1239 1239 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1240 1240 found = False
1241 1241 follow = opts.get('follow')
1242 1242 for st, rev, fns in changeiter:
1243 1243 if st == 'window':
1244 1244 matches.clear()
1245 1245 elif st == 'add':
1246 1246 ctx = repo[rev]
1247 1247 matches[rev] = {}
1248 1248 for fn in fns:
1249 1249 if fn in skip:
1250 1250 continue
1251 1251 try:
1252 1252 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1253 1253 fstate.setdefault(fn, [])
1254 1254 if follow:
1255 1255 copied = getfile(fn).renamed(ctx.filenode(fn))
1256 1256 if copied:
1257 1257 copies.setdefault(rev, {})[fn] = copied[0]
1258 1258 except error.LookupError:
1259 1259 pass
1260 1260 elif st == 'iter':
1261 1261 for fn, m in util.sort(matches[rev].items()):
1262 1262 copy = copies.get(rev, {}).get(fn)
1263 1263 if fn in skip:
1264 1264 if copy:
1265 1265 skip[copy] = True
1266 1266 continue
1267 1267 if fn in prev or fstate[fn]:
1268 1268 r = display(fn, rev, m, fstate[fn])
1269 1269 found = found or r
1270 1270 if r and not opts.get('all'):
1271 1271 skip[fn] = True
1272 1272 if copy:
1273 1273 skip[copy] = True
1274 1274 fstate[fn] = m
1275 1275 if copy:
1276 1276 fstate[copy] = m
1277 1277 prev[fn] = rev
1278 1278
1279 1279 for fn, state in util.sort(fstate.items()):
1280 1280 if fn in skip:
1281 1281 continue
1282 1282 if fn not in copies.get(prev[fn], {}):
1283 1283 found = display(fn, rev, {}, state) or found
1284 1284 return (not found and 1) or 0
1285 1285
1286 1286 def heads(ui, repo, *branchrevs, **opts):
1287 1287 """show current repository heads or show branch heads
1288 1288
1289 1289 With no arguments, show all repository head changesets.
1290 1290
1291 1291 If branch or revisions names are given this will show the heads of
1292 1292 the specified branches or the branches those revisions are tagged
1293 1293 with.
1294 1294
1295 1295 Repository "heads" are changesets that don't have child
1296 1296 changesets. They are where development generally takes place and
1297 1297 are the usual targets for update and merge operations.
1298 1298
1299 1299 Branch heads are changesets that have a given branch tag, but have
1300 1300 no child changesets with that tag. They are usually where
1301 1301 development on the given branch takes place.
1302 1302 """
1303 1303 if opts.get('rev'):
1304 1304 start = repo.lookup(opts['rev'])
1305 1305 else:
1306 1306 start = None
1307 1307 closed = not opts.get('active')
1308 1308 if not branchrevs:
1309 1309 # Assume we're looking repo-wide heads if no revs were specified.
1310 1310 heads = repo.heads(start, closed=closed)
1311 1311 else:
1312 1312 heads = []
1313 1313 visitedset = set()
1314 1314 for branchrev in branchrevs:
1315 1315 branch = repo[branchrev].branch()
1316 1316 if branch in visitedset:
1317 1317 continue
1318 1318 visitedset.add(branch)
1319 1319 bheads = repo.branchheads(branch, start, closed=closed)
1320 1320 if not bheads:
1321 1321 if branch != branchrev:
1322 1322 ui.warn(_("no changes on branch %s containing %s are "
1323 1323 "reachable from %s\n")
1324 1324 % (branch, branchrev, opts.get('rev')))
1325 1325 else:
1326 1326 ui.warn(_("no changes on branch %s are reachable from %s\n")
1327 1327 % (branch, opts.get('rev')))
1328 1328 heads.extend(bheads)
1329 1329 if not heads:
1330 1330 return 1
1331 1331 displayer = cmdutil.show_changeset(ui, repo, opts)
1332 1332 for n in heads:
1333 1333 displayer.show(repo[n])
1334 1334
1335 1335 def help_(ui, name=None, with_version=False):
1336 1336 """show help for a given topic or a help overview
1337 1337
1338 1338 With no arguments, print a list of commands and short help.
1339 1339
1340 1340 Given a topic, extension, or command name, print help for that
1341 1341 topic."""
1342 1342 option_lists = []
1343 1343
1344 1344 def addglobalopts(aliases):
1345 1345 if ui.verbose:
1346 1346 option_lists.append((_("global options:"), globalopts))
1347 1347 if name == 'shortlist':
1348 1348 option_lists.append((_('use "hg help" for the full list '
1349 1349 'of commands'), ()))
1350 1350 else:
1351 1351 if name == 'shortlist':
1352 1352 msg = _('use "hg help" for the full list of commands '
1353 1353 'or "hg -v" for details')
1354 1354 elif aliases:
1355 1355 msg = _('use "hg -v help%s" to show aliases and '
1356 1356 'global options') % (name and " " + name or "")
1357 1357 else:
1358 1358 msg = _('use "hg -v help %s" to show global options') % name
1359 1359 option_lists.append((msg, ()))
1360 1360
1361 1361 def helpcmd(name):
1362 1362 if with_version:
1363 1363 version_(ui)
1364 1364 ui.write('\n')
1365 1365
1366 1366 try:
1367 1367 aliases, i = cmdutil.findcmd(name, table, False)
1368 1368 except error.AmbiguousCommand, inst:
1369 1369 select = lambda c: c.lstrip('^').startswith(inst.args[0])
1370 1370 helplist(_('list of commands:\n\n'), select)
1371 1371 return
1372 1372
1373 1373 # synopsis
1374 1374 if len(i) > 2:
1375 1375 if i[2].startswith('hg'):
1376 1376 ui.write("%s\n" % i[2])
1377 1377 else:
1378 1378 ui.write('hg %s %s\n' % (aliases[0], i[2]))
1379 1379 else:
1380 1380 ui.write('hg %s\n' % aliases[0])
1381 1381
1382 1382 # aliases
1383 1383 if not ui.quiet and len(aliases) > 1:
1384 1384 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1385 1385
1386 1386 # description
1387 1387 doc = gettext(i[0].__doc__)
1388 1388 if not doc:
1389 1389 doc = _("(no help text available)")
1390 1390 if ui.quiet:
1391 1391 doc = doc.splitlines(0)[0]
1392 1392 ui.write("\n%s\n" % doc.rstrip())
1393 1393
1394 1394 if not ui.quiet:
1395 1395 # options
1396 1396 if i[1]:
1397 1397 option_lists.append((_("options:\n"), i[1]))
1398 1398
1399 1399 addglobalopts(False)
1400 1400
1401 1401 def helplist(header, select=None):
1402 1402 h = {}
1403 1403 cmds = {}
1404 1404 for c, e in table.iteritems():
1405 1405 f = c.split("|", 1)[0]
1406 1406 if select and not select(f):
1407 1407 continue
1408 1408 if (not select and name != 'shortlist' and
1409 1409 e[0].__module__ != __name__):
1410 1410 continue
1411 1411 if name == "shortlist" and not f.startswith("^"):
1412 1412 continue
1413 1413 f = f.lstrip("^")
1414 1414 if not ui.debugflag and f.startswith("debug"):
1415 1415 continue
1416 1416 doc = gettext(e[0].__doc__)
1417 1417 if not doc:
1418 1418 doc = _("(no help text available)")
1419 1419 h[f] = doc.splitlines(0)[0].rstrip()
1420 1420 cmds[f] = c.lstrip("^")
1421 1421
1422 1422 if not h:
1423 1423 ui.status(_('no commands defined\n'))
1424 1424 return
1425 1425
1426 1426 ui.status(header)
1427 1427 fns = util.sort(h)
1428 1428 m = max(map(len, fns))
1429 1429 for f in fns:
1430 1430 if ui.verbose:
1431 1431 commands = cmds[f].replace("|",", ")
1432 1432 ui.write(" %s:\n %s\n"%(commands, h[f]))
1433 1433 else:
1434 1434 ui.write(' %-*s %s\n' % (m, f, h[f]))
1435 1435
1436 1436 exts = list(extensions.extensions())
1437 1437 if exts and name != 'shortlist':
1438 1438 ui.write(_('\nenabled extensions:\n\n'))
1439 1439 maxlength = 0
1440 1440 exthelps = []
1441 1441 for ename, ext in exts:
1442 1442 doc = (gettext(ext.__doc__) or _('(no help text available)'))
1443 1443 ename = ename.split('.')[-1]
1444 1444 maxlength = max(len(ename), maxlength)
1445 1445 exthelps.append((ename, doc.splitlines(0)[0].strip()))
1446 1446 for ename, text in exthelps:
1447 1447 ui.write(_(' %s %s\n') % (ename.ljust(maxlength), text))
1448 1448
1449 1449 if not ui.quiet:
1450 1450 addglobalopts(True)
1451 1451
1452 1452 def helptopic(name):
1453 1453 for names, header, doc in help.helptable:
1454 1454 if name in names:
1455 1455 break
1456 1456 else:
1457 1457 raise error.UnknownCommand(name)
1458 1458
1459 1459 # description
1460 1460 if not doc:
1461 1461 doc = _("(no help text available)")
1462 1462 if callable(doc):
1463 1463 doc = doc()
1464 1464
1465 1465 ui.write("%s\n" % header)
1466 1466 ui.write("%s\n" % doc.rstrip())
1467 1467
1468 1468 def helpext(name):
1469 1469 try:
1470 1470 mod = extensions.find(name)
1471 1471 except KeyError:
1472 1472 raise error.UnknownCommand(name)
1473 1473
1474 1474 doc = gettext(mod.__doc__) or _('no help text available')
1475 1475 doc = doc.splitlines(0)
1476 1476 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1477 1477 for d in doc[1:]:
1478 1478 ui.write(d, '\n')
1479 1479
1480 1480 ui.status('\n')
1481 1481
1482 1482 try:
1483 1483 ct = mod.cmdtable
1484 1484 except AttributeError:
1485 1485 ct = {}
1486 1486
1487 1487 modcmds = set([c.split('|', 1)[0] for c in ct])
1488 1488 helplist(_('list of commands:\n\n'), modcmds.__contains__)
1489 1489
1490 1490 if name and name != 'shortlist':
1491 1491 i = None
1492 1492 for f in (helptopic, helpcmd, helpext):
1493 1493 try:
1494 1494 f(name)
1495 1495 i = None
1496 1496 break
1497 1497 except error.UnknownCommand, inst:
1498 1498 i = inst
1499 1499 if i:
1500 1500 raise i
1501 1501
1502 1502 else:
1503 1503 # program name
1504 1504 if ui.verbose or with_version:
1505 1505 version_(ui)
1506 1506 else:
1507 1507 ui.status(_("Mercurial Distributed SCM\n"))
1508 1508 ui.status('\n')
1509 1509
1510 1510 # list of commands
1511 1511 if name == "shortlist":
1512 1512 header = _('basic commands:\n\n')
1513 1513 else:
1514 1514 header = _('list of commands:\n\n')
1515 1515
1516 1516 helplist(header)
1517 1517
1518 1518 # list all option lists
1519 1519 opt_output = []
1520 1520 for title, options in option_lists:
1521 1521 opt_output.append(("\n%s" % title, None))
1522 1522 for shortopt, longopt, default, desc in options:
1523 1523 if "DEPRECATED" in desc and not ui.verbose: continue
1524 1524 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1525 1525 longopt and " --%s" % longopt),
1526 1526 "%s%s" % (desc,
1527 1527 default
1528 1528 and _(" (default: %s)") % default
1529 1529 or "")))
1530 1530
1531 1531 if not name:
1532 1532 ui.write(_("\nadditional help topics:\n\n"))
1533 1533 topics = []
1534 1534 for names, header, doc in help.helptable:
1535 1535 names = [(-len(name), name) for name in names]
1536 1536 names.sort()
1537 1537 topics.append((names[0][1], header))
1538 1538 topics_len = max([len(s[0]) for s in topics])
1539 1539 for t, desc in topics:
1540 1540 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1541 1541
1542 1542 if opt_output:
1543 1543 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1544 1544 for first, second in opt_output:
1545 1545 if second:
1546 1546 # wrap descriptions at 70 characters, just like the
1547 1547 # main help texts
1548 1548 second = textwrap.wrap(second, width=70 - opts_len - 3)
1549 1549 pad = '\n' + ' ' * (opts_len + 3)
1550 1550 ui.write(" %-*s %s\n" % (opts_len, first, pad.join(second)))
1551 1551 else:
1552 1552 ui.write("%s\n" % first)
1553 1553
1554 1554 def identify(ui, repo, source=None,
1555 1555 rev=None, num=None, id=None, branch=None, tags=None):
1556 1556 """identify the working copy or specified revision
1557 1557
1558 1558 With no revision, print a summary of the current state of the
1559 1559 repository.
1560 1560
1561 1561 With a path, do a lookup in another repository.
1562 1562
1563 1563 This summary identifies the repository state using one or two
1564 1564 parent hash identifiers, followed by a "+" if there are
1565 1565 uncommitted changes in the working directory, a list of tags for
1566 1566 this revision and a branch name for non-default branches.
1567 1567 """
1568 1568
1569 1569 if not repo and not source:
1570 1570 raise util.Abort(_("There is no Mercurial repository here "
1571 1571 "(.hg not found)"))
1572 1572
1573 1573 hexfunc = ui.debugflag and hex or short
1574 1574 default = not (num or id or branch or tags)
1575 1575 output = []
1576 1576
1577 1577 revs = []
1578 1578 if source:
1579 1579 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1580 1580 repo = hg.repository(ui, source)
1581 1581
1582 1582 if not repo.local():
1583 1583 if not rev and revs:
1584 1584 rev = revs[0]
1585 1585 if not rev:
1586 1586 rev = "tip"
1587 1587 if num or branch or tags:
1588 1588 raise util.Abort(
1589 1589 "can't query remote revision number, branch, or tags")
1590 1590 output = [hexfunc(repo.lookup(rev))]
1591 1591 elif not rev:
1592 1592 ctx = repo[None]
1593 1593 parents = ctx.parents()
1594 1594 changed = False
1595 1595 if default or id or num:
1596 1596 changed = ctx.files() + ctx.deleted()
1597 1597 if default or id:
1598 1598 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1599 1599 (changed) and "+" or "")]
1600 1600 if num:
1601 1601 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1602 1602 (changed) and "+" or ""))
1603 1603 else:
1604 1604 ctx = repo[rev]
1605 1605 if default or id:
1606 1606 output = [hexfunc(ctx.node())]
1607 1607 if num:
1608 1608 output.append(str(ctx.rev()))
1609 1609
1610 1610 if repo.local() and default and not ui.quiet:
1611 1611 b = encoding.tolocal(ctx.branch())
1612 1612 if b != 'default':
1613 1613 output.append("(%s)" % b)
1614 1614
1615 1615 # multiple tags for a single parent separated by '/'
1616 1616 t = "/".join(ctx.tags())
1617 1617 if t:
1618 1618 output.append(t)
1619 1619
1620 1620 if branch:
1621 1621 output.append(encoding.tolocal(ctx.branch()))
1622 1622
1623 1623 if tags:
1624 1624 output.extend(ctx.tags())
1625 1625
1626 1626 ui.write("%s\n" % ' '.join(output))
1627 1627
1628 1628 def import_(ui, repo, patch1, *patches, **opts):
1629 1629 """import an ordered set of patches
1630 1630
1631 1631 Import a list of patches and commit them individually.
1632 1632
1633 1633 If there are outstanding changes in the working directory, import
1634 1634 will abort unless given the -f/--force flag.
1635 1635
1636 1636 You can import a patch straight from a mail message. Even patches
1637 1637 as attachments work (body part must be type text/plain or
1638 1638 text/x-patch to be used). From and Subject headers of email
1639 1639 message are used as default committer and commit message. All
1640 1640 text/plain body parts before first diff are added to commit
1641 1641 message.
1642 1642
1643 1643 If the imported patch was generated by hg export, user and
1644 1644 description from patch override values from message headers and
1645 1645 body. Values given on command line with -m/--message and -u/--user
1646 1646 override these.
1647 1647
1648 1648 If --exact is specified, import will set the working directory to
1649 1649 the parent of each patch before applying it, and will abort if the
1650 1650 resulting changeset has a different ID than the one recorded in
1651 1651 the patch. This may happen due to character set problems or other
1652 1652 deficiencies in the text patch format.
1653 1653
1654 1654 With -s/--similarity, hg will attempt to discover renames and
1655 1655 copies in the patch in the same way as 'addremove'.
1656 1656
1657 1657 To read a patch from standard input, use patch name "-". See 'hg
1658 1658 help dates' for a list of formats valid for -d/--date.
1659 1659 """
1660 1660 patches = (patch1,) + patches
1661 1661
1662 1662 date = opts.get('date')
1663 1663 if date:
1664 1664 opts['date'] = util.parsedate(date)
1665 1665
1666 1666 try:
1667 1667 sim = float(opts.get('similarity') or 0)
1668 1668 except ValueError:
1669 1669 raise util.Abort(_('similarity must be a number'))
1670 1670 if sim < 0 or sim > 100:
1671 1671 raise util.Abort(_('similarity must be between 0 and 100'))
1672 1672
1673 1673 if opts.get('exact') or not opts.get('force'):
1674 1674 cmdutil.bail_if_changed(repo)
1675 1675
1676 1676 d = opts["base"]
1677 1677 strip = opts["strip"]
1678 1678 wlock = lock = None
1679 1679 try:
1680 1680 wlock = repo.wlock()
1681 1681 lock = repo.lock()
1682 1682 for p in patches:
1683 1683 pf = os.path.join(d, p)
1684 1684
1685 1685 if pf == '-':
1686 1686 ui.status(_("applying patch from stdin\n"))
1687 1687 pf = sys.stdin
1688 1688 else:
1689 1689 ui.status(_("applying %s\n") % p)
1690 1690 pf = url.open(ui, pf)
1691 1691 data = patch.extract(ui, pf)
1692 1692 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1693 1693
1694 1694 if tmpname is None:
1695 1695 raise util.Abort(_('no diffs found'))
1696 1696
1697 1697 try:
1698 1698 cmdline_message = cmdutil.logmessage(opts)
1699 1699 if cmdline_message:
1700 1700 # pickup the cmdline msg
1701 1701 message = cmdline_message
1702 1702 elif message:
1703 1703 # pickup the patch msg
1704 1704 message = message.strip()
1705 1705 else:
1706 1706 # launch the editor
1707 1707 message = None
1708 1708 ui.debug(_('message:\n%s\n') % message)
1709 1709
1710 1710 wp = repo.parents()
1711 1711 if opts.get('exact'):
1712 1712 if not nodeid or not p1:
1713 1713 raise util.Abort(_('not a mercurial patch'))
1714 1714 p1 = repo.lookup(p1)
1715 1715 p2 = repo.lookup(p2 or hex(nullid))
1716 1716
1717 1717 if p1 != wp[0].node():
1718 1718 hg.clean(repo, p1)
1719 1719 repo.dirstate.setparents(p1, p2)
1720 1720 elif p2:
1721 1721 try:
1722 1722 p1 = repo.lookup(p1)
1723 1723 p2 = repo.lookup(p2)
1724 1724 if p1 == wp[0].node():
1725 1725 repo.dirstate.setparents(p1, p2)
1726 1726 except error.RepoError:
1727 1727 pass
1728 1728 if opts.get('exact') or opts.get('import_branch'):
1729 1729 repo.dirstate.setbranch(branch or 'default')
1730 1730
1731 1731 files = {}
1732 1732 try:
1733 1733 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1734 1734 files=files)
1735 1735 finally:
1736 1736 files = patch.updatedir(ui, repo, files, similarity=sim/100.)
1737 1737 if not opts.get('no_commit'):
1738 1738 n = repo.commit(files, message, opts.get('user') or user,
1739 1739 opts.get('date') or date)
1740 1740 if opts.get('exact'):
1741 1741 if hex(n) != nodeid:
1742 1742 repo.rollback()
1743 1743 raise util.Abort(_('patch is damaged'
1744 1744 ' or loses information'))
1745 1745 # Force a dirstate write so that the next transaction
1746 1746 # backups an up-do-date file.
1747 1747 repo.dirstate.write()
1748 1748 finally:
1749 1749 os.unlink(tmpname)
1750 1750 finally:
1751 1751 release(lock, wlock)
1752 1752
1753 1753 def incoming(ui, repo, source="default", **opts):
1754 1754 """show new changesets found in source
1755 1755
1756 1756 Show new changesets found in the specified path/URL or the default
1757 1757 pull location. These are the changesets that would be pulled if a
1758 1758 pull was requested.
1759 1759
1760 1760 For remote repository, using --bundle avoids downloading the
1761 1761 changesets twice if the incoming is followed by a pull.
1762 1762
1763 1763 See pull for valid source format details.
1764 1764 """
1765 1765 limit = cmdutil.loglimit(opts)
1766 1766 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
1767 1767 other = hg.repository(cmdutil.remoteui(repo, opts), source)
1768 1768 ui.status(_('comparing with %s\n') % url.hidepassword(source))
1769 1769 if revs:
1770 1770 revs = [other.lookup(rev) for rev in revs]
1771 1771 common, incoming, rheads = repo.findcommonincoming(other, heads=revs,
1772 1772 force=opts["force"])
1773 1773 if not incoming:
1774 1774 try:
1775 1775 os.unlink(opts["bundle"])
1776 1776 except:
1777 1777 pass
1778 1778 ui.status(_("no changes found\n"))
1779 1779 return 1
1780 1780
1781 1781 cleanup = None
1782 1782 try:
1783 1783 fname = opts["bundle"]
1784 1784 if fname or not other.local():
1785 1785 # create a bundle (uncompressed if other repo is not local)
1786 1786
1787 1787 if revs is None and other.capable('changegroupsubset'):
1788 1788 revs = rheads
1789 1789
1790 1790 if revs is None:
1791 1791 cg = other.changegroup(incoming, "incoming")
1792 1792 else:
1793 1793 cg = other.changegroupsubset(incoming, revs, 'incoming')
1794 1794 bundletype = other.local() and "HG10BZ" or "HG10UN"
1795 1795 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1796 1796 # keep written bundle?
1797 1797 if opts["bundle"]:
1798 1798 cleanup = None
1799 1799 if not other.local():
1800 1800 # use the created uncompressed bundlerepo
1801 1801 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1802 1802
1803 1803 o = other.changelog.nodesbetween(incoming, revs)[0]
1804 1804 if opts.get('newest_first'):
1805 1805 o.reverse()
1806 1806 displayer = cmdutil.show_changeset(ui, other, opts)
1807 1807 count = 0
1808 1808 for n in o:
1809 1809 if count >= limit:
1810 1810 break
1811 1811 parents = [p for p in other.changelog.parents(n) if p != nullid]
1812 1812 if opts.get('no_merges') and len(parents) == 2:
1813 1813 continue
1814 1814 count += 1
1815 1815 displayer.show(other[n])
1816 1816 finally:
1817 1817 if hasattr(other, 'close'):
1818 1818 other.close()
1819 1819 if cleanup:
1820 1820 os.unlink(cleanup)
1821 1821
1822 1822 def init(ui, dest=".", **opts):
1823 1823 """create a new repository in the given directory
1824 1824
1825 1825 Initialize a new repository in the given directory. If the given
1826 1826 directory does not exist, it is created.
1827 1827
1828 1828 If no directory is given, the current directory is used.
1829 1829
1830 1830 It is possible to specify an ssh:// URL as the destination.
1831 1831 See 'hg help urls' for more information.
1832 1832 """
1833 1833 hg.repository(cmdutil.remoteui(ui, opts), dest, create=1)
1834 1834
1835 1835 def locate(ui, repo, *pats, **opts):
1836 1836 """locate files matching specific patterns
1837 1837
1838 1838 Print all files under Mercurial control whose names match the
1839 1839 given patterns.
1840 1840
1841 1841 This command searches the entire repository by default. To search
1842 1842 just the current directory and its subdirectories, use
1843 1843 "--include .".
1844 1844
1845 1845 If no patterns are given to match, this command prints all file
1846 1846 names.
1847 1847
1848 1848 If you want to feed the output of this command into the "xargs"
1849 1849 command, use the -0 option to both this command and "xargs". This
1850 1850 will avoid the problem of "xargs" treating single filenames that
1851 1851 contain white space as multiple filenames.
1852 1852 """
1853 1853 end = opts.get('print0') and '\0' or '\n'
1854 1854 rev = opts.get('rev') or None
1855 1855
1856 1856 ret = 1
1857 1857 m = cmdutil.match(repo, pats, opts, default='relglob')
1858 1858 m.bad = lambda x,y: False
1859 1859 for abs in repo[rev].walk(m):
1860 1860 if not rev and abs not in repo.dirstate:
1861 1861 continue
1862 1862 if opts.get('fullpath'):
1863 1863 ui.write(repo.wjoin(abs), end)
1864 1864 else:
1865 1865 ui.write(((pats and m.rel(abs)) or abs), end)
1866 1866 ret = 0
1867 1867
1868 1868 return ret
1869 1869
1870 1870 def log(ui, repo, *pats, **opts):
1871 1871 """show revision history of entire repository or files
1872 1872
1873 1873 Print the revision history of the specified files or the entire
1874 1874 project.
1875 1875
1876 1876 File history is shown without following rename or copy history of
1877 1877 files. Use -f/--follow with a file name to follow history across
1878 1878 renames and copies. --follow without a file name will only show
1879 1879 ancestors or descendants of the starting revision. --follow-first
1880 1880 only follows the first parent of merge revisions.
1881 1881
1882 1882 If no revision range is specified, the default is tip:0 unless
1883 1883 --follow is set, in which case the working directory parent is
1884 1884 used as the starting revision.
1885 1885
1886 1886 See 'hg help dates' for a list of formats valid for -d/--date.
1887 1887
1888 1888 By default this command outputs: changeset id and hash, tags,
1889 1889 non-trivial parents, user, date and time, and a summary for each
1890 1890 commit. When the -v/--verbose switch is used, the list of changed
1891 1891 files and full commit message is shown.
1892 1892
1893 1893 NOTE: log -p/--patch may generate unexpected diff output for merge
1894 1894 changesets, as it will only compare the merge changeset against
1895 1895 its first parent. Also, the files: list will only reflect files
1896 1896 that are different from BOTH parents.
1897 1897
1898 1898 """
1899 1899
1900 1900 get = util.cachefunc(lambda r: repo[r].changeset())
1901 1901 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1902 1902
1903 1903 limit = cmdutil.loglimit(opts)
1904 1904 count = 0
1905 1905
1906 1906 if opts.get('copies') and opts.get('rev'):
1907 1907 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
1908 1908 else:
1909 1909 endrev = len(repo)
1910 1910 rcache = {}
1911 1911 ncache = {}
1912 1912 def getrenamed(fn, rev):
1913 1913 '''looks up all renames for a file (up to endrev) the first
1914 1914 time the file is given. It indexes on the changerev and only
1915 1915 parses the manifest if linkrev != changerev.
1916 1916 Returns rename info for fn at changerev rev.'''
1917 1917 if fn not in rcache:
1918 1918 rcache[fn] = {}
1919 1919 ncache[fn] = {}
1920 1920 fl = repo.file(fn)
1921 1921 for i in fl:
1922 1922 node = fl.node(i)
1923 1923 lr = fl.linkrev(i)
1924 1924 renamed = fl.renamed(node)
1925 1925 rcache[fn][lr] = renamed
1926 1926 if renamed:
1927 1927 ncache[fn][node] = renamed
1928 1928 if lr >= endrev:
1929 1929 break
1930 1930 if rev in rcache[fn]:
1931 1931 return rcache[fn][rev]
1932 1932
1933 1933 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1934 1934 # filectx logic.
1935 1935
1936 1936 try:
1937 1937 return repo[rev][fn].renamed()
1938 1938 except error.LookupError:
1939 1939 pass
1940 1940 return None
1941 1941
1942 1942 df = False
1943 1943 if opts["date"]:
1944 1944 df = util.matchdate(opts["date"])
1945 1945
1946 1946 only_branches = opts.get('only_branch')
1947 1947
1948 1948 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1949 1949 for st, rev, fns in changeiter:
1950 1950 if st == 'add':
1951 1951 parents = [p for p in repo.changelog.parentrevs(rev)
1952 1952 if p != nullrev]
1953 1953 if opts.get('no_merges') and len(parents) == 2:
1954 1954 continue
1955 1955 if opts.get('only_merges') and len(parents) != 2:
1956 1956 continue
1957 1957
1958 1958 if only_branches:
1959 1959 revbranch = get(rev)[5]['branch']
1960 1960 if revbranch not in only_branches:
1961 1961 continue
1962 1962
1963 1963 if df:
1964 1964 changes = get(rev)
1965 1965 if not df(changes[2][0]):
1966 1966 continue
1967 1967
1968 1968 if opts.get('keyword'):
1969 1969 changes = get(rev)
1970 1970 miss = 0
1971 1971 for k in [kw.lower() for kw in opts['keyword']]:
1972 1972 if not (k in changes[1].lower() or
1973 1973 k in changes[4].lower() or
1974 1974 k in " ".join(changes[3]).lower()):
1975 1975 miss = 1
1976 1976 break
1977 1977 if miss:
1978 1978 continue
1979 1979
1980 1980 if opts['user']:
1981 1981 changes = get(rev)
1982 1982 if not [k for k in opts['user'] if k in changes[1]]:
1983 1983 continue
1984 1984
1985 1985 copies = []
1986 1986 if opts.get('copies') and rev:
1987 1987 for fn in get(rev)[3]:
1988 1988 rename = getrenamed(fn, rev)
1989 1989 if rename:
1990 1990 copies.append((fn, rename[0]))
1991 1991 displayer.show(context.changectx(repo, rev), copies=copies)
1992 1992 elif st == 'iter':
1993 1993 if count == limit: break
1994 1994 if displayer.flush(rev):
1995 1995 count += 1
1996 1996
1997 1997 def manifest(ui, repo, node=None, rev=None):
1998 1998 """output the current or given revision of the project manifest
1999 1999
2000 2000 Print a list of version controlled files for the given revision.
2001 2001 If no revision is given, the first parent of the working directory
2002 2002 is used, or the null revision if none is checked out.
2003 2003
2004 2004 With -v flag, print file permissions, symlink and executable bits.
2005 2005 With --debug flag, print file revision hashes.
2006 2006 """
2007 2007
2008 2008 if rev and node:
2009 2009 raise util.Abort(_("please specify just one revision"))
2010 2010
2011 2011 if not node:
2012 2012 node = rev
2013 2013
2014 2014 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2015 2015 ctx = repo[node]
2016 2016 for f in ctx:
2017 2017 if ui.debugflag:
2018 2018 ui.write("%40s " % hex(ctx.manifest()[f]))
2019 2019 if ui.verbose:
2020 2020 ui.write(decor[ctx.flags(f)])
2021 2021 ui.write("%s\n" % f)
2022 2022
2023 2023 def merge(ui, repo, node=None, force=None, rev=None):
2024 2024 """merge working directory with another revision
2025 2025
2026 2026 The contents of the current working directory is updated with all
2027 2027 changes made in the requested revision since the last common
2028 2028 predecessor revision.
2029 2029
2030 2030 Files that changed between either parent are marked as changed for
2031 2031 the next commit and a commit must be performed before any further
2032 2032 updates are allowed. The next commit has two parents.
2033 2033
2034 2034 If no revision is specified, the working directory's parent is a
2035 2035 head revision, and the current branch contains exactly one other
2036 2036 head, the other head is merged with by default. Otherwise, an
2037 2037 explicit revision to merge with must be provided.
2038 2038 """
2039 2039
2040 2040 if rev and node:
2041 2041 raise util.Abort(_("please specify just one revision"))
2042 2042 if not node:
2043 2043 node = rev
2044 2044
2045 2045 if not node:
2046 2046 branch = repo.changectx(None).branch()
2047 2047 bheads = repo.branchheads(branch)
2048 2048 if len(bheads) > 2:
2049 2049 raise util.Abort(_("branch '%s' has %d heads - "
2050 2050 "please merge with an explicit rev") %
2051 2051 (branch, len(bheads)))
2052 2052
2053 2053 parent = repo.dirstate.parents()[0]
2054 2054 if len(bheads) == 1:
2055 2055 if len(repo.heads()) > 1:
2056 2056 raise util.Abort(_("branch '%s' has one head - "
2057 2057 "please merge with an explicit rev") %
2058 2058 branch)
2059 2059 msg = _('there is nothing to merge')
2060 2060 if parent != repo.lookup(repo[None].branch()):
2061 2061 msg = _('%s - use "hg update" instead') % msg
2062 2062 raise util.Abort(msg)
2063 2063
2064 2064 if parent not in bheads:
2065 2065 raise util.Abort(_('working dir not at a head rev - '
2066 2066 'use "hg update" or merge with an explicit rev'))
2067 2067 node = parent == bheads[0] and bheads[-1] or bheads[0]
2068 2068 return hg.merge(repo, node, force=force)
2069 2069
2070 2070 def outgoing(ui, repo, dest=None, **opts):
2071 2071 """show changesets not found in destination
2072 2072
2073 2073 Show changesets not found in the specified destination repository
2074 2074 or the default push location. These are the changesets that would
2075 2075 be pushed if a push was requested.
2076 2076
2077 2077 See pull for valid destination format details.
2078 2078 """
2079 2079 limit = cmdutil.loglimit(opts)
2080 2080 dest, revs, checkout = hg.parseurl(
2081 2081 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2082 2082 if revs:
2083 2083 revs = [repo.lookup(rev) for rev in revs]
2084 2084
2085 2085 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2086 2086 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2087 2087 o = repo.findoutgoing(other, force=opts.get('force'))
2088 2088 if not o:
2089 2089 ui.status(_("no changes found\n"))
2090 2090 return 1
2091 2091 o = repo.changelog.nodesbetween(o, revs)[0]
2092 2092 if opts.get('newest_first'):
2093 2093 o.reverse()
2094 2094 displayer = cmdutil.show_changeset(ui, repo, opts)
2095 2095 count = 0
2096 2096 for n in o:
2097 2097 if count >= limit:
2098 2098 break
2099 2099 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2100 2100 if opts.get('no_merges') and len(parents) == 2:
2101 2101 continue
2102 2102 count += 1
2103 2103 displayer.show(repo[n])
2104 2104
2105 2105 def parents(ui, repo, file_=None, **opts):
2106 2106 """show the parents of the working directory or revision
2107 2107
2108 2108 Print the working directory's parent revisions. If a revision is
2109 2109 given via -r/--rev, the parent of that revision will be printed.
2110 2110 If a file argument is given, revision in which the file was last
2111 2111 changed (before the working directory revision or the argument to
2112 2112 --rev if given) is printed.
2113 2113 """
2114 2114 rev = opts.get('rev')
2115 2115 if rev:
2116 2116 ctx = repo[rev]
2117 2117 else:
2118 2118 ctx = repo[None]
2119 2119
2120 2120 if file_:
2121 2121 m = cmdutil.match(repo, (file_,), opts)
2122 2122 if m.anypats() or len(m.files()) != 1:
2123 2123 raise util.Abort(_('can only specify an explicit file name'))
2124 2124 file_ = m.files()[0]
2125 2125 filenodes = []
2126 2126 for cp in ctx.parents():
2127 2127 if not cp:
2128 2128 continue
2129 2129 try:
2130 2130 filenodes.append(cp.filenode(file_))
2131 2131 except error.LookupError:
2132 2132 pass
2133 2133 if not filenodes:
2134 2134 raise util.Abort(_("'%s' not found in manifest!") % file_)
2135 2135 fl = repo.file(file_)
2136 2136 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2137 2137 else:
2138 2138 p = [cp.node() for cp in ctx.parents()]
2139 2139
2140 2140 displayer = cmdutil.show_changeset(ui, repo, opts)
2141 2141 for n in p:
2142 2142 if n != nullid:
2143 2143 displayer.show(repo[n])
2144 2144
2145 2145 def paths(ui, repo, search=None):
2146 2146 """show aliases for remote repositories
2147 2147
2148 2148 Show definition of symbolic path name NAME. If no name is given,
2149 2149 show definition of available names.
2150 2150
2151 2151 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2152 2152 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2153 2153
2154 2154 See 'hg help urls' for more information.
2155 2155 """
2156 2156 if search:
2157 2157 for name, path in ui.configitems("paths"):
2158 2158 if name == search:
2159 2159 ui.write("%s\n" % url.hidepassword(path))
2160 2160 return
2161 2161 ui.warn(_("not found!\n"))
2162 2162 return 1
2163 2163 else:
2164 2164 for name, path in ui.configitems("paths"):
2165 2165 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2166 2166
2167 2167 def postincoming(ui, repo, modheads, optupdate, checkout):
2168 2168 if modheads == 0:
2169 2169 return
2170 2170 if optupdate:
2171 2171 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2172 2172 return hg.update(repo, checkout)
2173 2173 else:
2174 2174 ui.status(_("not updating, since new heads added\n"))
2175 2175 if modheads > 1:
2176 2176 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2177 2177 else:
2178 2178 ui.status(_("(run 'hg update' to get a working copy)\n"))
2179 2179
2180 2180 def pull(ui, repo, source="default", **opts):
2181 2181 """pull changes from the specified source
2182 2182
2183 2183 Pull changes from a remote repository to the local one.
2184 2184
2185 2185 This finds all changes from the repository at the specified path
2186 2186 or URL and adds them to the local repository. By default, this
2187 2187 does not update the copy of the project in the working directory.
2188 2188
2189 2189 Use hg incoming if you want to see what will be added by the next
2190 2190 pull without actually adding the changes to the repository.
2191 2191
2192 2192 If SOURCE is omitted, the 'default' path will be used.
2193 2193 See 'hg help urls' for more information.
2194 2194 """
2195 2195 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
2196 2196 other = hg.repository(cmdutil.remoteui(repo, opts), source)
2197 2197 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2198 2198 if revs:
2199 2199 try:
2200 2200 revs = [other.lookup(rev) for rev in revs]
2201 2201 except error.CapabilityError:
2202 2202 err = _("Other repository doesn't support revision lookup, "
2203 2203 "so a rev cannot be specified.")
2204 2204 raise util.Abort(err)
2205 2205
2206 2206 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2207 2207 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2208 2208
2209 2209 def push(ui, repo, dest=None, **opts):
2210 2210 """push changes to the specified destination
2211 2211
2212 2212 Push changes from the local repository to the given destination.
2213 2213
2214 2214 This is the symmetrical operation for pull. It moves changes from
2215 2215 the current repository to a different one. If the destination is
2216 2216 local this is identical to a pull in that directory from the
2217 2217 current one.
2218 2218
2219 2219 By default, push will refuse to run if it detects the result would
2220 2220 increase the number of remote heads. This generally indicates the
2221 2221 the client has forgotten to pull and merge before pushing.
2222 2222
2223 2223 If -r/--rev is used, the named revision and all its ancestors will
2224 2224 be pushed to the remote repository.
2225 2225
2226 2226 Look at the help text for URLs for important details about ssh://
2227 2227 URLs. If DESTINATION is omitted, a default path will be used.
2228 2228 See 'hg help urls' for more information.
2229 2229 """
2230 2230 dest, revs, checkout = hg.parseurl(
2231 2231 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2232 2232 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2233 2233 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2234 2234 if revs:
2235 2235 revs = [repo.lookup(rev) for rev in revs]
2236 2236 r = repo.push(other, opts.get('force'), revs=revs)
2237 2237 return r == 0
2238 2238
2239 2239 def rawcommit(ui, repo, *pats, **opts):
2240 2240 """raw commit interface (DEPRECATED)
2241 2241
2242 2242 (DEPRECATED)
2243 2243 Lowlevel commit, for use in helper scripts.
2244 2244
2245 2245 This command is not intended to be used by normal users, as it is
2246 2246 primarily useful for importing from other SCMs.
2247 2247
2248 2248 This command is now deprecated and will be removed in a future
2249 2249 release, please use debugsetparents and commit instead.
2250 2250 """
2251 2251
2252 2252 ui.warn(_("(the rawcommit command is deprecated)\n"))
2253 2253
2254 2254 message = cmdutil.logmessage(opts)
2255 2255
2256 2256 files = cmdutil.match(repo, pats, opts).files()
2257 2257 if opts.get('files'):
2258 2258 files += open(opts['files']).read().splitlines()
2259 2259
2260 2260 parents = [repo.lookup(p) for p in opts['parent']]
2261 2261
2262 2262 try:
2263 2263 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2264 2264 except ValueError, inst:
2265 2265 raise util.Abort(str(inst))
2266 2266
2267 2267 def recover(ui, repo):
2268 2268 """roll back an interrupted transaction
2269 2269
2270 2270 Recover from an interrupted commit or pull.
2271 2271
2272 2272 This command tries to fix the repository status after an
2273 2273 interrupted operation. It should only be necessary when Mercurial
2274 2274 suggests it.
2275 2275 """
2276 2276 if repo.recover():
2277 2277 return hg.verify(repo)
2278 2278 return 1
2279 2279
2280 2280 def remove(ui, repo, *pats, **opts):
2281 2281 """remove the specified files on the next commit
2282 2282
2283 2283 Schedule the indicated files for removal from the repository.
2284 2284
2285 2285 This only removes files from the current branch, not from the
2286 2286 entire project history. -A/--after can be used to remove only
2287 2287 files that have already been deleted, -f/--force can be used to
2288 2288 force deletion, and -Af can be used to remove files from the next
2289 2289 revision without deleting them.
2290 2290
2291 2291 The following table details the behavior of remove for different
2292 2292 file states (columns) and option combinations (rows). The file
2293 2293 states are Added, Clean, Modified and Missing (as reported by hg
2294 2294 status). The actions are Warn, Remove (from branch) and Delete
2295 2295 (from disk).
2296 2296
2297 2297 A C M !
2298 2298 none W RD W R
2299 2299 -f R RD RD R
2300 2300 -A W W W R
2301 2301 -Af R R R R
2302 2302
2303 2303 This command schedules the files to be removed at the next commit.
2304 2304 To undo a remove before that, see hg revert.
2305 2305 """
2306 2306
2307 2307 after, force = opts.get('after'), opts.get('force')
2308 2308 if not pats and not after:
2309 2309 raise util.Abort(_('no files specified'))
2310 2310
2311 2311 m = cmdutil.match(repo, pats, opts)
2312 2312 s = repo.status(match=m, clean=True)
2313 2313 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2314 2314
2315 2315 def warn(files, reason):
2316 2316 for f in files:
2317 2317 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2318 2318 % (m.rel(f), reason))
2319 2319
2320 2320 if force:
2321 2321 remove, forget = modified + deleted + clean, added
2322 2322 elif after:
2323 2323 remove, forget = deleted, []
2324 2324 warn(modified + added + clean, _('still exists'))
2325 2325 else:
2326 2326 remove, forget = deleted + clean, []
2327 2327 warn(modified, _('is modified'))
2328 2328 warn(added, _('has been marked for add'))
2329 2329
2330 2330 for f in util.sort(remove + forget):
2331 2331 if ui.verbose or not m.exact(f):
2332 2332 ui.status(_('removing %s\n') % m.rel(f))
2333 2333
2334 2334 repo.forget(forget)
2335 2335 repo.remove(remove, unlink=not after)
2336 2336
2337 2337 def rename(ui, repo, *pats, **opts):
2338 2338 """rename files; equivalent of copy + remove
2339 2339
2340 2340 Mark dest as copies of sources; mark sources for deletion. If dest
2341 2341 is a directory, copies are put in that directory. If dest is a
2342 2342 file, there can only be one source.
2343 2343
2344 2344 By default, this command copies the contents of files as they
2345 2345 exist in the working directory. If invoked with -A/--after, the
2346 2346 operation is recorded, but no copying is performed.
2347 2347
2348 2348 This command takes effect at the next commit. To undo a rename
2349 2349 before that, see hg revert.
2350 2350 """
2351 2351 wlock = repo.wlock(False)
2352 2352 try:
2353 2353 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2354 2354 finally:
2355 2355 wlock.release()
2356 2356
2357 2357 def resolve(ui, repo, *pats, **opts):
2358 2358 """retry file merges from a merge or update
2359 2359
2360 2360 This command will cleanly retry unresolved file merges using file
2361 2361 revisions preserved from the last update or merge. To attempt to
2362 2362 resolve all unresolved files, use the -a/--all switch.
2363 2363
2364 2364 If a conflict is resolved manually, please note that the changes
2365 2365 will be overwritten if the merge is retried with resolve. The
2366 2366 -m/--mark switch should be used to mark the file as resolved.
2367 2367
2368 2368 This command will also allow listing resolved files and manually
2369 2369 marking and unmarking files as resolved. All files must be marked
2370 2370 as resolved before the new commits are permitted.
2371 2371
2372 2372 The codes used to show the status of files are:
2373 2373 U = unresolved
2374 2374 R = resolved
2375 2375 """
2376 2376
2377 2377 all, mark, unmark, show = [opts.get(o) for o in 'all mark unmark list'.split()]
2378 2378
2379 2379 if (show and (mark or unmark)) or (mark and unmark):
2380 2380 raise util.Abort(_("too many options specified"))
2381 2381 if pats and all:
2382 2382 raise util.Abort(_("can't specify --all and patterns"))
2383 2383 if not (all or pats or show or mark or unmark):
2384 2384 raise util.Abort(_('no files or directories specified; '
2385 2385 'use --all to remerge all files'))
2386 2386
2387 2387 ms = merge_.mergestate(repo)
2388 2388 m = cmdutil.match(repo, pats, opts)
2389 2389
2390 2390 for f in ms:
2391 2391 if m(f):
2392 2392 if show:
2393 2393 ui.write("%s %s\n" % (ms[f].upper(), f))
2394 2394 elif mark:
2395 2395 ms.mark(f, "r")
2396 2396 elif unmark:
2397 2397 ms.mark(f, "u")
2398 2398 else:
2399 2399 wctx = repo[None]
2400 2400 mctx = wctx.parents()[-1]
2401 2401
2402 2402 # backup pre-resolve (merge uses .orig for its own purposes)
2403 2403 a = repo.wjoin(f)
2404 2404 util.copyfile(a, a + ".resolve")
2405 2405
2406 2406 # resolve file
2407 2407 ms.resolve(f, wctx, mctx)
2408 2408
2409 2409 # replace filemerge's .orig file with our resolve file
2410 2410 util.rename(a + ".resolve", a + ".orig")
2411 2411
2412 2412 def revert(ui, repo, *pats, **opts):
2413 2413 """restore individual files or directories to an earlier state
2414 2414
2415 2415 (use update -r to check out earlier revisions, revert does not
2416 2416 change the working directory parents)
2417 2417
2418 2418 With no revision specified, revert the named files or directories
2419 2419 to the contents they had in the parent of the working directory.
2420 2420 This restores the contents of the affected files to an unmodified
2421 2421 state and unschedules adds, removes, copies, and renames. If the
2422 2422 working directory has two parents, you must explicitly specify the
2423 2423 revision to revert to.
2424 2424
2425 2425 Using the -r/--rev option, revert the given files or directories
2426 2426 to their contents as of a specific revision. This can be helpful
2427 2427 to "roll back" some or all of an earlier change. See 'hg help
2428 2428 dates' for a list of formats valid for -d/--date.
2429 2429
2430 2430 Revert modifies the working directory. It does not commit any
2431 2431 changes, or change the parent of the working directory. If you
2432 2432 revert to a revision other than the parent of the working
2433 2433 directory, the reverted files will thus appear modified
2434 2434 afterwards.
2435 2435
2436 2436 If a file has been deleted, it is restored. If the executable mode
2437 2437 of a file was changed, it is reset.
2438 2438
2439 2439 If names are given, all files matching the names are reverted.
2440 2440 If no arguments are given, no files are reverted.
2441 2441
2442 2442 Modified files are saved with a .orig suffix before reverting.
2443 2443 To disable these backups, use --no-backup.
2444 2444 """
2445 2445
2446 2446 if opts["date"]:
2447 2447 if opts["rev"]:
2448 2448 raise util.Abort(_("you can't specify a revision and a date"))
2449 2449 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2450 2450
2451 2451 if not pats and not opts.get('all'):
2452 2452 raise util.Abort(_('no files or directories specified; '
2453 2453 'use --all to revert the whole repo'))
2454 2454
2455 2455 parent, p2 = repo.dirstate.parents()
2456 2456 if not opts.get('rev') and p2 != nullid:
2457 2457 raise util.Abort(_('uncommitted merge - please provide a '
2458 2458 'specific revision'))
2459 2459 ctx = repo[opts.get('rev')]
2460 2460 node = ctx.node()
2461 2461 mf = ctx.manifest()
2462 2462 if node == parent:
2463 2463 pmf = mf
2464 2464 else:
2465 2465 pmf = None
2466 2466
2467 2467 # need all matching names in dirstate and manifest of target rev,
2468 2468 # so have to walk both. do not print errors if files exist in one
2469 2469 # but not other.
2470 2470
2471 2471 names = {}
2472 2472
2473 2473 wlock = repo.wlock()
2474 2474 try:
2475 2475 # walk dirstate.
2476 2476
2477 2477 m = cmdutil.match(repo, pats, opts)
2478 2478 m.bad = lambda x,y: False
2479 2479 for abs in repo.walk(m):
2480 2480 names[abs] = m.rel(abs), m.exact(abs)
2481 2481
2482 2482 # walk target manifest.
2483 2483
2484 2484 def badfn(path, msg):
2485 2485 if path in names:
2486 2486 return False
2487 2487 path_ = path + '/'
2488 2488 for f in names:
2489 2489 if f.startswith(path_):
2490 2490 return False
2491 2491 repo.ui.warn("%s: %s\n" % (m.rel(path), msg))
2492 2492 return False
2493 2493
2494 2494 m = cmdutil.match(repo, pats, opts)
2495 2495 m.bad = badfn
2496 2496 for abs in repo[node].walk(m):
2497 2497 if abs not in names:
2498 2498 names[abs] = m.rel(abs), m.exact(abs)
2499 2499
2500 2500 m = cmdutil.matchfiles(repo, names)
2501 2501 changes = repo.status(match=m)[:4]
2502 2502 modified, added, removed, deleted = map(set, changes)
2503 2503
2504 2504 # if f is a rename, also revert the source
2505 2505 cwd = repo.getcwd()
2506 2506 for f in added:
2507 2507 src = repo.dirstate.copied(f)
2508 2508 if src and src not in names and repo.dirstate[src] == 'r':
2509 2509 removed.add(src)
2510 2510 names[src] = (repo.pathto(src, cwd), True)
2511 2511
2512 2512 def removeforget(abs):
2513 2513 if repo.dirstate[abs] == 'a':
2514 2514 return _('forgetting %s\n')
2515 2515 return _('removing %s\n')
2516 2516
2517 2517 revert = ([], _('reverting %s\n'))
2518 2518 add = ([], _('adding %s\n'))
2519 2519 remove = ([], removeforget)
2520 2520 undelete = ([], _('undeleting %s\n'))
2521 2521
2522 2522 disptable = (
2523 2523 # dispatch table:
2524 2524 # file state
2525 2525 # action if in target manifest
2526 2526 # action if not in target manifest
2527 2527 # make backup if in target manifest
2528 2528 # make backup if not in target manifest
2529 2529 (modified, revert, remove, True, True),
2530 2530 (added, revert, remove, True, False),
2531 2531 (removed, undelete, None, False, False),
2532 2532 (deleted, revert, remove, False, False),
2533 2533 )
2534 2534
2535 2535 for abs, (rel, exact) in util.sort(names.items()):
2536 2536 mfentry = mf.get(abs)
2537 2537 target = repo.wjoin(abs)
2538 2538 def handle(xlist, dobackup):
2539 2539 xlist[0].append(abs)
2540 2540 if dobackup and not opts.get('no_backup') and util.lexists(target):
2541 2541 bakname = "%s.orig" % rel
2542 2542 ui.note(_('saving current version of %s as %s\n') %
2543 2543 (rel, bakname))
2544 2544 if not opts.get('dry_run'):
2545 2545 util.copyfile(target, bakname)
2546 2546 if ui.verbose or not exact:
2547 2547 msg = xlist[1]
2548 2548 if not isinstance(msg, basestring):
2549 2549 msg = msg(abs)
2550 2550 ui.status(msg % rel)
2551 2551 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2552 2552 if abs not in table: continue
2553 2553 # file has changed in dirstate
2554 2554 if mfentry:
2555 2555 handle(hitlist, backuphit)
2556 2556 elif misslist is not None:
2557 2557 handle(misslist, backupmiss)
2558 2558 break
2559 2559 else:
2560 2560 if abs not in repo.dirstate:
2561 2561 if mfentry:
2562 2562 handle(add, True)
2563 2563 elif exact:
2564 2564 ui.warn(_('file not managed: %s\n') % rel)
2565 2565 continue
2566 2566 # file has not changed in dirstate
2567 2567 if node == parent:
2568 2568 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2569 2569 continue
2570 2570 if pmf is None:
2571 2571 # only need parent manifest in this unlikely case,
2572 2572 # so do not read by default
2573 2573 pmf = repo[parent].manifest()
2574 2574 if abs in pmf:
2575 2575 if mfentry:
2576 2576 # if version of file is same in parent and target
2577 2577 # manifests, do nothing
2578 2578 if (pmf[abs] != mfentry or
2579 2579 pmf.flags(abs) != mf.flags(abs)):
2580 2580 handle(revert, False)
2581 2581 else:
2582 2582 handle(remove, False)
2583 2583
2584 2584 if not opts.get('dry_run'):
2585 2585 def checkout(f):
2586 2586 fc = ctx[f]
2587 2587 repo.wwrite(f, fc.data(), fc.flags())
2588 2588
2589 2589 audit_path = util.path_auditor(repo.root)
2590 2590 for f in remove[0]:
2591 2591 if repo.dirstate[f] == 'a':
2592 2592 repo.dirstate.forget(f)
2593 2593 continue
2594 2594 audit_path(f)
2595 2595 try:
2596 2596 util.unlink(repo.wjoin(f))
2597 2597 except OSError:
2598 2598 pass
2599 2599 repo.dirstate.remove(f)
2600 2600
2601 2601 normal = None
2602 2602 if node == parent:
2603 2603 # We're reverting to our parent. If possible, we'd like status
2604 2604 # to report the file as clean. We have to use normallookup for
2605 2605 # merges to avoid losing information about merged/dirty files.
2606 2606 if p2 != nullid:
2607 2607 normal = repo.dirstate.normallookup
2608 2608 else:
2609 2609 normal = repo.dirstate.normal
2610 2610 for f in revert[0]:
2611 2611 checkout(f)
2612 2612 if normal:
2613 2613 normal(f)
2614 2614
2615 2615 for f in add[0]:
2616 2616 checkout(f)
2617 2617 repo.dirstate.add(f)
2618 2618
2619 2619 normal = repo.dirstate.normallookup
2620 2620 if node == parent and p2 == nullid:
2621 2621 normal = repo.dirstate.normal
2622 2622 for f in undelete[0]:
2623 2623 checkout(f)
2624 2624 normal(f)
2625 2625
2626 2626 finally:
2627 2627 wlock.release()
2628 2628
2629 2629 def rollback(ui, repo):
2630 2630 """roll back the last transaction
2631 2631
2632 2632 This command should be used with care. There is only one level of
2633 2633 rollback, and there is no way to undo a rollback. It will also
2634 2634 restore the dirstate at the time of the last transaction, losing
2635 2635 any dirstate changes since that time.
2636 2636
2637 2637 Transactions are used to encapsulate the effects of all commands
2638 2638 that create new changesets or propagate existing changesets into a
2639 2639 repository. For example, the following commands are transactional,
2640 2640 and their effects can be rolled back:
2641 2641
2642 2642 commit
2643 2643 import
2644 2644 pull
2645 2645 push (with this repository as destination)
2646 2646 unbundle
2647 2647
2648 2648 This command is not intended for use on public repositories. Once
2649 2649 changes are visible for pull by other users, rolling a transaction
2650 2650 back locally is ineffective (someone else may already have pulled
2651 2651 the changes). Furthermore, a race is possible with readers of the
2652 2652 repository; for example an in-progress pull from the repository
2653 2653 may fail if a rollback is performed.
2654 2654 """
2655 2655 repo.rollback()
2656 2656
2657 2657 def root(ui, repo):
2658 2658 """print the root (top) of the current working directory
2659 2659
2660 2660 Print the root directory of the current repository.
2661 2661 """
2662 2662 ui.write(repo.root + "\n")
2663 2663
2664 2664 def serve(ui, repo, **opts):
2665 2665 """export the repository via HTTP
2666 2666
2667 2667 Start a local HTTP repository browser and pull server.
2668 2668
2669 2669 By default, the server logs accesses to stdout and errors to
2670 2670 stderr. Use the -A and -E options to log to files.
2671 2671 """
2672 2672
2673 2673 if opts["stdio"]:
2674 2674 if repo is None:
2675 2675 raise error.RepoError(_("There is no Mercurial repository here"
2676 2676 " (.hg not found)"))
2677 2677 s = sshserver.sshserver(ui, repo)
2678 2678 s.serve_forever()
2679 2679
2680 parentui = ui.parentui or ui
2680 parentui = repo and repo.baseui or ui
2681 2681 optlist = ("name templates style address port prefix ipv6"
2682 2682 " accesslog errorlog webdir_conf certificate")
2683 2683 for o in optlist.split():
2684 2684 if opts[o]:
2685 2685 parentui.setconfig("web", o, str(opts[o]))
2686 2686 if (repo is not None) and (repo.ui != parentui):
2687 2687 repo.ui.setconfig("web", o, str(opts[o]))
2688 2688
2689 2689 if repo is None and not ui.config("web", "webdir_conf"):
2690 2690 raise error.RepoError(_("There is no Mercurial repository here"
2691 2691 " (.hg not found)"))
2692 2692
2693 2693 class service:
2694 2694 def init(self):
2695 2695 util.set_signal_handler()
2696 2696 self.httpd = hgweb.server.create_server(parentui, repo)
2697 2697
2698 2698 if not ui.verbose: return
2699 2699
2700 2700 if self.httpd.prefix:
2701 2701 prefix = self.httpd.prefix.strip('/') + '/'
2702 2702 else:
2703 2703 prefix = ''
2704 2704
2705 2705 port = ':%d' % self.httpd.port
2706 2706 if port == ':80':
2707 2707 port = ''
2708 2708
2709 2709 bindaddr = self.httpd.addr
2710 2710 if bindaddr == '0.0.0.0':
2711 2711 bindaddr = '*'
2712 2712 elif ':' in bindaddr: # IPv6
2713 2713 bindaddr = '[%s]' % bindaddr
2714 2714
2715 2715 fqaddr = self.httpd.fqaddr
2716 2716 if ':' in fqaddr:
2717 2717 fqaddr = '[%s]' % fqaddr
2718 2718 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2719 2719 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2720 2720
2721 2721 def run(self):
2722 2722 self.httpd.serve_forever()
2723 2723
2724 2724 service = service()
2725 2725
2726 2726 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2727 2727
2728 2728 def status(ui, repo, *pats, **opts):
2729 2729 """show changed files in the working directory
2730 2730
2731 2731 Show status of files in the repository. If names are given, only
2732 2732 files that match are shown. Files that are clean or ignored or
2733 2733 source of a copy/move operation, are not listed unless -c/--clean,
2734 2734 -i/--ignored, -C/--copies or -A/--all is given. Unless options
2735 2735 described with "show only ..." are given, the options -mardu are
2736 2736 used.
2737 2737
2738 2738 Option -q/--quiet hides untracked (unknown and ignored) files
2739 2739 unless explicitly requested with -u/--unknown or -i/--ignored.
2740 2740
2741 2741 NOTE: status may appear to disagree with diff if permissions have
2742 2742 changed or a merge has occurred. The standard diff format does not
2743 2743 report permission changes and diff only reports changes relative
2744 2744 to one merge parent.
2745 2745
2746 2746 If one revision is given, it is used as the base revision.
2747 2747 If two revisions are given, the difference between them is shown.
2748 2748
2749 2749 The codes used to show the status of files are:
2750 2750 M = modified
2751 2751 A = added
2752 2752 R = removed
2753 2753 C = clean
2754 2754 ! = missing (deleted by non-hg command, but still tracked)
2755 2755 ? = not tracked
2756 2756 I = ignored
2757 2757 = the previous added file was copied from here
2758 2758 """
2759 2759
2760 2760 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2761 2761 cwd = (pats and repo.getcwd()) or ''
2762 2762 end = opts.get('print0') and '\0' or '\n'
2763 2763 copy = {}
2764 2764 states = 'modified added removed deleted unknown ignored clean'.split()
2765 2765 show = [k for k in states if opts.get(k)]
2766 2766 if opts.get('all'):
2767 2767 show += ui.quiet and (states[:4] + ['clean']) or states
2768 2768 if not show:
2769 2769 show = ui.quiet and states[:4] or states[:5]
2770 2770
2771 2771 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2772 2772 'ignored' in show, 'clean' in show, 'unknown' in show)
2773 2773 changestates = zip(states, 'MAR!?IC', stat)
2774 2774
2775 2775 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
2776 2776 ctxn = repo[nullid]
2777 2777 ctx1 = repo[node1]
2778 2778 ctx2 = repo[node2]
2779 2779 added = stat[1]
2780 2780 if node2 is None:
2781 2781 added = stat[0] + stat[1] # merged?
2782 2782
2783 2783 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
2784 2784 if k in added:
2785 2785 copy[k] = v
2786 2786 elif v in added:
2787 2787 copy[v] = k
2788 2788
2789 2789 for state, char, files in changestates:
2790 2790 if state in show:
2791 2791 format = "%s %%s%s" % (char, end)
2792 2792 if opts.get('no_status'):
2793 2793 format = "%%s%s" % end
2794 2794
2795 2795 for f in files:
2796 2796 ui.write(format % repo.pathto(f, cwd))
2797 2797 if f in copy:
2798 2798 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2799 2799
2800 2800 def tag(ui, repo, name1, *names, **opts):
2801 2801 """add one or more tags for the current or given revision
2802 2802
2803 2803 Name a particular revision using <name>.
2804 2804
2805 2805 Tags are used to name particular revisions of the repository and are
2806 2806 very useful to compare different revisions, to go back to significant
2807 2807 earlier versions or to mark branch points as releases, etc.
2808 2808
2809 2809 If no revision is given, the parent of the working directory is
2810 2810 used, or tip if no revision is checked out.
2811 2811
2812 2812 To facilitate version control, distribution, and merging of tags,
2813 2813 they are stored as a file named ".hgtags" which is managed
2814 2814 similarly to other project files and can be hand-edited if
2815 2815 necessary. The file '.hg/localtags' is used for local tags (not
2816 2816 shared among repositories).
2817 2817
2818 2818 See 'hg help dates' for a list of formats valid for -d/--date.
2819 2819 """
2820 2820
2821 2821 rev_ = "."
2822 2822 names = (name1,) + names
2823 2823 if len(names) != len(set(names)):
2824 2824 raise util.Abort(_('tag names must be unique'))
2825 2825 for n in names:
2826 2826 if n in ['tip', '.', 'null']:
2827 2827 raise util.Abort(_('the name \'%s\' is reserved') % n)
2828 2828 if opts.get('rev') and opts.get('remove'):
2829 2829 raise util.Abort(_("--rev and --remove are incompatible"))
2830 2830 if opts.get('rev'):
2831 2831 rev_ = opts['rev']
2832 2832 message = opts.get('message')
2833 2833 if opts.get('remove'):
2834 2834 expectedtype = opts.get('local') and 'local' or 'global'
2835 2835 for n in names:
2836 2836 if not repo.tagtype(n):
2837 2837 raise util.Abort(_('tag \'%s\' does not exist') % n)
2838 2838 if repo.tagtype(n) != expectedtype:
2839 2839 if expectedtype == 'global':
2840 2840 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
2841 2841 else:
2842 2842 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
2843 2843 rev_ = nullid
2844 2844 if not message:
2845 2845 message = _('Removed tag %s') % ', '.join(names)
2846 2846 elif not opts.get('force'):
2847 2847 for n in names:
2848 2848 if n in repo.tags():
2849 2849 raise util.Abort(_('tag \'%s\' already exists '
2850 2850 '(use -f to force)') % n)
2851 2851 if not rev_ and repo.dirstate.parents()[1] != nullid:
2852 2852 raise util.Abort(_('uncommitted merge - please provide a '
2853 2853 'specific revision'))
2854 2854 r = repo[rev_].node()
2855 2855
2856 2856 if not message:
2857 2857 message = (_('Added tag %s for changeset %s') %
2858 2858 (', '.join(names), short(r)))
2859 2859
2860 2860 date = opts.get('date')
2861 2861 if date:
2862 2862 date = util.parsedate(date)
2863 2863
2864 2864 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
2865 2865
2866 2866 def tags(ui, repo):
2867 2867 """list repository tags
2868 2868
2869 2869 This lists both regular and local tags. When the -v/--verbose
2870 2870 switch is used, a third column "local" is printed for local tags.
2871 2871 """
2872 2872
2873 2873 l = repo.tagslist()
2874 2874 l.reverse()
2875 2875 hexfunc = ui.debugflag and hex or short
2876 2876 tagtype = ""
2877 2877
2878 2878 for t, n in l:
2879 2879 if ui.quiet:
2880 2880 ui.write("%s\n" % t)
2881 2881 continue
2882 2882
2883 2883 try:
2884 2884 hn = hexfunc(n)
2885 2885 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2886 2886 except error.LookupError:
2887 2887 r = " ?:%s" % hn
2888 2888 else:
2889 2889 spaces = " " * (30 - encoding.colwidth(t))
2890 2890 if ui.verbose:
2891 2891 if repo.tagtype(t) == 'local':
2892 2892 tagtype = " local"
2893 2893 else:
2894 2894 tagtype = ""
2895 2895 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2896 2896
2897 2897 def tip(ui, repo, **opts):
2898 2898 """show the tip revision
2899 2899
2900 2900 The tip revision (usually just called the tip) is the most
2901 2901 recently added changeset in the repository, the most recently
2902 2902 changed head.
2903 2903
2904 2904 If you have just made a commit, that commit will be the tip. If
2905 2905 you have just pulled changes from another repository, the tip of
2906 2906 that repository becomes the current tip. The "tip" tag is special
2907 2907 and cannot be renamed or assigned to a different changeset.
2908 2908 """
2909 2909 cmdutil.show_changeset(ui, repo, opts).show(repo[len(repo) - 1])
2910 2910
2911 2911 def unbundle(ui, repo, fname1, *fnames, **opts):
2912 2912 """apply one or more changegroup files
2913 2913
2914 2914 Apply one or more compressed changegroup files generated by the
2915 2915 bundle command.
2916 2916 """
2917 2917 fnames = (fname1,) + fnames
2918 2918
2919 2919 lock = repo.lock()
2920 2920 try:
2921 2921 for fname in fnames:
2922 2922 f = url.open(ui, fname)
2923 2923 gen = changegroup.readbundle(f, fname)
2924 2924 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2925 2925 finally:
2926 2926 lock.release()
2927 2927
2928 2928 return postincoming(ui, repo, modheads, opts.get('update'), None)
2929 2929
2930 2930 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2931 2931 """update working directory
2932 2932
2933 2933 Update the repository's working directory to the specified
2934 2934 revision, or the tip of the current branch if none is specified.
2935 2935 Use null as the revision to remove the working copy (like 'hg
2936 2936 clone -U').
2937 2937
2938 2938 When the working directory contains no uncommitted changes, it
2939 2939 will be replaced by the state of the requested revision from the
2940 2940 repository. When the requested revision is on a different branch,
2941 2941 the working directory will additionally be switched to that
2942 2942 branch.
2943 2943
2944 2944 When there are uncommitted changes, use option -C to discard them,
2945 2945 forcibly replacing the state of the working directory with the
2946 2946 requested revision.
2947 2947
2948 2948 When there are uncommitted changes and option -C is not used, and
2949 2949 the parent revision and requested revision are on the same branch,
2950 2950 and one of them is an ancestor of the other, then the new working
2951 2951 directory will contain the requested revision merged with the
2952 2952 uncommitted changes. Otherwise, the update will fail with a
2953 2953 suggestion to use 'merge' or 'update -C' instead.
2954 2954
2955 2955 If you want to update just one file to an older revision, use
2956 2956 revert.
2957 2957
2958 2958 See 'hg help dates' for a list of formats valid for -d/--date.
2959 2959 """
2960 2960 if rev and node:
2961 2961 raise util.Abort(_("please specify just one revision"))
2962 2962
2963 2963 if not rev:
2964 2964 rev = node
2965 2965
2966 2966 if date:
2967 2967 if rev:
2968 2968 raise util.Abort(_("you can't specify a revision and a date"))
2969 2969 rev = cmdutil.finddate(ui, repo, date)
2970 2970
2971 2971 if clean:
2972 2972 return hg.clean(repo, rev)
2973 2973 else:
2974 2974 return hg.update(repo, rev)
2975 2975
2976 2976 def verify(ui, repo):
2977 2977 """verify the integrity of the repository
2978 2978
2979 2979 Verify the integrity of the current repository.
2980 2980
2981 2981 This will perform an extensive check of the repository's
2982 2982 integrity, validating the hashes and checksums of each entry in
2983 2983 the changelog, manifest, and tracked files, as well as the
2984 2984 integrity of their crosslinks and indices.
2985 2985 """
2986 2986 return hg.verify(repo)
2987 2987
2988 2988 def version_(ui):
2989 2989 """output version and copyright information"""
2990 2990 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2991 2991 % util.version())
2992 2992 ui.status(_(
2993 2993 "\nCopyright (C) 2005-2009 Matt Mackall <mpm@selenic.com> and others\n"
2994 2994 "This is free software; see the source for copying conditions. "
2995 2995 "There is NO\nwarranty; "
2996 2996 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2997 2997 ))
2998 2998
2999 2999 # Command options and aliases are listed here, alphabetically
3000 3000
3001 3001 globalopts = [
3002 3002 ('R', 'repository', '',
3003 3003 _('repository root directory or symbolic path name')),
3004 3004 ('', 'cwd', '', _('change working directory')),
3005 3005 ('y', 'noninteractive', None,
3006 3006 _('do not prompt, assume \'yes\' for any required answers')),
3007 3007 ('q', 'quiet', None, _('suppress output')),
3008 3008 ('v', 'verbose', None, _('enable additional output')),
3009 3009 ('', 'config', [], _('set/override config option')),
3010 3010 ('', 'debug', None, _('enable debugging output')),
3011 3011 ('', 'debugger', None, _('start debugger')),
3012 3012 ('', 'encoding', encoding.encoding, _('set the charset encoding')),
3013 3013 ('', 'encodingmode', encoding.encodingmode,
3014 3014 _('set the charset encoding mode')),
3015 3015 ('', 'traceback', None, _('print traceback on exception')),
3016 3016 ('', 'time', None, _('time how long the command takes')),
3017 3017 ('', 'profile', None, _('print command execution profile')),
3018 3018 ('', 'version', None, _('output version information and exit')),
3019 3019 ('h', 'help', None, _('display help and exit')),
3020 3020 ]
3021 3021
3022 3022 dryrunopts = [('n', 'dry-run', None,
3023 3023 _('do not perform actions, just print output'))]
3024 3024
3025 3025 remoteopts = [
3026 3026 ('e', 'ssh', '', _('specify ssh command to use')),
3027 3027 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
3028 3028 ]
3029 3029
3030 3030 walkopts = [
3031 3031 ('I', 'include', [], _('include names matching the given patterns')),
3032 3032 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3033 3033 ]
3034 3034
3035 3035 commitopts = [
3036 3036 ('m', 'message', '', _('use <text> as commit message')),
3037 3037 ('l', 'logfile', '', _('read commit message from <file>')),
3038 3038 ]
3039 3039
3040 3040 commitopts2 = [
3041 3041 ('d', 'date', '', _('record datecode as commit date')),
3042 3042 ('u', 'user', '', _('record user as committer')),
3043 3043 ]
3044 3044
3045 3045 templateopts = [
3046 3046 ('', 'style', '', _('display using template map file')),
3047 3047 ('', 'template', '', _('display with template')),
3048 3048 ]
3049 3049
3050 3050 logopts = [
3051 3051 ('p', 'patch', None, _('show patch')),
3052 3052 ('g', 'git', None, _('use git extended diff format')),
3053 3053 ('l', 'limit', '', _('limit number of changes displayed')),
3054 3054 ('M', 'no-merges', None, _('do not show merges')),
3055 3055 ] + templateopts
3056 3056
3057 3057 diffopts = [
3058 3058 ('a', 'text', None, _('treat all files as text')),
3059 3059 ('g', 'git', None, _('use git extended diff format')),
3060 3060 ('', 'nodates', None, _("don't include dates in diff headers"))
3061 3061 ]
3062 3062
3063 3063 diffopts2 = [
3064 3064 ('p', 'show-function', None, _('show which function each change is in')),
3065 3065 ('w', 'ignore-all-space', None,
3066 3066 _('ignore white space when comparing lines')),
3067 3067 ('b', 'ignore-space-change', None,
3068 3068 _('ignore changes in the amount of white space')),
3069 3069 ('B', 'ignore-blank-lines', None,
3070 3070 _('ignore changes whose lines are all blank')),
3071 3071 ('U', 'unified', '', _('number of lines of context to show'))
3072 3072 ]
3073 3073
3074 3074 similarityopts = [
3075 3075 ('s', 'similarity', '',
3076 3076 _('guess renamed files by similarity (0<=s<=100)'))
3077 3077 ]
3078 3078
3079 3079 table = {
3080 3080 "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')),
3081 3081 "addremove":
3082 3082 (addremove, similarityopts + walkopts + dryrunopts,
3083 3083 _('[OPTION]... [FILE]...')),
3084 3084 "^annotate|blame":
3085 3085 (annotate,
3086 3086 [('r', 'rev', '', _('annotate the specified revision')),
3087 3087 ('f', 'follow', None, _('follow file copies and renames')),
3088 3088 ('a', 'text', None, _('treat all files as text')),
3089 3089 ('u', 'user', None, _('list the author (long with -v)')),
3090 3090 ('d', 'date', None, _('list the date (short with -q)')),
3091 3091 ('n', 'number', None, _('list the revision number (default)')),
3092 3092 ('c', 'changeset', None, _('list the changeset')),
3093 3093 ('l', 'line-number', None,
3094 3094 _('show line number at the first appearance'))
3095 3095 ] + walkopts,
3096 3096 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3097 3097 "archive":
3098 3098 (archive,
3099 3099 [('', 'no-decode', None, _('do not pass files through decoders')),
3100 3100 ('p', 'prefix', '', _('directory prefix for files in archive')),
3101 3101 ('r', 'rev', '', _('revision to distribute')),
3102 3102 ('t', 'type', '', _('type of distribution to create')),
3103 3103 ] + walkopts,
3104 3104 _('[OPTION]... DEST')),
3105 3105 "backout":
3106 3106 (backout,
3107 3107 [('', 'merge', None,
3108 3108 _('merge with old dirstate parent after backout')),
3109 3109 ('', 'parent', '', _('parent to choose when backing out merge')),
3110 3110 ('r', 'rev', '', _('revision to backout')),
3111 3111 ] + walkopts + commitopts + commitopts2,
3112 3112 _('[OPTION]... [-r] REV')),
3113 3113 "bisect":
3114 3114 (bisect,
3115 3115 [('r', 'reset', False, _('reset bisect state')),
3116 3116 ('g', 'good', False, _('mark changeset good')),
3117 3117 ('b', 'bad', False, _('mark changeset bad')),
3118 3118 ('s', 'skip', False, _('skip testing changeset')),
3119 3119 ('c', 'command', '', _('use command to check changeset state')),
3120 3120 ('U', 'noupdate', False, _('do not update to target'))],
3121 3121 _("[-gbsr] [-c CMD] [REV]")),
3122 3122 "branch":
3123 3123 (branch,
3124 3124 [('f', 'force', None,
3125 3125 _('set branch name even if it shadows an existing branch')),
3126 3126 ('C', 'clean', None, _('reset branch name to parent branch name'))],
3127 3127 _('[-fC] [NAME]')),
3128 3128 "branches":
3129 3129 (branches,
3130 3130 [('a', 'active', False,
3131 3131 _('show only branches that have unmerged heads'))],
3132 3132 _('[-a]')),
3133 3133 "bundle":
3134 3134 (bundle,
3135 3135 [('f', 'force', None,
3136 3136 _('run even when remote repository is unrelated')),
3137 3137 ('r', 'rev', [],
3138 3138 _('a changeset up to which you would like to bundle')),
3139 3139 ('', 'base', [],
3140 3140 _('a base changeset to specify instead of a destination')),
3141 3141 ('a', 'all', None, _('bundle all changesets in the repository')),
3142 3142 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3143 3143 ] + remoteopts,
3144 3144 _('[-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3145 3145 "cat":
3146 3146 (cat,
3147 3147 [('o', 'output', '', _('print output to file with formatted name')),
3148 3148 ('r', 'rev', '', _('print the given revision')),
3149 3149 ('', 'decode', None, _('apply any matching decode filter')),
3150 3150 ] + walkopts,
3151 3151 _('[OPTION]... FILE...')),
3152 3152 "^clone":
3153 3153 (clone,
3154 3154 [('U', 'noupdate', None,
3155 3155 _('the clone will only contain a repository (no working copy)')),
3156 3156 ('r', 'rev', [],
3157 3157 _('a changeset you would like to have after cloning')),
3158 3158 ('', 'pull', None, _('use pull protocol to copy metadata')),
3159 3159 ('', 'uncompressed', None,
3160 3160 _('use uncompressed transfer (fast over LAN)')),
3161 3161 ] + remoteopts,
3162 3162 _('[OPTION]... SOURCE [DEST]')),
3163 3163 "^commit|ci":
3164 3164 (commit,
3165 3165 [('A', 'addremove', None,
3166 3166 _('mark new/missing files as added/removed before committing')),
3167 3167 ('', 'close-branch', None,
3168 3168 _('mark a branch as closed, hiding it from the branch list')),
3169 3169 ] + walkopts + commitopts + commitopts2,
3170 3170 _('[OPTION]... [FILE]...')),
3171 3171 "copy|cp":
3172 3172 (copy,
3173 3173 [('A', 'after', None, _('record a copy that has already occurred')),
3174 3174 ('f', 'force', None,
3175 3175 _('forcibly copy over an existing managed file')),
3176 3176 ] + walkopts + dryrunopts,
3177 3177 _('[OPTION]... [SOURCE]... DEST')),
3178 3178 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
3179 3179 "debugcheckstate": (debugcheckstate, []),
3180 3180 "debugcommands": (debugcommands, [], _('[COMMAND]')),
3181 3181 "debugcomplete":
3182 3182 (debugcomplete,
3183 3183 [('o', 'options', None, _('show the command options'))],
3184 3184 _('[-o] CMD')),
3185 3185 "debugdate":
3186 3186 (debugdate,
3187 3187 [('e', 'extended', None, _('try extended date formats'))],
3188 3188 _('[-e] DATE [RANGE]')),
3189 3189 "debugdata": (debugdata, [], _('FILE REV')),
3190 3190 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
3191 3191 "debugindex": (debugindex, [], _('FILE')),
3192 3192 "debugindexdot": (debugindexdot, [], _('FILE')),
3193 3193 "debuginstall": (debuginstall, []),
3194 3194 "debugrawcommit|rawcommit":
3195 3195 (rawcommit,
3196 3196 [('p', 'parent', [], _('parent')),
3197 3197 ('F', 'files', '', _('file list'))
3198 3198 ] + commitopts + commitopts2,
3199 3199 _('[OPTION]... [FILE]...')),
3200 3200 "debugrebuildstate":
3201 3201 (debugrebuildstate,
3202 3202 [('r', 'rev', '', _('revision to rebuild to'))],
3203 3203 _('[-r REV] [REV]')),
3204 3204 "debugrename":
3205 3205 (debugrename,
3206 3206 [('r', 'rev', '', _('revision to debug'))],
3207 3207 _('[-r REV] FILE')),
3208 3208 "debugsetparents":
3209 3209 (debugsetparents, [], _('REV1 [REV2]')),
3210 3210 "debugstate":
3211 3211 (debugstate,
3212 3212 [('', 'nodates', None, _('do not display the saved mtime'))],
3213 3213 _('[OPTION]...')),
3214 3214 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
3215 3215 "^diff":
3216 3216 (diff,
3217 3217 [('r', 'rev', [], _('revision')),
3218 3218 ('c', 'change', '', _('change made by revision'))
3219 3219 ] + diffopts + diffopts2 + walkopts,
3220 3220 _('[OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3221 3221 "^export":
3222 3222 (export,
3223 3223 [('o', 'output', '', _('print output to file with formatted name')),
3224 3224 ('', 'switch-parent', None, _('diff against the second parent'))
3225 3225 ] + diffopts,
3226 3226 _('[OPTION]... [-o OUTFILESPEC] REV...')),
3227 3227 "grep":
3228 3228 (grep,
3229 3229 [('0', 'print0', None, _('end fields with NUL')),
3230 3230 ('', 'all', None, _('print all revisions that match')),
3231 3231 ('f', 'follow', None,
3232 3232 _('follow changeset history, or file history across copies and renames')),
3233 3233 ('i', 'ignore-case', None, _('ignore case when matching')),
3234 3234 ('l', 'files-with-matches', None,
3235 3235 _('print only filenames and revisions that match')),
3236 3236 ('n', 'line-number', None, _('print matching line numbers')),
3237 3237 ('r', 'rev', [], _('search in given revision range')),
3238 3238 ('u', 'user', None, _('list the author (long with -v)')),
3239 3239 ('d', 'date', None, _('list the date (short with -q)')),
3240 3240 ] + walkopts,
3241 3241 _('[OPTION]... PATTERN [FILE]...')),
3242 3242 "heads":
3243 3243 (heads,
3244 3244 [('r', 'rev', '', _('show only heads which are descendants of REV')),
3245 3245 ('a', 'active', False,
3246 3246 _('show only the active heads from open branches')),
3247 3247 ] + templateopts,
3248 3248 _('[-r REV] [REV]...')),
3249 3249 "help": (help_, [], _('[TOPIC]')),
3250 3250 "identify|id":
3251 3251 (identify,
3252 3252 [('r', 'rev', '', _('identify the specified revision')),
3253 3253 ('n', 'num', None, _('show local revision number')),
3254 3254 ('i', 'id', None, _('show global revision id')),
3255 3255 ('b', 'branch', None, _('show branch')),
3256 3256 ('t', 'tags', None, _('show tags'))],
3257 3257 _('[-nibt] [-r REV] [SOURCE]')),
3258 3258 "import|patch":
3259 3259 (import_,
3260 3260 [('p', 'strip', 1,
3261 3261 _('directory strip option for patch. This has the same '
3262 3262 'meaning as the corresponding patch option')),
3263 3263 ('b', 'base', '', _('base path')),
3264 3264 ('f', 'force', None,
3265 3265 _('skip check for outstanding uncommitted changes')),
3266 3266 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3267 3267 ('', 'exact', None,
3268 3268 _('apply patch to the nodes from which it was generated')),
3269 3269 ('', 'import-branch', None,
3270 3270 _('use any branch information in patch (implied by --exact)'))] +
3271 3271 commitopts + commitopts2 + similarityopts,
3272 3272 _('[OPTION]... PATCH...')),
3273 3273 "incoming|in":
3274 3274 (incoming,
3275 3275 [('f', 'force', None,
3276 3276 _('run even when remote repository is unrelated')),
3277 3277 ('n', 'newest-first', None, _('show newest record first')),
3278 3278 ('', 'bundle', '', _('file to store the bundles into')),
3279 3279 ('r', 'rev', [],
3280 3280 _('a specific revision up to which you would like to pull')),
3281 3281 ] + logopts + remoteopts,
3282 3282 _('[-p] [-n] [-M] [-f] [-r REV]...'
3283 3283 ' [--bundle FILENAME] [SOURCE]')),
3284 3284 "^init":
3285 3285 (init,
3286 3286 remoteopts,
3287 3287 _('[-e CMD] [--remotecmd CMD] [DEST]')),
3288 3288 "locate":
3289 3289 (locate,
3290 3290 [('r', 'rev', '', _('search the repository as it stood at REV')),
3291 3291 ('0', 'print0', None,
3292 3292 _('end filenames with NUL, for use with xargs')),
3293 3293 ('f', 'fullpath', None,
3294 3294 _('print complete paths from the filesystem root')),
3295 3295 ] + walkopts,
3296 3296 _('[OPTION]... [PATTERN]...')),
3297 3297 "^log|history":
3298 3298 (log,
3299 3299 [('f', 'follow', None,
3300 3300 _('follow changeset history, or file history across copies and renames')),
3301 3301 ('', 'follow-first', None,
3302 3302 _('only follow the first parent of merge changesets')),
3303 3303 ('d', 'date', '', _('show revisions matching date spec')),
3304 3304 ('C', 'copies', None, _('show copied files')),
3305 3305 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3306 3306 ('r', 'rev', [], _('show the specified revision or range')),
3307 3307 ('', 'removed', None, _('include revisions where files were removed')),
3308 3308 ('m', 'only-merges', None, _('show only merges')),
3309 3309 ('u', 'user', [], _('revisions committed by user')),
3310 3310 ('b', 'only-branch', [],
3311 3311 _('show only changesets within the given named branch')),
3312 3312 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3313 3313 ] + logopts + walkopts,
3314 3314 _('[OPTION]... [FILE]')),
3315 3315 "manifest":
3316 3316 (manifest,
3317 3317 [('r', 'rev', '', _('revision to display'))],
3318 3318 _('[-r REV]')),
3319 3319 "^merge":
3320 3320 (merge,
3321 3321 [('f', 'force', None, _('force a merge with outstanding changes')),
3322 3322 ('r', 'rev', '', _('revision to merge')),
3323 3323 ],
3324 3324 _('[-f] [[-r] REV]')),
3325 3325 "outgoing|out":
3326 3326 (outgoing,
3327 3327 [('f', 'force', None,
3328 3328 _('run even when remote repository is unrelated')),
3329 3329 ('r', 'rev', [],
3330 3330 _('a specific revision up to which you would like to push')),
3331 3331 ('n', 'newest-first', None, _('show newest record first')),
3332 3332 ] + logopts + remoteopts,
3333 3333 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3334 3334 "^parents":
3335 3335 (parents,
3336 3336 [('r', 'rev', '', _('show parents from the specified revision')),
3337 3337 ] + templateopts,
3338 3338 _('hg parents [-r REV] [FILE]')),
3339 3339 "paths": (paths, [], _('[NAME]')),
3340 3340 "^pull":
3341 3341 (pull,
3342 3342 [('u', 'update', None,
3343 3343 _('update to new tip if changesets were pulled')),
3344 3344 ('f', 'force', None,
3345 3345 _('run even when remote repository is unrelated')),
3346 3346 ('r', 'rev', [],
3347 3347 _('a specific revision up to which you would like to pull')),
3348 3348 ] + remoteopts,
3349 3349 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3350 3350 "^push":
3351 3351 (push,
3352 3352 [('f', 'force', None, _('force push')),
3353 3353 ('r', 'rev', [],
3354 3354 _('a specific revision up to which you would like to push')),
3355 3355 ] + remoteopts,
3356 3356 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3357 3357 "recover": (recover, []),
3358 3358 "^remove|rm":
3359 3359 (remove,
3360 3360 [('A', 'after', None, _('record delete for missing files')),
3361 3361 ('f', 'force', None,
3362 3362 _('remove (and delete) file even if added or modified')),
3363 3363 ] + walkopts,
3364 3364 _('[OPTION]... FILE...')),
3365 3365 "rename|mv":
3366 3366 (rename,
3367 3367 [('A', 'after', None, _('record a rename that has already occurred')),
3368 3368 ('f', 'force', None,
3369 3369 _('forcibly copy over an existing managed file')),
3370 3370 ] + walkopts + dryrunopts,
3371 3371 _('[OPTION]... SOURCE... DEST')),
3372 3372 "resolve":
3373 3373 (resolve,
3374 3374 [('a', 'all', None, _('remerge all unresolved files')),
3375 3375 ('l', 'list', None, _('list state of files needing merge')),
3376 3376 ('m', 'mark', None, _('mark files as resolved')),
3377 3377 ('u', 'unmark', None, _('unmark files as resolved'))]
3378 3378 + walkopts,
3379 3379 _('[OPTION]... [FILE]...')),
3380 3380 "revert":
3381 3381 (revert,
3382 3382 [('a', 'all', None, _('revert all changes when no arguments given')),
3383 3383 ('d', 'date', '', _('tipmost revision matching date')),
3384 3384 ('r', 'rev', '', _('revision to revert to')),
3385 3385 ('', 'no-backup', None, _('do not save backup copies of files')),
3386 3386 ] + walkopts + dryrunopts,
3387 3387 _('[OPTION]... [-r REV] [NAME]...')),
3388 3388 "rollback": (rollback, []),
3389 3389 "root": (root, []),
3390 3390 "^serve":
3391 3391 (serve,
3392 3392 [('A', 'accesslog', '', _('name of access log file to write to')),
3393 3393 ('d', 'daemon', None, _('run server in background')),
3394 3394 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3395 3395 ('E', 'errorlog', '', _('name of error log file to write to')),
3396 3396 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3397 3397 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3398 3398 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3399 3399 ('n', 'name', '',
3400 3400 _('name to show in web pages (default: working directory)')),
3401 3401 ('', 'webdir-conf', '', _('name of the webdir config file'
3402 3402 ' (serve more than one repository)')),
3403 3403 ('', 'pid-file', '', _('name of file to write process ID to')),
3404 3404 ('', 'stdio', None, _('for remote clients')),
3405 3405 ('t', 'templates', '', _('web templates to use')),
3406 3406 ('', 'style', '', _('template style to use')),
3407 3407 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3408 3408 ('', 'certificate', '', _('SSL certificate file'))],
3409 3409 _('[OPTION]...')),
3410 3410 "showconfig|debugconfig":
3411 3411 (showconfig,
3412 3412 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3413 3413 _('[-u] [NAME]...')),
3414 3414 "^status|st":
3415 3415 (status,
3416 3416 [('A', 'all', None, _('show status of all files')),
3417 3417 ('m', 'modified', None, _('show only modified files')),
3418 3418 ('a', 'added', None, _('show only added files')),
3419 3419 ('r', 'removed', None, _('show only removed files')),
3420 3420 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3421 3421 ('c', 'clean', None, _('show only files without changes')),
3422 3422 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3423 3423 ('i', 'ignored', None, _('show only ignored files')),
3424 3424 ('n', 'no-status', None, _('hide status prefix')),
3425 3425 ('C', 'copies', None, _('show source of copied files')),
3426 3426 ('0', 'print0', None,
3427 3427 _('end filenames with NUL, for use with xargs')),
3428 3428 ('', 'rev', [], _('show difference from revision')),
3429 3429 ] + walkopts,
3430 3430 _('[OPTION]... [FILE]...')),
3431 3431 "tag":
3432 3432 (tag,
3433 3433 [('f', 'force', None, _('replace existing tag')),
3434 3434 ('l', 'local', None, _('make the tag local')),
3435 3435 ('r', 'rev', '', _('revision to tag')),
3436 3436 ('', 'remove', None, _('remove a tag')),
3437 3437 # -l/--local is already there, commitopts cannot be used
3438 3438 ('m', 'message', '', _('use <text> as commit message')),
3439 3439 ] + commitopts2,
3440 3440 _('[-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3441 3441 "tags": (tags, []),
3442 3442 "tip":
3443 3443 (tip,
3444 3444 [('p', 'patch', None, _('show patch')),
3445 3445 ('g', 'git', None, _('use git extended diff format')),
3446 3446 ] + templateopts,
3447 3447 _('[-p]')),
3448 3448 "unbundle":
3449 3449 (unbundle,
3450 3450 [('u', 'update', None,
3451 3451 _('update to new tip if changesets were unbundled'))],
3452 3452 _('[-u] FILE...')),
3453 3453 "^update|up|checkout|co":
3454 3454 (update,
3455 3455 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3456 3456 ('d', 'date', '', _('tipmost revision matching date')),
3457 3457 ('r', 'rev', '', _('revision'))],
3458 3458 _('[-C] [-d DATE] [[-r] REV]')),
3459 3459 "verify": (verify, []),
3460 3460 "version": (version_, []),
3461 3461 }
3462 3462
3463 3463 norepo = ("clone init version help debugcommands debugcomplete debugdata"
3464 3464 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3465 3465 optionalrepo = ("identify paths serve showconfig debugancestor")
@@ -1,2174 +1,2175 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import bin, hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import repo, changegroup
11 11 import changelog, dirstate, filelog, manifest, context, weakref
12 12 import lock, transaction, stat, errno, ui, store, encoding
13 13 import os, time, util, extensions, hook, inspect, error
14 14 import match as match_
15 15 import merge as merge_
16 16
17 17 from lock import release
18 18
19 19 class localrepository(repo.repository):
20 20 capabilities = set(('lookup', 'changegroupsubset'))
21 21 supported = ('revlogv1', 'store', 'fncache')
22 22
23 def __init__(self, parentui, path=None, create=0):
23 def __init__(self, baseui, path=None, create=0):
24 24 repo.repository.__init__(self)
25 25 self.root = os.path.realpath(path)
26 26 self.path = os.path.join(self.root, ".hg")
27 27 self.origroot = path
28 28 self.opener = util.opener(self.path)
29 29 self.wopener = util.opener(self.root)
30 30
31 31 if not os.path.isdir(self.path):
32 32 if create:
33 33 if not os.path.exists(path):
34 34 os.mkdir(path)
35 35 os.mkdir(self.path)
36 36 requirements = ["revlogv1"]
37 if parentui.configbool('format', 'usestore', True):
37 if baseui.configbool('format', 'usestore', True):
38 38 os.mkdir(os.path.join(self.path, "store"))
39 39 requirements.append("store")
40 if parentui.configbool('format', 'usefncache', True):
40 if baseui.configbool('format', 'usefncache', True):
41 41 requirements.append("fncache")
42 42 # create an invalid changelog
43 43 self.opener("00changelog.i", "a").write(
44 44 '\0\0\0\2' # represents revlogv2
45 45 ' dummy changelog to prevent using the old repo layout'
46 46 )
47 47 reqfile = self.opener("requires", "w")
48 48 for r in requirements:
49 49 reqfile.write("%s\n" % r)
50 50 reqfile.close()
51 51 else:
52 52 raise error.RepoError(_("repository %s not found") % path)
53 53 elif create:
54 54 raise error.RepoError(_("repository %s already exists") % path)
55 55 else:
56 56 # find requirements
57 57 requirements = []
58 58 try:
59 59 requirements = self.opener("requires").read().splitlines()
60 60 for r in requirements:
61 61 if r not in self.supported:
62 62 raise error.RepoError(_("requirement '%s' not supported") % r)
63 63 except IOError, inst:
64 64 if inst.errno != errno.ENOENT:
65 65 raise
66 66
67 67 self.store = store.store(requirements, self.path, util.opener)
68 68 self.spath = self.store.path
69 69 self.sopener = self.store.opener
70 70 self.sjoin = self.store.join
71 71 self.opener.createmode = self.store.createmode
72 72
73 self.ui = ui.ui(parentui=parentui)
73 self.baseui = baseui
74 self.ui = baseui.copy()
74 75 try:
75 76 self.ui.readconfig(self.join("hgrc"), self.root)
76 77 extensions.loadall(self.ui)
77 78 except IOError:
78 79 pass
79 80
80 81 self.tagscache = None
81 82 self._tagstypecache = None
82 83 self.branchcache = None
83 84 self._ubranchcache = None # UTF-8 version of branchcache
84 85 self._branchcachetip = None
85 86 self.nodetagscache = None
86 87 self.filterpats = {}
87 88 self._datafilters = {}
88 89 self._transref = self._lockref = self._wlockref = None
89 90
90 91 def __getattr__(self, name):
91 92 if name == 'changelog':
92 93 self.changelog = changelog.changelog(self.sopener)
93 94 if 'HG_PENDING' in os.environ:
94 95 p = os.environ['HG_PENDING']
95 96 if p.startswith(self.root):
96 97 self.changelog.readpending('00changelog.i.a')
97 98 self.sopener.defversion = self.changelog.version
98 99 return self.changelog
99 100 if name == 'manifest':
100 101 self.changelog
101 102 self.manifest = manifest.manifest(self.sopener)
102 103 return self.manifest
103 104 if name == 'dirstate':
104 105 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
105 106 return self.dirstate
106 107 else:
107 108 raise AttributeError(name)
108 109
109 110 def __getitem__(self, changeid):
110 111 if changeid == None:
111 112 return context.workingctx(self)
112 113 return context.changectx(self, changeid)
113 114
114 115 def __nonzero__(self):
115 116 return True
116 117
117 118 def __len__(self):
118 119 return len(self.changelog)
119 120
120 121 def __iter__(self):
121 122 for i in xrange(len(self)):
122 123 yield i
123 124
124 125 def url(self):
125 126 return 'file:' + self.root
126 127
127 128 def hook(self, name, throw=False, **args):
128 129 return hook.hook(self.ui, self, name, throw, **args)
129 130
130 131 tag_disallowed = ':\r\n'
131 132
132 133 def _tag(self, names, node, message, local, user, date, parent=None,
133 134 extra={}):
134 135 use_dirstate = parent is None
135 136
136 137 if isinstance(names, str):
137 138 allchars = names
138 139 names = (names,)
139 140 else:
140 141 allchars = ''.join(names)
141 142 for c in self.tag_disallowed:
142 143 if c in allchars:
143 144 raise util.Abort(_('%r cannot be used in a tag name') % c)
144 145
145 146 for name in names:
146 147 self.hook('pretag', throw=True, node=hex(node), tag=name,
147 148 local=local)
148 149
149 150 def writetags(fp, names, munge, prevtags):
150 151 fp.seek(0, 2)
151 152 if prevtags and prevtags[-1] != '\n':
152 153 fp.write('\n')
153 154 for name in names:
154 155 m = munge and munge(name) or name
155 156 if self._tagstypecache and name in self._tagstypecache:
156 157 old = self.tagscache.get(name, nullid)
157 158 fp.write('%s %s\n' % (hex(old), m))
158 159 fp.write('%s %s\n' % (hex(node), m))
159 160 fp.close()
160 161
161 162 prevtags = ''
162 163 if local:
163 164 try:
164 165 fp = self.opener('localtags', 'r+')
165 166 except IOError:
166 167 fp = self.opener('localtags', 'a')
167 168 else:
168 169 prevtags = fp.read()
169 170
170 171 # local tags are stored in the current charset
171 172 writetags(fp, names, None, prevtags)
172 173 for name in names:
173 174 self.hook('tag', node=hex(node), tag=name, local=local)
174 175 return
175 176
176 177 if use_dirstate:
177 178 try:
178 179 fp = self.wfile('.hgtags', 'rb+')
179 180 except IOError:
180 181 fp = self.wfile('.hgtags', 'ab')
181 182 else:
182 183 prevtags = fp.read()
183 184 else:
184 185 try:
185 186 prevtags = self.filectx('.hgtags', parent).data()
186 187 except error.LookupError:
187 188 pass
188 189 fp = self.wfile('.hgtags', 'wb')
189 190 if prevtags:
190 191 fp.write(prevtags)
191 192
192 193 # committed tags are stored in UTF-8
193 194 writetags(fp, names, encoding.fromlocal, prevtags)
194 195
195 196 if use_dirstate and '.hgtags' not in self.dirstate:
196 197 self.add(['.hgtags'])
197 198
198 199 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
199 200 extra=extra)
200 201
201 202 for name in names:
202 203 self.hook('tag', node=hex(node), tag=name, local=local)
203 204
204 205 return tagnode
205 206
206 207 def tag(self, names, node, message, local, user, date):
207 208 '''tag a revision with one or more symbolic names.
208 209
209 210 names is a list of strings or, when adding a single tag, names may be a
210 211 string.
211 212
212 213 if local is True, the tags are stored in a per-repository file.
213 214 otherwise, they are stored in the .hgtags file, and a new
214 215 changeset is committed with the change.
215 216
216 217 keyword arguments:
217 218
218 219 local: whether to store tags in non-version-controlled file
219 220 (default False)
220 221
221 222 message: commit message to use if committing
222 223
223 224 user: name of user to use if committing
224 225
225 226 date: date tuple to use if committing'''
226 227
227 228 for x in self.status()[:5]:
228 229 if '.hgtags' in x:
229 230 raise util.Abort(_('working copy of .hgtags is changed '
230 231 '(please commit .hgtags manually)'))
231 232
232 233 self.tags() # instantiate the cache
233 234 self._tag(names, node, message, local, user, date)
234 235
235 236 def tags(self):
236 237 '''return a mapping of tag to node'''
237 238 if self.tagscache:
238 239 return self.tagscache
239 240
240 241 globaltags = {}
241 242 tagtypes = {}
242 243
243 244 def readtags(lines, fn, tagtype):
244 245 filetags = {}
245 246 count = 0
246 247
247 248 def warn(msg):
248 249 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
249 250
250 251 for l in lines:
251 252 count += 1
252 253 if not l:
253 254 continue
254 255 s = l.split(" ", 1)
255 256 if len(s) != 2:
256 257 warn(_("cannot parse entry"))
257 258 continue
258 259 node, key = s
259 260 key = encoding.tolocal(key.strip()) # stored in UTF-8
260 261 try:
261 262 bin_n = bin(node)
262 263 except TypeError:
263 264 warn(_("node '%s' is not well formed") % node)
264 265 continue
265 266 if bin_n not in self.changelog.nodemap:
266 267 warn(_("tag '%s' refers to unknown node") % key)
267 268 continue
268 269
269 270 h = []
270 271 if key in filetags:
271 272 n, h = filetags[key]
272 273 h.append(n)
273 274 filetags[key] = (bin_n, h)
274 275
275 276 for k, nh in filetags.iteritems():
276 277 if k not in globaltags:
277 278 globaltags[k] = nh
278 279 tagtypes[k] = tagtype
279 280 continue
280 281
281 282 # we prefer the global tag if:
282 283 # it supercedes us OR
283 284 # mutual supercedes and it has a higher rank
284 285 # otherwise we win because we're tip-most
285 286 an, ah = nh
286 287 bn, bh = globaltags[k]
287 288 if (bn != an and an in bh and
288 289 (bn not in ah or len(bh) > len(ah))):
289 290 an = bn
290 291 ah.extend([n for n in bh if n not in ah])
291 292 globaltags[k] = an, ah
292 293 tagtypes[k] = tagtype
293 294
294 295 # read the tags file from each head, ending with the tip
295 296 f = None
296 297 for rev, node, fnode in self._hgtagsnodes():
297 298 f = (f and f.filectx(fnode) or
298 299 self.filectx('.hgtags', fileid=fnode))
299 300 readtags(f.data().splitlines(), f, "global")
300 301
301 302 try:
302 303 data = encoding.fromlocal(self.opener("localtags").read())
303 304 # localtags are stored in the local character set
304 305 # while the internal tag table is stored in UTF-8
305 306 readtags(data.splitlines(), "localtags", "local")
306 307 except IOError:
307 308 pass
308 309
309 310 self.tagscache = {}
310 311 self._tagstypecache = {}
311 312 for k, nh in globaltags.iteritems():
312 313 n = nh[0]
313 314 if n != nullid:
314 315 self.tagscache[k] = n
315 316 self._tagstypecache[k] = tagtypes[k]
316 317 self.tagscache['tip'] = self.changelog.tip()
317 318 return self.tagscache
318 319
319 320 def tagtype(self, tagname):
320 321 '''
321 322 return the type of the given tag. result can be:
322 323
323 324 'local' : a local tag
324 325 'global' : a global tag
325 326 None : tag does not exist
326 327 '''
327 328
328 329 self.tags()
329 330
330 331 return self._tagstypecache.get(tagname)
331 332
332 333 def _hgtagsnodes(self):
333 334 heads = self.heads()
334 335 heads.reverse()
335 336 last = {}
336 337 ret = []
337 338 for node in heads:
338 339 c = self[node]
339 340 rev = c.rev()
340 341 try:
341 342 fnode = c.filenode('.hgtags')
342 343 except error.LookupError:
343 344 continue
344 345 ret.append((rev, node, fnode))
345 346 if fnode in last:
346 347 ret[last[fnode]] = None
347 348 last[fnode] = len(ret) - 1
348 349 return [item for item in ret if item]
349 350
350 351 def tagslist(self):
351 352 '''return a list of tags ordered by revision'''
352 353 l = []
353 354 for t, n in self.tags().iteritems():
354 355 try:
355 356 r = self.changelog.rev(n)
356 357 except:
357 358 r = -2 # sort to the beginning of the list if unknown
358 359 l.append((r, t, n))
359 360 return [(t, n) for r, t, n in util.sort(l)]
360 361
361 362 def nodetags(self, node):
362 363 '''return the tags associated with a node'''
363 364 if not self.nodetagscache:
364 365 self.nodetagscache = {}
365 366 for t, n in self.tags().iteritems():
366 367 self.nodetagscache.setdefault(n, []).append(t)
367 368 return self.nodetagscache.get(node, [])
368 369
369 370 def _branchtags(self, partial, lrev):
370 371 # TODO: rename this function?
371 372 tiprev = len(self) - 1
372 373 if lrev != tiprev:
373 374 self._updatebranchcache(partial, lrev+1, tiprev+1)
374 375 self._writebranchcache(partial, self.changelog.tip(), tiprev)
375 376
376 377 return partial
377 378
378 379 def _branchheads(self):
379 380 tip = self.changelog.tip()
380 381 if self.branchcache is not None and self._branchcachetip == tip:
381 382 return self.branchcache
382 383
383 384 oldtip = self._branchcachetip
384 385 self._branchcachetip = tip
385 386 if self.branchcache is None:
386 387 self.branchcache = {} # avoid recursion in changectx
387 388 else:
388 389 self.branchcache.clear() # keep using the same dict
389 390 if oldtip is None or oldtip not in self.changelog.nodemap:
390 391 partial, last, lrev = self._readbranchcache()
391 392 else:
392 393 lrev = self.changelog.rev(oldtip)
393 394 partial = self._ubranchcache
394 395
395 396 self._branchtags(partial, lrev)
396 397 # this private cache holds all heads (not just tips)
397 398 self._ubranchcache = partial
398 399
399 400 # the branch cache is stored on disk as UTF-8, but in the local
400 401 # charset internally
401 402 for k, v in partial.iteritems():
402 403 self.branchcache[encoding.tolocal(k)] = v
403 404 return self.branchcache
404 405
405 406
406 407 def branchtags(self):
407 408 '''return a dict where branch names map to the tipmost head of
408 409 the branch, open heads come before closed'''
409 410 bt = {}
410 411 for bn, heads in self._branchheads().iteritems():
411 412 head = None
412 413 for i in range(len(heads)-1, -1, -1):
413 414 h = heads[i]
414 415 if 'close' not in self.changelog.read(h)[5]:
415 416 head = h
416 417 break
417 418 # no open heads were found
418 419 if head is None:
419 420 head = heads[-1]
420 421 bt[bn] = head
421 422 return bt
422 423
423 424
424 425 def _readbranchcache(self):
425 426 partial = {}
426 427 try:
427 428 f = self.opener("branchheads.cache")
428 429 lines = f.read().split('\n')
429 430 f.close()
430 431 except (IOError, OSError):
431 432 return {}, nullid, nullrev
432 433
433 434 try:
434 435 last, lrev = lines.pop(0).split(" ", 1)
435 436 last, lrev = bin(last), int(lrev)
436 437 if lrev >= len(self) or self[lrev].node() != last:
437 438 # invalidate the cache
438 439 raise ValueError('invalidating branch cache (tip differs)')
439 440 for l in lines:
440 441 if not l: continue
441 442 node, label = l.split(" ", 1)
442 443 partial.setdefault(label.strip(), []).append(bin(node))
443 444 except KeyboardInterrupt:
444 445 raise
445 446 except Exception, inst:
446 447 if self.ui.debugflag:
447 448 self.ui.warn(str(inst), '\n')
448 449 partial, last, lrev = {}, nullid, nullrev
449 450 return partial, last, lrev
450 451
451 452 def _writebranchcache(self, branches, tip, tiprev):
452 453 try:
453 454 f = self.opener("branchheads.cache", "w", atomictemp=True)
454 455 f.write("%s %s\n" % (hex(tip), tiprev))
455 456 for label, nodes in branches.iteritems():
456 457 for node in nodes:
457 458 f.write("%s %s\n" % (hex(node), label))
458 459 f.rename()
459 460 except (IOError, OSError):
460 461 pass
461 462
462 463 def _updatebranchcache(self, partial, start, end):
463 464 for r in xrange(start, end):
464 465 c = self[r]
465 466 b = c.branch()
466 467 bheads = partial.setdefault(b, [])
467 468 bheads.append(c.node())
468 469 for p in c.parents():
469 470 pn = p.node()
470 471 if pn in bheads:
471 472 bheads.remove(pn)
472 473
473 474 def lookup(self, key):
474 475 if isinstance(key, int):
475 476 return self.changelog.node(key)
476 477 elif key == '.':
477 478 return self.dirstate.parents()[0]
478 479 elif key == 'null':
479 480 return nullid
480 481 elif key == 'tip':
481 482 return self.changelog.tip()
482 483 n = self.changelog._match(key)
483 484 if n:
484 485 return n
485 486 if key in self.tags():
486 487 return self.tags()[key]
487 488 if key in self.branchtags():
488 489 return self.branchtags()[key]
489 490 n = self.changelog._partialmatch(key)
490 491 if n:
491 492 return n
492 493 try:
493 494 if len(key) == 20:
494 495 key = hex(key)
495 496 except:
496 497 pass
497 498 raise error.RepoError(_("unknown revision '%s'") % key)
498 499
499 500 def local(self):
500 501 return True
501 502
502 503 def join(self, f):
503 504 return os.path.join(self.path, f)
504 505
505 506 def wjoin(self, f):
506 507 return os.path.join(self.root, f)
507 508
508 509 def rjoin(self, f):
509 510 return os.path.join(self.root, util.pconvert(f))
510 511
511 512 def file(self, f):
512 513 if f[0] == '/':
513 514 f = f[1:]
514 515 return filelog.filelog(self.sopener, f)
515 516
516 517 def changectx(self, changeid):
517 518 return self[changeid]
518 519
519 520 def parents(self, changeid=None):
520 521 '''get list of changectxs for parents of changeid'''
521 522 return self[changeid].parents()
522 523
523 524 def filectx(self, path, changeid=None, fileid=None):
524 525 """changeid can be a changeset revision, node, or tag.
525 526 fileid can be a file revision or node."""
526 527 return context.filectx(self, path, changeid, fileid)
527 528
528 529 def getcwd(self):
529 530 return self.dirstate.getcwd()
530 531
531 532 def pathto(self, f, cwd=None):
532 533 return self.dirstate.pathto(f, cwd)
533 534
534 535 def wfile(self, f, mode='r'):
535 536 return self.wopener(f, mode)
536 537
537 538 def _link(self, f):
538 539 return os.path.islink(self.wjoin(f))
539 540
540 541 def _filter(self, filter, filename, data):
541 542 if filter not in self.filterpats:
542 543 l = []
543 544 for pat, cmd in self.ui.configitems(filter):
544 545 if cmd == '!':
545 546 continue
546 547 mf = util.matcher(self.root, "", [pat], [], [])[1]
547 548 fn = None
548 549 params = cmd
549 550 for name, filterfn in self._datafilters.iteritems():
550 551 if cmd.startswith(name):
551 552 fn = filterfn
552 553 params = cmd[len(name):].lstrip()
553 554 break
554 555 if not fn:
555 556 fn = lambda s, c, **kwargs: util.filter(s, c)
556 557 # Wrap old filters not supporting keyword arguments
557 558 if not inspect.getargspec(fn)[2]:
558 559 oldfn = fn
559 560 fn = lambda s, c, **kwargs: oldfn(s, c)
560 561 l.append((mf, fn, params))
561 562 self.filterpats[filter] = l
562 563
563 564 for mf, fn, cmd in self.filterpats[filter]:
564 565 if mf(filename):
565 566 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
566 567 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
567 568 break
568 569
569 570 return data
570 571
571 572 def adddatafilter(self, name, filter):
572 573 self._datafilters[name] = filter
573 574
574 575 def wread(self, filename):
575 576 if self._link(filename):
576 577 data = os.readlink(self.wjoin(filename))
577 578 else:
578 579 data = self.wopener(filename, 'r').read()
579 580 return self._filter("encode", filename, data)
580 581
581 582 def wwrite(self, filename, data, flags):
582 583 data = self._filter("decode", filename, data)
583 584 try:
584 585 os.unlink(self.wjoin(filename))
585 586 except OSError:
586 587 pass
587 588 if 'l' in flags:
588 589 self.wopener.symlink(data, filename)
589 590 else:
590 591 self.wopener(filename, 'w').write(data)
591 592 if 'x' in flags:
592 593 util.set_flags(self.wjoin(filename), False, True)
593 594
594 595 def wwritedata(self, filename, data):
595 596 return self._filter("decode", filename, data)
596 597
597 598 def transaction(self):
598 599 tr = self._transref and self._transref() or None
599 600 if tr and tr.running():
600 601 return tr.nest()
601 602
602 603 # abort here if the journal already exists
603 604 if os.path.exists(self.sjoin("journal")):
604 605 raise error.RepoError(_("journal already exists - run hg recover"))
605 606
606 607 # save dirstate for rollback
607 608 try:
608 609 ds = self.opener("dirstate").read()
609 610 except IOError:
610 611 ds = ""
611 612 self.opener("journal.dirstate", "w").write(ds)
612 613 self.opener("journal.branch", "w").write(self.dirstate.branch())
613 614
614 615 renames = [(self.sjoin("journal"), self.sjoin("undo")),
615 616 (self.join("journal.dirstate"), self.join("undo.dirstate")),
616 617 (self.join("journal.branch"), self.join("undo.branch"))]
617 618 tr = transaction.transaction(self.ui.warn, self.sopener,
618 619 self.sjoin("journal"),
619 620 aftertrans(renames),
620 621 self.store.createmode)
621 622 self._transref = weakref.ref(tr)
622 623 return tr
623 624
624 625 def recover(self):
625 626 lock = self.lock()
626 627 try:
627 628 if os.path.exists(self.sjoin("journal")):
628 629 self.ui.status(_("rolling back interrupted transaction\n"))
629 630 transaction.rollback(self.sopener, self.sjoin("journal"))
630 631 self.invalidate()
631 632 return True
632 633 else:
633 634 self.ui.warn(_("no interrupted transaction available\n"))
634 635 return False
635 636 finally:
636 637 lock.release()
637 638
638 639 def rollback(self):
639 640 wlock = lock = None
640 641 try:
641 642 wlock = self.wlock()
642 643 lock = self.lock()
643 644 if os.path.exists(self.sjoin("undo")):
644 645 self.ui.status(_("rolling back last transaction\n"))
645 646 transaction.rollback(self.sopener, self.sjoin("undo"))
646 647 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
647 648 try:
648 649 branch = self.opener("undo.branch").read()
649 650 self.dirstate.setbranch(branch)
650 651 except IOError:
651 652 self.ui.warn(_("Named branch could not be reset, "
652 653 "current branch still is: %s\n")
653 654 % encoding.tolocal(self.dirstate.branch()))
654 655 self.invalidate()
655 656 self.dirstate.invalidate()
656 657 else:
657 658 self.ui.warn(_("no rollback information available\n"))
658 659 finally:
659 660 release(lock, wlock)
660 661
661 662 def invalidate(self):
662 663 for a in "changelog manifest".split():
663 664 if a in self.__dict__:
664 665 delattr(self, a)
665 666 self.tagscache = None
666 667 self._tagstypecache = None
667 668 self.nodetagscache = None
668 669 self.branchcache = None
669 670 self._ubranchcache = None
670 671 self._branchcachetip = None
671 672
672 673 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
673 674 try:
674 675 l = lock.lock(lockname, 0, releasefn, desc=desc)
675 676 except error.LockHeld, inst:
676 677 if not wait:
677 678 raise
678 679 self.ui.warn(_("waiting for lock on %s held by %r\n") %
679 680 (desc, inst.locker))
680 681 # default to 600 seconds timeout
681 682 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
682 683 releasefn, desc=desc)
683 684 if acquirefn:
684 685 acquirefn()
685 686 return l
686 687
687 688 def lock(self, wait=True):
688 689 l = self._lockref and self._lockref()
689 690 if l is not None and l.held:
690 691 l.lock()
691 692 return l
692 693
693 694 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
694 695 _('repository %s') % self.origroot)
695 696 self._lockref = weakref.ref(l)
696 697 return l
697 698
698 699 def wlock(self, wait=True):
699 700 l = self._wlockref and self._wlockref()
700 701 if l is not None and l.held:
701 702 l.lock()
702 703 return l
703 704
704 705 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
705 706 self.dirstate.invalidate, _('working directory of %s') %
706 707 self.origroot)
707 708 self._wlockref = weakref.ref(l)
708 709 return l
709 710
710 711 def filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
711 712 """
712 713 commit an individual file as part of a larger transaction
713 714 """
714 715
715 716 fn = fctx.path()
716 717 t = fctx.data()
717 718 fl = self.file(fn)
718 719 fp1 = manifest1.get(fn, nullid)
719 720 fp2 = manifest2.get(fn, nullid)
720 721
721 722 meta = {}
722 723 cp = fctx.renamed()
723 724 if cp and cp[0] != fn:
724 725 # Mark the new revision of this file as a copy of another
725 726 # file. This copy data will effectively act as a parent
726 727 # of this new revision. If this is a merge, the first
727 728 # parent will be the nullid (meaning "look up the copy data")
728 729 # and the second one will be the other parent. For example:
729 730 #
730 731 # 0 --- 1 --- 3 rev1 changes file foo
731 732 # \ / rev2 renames foo to bar and changes it
732 733 # \- 2 -/ rev3 should have bar with all changes and
733 734 # should record that bar descends from
734 735 # bar in rev2 and foo in rev1
735 736 #
736 737 # this allows this merge to succeed:
737 738 #
738 739 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
739 740 # \ / merging rev3 and rev4 should use bar@rev2
740 741 # \- 2 --- 4 as the merge base
741 742 #
742 743
743 744 cf = cp[0]
744 745 cr = manifest1.get(cf)
745 746 nfp = fp2
746 747
747 748 if manifest2: # branch merge
748 749 if fp2 == nullid or cr is None: # copied on remote side
749 750 if cf in manifest2:
750 751 cr = manifest2[cf]
751 752 nfp = fp1
752 753
753 754 # find source in nearest ancestor if we've lost track
754 755 if not cr:
755 756 self.ui.debug(_(" %s: searching for copy revision for %s\n") %
756 757 (fn, cf))
757 758 for a in self['.'].ancestors():
758 759 if cf in a:
759 760 cr = a[cf].filenode()
760 761 break
761 762
762 763 self.ui.debug(_(" %s: copy %s:%s\n") % (fn, cf, hex(cr)))
763 764 meta["copy"] = cf
764 765 meta["copyrev"] = hex(cr)
765 766 fp1, fp2 = nullid, nfp
766 767 elif fp2 != nullid:
767 768 # is one parent an ancestor of the other?
768 769 fpa = fl.ancestor(fp1, fp2)
769 770 if fpa == fp1:
770 771 fp1, fp2 = fp2, nullid
771 772 elif fpa == fp2:
772 773 fp2 = nullid
773 774
774 775 # is the file unmodified from the parent? report existing entry
775 776 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
776 777 return fp1
777 778
778 779 changelist.append(fn)
779 780 return fl.add(t, meta, tr, linkrev, fp1, fp2)
780 781
781 782 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
782 783 if p1 is None:
783 784 p1, p2 = self.dirstate.parents()
784 785 return self.commit(files=files, text=text, user=user, date=date,
785 786 p1=p1, p2=p2, extra=extra, empty_ok=True)
786 787
787 788 def commit(self, files=None, text="", user=None, date=None,
788 789 match=None, force=False, force_editor=False,
789 790 p1=None, p2=None, extra={}, empty_ok=False):
790 791 wlock = lock = None
791 792 if extra.get("close"):
792 793 force = True
793 794 if files:
794 795 files = list(set(files))
795 796 try:
796 797 wlock = self.wlock()
797 798 lock = self.lock()
798 799 use_dirstate = (p1 is None) # not rawcommit
799 800
800 801 if use_dirstate:
801 802 p1, p2 = self.dirstate.parents()
802 803 update_dirstate = True
803 804
804 805 if (not force and p2 != nullid and
805 806 (match and (match.files() or match.anypats()))):
806 807 raise util.Abort(_('cannot partially commit a merge '
807 808 '(do not specify files or patterns)'))
808 809
809 810 if files:
810 811 modified, removed = [], []
811 812 for f in files:
812 813 s = self.dirstate[f]
813 814 if s in 'nma':
814 815 modified.append(f)
815 816 elif s == 'r':
816 817 removed.append(f)
817 818 else:
818 819 self.ui.warn(_("%s not tracked!\n") % f)
819 820 changes = [modified, [], removed, [], []]
820 821 else:
821 822 changes = self.status(match=match)
822 823 else:
823 824 p1, p2 = p1, p2 or nullid
824 825 update_dirstate = (self.dirstate.parents()[0] == p1)
825 826 changes = [files, [], [], [], []]
826 827
827 828 ms = merge_.mergestate(self)
828 829 for f in changes[0]:
829 830 if f in ms and ms[f] == 'u':
830 831 raise util.Abort(_("unresolved merge conflicts "
831 832 "(see hg resolve)"))
832 833 wctx = context.workingctx(self, (p1, p2), text, user, date,
833 834 extra, changes)
834 835 r = self._commitctx(wctx, force, force_editor, empty_ok,
835 836 use_dirstate, update_dirstate)
836 837 ms.reset()
837 838 return r
838 839
839 840 finally:
840 841 release(lock, wlock)
841 842
842 843 def commitctx(self, ctx):
843 844 """Add a new revision to current repository.
844 845
845 846 Revision information is passed in the context.memctx argument.
846 847 commitctx() does not touch the working directory.
847 848 """
848 849 wlock = lock = None
849 850 try:
850 851 wlock = self.wlock()
851 852 lock = self.lock()
852 853 return self._commitctx(ctx, force=True, force_editor=False,
853 854 empty_ok=True, use_dirstate=False,
854 855 update_dirstate=False)
855 856 finally:
856 857 release(lock, wlock)
857 858
858 859 def _commitctx(self, wctx, force=False, force_editor=False, empty_ok=False,
859 860 use_dirstate=True, update_dirstate=True):
860 861 tr = None
861 862 valid = 0 # don't save the dirstate if this isn't set
862 863 try:
863 864 commit = util.sort(wctx.modified() + wctx.added())
864 865 remove = wctx.removed()
865 866 extra = wctx.extra().copy()
866 867 branchname = extra['branch']
867 868 user = wctx.user()
868 869 text = wctx.description()
869 870
870 871 p1, p2 = [p.node() for p in wctx.parents()]
871 872 c1 = self.changelog.read(p1)
872 873 c2 = self.changelog.read(p2)
873 874 m1 = self.manifest.read(c1[0]).copy()
874 875 m2 = self.manifest.read(c2[0])
875 876
876 877 if use_dirstate:
877 878 oldname = c1[5].get("branch") # stored in UTF-8
878 879 if (not commit and not remove and not force and p2 == nullid
879 880 and branchname == oldname):
880 881 self.ui.status(_("nothing changed\n"))
881 882 return None
882 883
883 884 xp1 = hex(p1)
884 885 if p2 == nullid: xp2 = ''
885 886 else: xp2 = hex(p2)
886 887
887 888 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
888 889
889 890 tr = self.transaction()
890 891 trp = weakref.proxy(tr)
891 892
892 893 # check in files
893 894 new = {}
894 895 changed = []
895 896 linkrev = len(self)
896 897 for f in commit:
897 898 self.ui.note(f + "\n")
898 899 try:
899 900 fctx = wctx.filectx(f)
900 901 newflags = fctx.flags()
901 902 new[f] = self.filecommit(fctx, m1, m2, linkrev, trp, changed)
902 903 if ((not changed or changed[-1] != f) and
903 904 m2.get(f) != new[f]):
904 905 # mention the file in the changelog if some
905 906 # flag changed, even if there was no content
906 907 # change.
907 908 if m1.flags(f) != newflags:
908 909 changed.append(f)
909 910 m1.set(f, newflags)
910 911 if use_dirstate:
911 912 self.dirstate.normal(f)
912 913
913 914 except (OSError, IOError):
914 915 if use_dirstate:
915 916 self.ui.warn(_("trouble committing %s!\n") % f)
916 917 raise
917 918 else:
918 919 remove.append(f)
919 920
920 921 updated, added = [], []
921 922 for f in util.sort(changed):
922 923 if f in m1 or f in m2:
923 924 updated.append(f)
924 925 else:
925 926 added.append(f)
926 927
927 928 # update manifest
928 929 m1.update(new)
929 930 removed = [f for f in util.sort(remove) if f in m1 or f in m2]
930 931 removed1 = []
931 932
932 933 for f in removed:
933 934 if f in m1:
934 935 del m1[f]
935 936 removed1.append(f)
936 937 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
937 938 (new, removed1))
938 939
939 940 # add changeset
940 941 if (not empty_ok and not text) or force_editor:
941 942 edittext = []
942 943 if text:
943 944 edittext.append(text)
944 945 edittext.append("")
945 946 edittext.append("") # Empty line between message and comments.
946 947 edittext.append(_("HG: Enter commit message."
947 948 " Lines beginning with 'HG:' are removed."))
948 949 edittext.append("HG: --")
949 950 edittext.append("HG: user: %s" % user)
950 951 if p2 != nullid:
951 952 edittext.append("HG: branch merge")
952 953 if branchname:
953 954 edittext.append("HG: branch '%s'"
954 955 % encoding.tolocal(branchname))
955 956 edittext.extend(["HG: added %s" % f for f in added])
956 957 edittext.extend(["HG: changed %s" % f for f in updated])
957 958 edittext.extend(["HG: removed %s" % f for f in removed])
958 959 if not added and not updated and not removed:
959 960 edittext.append("HG: no files changed")
960 961 edittext.append("")
961 962 # run editor in the repository root
962 963 olddir = os.getcwd()
963 964 os.chdir(self.root)
964 965 text = self.ui.edit("\n".join(edittext), user)
965 966 os.chdir(olddir)
966 967
967 968 lines = [line.rstrip() for line in text.rstrip().splitlines()]
968 969 while lines and not lines[0]:
969 970 del lines[0]
970 971 if not lines and use_dirstate:
971 972 raise util.Abort(_("empty commit message"))
972 973 text = '\n'.join(lines)
973 974
974 975 self.changelog.delayupdate()
975 976 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
976 977 user, wctx.date(), extra)
977 978 p = lambda: self.changelog.writepending() and self.root or ""
978 979 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
979 980 parent2=xp2, pending=p)
980 981 self.changelog.finalize(trp)
981 982 tr.close()
982 983
983 984 if self.branchcache:
984 985 self.branchtags()
985 986
986 987 if use_dirstate or update_dirstate:
987 988 self.dirstate.setparents(n)
988 989 if use_dirstate:
989 990 for f in removed:
990 991 self.dirstate.forget(f)
991 992 valid = 1 # our dirstate updates are complete
992 993
993 994 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
994 995 return n
995 996 finally:
996 997 if not valid: # don't save our updated dirstate
997 998 self.dirstate.invalidate()
998 999 del tr
999 1000
1000 1001 def walk(self, match, node=None):
1001 1002 '''
1002 1003 walk recursively through the directory tree or a given
1003 1004 changeset, finding all files matched by the match
1004 1005 function
1005 1006 '''
1006 1007 return self[node].walk(match)
1007 1008
1008 1009 def status(self, node1='.', node2=None, match=None,
1009 1010 ignored=False, clean=False, unknown=False):
1010 1011 """return status of files between two nodes or node and working directory
1011 1012
1012 1013 If node1 is None, use the first dirstate parent instead.
1013 1014 If node2 is None, compare node1 with working directory.
1014 1015 """
1015 1016
1016 1017 def mfmatches(ctx):
1017 1018 mf = ctx.manifest().copy()
1018 1019 for fn in mf.keys():
1019 1020 if not match(fn):
1020 1021 del mf[fn]
1021 1022 return mf
1022 1023
1023 1024 if isinstance(node1, context.changectx):
1024 1025 ctx1 = node1
1025 1026 else:
1026 1027 ctx1 = self[node1]
1027 1028 if isinstance(node2, context.changectx):
1028 1029 ctx2 = node2
1029 1030 else:
1030 1031 ctx2 = self[node2]
1031 1032
1032 1033 working = ctx2.rev() is None
1033 1034 parentworking = working and ctx1 == self['.']
1034 1035 match = match or match_.always(self.root, self.getcwd())
1035 1036 listignored, listclean, listunknown = ignored, clean, unknown
1036 1037
1037 1038 # load earliest manifest first for caching reasons
1038 1039 if not working and ctx2.rev() < ctx1.rev():
1039 1040 ctx2.manifest()
1040 1041
1041 1042 if not parentworking:
1042 1043 def bad(f, msg):
1043 1044 if f not in ctx1:
1044 1045 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1045 1046 return False
1046 1047 match.bad = bad
1047 1048
1048 1049 if working: # we need to scan the working dir
1049 1050 s = self.dirstate.status(match, listignored, listclean, listunknown)
1050 1051 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1051 1052
1052 1053 # check for any possibly clean files
1053 1054 if parentworking and cmp:
1054 1055 fixup = []
1055 1056 # do a full compare of any files that might have changed
1056 1057 for f in cmp:
1057 1058 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1058 1059 or ctx1[f].cmp(ctx2[f].data())):
1059 1060 modified.append(f)
1060 1061 else:
1061 1062 fixup.append(f)
1062 1063
1063 1064 if listclean:
1064 1065 clean += fixup
1065 1066
1066 1067 # update dirstate for files that are actually clean
1067 1068 if fixup:
1068 1069 wlock = None
1069 1070 try:
1070 1071 try:
1071 1072 # updating the dirstate is optional
1072 1073 # so we dont wait on the lock
1073 1074 wlock = self.wlock(False)
1074 1075 for f in fixup:
1075 1076 self.dirstate.normal(f)
1076 1077 except error.LockError:
1077 1078 pass
1078 1079 finally:
1079 1080 release(wlock)
1080 1081
1081 1082 if not parentworking:
1082 1083 mf1 = mfmatches(ctx1)
1083 1084 if working:
1084 1085 # we are comparing working dir against non-parent
1085 1086 # generate a pseudo-manifest for the working dir
1086 1087 mf2 = mfmatches(self['.'])
1087 1088 for f in cmp + modified + added:
1088 1089 mf2[f] = None
1089 1090 mf2.set(f, ctx2.flags(f))
1090 1091 for f in removed:
1091 1092 if f in mf2:
1092 1093 del mf2[f]
1093 1094 else:
1094 1095 # we are comparing two revisions
1095 1096 deleted, unknown, ignored = [], [], []
1096 1097 mf2 = mfmatches(ctx2)
1097 1098
1098 1099 modified, added, clean = [], [], []
1099 1100 for fn in mf2:
1100 1101 if fn in mf1:
1101 1102 if (mf1.flags(fn) != mf2.flags(fn) or
1102 1103 (mf1[fn] != mf2[fn] and
1103 1104 (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
1104 1105 modified.append(fn)
1105 1106 elif listclean:
1106 1107 clean.append(fn)
1107 1108 del mf1[fn]
1108 1109 else:
1109 1110 added.append(fn)
1110 1111 removed = mf1.keys()
1111 1112
1112 1113 r = modified, added, removed, deleted, unknown, ignored, clean
1113 1114 [l.sort() for l in r]
1114 1115 return r
1115 1116
1116 1117 def add(self, list):
1117 1118 wlock = self.wlock()
1118 1119 try:
1119 1120 rejected = []
1120 1121 for f in list:
1121 1122 p = self.wjoin(f)
1122 1123 try:
1123 1124 st = os.lstat(p)
1124 1125 except:
1125 1126 self.ui.warn(_("%s does not exist!\n") % f)
1126 1127 rejected.append(f)
1127 1128 continue
1128 1129 if st.st_size > 10000000:
1129 1130 self.ui.warn(_("%s: files over 10MB may cause memory and"
1130 1131 " performance problems\n"
1131 1132 "(use 'hg revert %s' to unadd the file)\n")
1132 1133 % (f, f))
1133 1134 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1134 1135 self.ui.warn(_("%s not added: only files and symlinks "
1135 1136 "supported currently\n") % f)
1136 1137 rejected.append(p)
1137 1138 elif self.dirstate[f] in 'amn':
1138 1139 self.ui.warn(_("%s already tracked!\n") % f)
1139 1140 elif self.dirstate[f] == 'r':
1140 1141 self.dirstate.normallookup(f)
1141 1142 else:
1142 1143 self.dirstate.add(f)
1143 1144 return rejected
1144 1145 finally:
1145 1146 wlock.release()
1146 1147
1147 1148 def forget(self, list):
1148 1149 wlock = self.wlock()
1149 1150 try:
1150 1151 for f in list:
1151 1152 if self.dirstate[f] != 'a':
1152 1153 self.ui.warn(_("%s not added!\n") % f)
1153 1154 else:
1154 1155 self.dirstate.forget(f)
1155 1156 finally:
1156 1157 wlock.release()
1157 1158
1158 1159 def remove(self, list, unlink=False):
1159 1160 wlock = None
1160 1161 try:
1161 1162 if unlink:
1162 1163 for f in list:
1163 1164 try:
1164 1165 util.unlink(self.wjoin(f))
1165 1166 except OSError, inst:
1166 1167 if inst.errno != errno.ENOENT:
1167 1168 raise
1168 1169 wlock = self.wlock()
1169 1170 for f in list:
1170 1171 if unlink and os.path.exists(self.wjoin(f)):
1171 1172 self.ui.warn(_("%s still exists!\n") % f)
1172 1173 elif self.dirstate[f] == 'a':
1173 1174 self.dirstate.forget(f)
1174 1175 elif f not in self.dirstate:
1175 1176 self.ui.warn(_("%s not tracked!\n") % f)
1176 1177 else:
1177 1178 self.dirstate.remove(f)
1178 1179 finally:
1179 1180 release(wlock)
1180 1181
1181 1182 def undelete(self, list):
1182 1183 manifests = [self.manifest.read(self.changelog.read(p)[0])
1183 1184 for p in self.dirstate.parents() if p != nullid]
1184 1185 wlock = self.wlock()
1185 1186 try:
1186 1187 for f in list:
1187 1188 if self.dirstate[f] != 'r':
1188 1189 self.ui.warn(_("%s not removed!\n") % f)
1189 1190 else:
1190 1191 m = f in manifests[0] and manifests[0] or manifests[1]
1191 1192 t = self.file(f).read(m[f])
1192 1193 self.wwrite(f, t, m.flags(f))
1193 1194 self.dirstate.normal(f)
1194 1195 finally:
1195 1196 wlock.release()
1196 1197
1197 1198 def copy(self, source, dest):
1198 1199 p = self.wjoin(dest)
1199 1200 if not (os.path.exists(p) or os.path.islink(p)):
1200 1201 self.ui.warn(_("%s does not exist!\n") % dest)
1201 1202 elif not (os.path.isfile(p) or os.path.islink(p)):
1202 1203 self.ui.warn(_("copy failed: %s is not a file or a "
1203 1204 "symbolic link\n") % dest)
1204 1205 else:
1205 1206 wlock = self.wlock()
1206 1207 try:
1207 1208 if self.dirstate[dest] in '?r':
1208 1209 self.dirstate.add(dest)
1209 1210 self.dirstate.copy(source, dest)
1210 1211 finally:
1211 1212 wlock.release()
1212 1213
1213 1214 def heads(self, start=None, closed=True):
1214 1215 heads = self.changelog.heads(start)
1215 1216 def display(head):
1216 1217 if closed:
1217 1218 return True
1218 1219 extras = self.changelog.read(head)[5]
1219 1220 return ('close' not in extras)
1220 1221 # sort the output in rev descending order
1221 1222 heads = [(-self.changelog.rev(h), h) for h in heads if display(h)]
1222 1223 return [n for (r, n) in util.sort(heads)]
1223 1224
1224 1225 def branchheads(self, branch=None, start=None, closed=True):
1225 1226 if branch is None:
1226 1227 branch = self[None].branch()
1227 1228 branches = self._branchheads()
1228 1229 if branch not in branches:
1229 1230 return []
1230 1231 bheads = branches[branch]
1231 1232 # the cache returns heads ordered lowest to highest
1232 1233 bheads.reverse()
1233 1234 if start is not None:
1234 1235 # filter out the heads that cannot be reached from startrev
1235 1236 bheads = self.changelog.nodesbetween([start], bheads)[2]
1236 1237 if not closed:
1237 1238 bheads = [h for h in bheads if
1238 1239 ('close' not in self.changelog.read(h)[5])]
1239 1240 return bheads
1240 1241
1241 1242 def branches(self, nodes):
1242 1243 if not nodes:
1243 1244 nodes = [self.changelog.tip()]
1244 1245 b = []
1245 1246 for n in nodes:
1246 1247 t = n
1247 1248 while 1:
1248 1249 p = self.changelog.parents(n)
1249 1250 if p[1] != nullid or p[0] == nullid:
1250 1251 b.append((t, n, p[0], p[1]))
1251 1252 break
1252 1253 n = p[0]
1253 1254 return b
1254 1255
1255 1256 def between(self, pairs):
1256 1257 r = []
1257 1258
1258 1259 for top, bottom in pairs:
1259 1260 n, l, i = top, [], 0
1260 1261 f = 1
1261 1262
1262 1263 while n != bottom and n != nullid:
1263 1264 p = self.changelog.parents(n)[0]
1264 1265 if i == f:
1265 1266 l.append(n)
1266 1267 f = f * 2
1267 1268 n = p
1268 1269 i += 1
1269 1270
1270 1271 r.append(l)
1271 1272
1272 1273 return r
1273 1274
1274 1275 def findincoming(self, remote, base=None, heads=None, force=False):
1275 1276 """Return list of roots of the subsets of missing nodes from remote
1276 1277
1277 1278 If base dict is specified, assume that these nodes and their parents
1278 1279 exist on the remote side and that no child of a node of base exists
1279 1280 in both remote and self.
1280 1281 Furthermore base will be updated to include the nodes that exists
1281 1282 in self and remote but no children exists in self and remote.
1282 1283 If a list of heads is specified, return only nodes which are heads
1283 1284 or ancestors of these heads.
1284 1285
1285 1286 All the ancestors of base are in self and in remote.
1286 1287 All the descendants of the list returned are missing in self.
1287 1288 (and so we know that the rest of the nodes are missing in remote, see
1288 1289 outgoing)
1289 1290 """
1290 1291 return self.findcommonincoming(remote, base, heads, force)[1]
1291 1292
1292 1293 def findcommonincoming(self, remote, base=None, heads=None, force=False):
1293 1294 """Return a tuple (common, missing roots, heads) used to identify
1294 1295 missing nodes from remote.
1295 1296
1296 1297 If base dict is specified, assume that these nodes and their parents
1297 1298 exist on the remote side and that no child of a node of base exists
1298 1299 in both remote and self.
1299 1300 Furthermore base will be updated to include the nodes that exists
1300 1301 in self and remote but no children exists in self and remote.
1301 1302 If a list of heads is specified, return only nodes which are heads
1302 1303 or ancestors of these heads.
1303 1304
1304 1305 All the ancestors of base are in self and in remote.
1305 1306 """
1306 1307 m = self.changelog.nodemap
1307 1308 search = []
1308 1309 fetch = set()
1309 1310 seen = set()
1310 1311 seenbranch = set()
1311 1312 if base == None:
1312 1313 base = {}
1313 1314
1314 1315 if not heads:
1315 1316 heads = remote.heads()
1316 1317
1317 1318 if self.changelog.tip() == nullid:
1318 1319 base[nullid] = 1
1319 1320 if heads != [nullid]:
1320 1321 return [nullid], [nullid], list(heads)
1321 1322 return [nullid], [], []
1322 1323
1323 1324 # assume we're closer to the tip than the root
1324 1325 # and start by examining the heads
1325 1326 self.ui.status(_("searching for changes\n"))
1326 1327
1327 1328 unknown = []
1328 1329 for h in heads:
1329 1330 if h not in m:
1330 1331 unknown.append(h)
1331 1332 else:
1332 1333 base[h] = 1
1333 1334
1334 1335 heads = unknown
1335 1336 if not unknown:
1336 1337 return base.keys(), [], []
1337 1338
1338 1339 req = set(unknown)
1339 1340 reqcnt = 0
1340 1341
1341 1342 # search through remote branches
1342 1343 # a 'branch' here is a linear segment of history, with four parts:
1343 1344 # head, root, first parent, second parent
1344 1345 # (a branch always has two parents (or none) by definition)
1345 1346 unknown = remote.branches(unknown)
1346 1347 while unknown:
1347 1348 r = []
1348 1349 while unknown:
1349 1350 n = unknown.pop(0)
1350 1351 if n[0] in seen:
1351 1352 continue
1352 1353
1353 1354 self.ui.debug(_("examining %s:%s\n")
1354 1355 % (short(n[0]), short(n[1])))
1355 1356 if n[0] == nullid: # found the end of the branch
1356 1357 pass
1357 1358 elif n in seenbranch:
1358 1359 self.ui.debug(_("branch already found\n"))
1359 1360 continue
1360 1361 elif n[1] and n[1] in m: # do we know the base?
1361 1362 self.ui.debug(_("found incomplete branch %s:%s\n")
1362 1363 % (short(n[0]), short(n[1])))
1363 1364 search.append(n[0:2]) # schedule branch range for scanning
1364 1365 seenbranch.add(n)
1365 1366 else:
1366 1367 if n[1] not in seen and n[1] not in fetch:
1367 1368 if n[2] in m and n[3] in m:
1368 1369 self.ui.debug(_("found new changeset %s\n") %
1369 1370 short(n[1]))
1370 1371 fetch.add(n[1]) # earliest unknown
1371 1372 for p in n[2:4]:
1372 1373 if p in m:
1373 1374 base[p] = 1 # latest known
1374 1375
1375 1376 for p in n[2:4]:
1376 1377 if p not in req and p not in m:
1377 1378 r.append(p)
1378 1379 req.add(p)
1379 1380 seen.add(n[0])
1380 1381
1381 1382 if r:
1382 1383 reqcnt += 1
1383 1384 self.ui.debug(_("request %d: %s\n") %
1384 1385 (reqcnt, " ".join(map(short, r))))
1385 1386 for p in xrange(0, len(r), 10):
1386 1387 for b in remote.branches(r[p:p+10]):
1387 1388 self.ui.debug(_("received %s:%s\n") %
1388 1389 (short(b[0]), short(b[1])))
1389 1390 unknown.append(b)
1390 1391
1391 1392 # do binary search on the branches we found
1392 1393 while search:
1393 1394 newsearch = []
1394 1395 reqcnt += 1
1395 1396 for n, l in zip(search, remote.between(search)):
1396 1397 l.append(n[1])
1397 1398 p = n[0]
1398 1399 f = 1
1399 1400 for i in l:
1400 1401 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1401 1402 if i in m:
1402 1403 if f <= 2:
1403 1404 self.ui.debug(_("found new branch changeset %s\n") %
1404 1405 short(p))
1405 1406 fetch.add(p)
1406 1407 base[i] = 1
1407 1408 else:
1408 1409 self.ui.debug(_("narrowed branch search to %s:%s\n")
1409 1410 % (short(p), short(i)))
1410 1411 newsearch.append((p, i))
1411 1412 break
1412 1413 p, f = i, f * 2
1413 1414 search = newsearch
1414 1415
1415 1416 # sanity check our fetch list
1416 1417 for f in fetch:
1417 1418 if f in m:
1418 1419 raise error.RepoError(_("already have changeset ")
1419 1420 + short(f[:4]))
1420 1421
1421 1422 if base.keys() == [nullid]:
1422 1423 if force:
1423 1424 self.ui.warn(_("warning: repository is unrelated\n"))
1424 1425 else:
1425 1426 raise util.Abort(_("repository is unrelated"))
1426 1427
1427 1428 self.ui.debug(_("found new changesets starting at ") +
1428 1429 " ".join([short(f) for f in fetch]) + "\n")
1429 1430
1430 1431 self.ui.debug(_("%d total queries\n") % reqcnt)
1431 1432
1432 1433 return base.keys(), list(fetch), heads
1433 1434
1434 1435 def findoutgoing(self, remote, base=None, heads=None, force=False):
1435 1436 """Return list of nodes that are roots of subsets not in remote
1436 1437
1437 1438 If base dict is specified, assume that these nodes and their parents
1438 1439 exist on the remote side.
1439 1440 If a list of heads is specified, return only nodes which are heads
1440 1441 or ancestors of these heads, and return a second element which
1441 1442 contains all remote heads which get new children.
1442 1443 """
1443 1444 if base == None:
1444 1445 base = {}
1445 1446 self.findincoming(remote, base, heads, force=force)
1446 1447
1447 1448 self.ui.debug(_("common changesets up to ")
1448 1449 + " ".join(map(short, base.keys())) + "\n")
1449 1450
1450 1451 remain = set(self.changelog.nodemap)
1451 1452
1452 1453 # prune everything remote has from the tree
1453 1454 remain.remove(nullid)
1454 1455 remove = base.keys()
1455 1456 while remove:
1456 1457 n = remove.pop(0)
1457 1458 if n in remain:
1458 1459 remain.remove(n)
1459 1460 for p in self.changelog.parents(n):
1460 1461 remove.append(p)
1461 1462
1462 1463 # find every node whose parents have been pruned
1463 1464 subset = []
1464 1465 # find every remote head that will get new children
1465 1466 updated_heads = {}
1466 1467 for n in remain:
1467 1468 p1, p2 = self.changelog.parents(n)
1468 1469 if p1 not in remain and p2 not in remain:
1469 1470 subset.append(n)
1470 1471 if heads:
1471 1472 if p1 in heads:
1472 1473 updated_heads[p1] = True
1473 1474 if p2 in heads:
1474 1475 updated_heads[p2] = True
1475 1476
1476 1477 # this is the set of all roots we have to push
1477 1478 if heads:
1478 1479 return subset, updated_heads.keys()
1479 1480 else:
1480 1481 return subset
1481 1482
1482 1483 def pull(self, remote, heads=None, force=False):
1483 1484 lock = self.lock()
1484 1485 try:
1485 1486 common, fetch, rheads = self.findcommonincoming(remote, heads=heads,
1486 1487 force=force)
1487 1488 if fetch == [nullid]:
1488 1489 self.ui.status(_("requesting all changes\n"))
1489 1490
1490 1491 if not fetch:
1491 1492 self.ui.status(_("no changes found\n"))
1492 1493 return 0
1493 1494
1494 1495 if heads is None and remote.capable('changegroupsubset'):
1495 1496 heads = rheads
1496 1497
1497 1498 if heads is None:
1498 1499 cg = remote.changegroup(fetch, 'pull')
1499 1500 else:
1500 1501 if not remote.capable('changegroupsubset'):
1501 1502 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1502 1503 cg = remote.changegroupsubset(fetch, heads, 'pull')
1503 1504 return self.addchangegroup(cg, 'pull', remote.url())
1504 1505 finally:
1505 1506 lock.release()
1506 1507
1507 1508 def push(self, remote, force=False, revs=None):
1508 1509 # there are two ways to push to remote repo:
1509 1510 #
1510 1511 # addchangegroup assumes local user can lock remote
1511 1512 # repo (local filesystem, old ssh servers).
1512 1513 #
1513 1514 # unbundle assumes local user cannot lock remote repo (new ssh
1514 1515 # servers, http servers).
1515 1516
1516 1517 if remote.capable('unbundle'):
1517 1518 return self.push_unbundle(remote, force, revs)
1518 1519 return self.push_addchangegroup(remote, force, revs)
1519 1520
1520 1521 def prepush(self, remote, force, revs):
1521 1522 common = {}
1522 1523 remote_heads = remote.heads()
1523 1524 inc = self.findincoming(remote, common, remote_heads, force=force)
1524 1525
1525 1526 update, updated_heads = self.findoutgoing(remote, common, remote_heads)
1526 1527 if revs is not None:
1527 1528 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1528 1529 else:
1529 1530 bases, heads = update, self.changelog.heads()
1530 1531
1531 1532 if not bases:
1532 1533 self.ui.status(_("no changes found\n"))
1533 1534 return None, 1
1534 1535 elif not force:
1535 1536 # check if we're creating new remote heads
1536 1537 # to be a remote head after push, node must be either
1537 1538 # - unknown locally
1538 1539 # - a local outgoing head descended from update
1539 1540 # - a remote head that's known locally and not
1540 1541 # ancestral to an outgoing head
1541 1542
1542 1543 warn = 0
1543 1544
1544 1545 if remote_heads == [nullid]:
1545 1546 warn = 0
1546 1547 elif not revs and len(heads) > len(remote_heads):
1547 1548 warn = 1
1548 1549 else:
1549 1550 newheads = list(heads)
1550 1551 for r in remote_heads:
1551 1552 if r in self.changelog.nodemap:
1552 1553 desc = self.changelog.heads(r, heads)
1553 1554 l = [h for h in heads if h in desc]
1554 1555 if not l:
1555 1556 newheads.append(r)
1556 1557 else:
1557 1558 newheads.append(r)
1558 1559 if len(newheads) > len(remote_heads):
1559 1560 warn = 1
1560 1561
1561 1562 if warn:
1562 1563 self.ui.warn(_("abort: push creates new remote heads!\n"))
1563 1564 self.ui.status(_("(did you forget to merge?"
1564 1565 " use push -f to force)\n"))
1565 1566 return None, 0
1566 1567 elif inc:
1567 1568 self.ui.warn(_("note: unsynced remote changes!\n"))
1568 1569
1569 1570
1570 1571 if revs is None:
1571 1572 # use the fast path, no race possible on push
1572 1573 cg = self._changegroup(common.keys(), 'push')
1573 1574 else:
1574 1575 cg = self.changegroupsubset(update, revs, 'push')
1575 1576 return cg, remote_heads
1576 1577
1577 1578 def push_addchangegroup(self, remote, force, revs):
1578 1579 lock = remote.lock()
1579 1580 try:
1580 1581 ret = self.prepush(remote, force, revs)
1581 1582 if ret[0] is not None:
1582 1583 cg, remote_heads = ret
1583 1584 return remote.addchangegroup(cg, 'push', self.url())
1584 1585 return ret[1]
1585 1586 finally:
1586 1587 lock.release()
1587 1588
1588 1589 def push_unbundle(self, remote, force, revs):
1589 1590 # local repo finds heads on server, finds out what revs it
1590 1591 # must push. once revs transferred, if server finds it has
1591 1592 # different heads (someone else won commit/push race), server
1592 1593 # aborts.
1593 1594
1594 1595 ret = self.prepush(remote, force, revs)
1595 1596 if ret[0] is not None:
1596 1597 cg, remote_heads = ret
1597 1598 if force: remote_heads = ['force']
1598 1599 return remote.unbundle(cg, remote_heads, 'push')
1599 1600 return ret[1]
1600 1601
1601 1602 def changegroupinfo(self, nodes, source):
1602 1603 if self.ui.verbose or source == 'bundle':
1603 1604 self.ui.status(_("%d changesets found\n") % len(nodes))
1604 1605 if self.ui.debugflag:
1605 1606 self.ui.debug(_("list of changesets:\n"))
1606 1607 for node in nodes:
1607 1608 self.ui.debug("%s\n" % hex(node))
1608 1609
1609 1610 def changegroupsubset(self, bases, heads, source, extranodes=None):
1610 1611 """This function generates a changegroup consisting of all the nodes
1611 1612 that are descendents of any of the bases, and ancestors of any of
1612 1613 the heads.
1613 1614
1614 1615 It is fairly complex as determining which filenodes and which
1615 1616 manifest nodes need to be included for the changeset to be complete
1616 1617 is non-trivial.
1617 1618
1618 1619 Another wrinkle is doing the reverse, figuring out which changeset in
1619 1620 the changegroup a particular filenode or manifestnode belongs to.
1620 1621
1621 1622 The caller can specify some nodes that must be included in the
1622 1623 changegroup using the extranodes argument. It should be a dict
1623 1624 where the keys are the filenames (or 1 for the manifest), and the
1624 1625 values are lists of (node, linknode) tuples, where node is a wanted
1625 1626 node and linknode is the changelog node that should be transmitted as
1626 1627 the linkrev.
1627 1628 """
1628 1629
1629 1630 if extranodes is None:
1630 1631 # can we go through the fast path ?
1631 1632 heads.sort()
1632 1633 allheads = self.heads()
1633 1634 allheads.sort()
1634 1635 if heads == allheads:
1635 1636 common = []
1636 1637 # parents of bases are known from both sides
1637 1638 for n in bases:
1638 1639 for p in self.changelog.parents(n):
1639 1640 if p != nullid:
1640 1641 common.append(p)
1641 1642 return self._changegroup(common, source)
1642 1643
1643 1644 self.hook('preoutgoing', throw=True, source=source)
1644 1645
1645 1646 # Set up some initial variables
1646 1647 # Make it easy to refer to self.changelog
1647 1648 cl = self.changelog
1648 1649 # msng is short for missing - compute the list of changesets in this
1649 1650 # changegroup.
1650 1651 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1651 1652 self.changegroupinfo(msng_cl_lst, source)
1652 1653 # Some bases may turn out to be superfluous, and some heads may be
1653 1654 # too. nodesbetween will return the minimal set of bases and heads
1654 1655 # necessary to re-create the changegroup.
1655 1656
1656 1657 # Known heads are the list of heads that it is assumed the recipient
1657 1658 # of this changegroup will know about.
1658 1659 knownheads = {}
1659 1660 # We assume that all parents of bases are known heads.
1660 1661 for n in bases:
1661 1662 for p in cl.parents(n):
1662 1663 if p != nullid:
1663 1664 knownheads[p] = 1
1664 1665 knownheads = knownheads.keys()
1665 1666 if knownheads:
1666 1667 # Now that we know what heads are known, we can compute which
1667 1668 # changesets are known. The recipient must know about all
1668 1669 # changesets required to reach the known heads from the null
1669 1670 # changeset.
1670 1671 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1671 1672 junk = None
1672 1673 # Transform the list into an ersatz set.
1673 1674 has_cl_set = set(has_cl_set)
1674 1675 else:
1675 1676 # If there were no known heads, the recipient cannot be assumed to
1676 1677 # know about any changesets.
1677 1678 has_cl_set = set()
1678 1679
1679 1680 # Make it easy to refer to self.manifest
1680 1681 mnfst = self.manifest
1681 1682 # We don't know which manifests are missing yet
1682 1683 msng_mnfst_set = {}
1683 1684 # Nor do we know which filenodes are missing.
1684 1685 msng_filenode_set = {}
1685 1686
1686 1687 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1687 1688 junk = None
1688 1689
1689 1690 # A changeset always belongs to itself, so the changenode lookup
1690 1691 # function for a changenode is identity.
1691 1692 def identity(x):
1692 1693 return x
1693 1694
1694 1695 # A function generating function. Sets up an environment for the
1695 1696 # inner function.
1696 1697 def cmp_by_rev_func(revlog):
1697 1698 # Compare two nodes by their revision number in the environment's
1698 1699 # revision history. Since the revision number both represents the
1699 1700 # most efficient order to read the nodes in, and represents a
1700 1701 # topological sorting of the nodes, this function is often useful.
1701 1702 def cmp_by_rev(a, b):
1702 1703 return cmp(revlog.rev(a), revlog.rev(b))
1703 1704 return cmp_by_rev
1704 1705
1705 1706 # If we determine that a particular file or manifest node must be a
1706 1707 # node that the recipient of the changegroup will already have, we can
1707 1708 # also assume the recipient will have all the parents. This function
1708 1709 # prunes them from the set of missing nodes.
1709 1710 def prune_parents(revlog, hasset, msngset):
1710 1711 haslst = hasset.keys()
1711 1712 haslst.sort(cmp_by_rev_func(revlog))
1712 1713 for node in haslst:
1713 1714 parentlst = [p for p in revlog.parents(node) if p != nullid]
1714 1715 while parentlst:
1715 1716 n = parentlst.pop()
1716 1717 if n not in hasset:
1717 1718 hasset[n] = 1
1718 1719 p = [p for p in revlog.parents(n) if p != nullid]
1719 1720 parentlst.extend(p)
1720 1721 for n in hasset:
1721 1722 msngset.pop(n, None)
1722 1723
1723 1724 # This is a function generating function used to set up an environment
1724 1725 # for the inner function to execute in.
1725 1726 def manifest_and_file_collector(changedfileset):
1726 1727 # This is an information gathering function that gathers
1727 1728 # information from each changeset node that goes out as part of
1728 1729 # the changegroup. The information gathered is a list of which
1729 1730 # manifest nodes are potentially required (the recipient may
1730 1731 # already have them) and total list of all files which were
1731 1732 # changed in any changeset in the changegroup.
1732 1733 #
1733 1734 # We also remember the first changenode we saw any manifest
1734 1735 # referenced by so we can later determine which changenode 'owns'
1735 1736 # the manifest.
1736 1737 def collect_manifests_and_files(clnode):
1737 1738 c = cl.read(clnode)
1738 1739 for f in c[3]:
1739 1740 # This is to make sure we only have one instance of each
1740 1741 # filename string for each filename.
1741 1742 changedfileset.setdefault(f, f)
1742 1743 msng_mnfst_set.setdefault(c[0], clnode)
1743 1744 return collect_manifests_and_files
1744 1745
1745 1746 # Figure out which manifest nodes (of the ones we think might be part
1746 1747 # of the changegroup) the recipient must know about and remove them
1747 1748 # from the changegroup.
1748 1749 def prune_manifests():
1749 1750 has_mnfst_set = {}
1750 1751 for n in msng_mnfst_set:
1751 1752 # If a 'missing' manifest thinks it belongs to a changenode
1752 1753 # the recipient is assumed to have, obviously the recipient
1753 1754 # must have that manifest.
1754 1755 linknode = cl.node(mnfst.linkrev(mnfst.rev(n)))
1755 1756 if linknode in has_cl_set:
1756 1757 has_mnfst_set[n] = 1
1757 1758 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1758 1759
1759 1760 # Use the information collected in collect_manifests_and_files to say
1760 1761 # which changenode any manifestnode belongs to.
1761 1762 def lookup_manifest_link(mnfstnode):
1762 1763 return msng_mnfst_set[mnfstnode]
1763 1764
1764 1765 # A function generating function that sets up the initial environment
1765 1766 # the inner function.
1766 1767 def filenode_collector(changedfiles):
1767 1768 next_rev = [0]
1768 1769 # This gathers information from each manifestnode included in the
1769 1770 # changegroup about which filenodes the manifest node references
1770 1771 # so we can include those in the changegroup too.
1771 1772 #
1772 1773 # It also remembers which changenode each filenode belongs to. It
1773 1774 # does this by assuming the a filenode belongs to the changenode
1774 1775 # the first manifest that references it belongs to.
1775 1776 def collect_msng_filenodes(mnfstnode):
1776 1777 r = mnfst.rev(mnfstnode)
1777 1778 if r == next_rev[0]:
1778 1779 # If the last rev we looked at was the one just previous,
1779 1780 # we only need to see a diff.
1780 1781 deltamf = mnfst.readdelta(mnfstnode)
1781 1782 # For each line in the delta
1782 1783 for f, fnode in deltamf.iteritems():
1783 1784 f = changedfiles.get(f, None)
1784 1785 # And if the file is in the list of files we care
1785 1786 # about.
1786 1787 if f is not None:
1787 1788 # Get the changenode this manifest belongs to
1788 1789 clnode = msng_mnfst_set[mnfstnode]
1789 1790 # Create the set of filenodes for the file if
1790 1791 # there isn't one already.
1791 1792 ndset = msng_filenode_set.setdefault(f, {})
1792 1793 # And set the filenode's changelog node to the
1793 1794 # manifest's if it hasn't been set already.
1794 1795 ndset.setdefault(fnode, clnode)
1795 1796 else:
1796 1797 # Otherwise we need a full manifest.
1797 1798 m = mnfst.read(mnfstnode)
1798 1799 # For every file in we care about.
1799 1800 for f in changedfiles:
1800 1801 fnode = m.get(f, None)
1801 1802 # If it's in the manifest
1802 1803 if fnode is not None:
1803 1804 # See comments above.
1804 1805 clnode = msng_mnfst_set[mnfstnode]
1805 1806 ndset = msng_filenode_set.setdefault(f, {})
1806 1807 ndset.setdefault(fnode, clnode)
1807 1808 # Remember the revision we hope to see next.
1808 1809 next_rev[0] = r + 1
1809 1810 return collect_msng_filenodes
1810 1811
1811 1812 # We have a list of filenodes we think we need for a file, lets remove
1812 1813 # all those we now the recipient must have.
1813 1814 def prune_filenodes(f, filerevlog):
1814 1815 msngset = msng_filenode_set[f]
1815 1816 hasset = {}
1816 1817 # If a 'missing' filenode thinks it belongs to a changenode we
1817 1818 # assume the recipient must have, then the recipient must have
1818 1819 # that filenode.
1819 1820 for n in msngset:
1820 1821 clnode = cl.node(filerevlog.linkrev(filerevlog.rev(n)))
1821 1822 if clnode in has_cl_set:
1822 1823 hasset[n] = 1
1823 1824 prune_parents(filerevlog, hasset, msngset)
1824 1825
1825 1826 # A function generator function that sets up the a context for the
1826 1827 # inner function.
1827 1828 def lookup_filenode_link_func(fname):
1828 1829 msngset = msng_filenode_set[fname]
1829 1830 # Lookup the changenode the filenode belongs to.
1830 1831 def lookup_filenode_link(fnode):
1831 1832 return msngset[fnode]
1832 1833 return lookup_filenode_link
1833 1834
1834 1835 # Add the nodes that were explicitly requested.
1835 1836 def add_extra_nodes(name, nodes):
1836 1837 if not extranodes or name not in extranodes:
1837 1838 return
1838 1839
1839 1840 for node, linknode in extranodes[name]:
1840 1841 if node not in nodes:
1841 1842 nodes[node] = linknode
1842 1843
1843 1844 # Now that we have all theses utility functions to help out and
1844 1845 # logically divide up the task, generate the group.
1845 1846 def gengroup():
1846 1847 # The set of changed files starts empty.
1847 1848 changedfiles = {}
1848 1849 # Create a changenode group generator that will call our functions
1849 1850 # back to lookup the owning changenode and collect information.
1850 1851 group = cl.group(msng_cl_lst, identity,
1851 1852 manifest_and_file_collector(changedfiles))
1852 1853 for chnk in group:
1853 1854 yield chnk
1854 1855
1855 1856 # The list of manifests has been collected by the generator
1856 1857 # calling our functions back.
1857 1858 prune_manifests()
1858 1859 add_extra_nodes(1, msng_mnfst_set)
1859 1860 msng_mnfst_lst = msng_mnfst_set.keys()
1860 1861 # Sort the manifestnodes by revision number.
1861 1862 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1862 1863 # Create a generator for the manifestnodes that calls our lookup
1863 1864 # and data collection functions back.
1864 1865 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1865 1866 filenode_collector(changedfiles))
1866 1867 for chnk in group:
1867 1868 yield chnk
1868 1869
1869 1870 # These are no longer needed, dereference and toss the memory for
1870 1871 # them.
1871 1872 msng_mnfst_lst = None
1872 1873 msng_mnfst_set.clear()
1873 1874
1874 1875 if extranodes:
1875 1876 for fname in extranodes:
1876 1877 if isinstance(fname, int):
1877 1878 continue
1878 1879 msng_filenode_set.setdefault(fname, {})
1879 1880 changedfiles[fname] = 1
1880 1881 # Go through all our files in order sorted by name.
1881 1882 for fname in util.sort(changedfiles):
1882 1883 filerevlog = self.file(fname)
1883 1884 if not len(filerevlog):
1884 1885 raise util.Abort(_("empty or missing revlog for %s") % fname)
1885 1886 # Toss out the filenodes that the recipient isn't really
1886 1887 # missing.
1887 1888 if fname in msng_filenode_set:
1888 1889 prune_filenodes(fname, filerevlog)
1889 1890 add_extra_nodes(fname, msng_filenode_set[fname])
1890 1891 msng_filenode_lst = msng_filenode_set[fname].keys()
1891 1892 else:
1892 1893 msng_filenode_lst = []
1893 1894 # If any filenodes are left, generate the group for them,
1894 1895 # otherwise don't bother.
1895 1896 if len(msng_filenode_lst) > 0:
1896 1897 yield changegroup.chunkheader(len(fname))
1897 1898 yield fname
1898 1899 # Sort the filenodes by their revision #
1899 1900 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1900 1901 # Create a group generator and only pass in a changenode
1901 1902 # lookup function as we need to collect no information
1902 1903 # from filenodes.
1903 1904 group = filerevlog.group(msng_filenode_lst,
1904 1905 lookup_filenode_link_func(fname))
1905 1906 for chnk in group:
1906 1907 yield chnk
1907 1908 if fname in msng_filenode_set:
1908 1909 # Don't need this anymore, toss it to free memory.
1909 1910 del msng_filenode_set[fname]
1910 1911 # Signal that no more groups are left.
1911 1912 yield changegroup.closechunk()
1912 1913
1913 1914 if msng_cl_lst:
1914 1915 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1915 1916
1916 1917 return util.chunkbuffer(gengroup())
1917 1918
1918 1919 def changegroup(self, basenodes, source):
1919 1920 # to avoid a race we use changegroupsubset() (issue1320)
1920 1921 return self.changegroupsubset(basenodes, self.heads(), source)
1921 1922
1922 1923 def _changegroup(self, common, source):
1923 1924 """Generate a changegroup of all nodes that we have that a recipient
1924 1925 doesn't.
1925 1926
1926 1927 This is much easier than the previous function as we can assume that
1927 1928 the recipient has any changenode we aren't sending them.
1928 1929
1929 1930 common is the set of common nodes between remote and self"""
1930 1931
1931 1932 self.hook('preoutgoing', throw=True, source=source)
1932 1933
1933 1934 cl = self.changelog
1934 1935 nodes = cl.findmissing(common)
1935 1936 revset = set([cl.rev(n) for n in nodes])
1936 1937 self.changegroupinfo(nodes, source)
1937 1938
1938 1939 def identity(x):
1939 1940 return x
1940 1941
1941 1942 def gennodelst(log):
1942 1943 for r in log:
1943 1944 if log.linkrev(r) in revset:
1944 1945 yield log.node(r)
1945 1946
1946 1947 def changed_file_collector(changedfileset):
1947 1948 def collect_changed_files(clnode):
1948 1949 c = cl.read(clnode)
1949 1950 for fname in c[3]:
1950 1951 changedfileset[fname] = 1
1951 1952 return collect_changed_files
1952 1953
1953 1954 def lookuprevlink_func(revlog):
1954 1955 def lookuprevlink(n):
1955 1956 return cl.node(revlog.linkrev(revlog.rev(n)))
1956 1957 return lookuprevlink
1957 1958
1958 1959 def gengroup():
1959 1960 # construct a list of all changed files
1960 1961 changedfiles = {}
1961 1962
1962 1963 for chnk in cl.group(nodes, identity,
1963 1964 changed_file_collector(changedfiles)):
1964 1965 yield chnk
1965 1966
1966 1967 mnfst = self.manifest
1967 1968 nodeiter = gennodelst(mnfst)
1968 1969 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1969 1970 yield chnk
1970 1971
1971 1972 for fname in util.sort(changedfiles):
1972 1973 filerevlog = self.file(fname)
1973 1974 if not len(filerevlog):
1974 1975 raise util.Abort(_("empty or missing revlog for %s") % fname)
1975 1976 nodeiter = gennodelst(filerevlog)
1976 1977 nodeiter = list(nodeiter)
1977 1978 if nodeiter:
1978 1979 yield changegroup.chunkheader(len(fname))
1979 1980 yield fname
1980 1981 lookup = lookuprevlink_func(filerevlog)
1981 1982 for chnk in filerevlog.group(nodeiter, lookup):
1982 1983 yield chnk
1983 1984
1984 1985 yield changegroup.closechunk()
1985 1986
1986 1987 if nodes:
1987 1988 self.hook('outgoing', node=hex(nodes[0]), source=source)
1988 1989
1989 1990 return util.chunkbuffer(gengroup())
1990 1991
1991 1992 def addchangegroup(self, source, srctype, url, emptyok=False):
1992 1993 """add changegroup to repo.
1993 1994
1994 1995 return values:
1995 1996 - nothing changed or no source: 0
1996 1997 - more heads than before: 1+added heads (2..n)
1997 1998 - less heads than before: -1-removed heads (-2..-n)
1998 1999 - number of heads stays the same: 1
1999 2000 """
2000 2001 def csmap(x):
2001 2002 self.ui.debug(_("add changeset %s\n") % short(x))
2002 2003 return len(cl)
2003 2004
2004 2005 def revmap(x):
2005 2006 return cl.rev(x)
2006 2007
2007 2008 if not source:
2008 2009 return 0
2009 2010
2010 2011 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2011 2012
2012 2013 changesets = files = revisions = 0
2013 2014
2014 2015 # write changelog data to temp files so concurrent readers will not see
2015 2016 # inconsistent view
2016 2017 cl = self.changelog
2017 2018 cl.delayupdate()
2018 2019 oldheads = len(cl.heads())
2019 2020
2020 2021 tr = self.transaction()
2021 2022 try:
2022 2023 trp = weakref.proxy(tr)
2023 2024 # pull off the changeset group
2024 2025 self.ui.status(_("adding changesets\n"))
2025 2026 cor = len(cl) - 1
2026 2027 chunkiter = changegroup.chunkiter(source)
2027 2028 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
2028 2029 raise util.Abort(_("received changelog group is empty"))
2029 2030 cnr = len(cl) - 1
2030 2031 changesets = cnr - cor
2031 2032
2032 2033 # pull off the manifest group
2033 2034 self.ui.status(_("adding manifests\n"))
2034 2035 chunkiter = changegroup.chunkiter(source)
2035 2036 # no need to check for empty manifest group here:
2036 2037 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2037 2038 # no new manifest will be created and the manifest group will
2038 2039 # be empty during the pull
2039 2040 self.manifest.addgroup(chunkiter, revmap, trp)
2040 2041
2041 2042 # process the files
2042 2043 self.ui.status(_("adding file changes\n"))
2043 2044 while 1:
2044 2045 f = changegroup.getchunk(source)
2045 2046 if not f:
2046 2047 break
2047 2048 self.ui.debug(_("adding %s revisions\n") % f)
2048 2049 fl = self.file(f)
2049 2050 o = len(fl)
2050 2051 chunkiter = changegroup.chunkiter(source)
2051 2052 if fl.addgroup(chunkiter, revmap, trp) is None:
2052 2053 raise util.Abort(_("received file revlog group is empty"))
2053 2054 revisions += len(fl) - o
2054 2055 files += 1
2055 2056
2056 2057 newheads = len(self.changelog.heads())
2057 2058 heads = ""
2058 2059 if oldheads and newheads != oldheads:
2059 2060 heads = _(" (%+d heads)") % (newheads - oldheads)
2060 2061
2061 2062 self.ui.status(_("added %d changesets"
2062 2063 " with %d changes to %d files%s\n")
2063 2064 % (changesets, revisions, files, heads))
2064 2065
2065 2066 if changesets > 0:
2066 2067 p = lambda: self.changelog.writepending() and self.root or ""
2067 2068 self.hook('pretxnchangegroup', throw=True,
2068 2069 node=hex(self.changelog.node(cor+1)), source=srctype,
2069 2070 url=url, pending=p)
2070 2071
2071 2072 # make changelog see real files again
2072 2073 cl.finalize(trp)
2073 2074
2074 2075 tr.close()
2075 2076 finally:
2076 2077 del tr
2077 2078
2078 2079 if changesets > 0:
2079 2080 # forcefully update the on-disk branch cache
2080 2081 self.ui.debug(_("updating the branch cache\n"))
2081 2082 self.branchtags()
2082 2083 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
2083 2084 source=srctype, url=url)
2084 2085
2085 2086 for i in xrange(cor + 1, cnr + 1):
2086 2087 self.hook("incoming", node=hex(self.changelog.node(i)),
2087 2088 source=srctype, url=url)
2088 2089
2089 2090 # never return 0 here:
2090 2091 if newheads < oldheads:
2091 2092 return newheads - oldheads - 1
2092 2093 else:
2093 2094 return newheads - oldheads + 1
2094 2095
2095 2096
2096 2097 def stream_in(self, remote):
2097 2098 fp = remote.stream_out()
2098 2099 l = fp.readline()
2099 2100 try:
2100 2101 resp = int(l)
2101 2102 except ValueError:
2102 2103 raise error.ResponseError(
2103 2104 _('Unexpected response from remote server:'), l)
2104 2105 if resp == 1:
2105 2106 raise util.Abort(_('operation forbidden by server'))
2106 2107 elif resp == 2:
2107 2108 raise util.Abort(_('locking the remote repository failed'))
2108 2109 elif resp != 0:
2109 2110 raise util.Abort(_('the server sent an unknown error code'))
2110 2111 self.ui.status(_('streaming all changes\n'))
2111 2112 l = fp.readline()
2112 2113 try:
2113 2114 total_files, total_bytes = map(int, l.split(' ', 1))
2114 2115 except (ValueError, TypeError):
2115 2116 raise error.ResponseError(
2116 2117 _('Unexpected response from remote server:'), l)
2117 2118 self.ui.status(_('%d files to transfer, %s of data\n') %
2118 2119 (total_files, util.bytecount(total_bytes)))
2119 2120 start = time.time()
2120 2121 for i in xrange(total_files):
2121 2122 # XXX doesn't support '\n' or '\r' in filenames
2122 2123 l = fp.readline()
2123 2124 try:
2124 2125 name, size = l.split('\0', 1)
2125 2126 size = int(size)
2126 2127 except (ValueError, TypeError):
2127 2128 raise error.ResponseError(
2128 2129 _('Unexpected response from remote server:'), l)
2129 2130 self.ui.debug(_('adding %s (%s)\n') % (name, util.bytecount(size)))
2130 2131 ofp = self.sopener(name, 'w')
2131 2132 for chunk in util.filechunkiter(fp, limit=size):
2132 2133 ofp.write(chunk)
2133 2134 ofp.close()
2134 2135 elapsed = time.time() - start
2135 2136 if elapsed <= 0:
2136 2137 elapsed = 0.001
2137 2138 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2138 2139 (util.bytecount(total_bytes), elapsed,
2139 2140 util.bytecount(total_bytes / elapsed)))
2140 2141 self.invalidate()
2141 2142 return len(self.heads()) + 1
2142 2143
2143 2144 def clone(self, remote, heads=[], stream=False):
2144 2145 '''clone remote repository.
2145 2146
2146 2147 keyword arguments:
2147 2148 heads: list of revs to clone (forces use of pull)
2148 2149 stream: use streaming clone if possible'''
2149 2150
2150 2151 # now, all clients that can request uncompressed clones can
2151 2152 # read repo formats supported by all servers that can serve
2152 2153 # them.
2153 2154
2154 2155 # if revlog format changes, client will have to check version
2155 2156 # and format flags on "stream" capability, and use
2156 2157 # uncompressed only if compatible.
2157 2158
2158 2159 if stream and not heads and remote.capable('stream'):
2159 2160 return self.stream_in(remote)
2160 2161 return self.pull(remote, heads)
2161 2162
2162 2163 # used to avoid circular references so destructors work
2163 2164 def aftertrans(files):
2164 2165 renamefiles = [tuple(t) for t in files]
2165 2166 def a():
2166 2167 for src, dest in renamefiles:
2167 2168 util.rename(src, dest)
2168 2169 return a
2169 2170
2170 2171 def instance(ui, path, create):
2171 2172 return localrepository(ui, util.drop_scheme('file', path), create)
2172 2173
2173 2174 def islocal(path):
2174 2175 return True
@@ -1,359 +1,358 b''
1 1 # ui.py - user interface bits for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from i18n import _
9 9 import errno, getpass, os, re, socket, sys, tempfile
10 10 import config, traceback, util, error
11 11
12 12 _booleans = {'1':True, 'yes':True, 'true':True, 'on':True,
13 13 '0':False, 'no':False, 'false':False, 'off':False}
14 14
15 15 class ui(object):
16 16 def __init__(self, parentui=None):
17 17 self.buffers = []
18 18 self.quiet = self.verbose = self.debugflag = self.traceback = False
19 19 self.interactive = self.report_untrusted = True
20 20 self.overlay = config.config()
21 21 self.cdata = config.config()
22 22 self.ucdata = config.config()
23 self.parentui = None
24 23 self.trusted_users = {}
25 24 self.trusted_groups = {}
26 25
27 26 if parentui:
28 self.parentui = parentui.parentui or parentui
29 self.cdata = self.parentui.cdata.copy()
30 self.ucdata = self.parentui.ucdata.copy()
27 self.cdata = parentui.cdata.copy()
28 self.ucdata = parentui.ucdata.copy()
31 29 self.overlay = parentui.overlay.copy()
32 30 self.trusted_users = parentui.trusted_users.copy()
33 31 self.trusted_groups = parentui.trusted_groups.copy()
34 self.buffers = parentui.buffers
35 32 self.fixconfig()
36 33 else:
37 34 # we always trust global config files
38 35 for f in util.rcpath():
39 36 self.readconfig(f, assumetrusted=True)
37 def copy(self):
38 return ui(self)
40 39
41 40 _isatty = None
42 41 def isatty(self):
43 42 if ui._isatty is None:
44 43 try:
45 44 ui._isatty = sys.stdin.isatty()
46 45 except AttributeError: # not a real file object
47 46 ui._isatty = False
48 47 except IOError:
49 48 # access to stdin is unsafe in a WSGI environment
50 49 ui._isatty = False
51 50 return ui._isatty
52 51
53 52 def _is_trusted(self, fp, f):
54 53 st = util.fstat(fp)
55 54 if util.isowner(fp, st):
56 55 return True
57 56
58 57 tusers = self.trusted_users
59 58 tgroups = self.trusted_groups
60 59 if '*' in tusers or '*' in tgroups:
61 60 return True
62 61
63 62 user = util.username(st.st_uid)
64 63 group = util.groupname(st.st_gid)
65 64 if user in tusers or group in tgroups or user == util.username():
66 65 return True
67 66
68 67 if self.report_untrusted:
69 68 self.warn(_('Not trusting file %s from untrusted '
70 69 'user %s, group %s\n') % (f, user, group))
71 70 return False
72 71
73 72 def readconfig(self, filename, root=None, assumetrusted=False,
74 73 sections = None):
75 74 try:
76 75 fp = open(filename)
77 76 except IOError:
78 77 if not sections: # ignore unless we were looking for something
79 78 return
80 79 raise
81 80
82 81 cdata = config.config()
83 82 trusted = sections or assumetrusted or self._is_trusted(fp, filename)
84 83
85 84 try:
86 85 cdata.read(filename, fp)
87 86 except error.ConfigError, inst:
88 87 if trusted:
89 88 raise
90 89 self.warn(_("Ignored: %s\n") % str(inst))
91 90
92 91 if trusted:
93 92 self.cdata.update(cdata, sections)
94 93 self.cdata.update(self.overlay, sections)
95 94 self.ucdata.update(cdata, sections)
96 95 self.ucdata.update(self.overlay, sections)
97 96
98 97 if root is None:
99 98 root = os.path.expanduser('~')
100 99 self.fixconfig(root=root)
101 100
102 101 def fixconfig(self, section=None, name=None, value=None, root=None):
103 102 # translate paths relative to root (or home) into absolute paths
104 103 if section is None or section == 'paths':
105 104 if root is None:
106 105 root = os.getcwd()
107 106 items = section and [(name, value)] or []
108 107 for cdata in self.cdata, self.ucdata, self.overlay:
109 108 if not items and 'paths' in cdata:
110 109 pathsitems = cdata.items('paths')
111 110 else:
112 111 pathsitems = items
113 112 for n, path in pathsitems:
114 113 if path and "://" not in path and not os.path.isabs(path):
115 114 cdata.set("paths", n,
116 115 os.path.normpath(os.path.join(root, path)))
117 116
118 117 # update ui options
119 118 if section is None or section == 'ui':
120 119 self.debugflag = self.configbool('ui', 'debug')
121 120 self.verbose = self.debugflag or self.configbool('ui', 'verbose')
122 121 self.quiet = not self.debugflag and self.configbool('ui', 'quiet')
123 122 if self.verbose and self.quiet:
124 123 self.quiet = self.verbose = False
125 124
126 125 self.report_untrusted = self.configbool("ui", "report_untrusted",
127 126 True)
128 127 self.interactive = self.configbool("ui", "interactive",
129 128 self.isatty())
130 129 self.traceback = self.configbool('ui', 'traceback', False)
131 130
132 131 # update trust information
133 132 if section is None or section == 'trusted':
134 133 for user in self.configlist('trusted', 'users'):
135 134 self.trusted_users[user] = 1
136 135 for group in self.configlist('trusted', 'groups'):
137 136 self.trusted_groups[group] = 1
138 137
139 138 def setconfig(self, section, name, value):
140 139 for cdata in (self.overlay, self.cdata, self.ucdata):
141 140 cdata.set(section, name, value)
142 141 self.fixconfig(section, name, value)
143 142
144 143 def _get_cdata(self, untrusted):
145 144 if untrusted:
146 145 return self.ucdata
147 146 return self.cdata
148 147
149 148 def configsource(self, section, name, untrusted=False):
150 149 return self._get_cdata(untrusted).getsource(section, name) or 'none'
151 150
152 151 def config(self, section, name, default=None, untrusted=False):
153 152 value = self._get_cdata(untrusted).get(section, name, default)
154 153 if self.debugflag and not untrusted:
155 154 uvalue = self.ucdata.get(section, name)
156 155 if uvalue is not None and uvalue != value:
157 156 self.warn(_("Ignoring untrusted configuration option "
158 157 "%s.%s = %s\n") % (section, name, uvalue))
159 158 return value
160 159
161 160 def configbool(self, section, name, default=False, untrusted=False):
162 161 v = self.config(section, name, None, untrusted)
163 162 if v == None:
164 163 return default
165 164 if v.lower() not in _booleans:
166 165 raise error.ConfigError(_("%s.%s not a boolean ('%s')")
167 166 % (section, name, v))
168 167 return _booleans[v.lower()]
169 168
170 169 def configlist(self, section, name, default=None, untrusted=False):
171 170 """Return a list of comma/space separated strings"""
172 171 result = self.config(section, name, untrusted=untrusted)
173 172 if result is None:
174 173 result = default or []
175 174 if isinstance(result, basestring):
176 175 result = result.replace(",", " ").split()
177 176 return result
178 177
179 178 def has_section(self, section, untrusted=False):
180 179 '''tell whether section exists in config.'''
181 180 return section in self._get_cdata(untrusted)
182 181
183 182 def configitems(self, section, untrusted=False):
184 183 items = self._get_cdata(untrusted).items(section)
185 184 if self.debugflag and not untrusted:
186 185 for k,v in self.ucdata.items(section):
187 186 if self.cdata.get(section, k) != v:
188 187 self.warn(_("Ignoring untrusted configuration option "
189 188 "%s.%s = %s\n") % (section, k, v))
190 189 return items
191 190
192 191 def walkconfig(self, untrusted=False):
193 192 cdata = self._get_cdata(untrusted)
194 193 for section in cdata.sections():
195 194 for name, value in self.configitems(section, untrusted):
196 195 yield section, name, str(value).replace('\n', '\\n')
197 196
198 197 def username(self):
199 198 """Return default username to be used in commits.
200 199
201 200 Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
202 201 and stop searching if one of these is set.
203 202 If not found and ui.askusername is True, ask the user, else use
204 203 ($LOGNAME or $USER or $LNAME or $USERNAME) + "@full.hostname".
205 204 """
206 205 user = os.environ.get("HGUSER")
207 206 if user is None:
208 207 user = self.config("ui", "username")
209 208 if user is None:
210 209 user = os.environ.get("EMAIL")
211 210 if user is None and self.configbool("ui", "askusername"):
212 211 user = self.prompt(_("enter a commit username:"), default=None)
213 212 if user is None:
214 213 try:
215 214 user = '%s@%s' % (util.getuser(), socket.getfqdn())
216 215 self.warn(_("No username found, using '%s' instead\n") % user)
217 216 except KeyError:
218 217 pass
219 218 if not user:
220 219 raise util.Abort(_("Please specify a username."))
221 220 if "\n" in user:
222 221 raise util.Abort(_("username %s contains a newline\n") % repr(user))
223 222 return user
224 223
225 224 def shortuser(self, user):
226 225 """Return a short representation of a user name or email address."""
227 226 if not self.verbose: user = util.shortuser(user)
228 227 return user
229 228
230 229 def expandpath(self, loc, default=None):
231 230 """Return repository location relative to cwd or from [paths]"""
232 231 if "://" in loc or os.path.isdir(os.path.join(loc, '.hg')):
233 232 return loc
234 233
235 234 path = self.config("paths", loc)
236 235 if not path and default is not None:
237 236 path = self.config("paths", default)
238 237 return path or loc
239 238
240 239 def pushbuffer(self):
241 240 self.buffers.append([])
242 241
243 242 def popbuffer(self):
244 243 return "".join(self.buffers.pop())
245 244
246 245 def write(self, *args):
247 246 if self.buffers:
248 247 self.buffers[-1].extend([str(a) for a in args])
249 248 else:
250 249 for a in args:
251 250 sys.stdout.write(str(a))
252 251
253 252 def write_err(self, *args):
254 253 try:
255 254 if not sys.stdout.closed: sys.stdout.flush()
256 255 for a in args:
257 256 sys.stderr.write(str(a))
258 257 # stderr may be buffered under win32 when redirected to files,
259 258 # including stdout.
260 259 if not sys.stderr.closed: sys.stderr.flush()
261 260 except IOError, inst:
262 261 if inst.errno != errno.EPIPE:
263 262 raise
264 263
265 264 def flush(self):
266 265 try: sys.stdout.flush()
267 266 except: pass
268 267 try: sys.stderr.flush()
269 268 except: pass
270 269
271 270 def _readline(self, prompt=''):
272 271 if self.isatty():
273 272 try:
274 273 # magically add command line editing support, where
275 274 # available
276 275 import readline
277 276 # force demandimport to really load the module
278 277 readline.read_history_file
279 278 # windows sometimes raises something other than ImportError
280 279 except Exception:
281 280 pass
282 281 line = raw_input(prompt)
283 282 # When stdin is in binary mode on Windows, it can cause
284 283 # raw_input() to emit an extra trailing carriage return
285 284 if os.linesep == '\r\n' and line and line[-1] == '\r':
286 285 line = line[:-1]
287 286 return line
288 287
289 288 def prompt(self, msg, pat=None, default="y"):
290 289 """Prompt user with msg, read response, and ensure it matches pat
291 290
292 291 If not interactive -- the default is returned
293 292 """
294 293 if not self.interactive:
295 294 self.note(msg, ' ', default, "\n")
296 295 return default
297 296 while True:
298 297 try:
299 298 r = self._readline(msg + ' ')
300 299 if not r:
301 300 return default
302 301 if not pat or re.match(pat, r):
303 302 return r
304 303 else:
305 304 self.write(_("unrecognized response\n"))
306 305 except EOFError:
307 306 raise util.Abort(_('response expected'))
308 307
309 308 def getpass(self, prompt=None, default=None):
310 309 if not self.interactive: return default
311 310 try:
312 311 return getpass.getpass(prompt or _('password: '))
313 312 except EOFError:
314 313 raise util.Abort(_('response expected'))
315 314 def status(self, *msg):
316 315 if not self.quiet: self.write(*msg)
317 316 def warn(self, *msg):
318 317 self.write_err(*msg)
319 318 def note(self, *msg):
320 319 if self.verbose: self.write(*msg)
321 320 def debug(self, *msg):
322 321 if self.debugflag: self.write(*msg)
323 322 def edit(self, text, user):
324 323 (fd, name) = tempfile.mkstemp(prefix="hg-editor-", suffix=".txt",
325 324 text=True)
326 325 try:
327 326 f = os.fdopen(fd, "w")
328 327 f.write(text)
329 328 f.close()
330 329
331 330 editor = self.geteditor()
332 331
333 332 util.system("%s \"%s\"" % (editor, name),
334 333 environ={'HGUSER': user},
335 334 onerr=util.Abort, errprefix=_("edit failed"))
336 335
337 336 f = open(name)
338 337 t = f.read()
339 338 f.close()
340 339 t = re.sub("(?m)^HG:.*\n", "", t)
341 340 finally:
342 341 os.unlink(name)
343 342
344 343 return t
345 344
346 345 def print_exc(self):
347 346 '''print exception traceback if traceback printing enabled.
348 347 only to call in exception handler. returns true if traceback
349 348 printed.'''
350 349 if self.traceback:
351 350 traceback.print_exc()
352 351 return self.traceback
353 352
354 353 def geteditor(self):
355 354 '''return editor to use'''
356 355 return (os.environ.get("HGEDITOR") or
357 356 self.config("ui", "editor") or
358 357 os.environ.get("VISUAL") or
359 358 os.environ.get("EDITOR", "vi"))
@@ -1,106 +1,104 b''
1 1 #!/bin/sh
2 2 # Test basic extension support
3 3
4 4 "$TESTDIR/hghave" no-outer-repo || exit 80
5 5
6 6 cat > foobar.py <<EOF
7 7 import os
8 8 from mercurial import commands
9 9
10 10 def uisetup(ui):
11 11 ui.write("uisetup called\\n")
12 ui.write("ui.parentui is%s None\\n" % (ui.parentui is not None
13 and "not" or ""))
14 12
15 13 def reposetup(ui, repo):
16 14 ui.write("reposetup called for %s\\n" % os.path.basename(repo.root))
17 15 ui.write("ui %s= repo.ui\\n" % (ui == repo.ui and "=" or "!"))
18 16
19 17 def foo(ui, *args, **kwargs):
20 18 ui.write("Foo\\n")
21 19
22 20 def bar(ui, *args, **kwargs):
23 21 ui.write("Bar\\n")
24 22
25 23 cmdtable = {
26 24 "foo": (foo, [], "hg foo"),
27 25 "bar": (bar, [], "hg bar"),
28 26 }
29 27
30 28 commands.norepo += ' bar'
31 29 EOF
32 30 abspath=`pwd`/foobar.py
33 31
34 32 mkdir barfoo
35 33 cp foobar.py barfoo/__init__.py
36 34 barfoopath=`pwd`/barfoo
37 35
38 36 hg init a
39 37 cd a
40 38 echo foo > file
41 39 hg add file
42 40 hg commit -m 'add file'
43 41
44 42 echo '[extensions]' >> $HGRCPATH
45 43 echo "foobar = $abspath" >> $HGRCPATH
46 44 hg foo
47 45
48 46 cd ..
49 47 hg clone a b
50 48
51 49 hg bar
52 50 echo 'foobar = !' >> $HGRCPATH
53 51
54 52 echo '% module/__init__.py-style'
55 53 echo "barfoo = $barfoopath" >> $HGRCPATH
56 54 cd a
57 55 hg foo
58 56 echo 'barfoo = !' >> $HGRCPATH
59 57
60 58 cd ..
61 59 cat > empty.py <<EOF
62 60 '''empty cmdtable
63 61 '''
64 62 cmdtable = {}
65 63 EOF
66 64 emptypath=`pwd`/empty.py
67 65 echo "empty = $emptypath" >> $HGRCPATH
68 66 hg help empty
69 67 echo 'empty = !' >> $HGRCPATH
70 68
71 69 cat > debugextension.py <<EOF
72 70 '''only debugcommands
73 71 '''
74 72 def debugfoobar(ui, repo, *args, **opts):
75 73 "yet another debug command"
76 74 pass
77 75
78 76 cmdtable = {"debugfoobar": (debugfoobar, (), "hg debugfoobar")}
79 77 EOF
80 78 debugpath=`pwd`/debugextension.py
81 79 echo "debugextension = $debugpath" >> $HGRCPATH
82 80 hg help debugextension
83 81 hg --debug help debugextension
84 82 echo 'debugextension = !' >> $HGRCPATH
85 83
86 84 echo % issue811
87 85 debugpath=`pwd`/debugissue811.py
88 86 cat > debugissue811.py <<EOF
89 87 '''show all loaded extensions
90 88 '''
91 89 from mercurial import extensions, commands
92 90
93 91 def debugextensions(ui):
94 92 "yet another debug command"
95 93 ui.write("%s\n" % '\n'.join([x for x, y in extensions.extensions()]))
96 94
97 95 cmdtable = {"debugextensions": (debugextensions, (), "hg debugextensions")}
98 96 commands.norepo += " debugextensions"
99 97 EOF
100 98 echo "debugissue811 = $debugpath" >> $HGRCPATH
101 99 echo "mq=" >> $HGRCPATH
102 100 echo "hgext.mq=" >> $HGRCPATH
103 101 echo "hgext/mq=" >> $HGRCPATH
104 102
105 103 echo % show extensions
106 104 hg debugextensions
@@ -1,60 +1,56 b''
1 1 uisetup called
2 ui.parentui isnot None
3 2 reposetup called for a
4 3 ui == repo.ui
5 4 Foo
6 5 uisetup called
7 ui.parentui is None
8 6 reposetup called for a
9 7 ui == repo.ui
10 8 reposetup called for b
11 9 ui == repo.ui
12 10 updating working directory
13 11 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
14 12 uisetup called
15 ui.parentui is None
16 13 Bar
17 14 % module/__init__.py-style
18 15 uisetup called
19 ui.parentui isnot None
20 16 reposetup called for a
21 17 ui == repo.ui
22 18 Foo
23 19 empty extension - empty cmdtable
24 20
25 21 no commands defined
26 22 debugextension extension - only debugcommands
27 23
28 24 no commands defined
29 25 debugextension extension - only debugcommands
30 26
31 27 list of commands:
32 28
33 29 debugfoobar:
34 30 yet another debug command
35 31
36 32 enabled extensions:
37 33
38 34 debugextension only debugcommands
39 35
40 36 global options:
41 37 -R --repository repository root directory or symbolic path name
42 38 --cwd change working directory
43 39 -y --noninteractive do not prompt, assume 'yes' for any required
44 40 answers
45 41 -q --quiet suppress output
46 42 -v --verbose enable additional output
47 43 --config set/override config option
48 44 --debug enable debugging output
49 45 --debugger start debugger
50 46 --encoding set the charset encoding (default: ascii)
51 47 --encodingmode set the charset encoding mode (default: strict)
52 48 --traceback print traceback on exception
53 49 --time time how long the command takes
54 50 --profile print command execution profile
55 51 --version output version information and exit
56 52 -h --help display help and exit
57 53 % issue811
58 54 % show extensions
59 55 debugissue811
60 56 mq
General Comments 0
You need to be logged in to leave comments. Login now