##// END OF EJS Templates
cmdutil: changeset_printer: use methods of filectx/changectx....
Greg Ward -
r9547:f57640bf default
parent child Browse files
Show More
@@ -1,1290 +1,1290
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2, incorporated herein by reference.
7 7
8 8 from node import hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import os, sys, errno, re, glob
11 11 import mdiff, bdiff, util, templater, patch, error, encoding
12 12 import match as _match
13 13
14 14 revrangesep = ':'
15 15
16 16 def findpossible(cmd, table, strict=False):
17 17 """
18 18 Return cmd -> (aliases, command table entry)
19 19 for each matching command.
20 20 Return debug commands (or their aliases) only if no normal command matches.
21 21 """
22 22 choice = {}
23 23 debugchoice = {}
24 24 for e in table.keys():
25 25 aliases = e.lstrip("^").split("|")
26 26 found = None
27 27 if cmd in aliases:
28 28 found = cmd
29 29 elif not strict:
30 30 for a in aliases:
31 31 if a.startswith(cmd):
32 32 found = a
33 33 break
34 34 if found is not None:
35 35 if aliases[0].startswith("debug") or found.startswith("debug"):
36 36 debugchoice[found] = (aliases, table[e])
37 37 else:
38 38 choice[found] = (aliases, table[e])
39 39
40 40 if not choice and debugchoice:
41 41 choice = debugchoice
42 42
43 43 return choice
44 44
45 45 def findcmd(cmd, table, strict=True):
46 46 """Return (aliases, command table entry) for command string."""
47 47 choice = findpossible(cmd, table, strict)
48 48
49 49 if cmd in choice:
50 50 return choice[cmd]
51 51
52 52 if len(choice) > 1:
53 53 clist = choice.keys()
54 54 clist.sort()
55 55 raise error.AmbiguousCommand(cmd, clist)
56 56
57 57 if choice:
58 58 return choice.values()[0]
59 59
60 60 raise error.UnknownCommand(cmd)
61 61
62 62 def bail_if_changed(repo):
63 63 if repo.dirstate.parents()[1] != nullid:
64 64 raise util.Abort(_('outstanding uncommitted merge'))
65 65 modified, added, removed, deleted = repo.status()[:4]
66 66 if modified or added or removed or deleted:
67 67 raise util.Abort(_("outstanding uncommitted changes"))
68 68
69 69 def logmessage(opts):
70 70 """ get the log message according to -m and -l option """
71 71 message = opts.get('message')
72 72 logfile = opts.get('logfile')
73 73
74 74 if message and logfile:
75 75 raise util.Abort(_('options --message and --logfile are mutually '
76 76 'exclusive'))
77 77 if not message and logfile:
78 78 try:
79 79 if logfile == '-':
80 80 message = sys.stdin.read()
81 81 else:
82 82 message = open(logfile).read()
83 83 except IOError, inst:
84 84 raise util.Abort(_("can't read commit message '%s': %s") %
85 85 (logfile, inst.strerror))
86 86 return message
87 87
88 88 def loglimit(opts):
89 89 """get the log limit according to option -l/--limit"""
90 90 limit = opts.get('limit')
91 91 if limit:
92 92 try:
93 93 limit = int(limit)
94 94 except ValueError:
95 95 raise util.Abort(_('limit must be a positive integer'))
96 96 if limit <= 0: raise util.Abort(_('limit must be positive'))
97 97 else:
98 98 limit = sys.maxint
99 99 return limit
100 100
101 101 def remoteui(src, opts):
102 102 'build a remote ui from ui or repo and opts'
103 103 if hasattr(src, 'baseui'): # looks like a repository
104 104 dst = src.baseui.copy() # drop repo-specific config
105 105 src = src.ui # copy target options from repo
106 106 else: # assume it's a global ui object
107 107 dst = src.copy() # keep all global options
108 108
109 109 # copy ssh-specific options
110 110 for o in 'ssh', 'remotecmd':
111 111 v = opts.get(o) or src.config('ui', o)
112 112 if v:
113 113 dst.setconfig("ui", o, v)
114 114 # copy bundle-specific options
115 115 r = src.config('bundle', 'mainreporoot')
116 116 if r:
117 117 dst.setconfig('bundle', 'mainreporoot', r)
118 118
119 119 return dst
120 120
121 121 def revpair(repo, revs):
122 122 '''return pair of nodes, given list of revisions. second item can
123 123 be None, meaning use working dir.'''
124 124
125 125 def revfix(repo, val, defval):
126 126 if not val and val != 0 and defval is not None:
127 127 val = defval
128 128 return repo.lookup(val)
129 129
130 130 if not revs:
131 131 return repo.dirstate.parents()[0], None
132 132 end = None
133 133 if len(revs) == 1:
134 134 if revrangesep in revs[0]:
135 135 start, end = revs[0].split(revrangesep, 1)
136 136 start = revfix(repo, start, 0)
137 137 end = revfix(repo, end, len(repo) - 1)
138 138 else:
139 139 start = revfix(repo, revs[0], None)
140 140 elif len(revs) == 2:
141 141 if revrangesep in revs[0] or revrangesep in revs[1]:
142 142 raise util.Abort(_('too many revisions specified'))
143 143 start = revfix(repo, revs[0], None)
144 144 end = revfix(repo, revs[1], None)
145 145 else:
146 146 raise util.Abort(_('too many revisions specified'))
147 147 return start, end
148 148
149 149 def revrange(repo, revs):
150 150 """Yield revision as strings from a list of revision specifications."""
151 151
152 152 def revfix(repo, val, defval):
153 153 if not val and val != 0 and defval is not None:
154 154 return defval
155 155 return repo.changelog.rev(repo.lookup(val))
156 156
157 157 seen, l = set(), []
158 158 for spec in revs:
159 159 if revrangesep in spec:
160 160 start, end = spec.split(revrangesep, 1)
161 161 start = revfix(repo, start, 0)
162 162 end = revfix(repo, end, len(repo) - 1)
163 163 step = start > end and -1 or 1
164 164 for rev in xrange(start, end+step, step):
165 165 if rev in seen:
166 166 continue
167 167 seen.add(rev)
168 168 l.append(rev)
169 169 else:
170 170 rev = revfix(repo, spec, None)
171 171 if rev in seen:
172 172 continue
173 173 seen.add(rev)
174 174 l.append(rev)
175 175
176 176 return l
177 177
178 178 def make_filename(repo, pat, node,
179 179 total=None, seqno=None, revwidth=None, pathname=None):
180 180 node_expander = {
181 181 'H': lambda: hex(node),
182 182 'R': lambda: str(repo.changelog.rev(node)),
183 183 'h': lambda: short(node),
184 184 }
185 185 expander = {
186 186 '%': lambda: '%',
187 187 'b': lambda: os.path.basename(repo.root),
188 188 }
189 189
190 190 try:
191 191 if node:
192 192 expander.update(node_expander)
193 193 if node:
194 194 expander['r'] = (lambda:
195 195 str(repo.changelog.rev(node)).zfill(revwidth or 0))
196 196 if total is not None:
197 197 expander['N'] = lambda: str(total)
198 198 if seqno is not None:
199 199 expander['n'] = lambda: str(seqno)
200 200 if total is not None and seqno is not None:
201 201 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
202 202 if pathname is not None:
203 203 expander['s'] = lambda: os.path.basename(pathname)
204 204 expander['d'] = lambda: os.path.dirname(pathname) or '.'
205 205 expander['p'] = lambda: pathname
206 206
207 207 newname = []
208 208 patlen = len(pat)
209 209 i = 0
210 210 while i < patlen:
211 211 c = pat[i]
212 212 if c == '%':
213 213 i += 1
214 214 c = pat[i]
215 215 c = expander[c]()
216 216 newname.append(c)
217 217 i += 1
218 218 return ''.join(newname)
219 219 except KeyError, inst:
220 220 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
221 221 inst.args[0])
222 222
223 223 def make_file(repo, pat, node=None,
224 224 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
225 225
226 226 writable = 'w' in mode or 'a' in mode
227 227
228 228 if not pat or pat == '-':
229 229 return writable and sys.stdout or sys.stdin
230 230 if hasattr(pat, 'write') and writable:
231 231 return pat
232 232 if hasattr(pat, 'read') and 'r' in mode:
233 233 return pat
234 234 return open(make_filename(repo, pat, node, total, seqno, revwidth,
235 235 pathname),
236 236 mode)
237 237
238 238 def expandpats(pats):
239 239 if not util.expandglobs:
240 240 return list(pats)
241 241 ret = []
242 242 for p in pats:
243 243 kind, name = _match._patsplit(p, None)
244 244 if kind is None:
245 245 try:
246 246 globbed = glob.glob(name)
247 247 except re.error:
248 248 globbed = [name]
249 249 if globbed:
250 250 ret.extend(globbed)
251 251 continue
252 252 ret.append(p)
253 253 return ret
254 254
255 255 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
256 256 if not globbed and default == 'relpath':
257 257 pats = expandpats(pats or [])
258 258 m = _match.match(repo.root, repo.getcwd(), pats,
259 259 opts.get('include'), opts.get('exclude'), default)
260 260 def badfn(f, msg):
261 261 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
262 262 m.bad = badfn
263 263 return m
264 264
265 265 def matchall(repo):
266 266 return _match.always(repo.root, repo.getcwd())
267 267
268 268 def matchfiles(repo, files):
269 269 return _match.exact(repo.root, repo.getcwd(), files)
270 270
271 271 def findrenames(repo, added, removed, threshold):
272 272 '''find renamed files -- yields (before, after, score) tuples'''
273 273 ctx = repo['.']
274 274 for a in added:
275 275 aa = repo.wread(a)
276 276 bestname, bestscore = None, threshold
277 277 for r in removed:
278 278 if r not in ctx:
279 279 continue
280 280 rr = ctx.filectx(r).data()
281 281
282 282 # bdiff.blocks() returns blocks of matching lines
283 283 # count the number of bytes in each
284 284 equal = 0
285 285 alines = mdiff.splitnewlines(aa)
286 286 matches = bdiff.blocks(aa, rr)
287 287 for x1,x2,y1,y2 in matches:
288 288 for line in alines[x1:x2]:
289 289 equal += len(line)
290 290
291 291 lengths = len(aa) + len(rr)
292 292 if lengths:
293 293 myscore = equal*2.0 / lengths
294 294 if myscore >= bestscore:
295 295 bestname, bestscore = r, myscore
296 296 if bestname:
297 297 yield bestname, a, bestscore
298 298
299 299 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
300 300 if dry_run is None:
301 301 dry_run = opts.get('dry_run')
302 302 if similarity is None:
303 303 similarity = float(opts.get('similarity') or 0)
304 304 # we'd use status here, except handling of symlinks and ignore is tricky
305 305 added, unknown, deleted, removed = [], [], [], []
306 306 audit_path = util.path_auditor(repo.root)
307 307 m = match(repo, pats, opts)
308 308 for abs in repo.walk(m):
309 309 target = repo.wjoin(abs)
310 310 good = True
311 311 try:
312 312 audit_path(abs)
313 313 except:
314 314 good = False
315 315 rel = m.rel(abs)
316 316 exact = m.exact(abs)
317 317 if good and abs not in repo.dirstate:
318 318 unknown.append(abs)
319 319 if repo.ui.verbose or not exact:
320 320 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
321 321 elif repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
322 322 or (os.path.isdir(target) and not os.path.islink(target))):
323 323 deleted.append(abs)
324 324 if repo.ui.verbose or not exact:
325 325 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
326 326 # for finding renames
327 327 elif repo.dirstate[abs] == 'r':
328 328 removed.append(abs)
329 329 elif repo.dirstate[abs] == 'a':
330 330 added.append(abs)
331 331 if not dry_run:
332 332 repo.remove(deleted)
333 333 repo.add(unknown)
334 334 if similarity > 0:
335 335 for old, new, score in findrenames(repo, added + unknown,
336 336 removed + deleted, similarity):
337 337 if repo.ui.verbose or not m.exact(old) or not m.exact(new):
338 338 repo.ui.status(_('recording removal of %s as rename to %s '
339 339 '(%d%% similar)\n') %
340 340 (m.rel(old), m.rel(new), score * 100))
341 341 if not dry_run:
342 342 repo.copy(old, new)
343 343
344 344 def copy(ui, repo, pats, opts, rename=False):
345 345 # called with the repo lock held
346 346 #
347 347 # hgsep => pathname that uses "/" to separate directories
348 348 # ossep => pathname that uses os.sep to separate directories
349 349 cwd = repo.getcwd()
350 350 targets = {}
351 351 after = opts.get("after")
352 352 dryrun = opts.get("dry_run")
353 353
354 354 def walkpat(pat):
355 355 srcs = []
356 356 m = match(repo, [pat], opts, globbed=True)
357 357 for abs in repo.walk(m):
358 358 state = repo.dirstate[abs]
359 359 rel = m.rel(abs)
360 360 exact = m.exact(abs)
361 361 if state in '?r':
362 362 if exact and state == '?':
363 363 ui.warn(_('%s: not copying - file is not managed\n') % rel)
364 364 if exact and state == 'r':
365 365 ui.warn(_('%s: not copying - file has been marked for'
366 366 ' remove\n') % rel)
367 367 continue
368 368 # abs: hgsep
369 369 # rel: ossep
370 370 srcs.append((abs, rel, exact))
371 371 return srcs
372 372
373 373 # abssrc: hgsep
374 374 # relsrc: ossep
375 375 # otarget: ossep
376 376 def copyfile(abssrc, relsrc, otarget, exact):
377 377 abstarget = util.canonpath(repo.root, cwd, otarget)
378 378 reltarget = repo.pathto(abstarget, cwd)
379 379 target = repo.wjoin(abstarget)
380 380 src = repo.wjoin(abssrc)
381 381 state = repo.dirstate[abstarget]
382 382
383 383 # check for collisions
384 384 prevsrc = targets.get(abstarget)
385 385 if prevsrc is not None:
386 386 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
387 387 (reltarget, repo.pathto(abssrc, cwd),
388 388 repo.pathto(prevsrc, cwd)))
389 389 return
390 390
391 391 # check for overwrites
392 392 exists = os.path.exists(target)
393 393 if not after and exists or after and state in 'mn':
394 394 if not opts['force']:
395 395 ui.warn(_('%s: not overwriting - file exists\n') %
396 396 reltarget)
397 397 return
398 398
399 399 if after:
400 400 if not exists:
401 401 return
402 402 elif not dryrun:
403 403 try:
404 404 if exists:
405 405 os.unlink(target)
406 406 targetdir = os.path.dirname(target) or '.'
407 407 if not os.path.isdir(targetdir):
408 408 os.makedirs(targetdir)
409 409 util.copyfile(src, target)
410 410 except IOError, inst:
411 411 if inst.errno == errno.ENOENT:
412 412 ui.warn(_('%s: deleted in working copy\n') % relsrc)
413 413 else:
414 414 ui.warn(_('%s: cannot copy - %s\n') %
415 415 (relsrc, inst.strerror))
416 416 return True # report a failure
417 417
418 418 if ui.verbose or not exact:
419 419 if rename:
420 420 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
421 421 else:
422 422 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
423 423
424 424 targets[abstarget] = abssrc
425 425
426 426 # fix up dirstate
427 427 origsrc = repo.dirstate.copied(abssrc) or abssrc
428 428 if abstarget == origsrc: # copying back a copy?
429 429 if state not in 'mn' and not dryrun:
430 430 repo.dirstate.normallookup(abstarget)
431 431 else:
432 432 if repo.dirstate[origsrc] == 'a' and origsrc == abssrc:
433 433 if not ui.quiet:
434 434 ui.warn(_("%s has not been committed yet, so no copy "
435 435 "data will be stored for %s.\n")
436 436 % (repo.pathto(origsrc, cwd), reltarget))
437 437 if repo.dirstate[abstarget] in '?r' and not dryrun:
438 438 repo.add([abstarget])
439 439 elif not dryrun:
440 440 repo.copy(origsrc, abstarget)
441 441
442 442 if rename and not dryrun:
443 443 repo.remove([abssrc], not after)
444 444
445 445 # pat: ossep
446 446 # dest ossep
447 447 # srcs: list of (hgsep, hgsep, ossep, bool)
448 448 # return: function that takes hgsep and returns ossep
449 449 def targetpathfn(pat, dest, srcs):
450 450 if os.path.isdir(pat):
451 451 abspfx = util.canonpath(repo.root, cwd, pat)
452 452 abspfx = util.localpath(abspfx)
453 453 if destdirexists:
454 454 striplen = len(os.path.split(abspfx)[0])
455 455 else:
456 456 striplen = len(abspfx)
457 457 if striplen:
458 458 striplen += len(os.sep)
459 459 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
460 460 elif destdirexists:
461 461 res = lambda p: os.path.join(dest,
462 462 os.path.basename(util.localpath(p)))
463 463 else:
464 464 res = lambda p: dest
465 465 return res
466 466
467 467 # pat: ossep
468 468 # dest ossep
469 469 # srcs: list of (hgsep, hgsep, ossep, bool)
470 470 # return: function that takes hgsep and returns ossep
471 471 def targetpathafterfn(pat, dest, srcs):
472 472 if _match.patkind(pat):
473 473 # a mercurial pattern
474 474 res = lambda p: os.path.join(dest,
475 475 os.path.basename(util.localpath(p)))
476 476 else:
477 477 abspfx = util.canonpath(repo.root, cwd, pat)
478 478 if len(abspfx) < len(srcs[0][0]):
479 479 # A directory. Either the target path contains the last
480 480 # component of the source path or it does not.
481 481 def evalpath(striplen):
482 482 score = 0
483 483 for s in srcs:
484 484 t = os.path.join(dest, util.localpath(s[0])[striplen:])
485 485 if os.path.exists(t):
486 486 score += 1
487 487 return score
488 488
489 489 abspfx = util.localpath(abspfx)
490 490 striplen = len(abspfx)
491 491 if striplen:
492 492 striplen += len(os.sep)
493 493 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
494 494 score = evalpath(striplen)
495 495 striplen1 = len(os.path.split(abspfx)[0])
496 496 if striplen1:
497 497 striplen1 += len(os.sep)
498 498 if evalpath(striplen1) > score:
499 499 striplen = striplen1
500 500 res = lambda p: os.path.join(dest,
501 501 util.localpath(p)[striplen:])
502 502 else:
503 503 # a file
504 504 if destdirexists:
505 505 res = lambda p: os.path.join(dest,
506 506 os.path.basename(util.localpath(p)))
507 507 else:
508 508 res = lambda p: dest
509 509 return res
510 510
511 511
512 512 pats = expandpats(pats)
513 513 if not pats:
514 514 raise util.Abort(_('no source or destination specified'))
515 515 if len(pats) == 1:
516 516 raise util.Abort(_('no destination specified'))
517 517 dest = pats.pop()
518 518 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
519 519 if not destdirexists:
520 520 if len(pats) > 1 or _match.patkind(pats[0]):
521 521 raise util.Abort(_('with multiple sources, destination must be an '
522 522 'existing directory'))
523 523 if util.endswithsep(dest):
524 524 raise util.Abort(_('destination %s is not a directory') % dest)
525 525
526 526 tfn = targetpathfn
527 527 if after:
528 528 tfn = targetpathafterfn
529 529 copylist = []
530 530 for pat in pats:
531 531 srcs = walkpat(pat)
532 532 if not srcs:
533 533 continue
534 534 copylist.append((tfn(pat, dest, srcs), srcs))
535 535 if not copylist:
536 536 raise util.Abort(_('no files to copy'))
537 537
538 538 errors = 0
539 539 for targetpath, srcs in copylist:
540 540 for abssrc, relsrc, exact in srcs:
541 541 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
542 542 errors += 1
543 543
544 544 if errors:
545 545 ui.warn(_('(consider using --after)\n'))
546 546
547 547 return errors
548 548
549 549 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
550 550 runargs=None):
551 551 '''Run a command as a service.'''
552 552
553 553 if opts['daemon'] and not opts['daemon_pipefds']:
554 554 rfd, wfd = os.pipe()
555 555 if not runargs:
556 556 runargs = sys.argv[:]
557 557 runargs.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
558 558 # Don't pass --cwd to the child process, because we've already
559 559 # changed directory.
560 560 for i in xrange(1,len(runargs)):
561 561 if runargs[i].startswith('--cwd='):
562 562 del runargs[i]
563 563 break
564 564 elif runargs[i].startswith('--cwd'):
565 565 del runargs[i:i+2]
566 566 break
567 567 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
568 568 runargs[0], runargs)
569 569 os.close(wfd)
570 570 os.read(rfd, 1)
571 571 if parentfn:
572 572 return parentfn(pid)
573 573 else:
574 574 os._exit(0)
575 575
576 576 if initfn:
577 577 initfn()
578 578
579 579 if opts['pid_file']:
580 580 fp = open(opts['pid_file'], 'w')
581 581 fp.write(str(os.getpid()) + '\n')
582 582 fp.close()
583 583
584 584 if opts['daemon_pipefds']:
585 585 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
586 586 os.close(rfd)
587 587 try:
588 588 os.setsid()
589 589 except AttributeError:
590 590 pass
591 591 os.write(wfd, 'y')
592 592 os.close(wfd)
593 593 sys.stdout.flush()
594 594 sys.stderr.flush()
595 595
596 596 nullfd = os.open(util.nulldev, os.O_RDWR)
597 597 logfilefd = nullfd
598 598 if logfile:
599 599 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
600 600 os.dup2(nullfd, 0)
601 601 os.dup2(logfilefd, 1)
602 602 os.dup2(logfilefd, 2)
603 603 if nullfd not in (0, 1, 2):
604 604 os.close(nullfd)
605 605 if logfile and logfilefd not in (0, 1, 2):
606 606 os.close(logfilefd)
607 607
608 608 if runfn:
609 609 return runfn()
610 610
611 611 class changeset_printer(object):
612 612 '''show changeset information when templating not requested.'''
613 613
614 614 def __init__(self, ui, repo, patch, diffopts, buffered):
615 615 self.ui = ui
616 616 self.repo = repo
617 617 self.buffered = buffered
618 618 self.patch = patch
619 619 self.diffopts = diffopts
620 620 self.header = {}
621 621 self.hunk = {}
622 622 self.lastheader = None
623 623
624 624 def flush(self, rev):
625 625 if rev in self.header:
626 626 h = self.header[rev]
627 627 if h != self.lastheader:
628 628 self.lastheader = h
629 629 self.ui.write(h)
630 630 del self.header[rev]
631 631 if rev in self.hunk:
632 632 self.ui.write(self.hunk[rev])
633 633 del self.hunk[rev]
634 634 return 1
635 635 return 0
636 636
637 637 def show(self, ctx, copies=(), **props):
638 638 if self.buffered:
639 639 self.ui.pushbuffer()
640 640 self._show(ctx, copies, props)
641 641 self.hunk[ctx.rev()] = self.ui.popbuffer()
642 642 else:
643 643 self._show(ctx, copies, props)
644 644
645 645 def _show(self, ctx, copies, props):
646 646 '''show a single changeset or file revision'''
647 647 changenode = ctx.node()
648 648 rev = ctx.rev()
649 649
650 650 if self.ui.quiet:
651 651 self.ui.write("%d:%s\n" % (rev, short(changenode)))
652 652 return
653 653
654 654 log = self.repo.changelog
655 changes = log.read(changenode)
656 date = util.datestr(changes[2])
657 extra = changes[5]
655 date = util.datestr(ctx.date())
656 extra = ctx.extra()
658 657 branch = extra.get("branch")
659 658
660 659 hexfunc = self.ui.debugflag and hex or short
661 660
662 661 parents = [(p, hexfunc(log.node(p)))
663 662 for p in self._meaningful_parentrevs(log, rev)]
664 663
665 664 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
666 665
667 666 # don't show the default branch name
668 667 if branch != 'default':
669 668 branch = encoding.tolocal(branch)
670 669 self.ui.write(_("branch: %s\n") % branch)
671 670 for tag in self.repo.nodetags(changenode):
672 671 self.ui.write(_("tag: %s\n") % tag)
673 672 for parent in parents:
674 673 self.ui.write(_("parent: %d:%s\n") % parent)
675 674
676 675 if self.ui.debugflag:
676 mnode = ctx.manifestnode()
677 677 self.ui.write(_("manifest: %d:%s\n") %
678 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
679 self.ui.write(_("user: %s\n") % changes[1])
678 (self.repo.manifest.rev(mnode), hex(mnode)))
679 self.ui.write(_("user: %s\n") % ctx.user())
680 680 self.ui.write(_("date: %s\n") % date)
681 681
682 682 if self.ui.debugflag:
683 683 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
684 684 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
685 685 files):
686 686 if value:
687 687 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
688 elif changes[3] and self.ui.verbose:
689 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
688 elif ctx.files() and self.ui.verbose:
689 self.ui.write(_("files: %s\n") % " ".join(ctx.files()))
690 690 if copies and self.ui.verbose:
691 691 copies = ['%s (%s)' % c for c in copies]
692 692 self.ui.write(_("copies: %s\n") % ' '.join(copies))
693 693
694 694 if extra and self.ui.debugflag:
695 695 for key, value in sorted(extra.items()):
696 696 self.ui.write(_("extra: %s=%s\n")
697 697 % (key, value.encode('string_escape')))
698 698
699 description = changes[4].strip()
699 description = ctx.description().strip()
700 700 if description:
701 701 if self.ui.verbose:
702 702 self.ui.write(_("description:\n"))
703 703 self.ui.write(description)
704 704 self.ui.write("\n\n")
705 705 else:
706 706 self.ui.write(_("summary: %s\n") %
707 707 description.splitlines()[0])
708 708 self.ui.write("\n")
709 709
710 710 self.showpatch(changenode)
711 711
712 712 def showpatch(self, node):
713 713 if self.patch:
714 714 prev = self.repo.changelog.parents(node)[0]
715 715 chunks = patch.diff(self.repo, prev, node, match=self.patch,
716 716 opts=patch.diffopts(self.ui, self.diffopts))
717 717 for chunk in chunks:
718 718 self.ui.write(chunk)
719 719 self.ui.write("\n")
720 720
721 721 def _meaningful_parentrevs(self, log, rev):
722 722 """Return list of meaningful (or all if debug) parentrevs for rev.
723 723
724 724 For merges (two non-nullrev revisions) both parents are meaningful.
725 725 Otherwise the first parent revision is considered meaningful if it
726 726 is not the preceding revision.
727 727 """
728 728 parents = log.parentrevs(rev)
729 729 if not self.ui.debugflag and parents[1] == nullrev:
730 730 if parents[0] >= rev - 1:
731 731 parents = []
732 732 else:
733 733 parents = [parents[0]]
734 734 return parents
735 735
736 736
737 737 class changeset_templater(changeset_printer):
738 738 '''format changeset information.'''
739 739
740 740 def __init__(self, ui, repo, patch, diffopts, mapfile, buffered):
741 741 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
742 742 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
743 743 self.t = templater.templater(mapfile, {'formatnode': formatnode},
744 744 cache={
745 745 'parent': '{rev}:{node|formatnode} ',
746 746 'manifest': '{rev}:{node|formatnode}',
747 747 'filecopy': '{name} ({source})'})
748 748 # Cache mapping from rev to a tuple with tag date, tag
749 749 # distance and tag name
750 750 self._latesttagcache = {-1: (0, 0, 'null')}
751 751
752 752 def use_template(self, t):
753 753 '''set template string to use'''
754 754 self.t.cache['changeset'] = t
755 755
756 756 def _meaningful_parentrevs(self, ctx):
757 757 """Return list of meaningful (or all if debug) parentrevs for rev.
758 758 """
759 759 parents = ctx.parents()
760 760 if len(parents) > 1:
761 761 return parents
762 762 if self.ui.debugflag:
763 763 return [parents[0], self.repo['null']]
764 764 if parents[0].rev() >= ctx.rev() - 1:
765 765 return []
766 766 return parents
767 767
768 768 def _latesttaginfo(self, rev):
769 769 '''return date, distance and name for the latest tag of rev'''
770 770 todo = [rev]
771 771 while todo:
772 772 rev = todo.pop()
773 773 if rev in self._latesttagcache:
774 774 continue
775 775 ctx = self.repo[rev]
776 776 tags = [t for t in ctx.tags() if self.repo.tagtype(t) == 'global']
777 777 if tags:
778 778 self._latesttagcache[rev] = ctx.date()[0], 0, ':'.join(sorted(tags))
779 779 continue
780 780 try:
781 781 # The tuples are laid out so the right one can be found by comparison.
782 782 pdate, pdist, ptag = max(
783 783 self._latesttagcache[p.rev()] for p in ctx.parents())
784 784 except KeyError:
785 785 # Cache miss - recurse
786 786 todo.append(rev)
787 787 todo.extend(p.rev() for p in ctx.parents())
788 788 continue
789 789 self._latesttagcache[rev] = pdate, pdist + 1, ptag
790 790 return self._latesttagcache[rev]
791 791
792 792 def _show(self, ctx, copies, props):
793 793 '''show a single changeset or file revision'''
794 794
795 795 def showlist(name, values, plural=None, **args):
796 796 '''expand set of values.
797 797 name is name of key in template map.
798 798 values is list of strings or dicts.
799 799 plural is plural of name, if not simply name + 's'.
800 800
801 801 expansion works like this, given name 'foo'.
802 802
803 803 if values is empty, expand 'no_foos'.
804 804
805 805 if 'foo' not in template map, return values as a string,
806 806 joined by space.
807 807
808 808 expand 'start_foos'.
809 809
810 810 for each value, expand 'foo'. if 'last_foo' in template
811 811 map, expand it instead of 'foo' for last key.
812 812
813 813 expand 'end_foos'.
814 814 '''
815 815 if plural: names = plural
816 816 else: names = name + 's'
817 817 if not values:
818 818 noname = 'no_' + names
819 819 if noname in self.t:
820 820 yield self.t(noname, **args)
821 821 return
822 822 if name not in self.t:
823 823 if isinstance(values[0], str):
824 824 yield ' '.join(values)
825 825 else:
826 826 for v in values:
827 827 yield dict(v, **args)
828 828 return
829 829 startname = 'start_' + names
830 830 if startname in self.t:
831 831 yield self.t(startname, **args)
832 832 vargs = args.copy()
833 833 def one(v, tag=name):
834 834 try:
835 835 vargs.update(v)
836 836 except (AttributeError, ValueError):
837 837 try:
838 838 for a, b in v:
839 839 vargs[a] = b
840 840 except ValueError:
841 841 vargs[name] = v
842 842 return self.t(tag, **vargs)
843 843 lastname = 'last_' + name
844 844 if lastname in self.t:
845 845 last = values.pop()
846 846 else:
847 847 last = None
848 848 for v in values:
849 849 yield one(v)
850 850 if last is not None:
851 851 yield one(last, tag=lastname)
852 852 endname = 'end_' + names
853 853 if endname in self.t:
854 854 yield self.t(endname, **args)
855 855
856 856 def showbranches(**args):
857 857 branch = ctx.branch()
858 858 if branch != 'default':
859 859 branch = encoding.tolocal(branch)
860 860 return showlist('branch', [branch], plural='branches', **args)
861 861
862 862 def showparents(**args):
863 863 parents = [[('rev', p.rev()), ('node', p.hex())]
864 864 for p in self._meaningful_parentrevs(ctx)]
865 865 return showlist('parent', parents, **args)
866 866
867 867 def showtags(**args):
868 868 return showlist('tag', ctx.tags(), **args)
869 869
870 870 def showextras(**args):
871 871 for key, value in sorted(ctx.extra().items()):
872 872 args = args.copy()
873 873 args.update(dict(key=key, value=value))
874 874 yield self.t('extra', **args)
875 875
876 876 def showcopies(**args):
877 877 c = [{'name': x[0], 'source': x[1]} for x in copies]
878 878 return showlist('file_copy', c, plural='file_copies', **args)
879 879
880 880 files = []
881 881 def getfiles():
882 882 if not files:
883 883 files[:] = self.repo.status(ctx.parents()[0].node(),
884 884 ctx.node())[:3]
885 885 return files
886 886 def showfiles(**args):
887 887 return showlist('file', ctx.files(), **args)
888 888 def showmods(**args):
889 889 return showlist('file_mod', getfiles()[0], **args)
890 890 def showadds(**args):
891 891 return showlist('file_add', getfiles()[1], **args)
892 892 def showdels(**args):
893 893 return showlist('file_del', getfiles()[2], **args)
894 894 def showmanifest(**args):
895 895 args = args.copy()
896 896 args.update(dict(rev=self.repo.manifest.rev(ctx.changeset()[0]),
897 897 node=hex(ctx.changeset()[0])))
898 898 return self.t('manifest', **args)
899 899
900 900 def showdiffstat(**args):
901 901 diff = patch.diff(self.repo, ctx.parents()[0].node(), ctx.node())
902 902 files, adds, removes = 0, 0, 0
903 903 for i in patch.diffstatdata(util.iterlines(diff)):
904 904 files += 1
905 905 adds += i[1]
906 906 removes += i[2]
907 907 return '%s: +%s/-%s' % (files, adds, removes)
908 908
909 909 def showlatesttag(**args):
910 910 return self._latesttaginfo(ctx.rev())[2]
911 911 def showlatesttagdistance(**args):
912 912 return self._latesttaginfo(ctx.rev())[1]
913 913
914 914 defprops = {
915 915 'author': ctx.user(),
916 916 'branches': showbranches,
917 917 'date': ctx.date(),
918 918 'desc': ctx.description().strip(),
919 919 'file_adds': showadds,
920 920 'file_dels': showdels,
921 921 'file_mods': showmods,
922 922 'files': showfiles,
923 923 'file_copies': showcopies,
924 924 'manifest': showmanifest,
925 925 'node': ctx.hex(),
926 926 'parents': showparents,
927 927 'rev': ctx.rev(),
928 928 'tags': showtags,
929 929 'extras': showextras,
930 930 'diffstat': showdiffstat,
931 931 'latesttag': showlatesttag,
932 932 'latesttagdistance': showlatesttagdistance,
933 933 }
934 934 props = props.copy()
935 935 props.update(defprops)
936 936
937 937 # find correct templates for current mode
938 938
939 939 tmplmodes = [
940 940 (True, None),
941 941 (self.ui.verbose, 'verbose'),
942 942 (self.ui.quiet, 'quiet'),
943 943 (self.ui.debugflag, 'debug'),
944 944 ]
945 945
946 946 types = {'header': '', 'changeset': 'changeset'}
947 947 for mode, postfix in tmplmodes:
948 948 for type in types:
949 949 cur = postfix and ('%s_%s' % (type, postfix)) or type
950 950 if mode and cur in self.t:
951 951 types[type] = cur
952 952
953 953 try:
954 954
955 955 # write header
956 956 if types['header']:
957 957 h = templater.stringify(self.t(types['header'], **props))
958 958 if self.buffered:
959 959 self.header[ctx.rev()] = h
960 960 else:
961 961 self.ui.write(h)
962 962
963 963 # write changeset metadata, then patch if requested
964 964 key = types['changeset']
965 965 self.ui.write(templater.stringify(self.t(key, **props)))
966 966 self.showpatch(ctx.node())
967 967
968 968 except KeyError, inst:
969 969 msg = _("%s: no key named '%s'")
970 970 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
971 971 except SyntaxError, inst:
972 972 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
973 973
974 974 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
975 975 """show one changeset using template or regular display.
976 976
977 977 Display format will be the first non-empty hit of:
978 978 1. option 'template'
979 979 2. option 'style'
980 980 3. [ui] setting 'logtemplate'
981 981 4. [ui] setting 'style'
982 982 If all of these values are either the unset or the empty string,
983 983 regular display via changeset_printer() is done.
984 984 """
985 985 # options
986 986 patch = False
987 987 if opts.get('patch'):
988 988 patch = matchfn or matchall(repo)
989 989
990 990 tmpl = opts.get('template')
991 991 style = None
992 992 if tmpl:
993 993 tmpl = templater.parsestring(tmpl, quoted=False)
994 994 else:
995 995 style = opts.get('style')
996 996
997 997 # ui settings
998 998 if not (tmpl or style):
999 999 tmpl = ui.config('ui', 'logtemplate')
1000 1000 if tmpl:
1001 1001 tmpl = templater.parsestring(tmpl)
1002 1002 else:
1003 1003 style = ui.config('ui', 'style')
1004 1004
1005 1005 if not (tmpl or style):
1006 1006 return changeset_printer(ui, repo, patch, opts, buffered)
1007 1007
1008 1008 mapfile = None
1009 1009 if style and not tmpl:
1010 1010 mapfile = style
1011 1011 if not os.path.split(mapfile)[0]:
1012 1012 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1013 1013 or templater.templatepath(mapfile))
1014 1014 if mapname: mapfile = mapname
1015 1015
1016 1016 try:
1017 1017 t = changeset_templater(ui, repo, patch, opts, mapfile, buffered)
1018 1018 except SyntaxError, inst:
1019 1019 raise util.Abort(inst.args[0])
1020 1020 if tmpl: t.use_template(tmpl)
1021 1021 return t
1022 1022
1023 1023 def finddate(ui, repo, date):
1024 1024 """Find the tipmost changeset that matches the given date spec"""
1025 1025 df = util.matchdate(date)
1026 1026 get = util.cachefunc(lambda r: repo[r])
1027 1027 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
1028 1028 results = {}
1029 1029 for st, rev, fns in changeiter:
1030 1030 if st == 'add':
1031 1031 d = get(rev).date()
1032 1032 if df(d[0]):
1033 1033 results[rev] = d
1034 1034 elif st == 'iter':
1035 1035 if rev in results:
1036 1036 ui.status(_("Found revision %s from %s\n") %
1037 1037 (rev, util.datestr(results[rev])))
1038 1038 return str(rev)
1039 1039
1040 1040 raise util.Abort(_("revision matching date not found"))
1041 1041
1042 1042 def walkchangerevs(ui, repo, pats, change, opts):
1043 1043 '''Iterate over files and the revs in which they changed.
1044 1044
1045 1045 Callers most commonly need to iterate backwards over the history
1046 1046 in which they are interested. Doing so has awful (quadratic-looking)
1047 1047 performance, so we use iterators in a "windowed" way.
1048 1048
1049 1049 We walk a window of revisions in the desired order. Within the
1050 1050 window, we first walk forwards to gather data, then in the desired
1051 1051 order (usually backwards) to display it.
1052 1052
1053 1053 This function returns an (iterator, matchfn) tuple. The iterator
1054 1054 yields 3-tuples. They will be of one of the following forms:
1055 1055
1056 1056 "window", incrementing, lastrev: stepping through a window,
1057 1057 positive if walking forwards through revs, last rev in the
1058 1058 sequence iterated over - use to reset state for the current window
1059 1059
1060 1060 "add", rev, fns: out-of-order traversal of the given filenames
1061 1061 fns, which changed during revision rev - use to gather data for
1062 1062 possible display
1063 1063
1064 1064 "iter", rev, None: in-order traversal of the revs earlier iterated
1065 1065 over with "add" - use to display data'''
1066 1066
1067 1067 def increasing_windows(start, end, windowsize=8, sizelimit=512):
1068 1068 if start < end:
1069 1069 while start < end:
1070 1070 yield start, min(windowsize, end-start)
1071 1071 start += windowsize
1072 1072 if windowsize < sizelimit:
1073 1073 windowsize *= 2
1074 1074 else:
1075 1075 while start > end:
1076 1076 yield start, min(windowsize, start-end-1)
1077 1077 start -= windowsize
1078 1078 if windowsize < sizelimit:
1079 1079 windowsize *= 2
1080 1080
1081 1081 m = match(repo, pats, opts)
1082 1082 follow = opts.get('follow') or opts.get('follow_first')
1083 1083
1084 1084 if not len(repo):
1085 1085 return [], m
1086 1086
1087 1087 if follow:
1088 1088 defrange = '%s:0' % repo['.'].rev()
1089 1089 else:
1090 1090 defrange = '-1:0'
1091 1091 revs = revrange(repo, opts['rev'] or [defrange])
1092 1092 wanted = set()
1093 1093 slowpath = m.anypats() or (m.files() and opts.get('removed'))
1094 1094 fncache = {}
1095 1095
1096 1096 if not slowpath and not m.files():
1097 1097 # No files, no patterns. Display all revs.
1098 1098 wanted = set(revs)
1099 1099 copies = []
1100 1100 if not slowpath:
1101 1101 # Only files, no patterns. Check the history of each file.
1102 1102 def filerevgen(filelog, node):
1103 1103 cl_count = len(repo)
1104 1104 if node is None:
1105 1105 last = len(filelog) - 1
1106 1106 else:
1107 1107 last = filelog.rev(node)
1108 1108 for i, window in increasing_windows(last, nullrev):
1109 1109 revs = []
1110 1110 for j in xrange(i - window, i + 1):
1111 1111 n = filelog.node(j)
1112 1112 revs.append((filelog.linkrev(j),
1113 1113 follow and filelog.renamed(n)))
1114 1114 for rev in reversed(revs):
1115 1115 # only yield rev for which we have the changelog, it can
1116 1116 # happen while doing "hg log" during a pull or commit
1117 1117 if rev[0] < cl_count:
1118 1118 yield rev
1119 1119 def iterfiles():
1120 1120 for filename in m.files():
1121 1121 yield filename, None
1122 1122 for filename_node in copies:
1123 1123 yield filename_node
1124 1124 minrev, maxrev = min(revs), max(revs)
1125 1125 for file_, node in iterfiles():
1126 1126 filelog = repo.file(file_)
1127 1127 if not len(filelog):
1128 1128 if node is None:
1129 1129 # A zero count may be a directory or deleted file, so
1130 1130 # try to find matching entries on the slow path.
1131 1131 if follow:
1132 1132 raise util.Abort(_('cannot follow nonexistent file: "%s"') % file_)
1133 1133 slowpath = True
1134 1134 break
1135 1135 else:
1136 1136 ui.warn(_('%s:%s copy source revision cannot be found!\n')
1137 1137 % (file_, short(node)))
1138 1138 continue
1139 1139 for rev, copied in filerevgen(filelog, node):
1140 1140 if rev <= maxrev:
1141 1141 if rev < minrev:
1142 1142 break
1143 1143 fncache.setdefault(rev, [])
1144 1144 fncache[rev].append(file_)
1145 1145 wanted.add(rev)
1146 1146 if follow and copied:
1147 1147 copies.append(copied)
1148 1148 if slowpath:
1149 1149 if follow:
1150 1150 raise util.Abort(_('can only follow copies/renames for explicit '
1151 1151 'filenames'))
1152 1152
1153 1153 # The slow path checks files modified in every changeset.
1154 1154 def changerevgen():
1155 1155 for i, window in increasing_windows(len(repo) - 1, nullrev):
1156 1156 for j in xrange(i - window, i + 1):
1157 1157 yield change(j)
1158 1158
1159 1159 for ctx in changerevgen():
1160 1160 matches = filter(m, ctx.files())
1161 1161 if matches:
1162 1162 fncache[ctx.rev()] = matches
1163 1163 wanted.add(ctx.rev())
1164 1164
1165 1165 class followfilter(object):
1166 1166 def __init__(self, onlyfirst=False):
1167 1167 self.startrev = nullrev
1168 1168 self.roots = []
1169 1169 self.onlyfirst = onlyfirst
1170 1170
1171 1171 def match(self, rev):
1172 1172 def realparents(rev):
1173 1173 if self.onlyfirst:
1174 1174 return repo.changelog.parentrevs(rev)[0:1]
1175 1175 else:
1176 1176 return filter(lambda x: x != nullrev,
1177 1177 repo.changelog.parentrevs(rev))
1178 1178
1179 1179 if self.startrev == nullrev:
1180 1180 self.startrev = rev
1181 1181 return True
1182 1182
1183 1183 if rev > self.startrev:
1184 1184 # forward: all descendants
1185 1185 if not self.roots:
1186 1186 self.roots.append(self.startrev)
1187 1187 for parent in realparents(rev):
1188 1188 if parent in self.roots:
1189 1189 self.roots.append(rev)
1190 1190 return True
1191 1191 else:
1192 1192 # backwards: all parents
1193 1193 if not self.roots:
1194 1194 self.roots.extend(realparents(self.startrev))
1195 1195 if rev in self.roots:
1196 1196 self.roots.remove(rev)
1197 1197 self.roots.extend(realparents(rev))
1198 1198 return True
1199 1199
1200 1200 return False
1201 1201
1202 1202 # it might be worthwhile to do this in the iterator if the rev range
1203 1203 # is descending and the prune args are all within that range
1204 1204 for rev in opts.get('prune', ()):
1205 1205 rev = repo.changelog.rev(repo.lookup(rev))
1206 1206 ff = followfilter()
1207 1207 stop = min(revs[0], revs[-1])
1208 1208 for x in xrange(rev, stop-1, -1):
1209 1209 if ff.match(x):
1210 1210 wanted.discard(x)
1211 1211
1212 1212 def iterate():
1213 1213 if follow and not m.files():
1214 1214 ff = followfilter(onlyfirst=opts.get('follow_first'))
1215 1215 def want(rev):
1216 1216 return ff.match(rev) and rev in wanted
1217 1217 else:
1218 1218 def want(rev):
1219 1219 return rev in wanted
1220 1220
1221 1221 for i, window in increasing_windows(0, len(revs)):
1222 1222 yield 'window', revs[0] < revs[-1], revs[-1]
1223 1223 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1224 1224 for rev in sorted(nrevs):
1225 1225 fns = fncache.get(rev)
1226 1226 if not fns:
1227 1227 def fns_generator():
1228 1228 for f in change(rev).files():
1229 1229 if m(f):
1230 1230 yield f
1231 1231 fns = fns_generator()
1232 1232 yield 'add', rev, fns
1233 1233 for rev in nrevs:
1234 1234 yield 'iter', rev, None
1235 1235 return iterate(), m
1236 1236
1237 1237 def commit(ui, repo, commitfunc, pats, opts):
1238 1238 '''commit the specified files or all outstanding changes'''
1239 1239 date = opts.get('date')
1240 1240 if date:
1241 1241 opts['date'] = util.parsedate(date)
1242 1242 message = logmessage(opts)
1243 1243
1244 1244 # extract addremove carefully -- this function can be called from a command
1245 1245 # that doesn't support addremove
1246 1246 if opts.get('addremove'):
1247 1247 addremove(repo, pats, opts)
1248 1248
1249 1249 return commitfunc(ui, repo, message, match(repo, pats, opts), opts)
1250 1250
1251 1251 def commiteditor(repo, ctx, subs):
1252 1252 if ctx.description():
1253 1253 return ctx.description()
1254 1254 return commitforceeditor(repo, ctx, subs)
1255 1255
1256 1256 def commitforceeditor(repo, ctx, subs):
1257 1257 edittext = []
1258 1258 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
1259 1259 if ctx.description():
1260 1260 edittext.append(ctx.description())
1261 1261 edittext.append("")
1262 1262 edittext.append("") # Empty line between message and comments.
1263 1263 edittext.append(_("HG: Enter commit message."
1264 1264 " Lines beginning with 'HG:' are removed."))
1265 1265 edittext.append(_("HG: Leave message empty to abort commit."))
1266 1266 edittext.append("HG: --")
1267 1267 edittext.append(_("HG: user: %s") % ctx.user())
1268 1268 if ctx.p2():
1269 1269 edittext.append(_("HG: branch merge"))
1270 1270 if ctx.branch():
1271 1271 edittext.append(_("HG: branch '%s'")
1272 1272 % encoding.tolocal(ctx.branch()))
1273 1273 edittext.extend([_("HG: subrepo %s") % s for s in subs])
1274 1274 edittext.extend([_("HG: added %s") % f for f in added])
1275 1275 edittext.extend([_("HG: changed %s") % f for f in modified])
1276 1276 edittext.extend([_("HG: removed %s") % f for f in removed])
1277 1277 if not added and not modified and not removed:
1278 1278 edittext.append(_("HG: no files changed"))
1279 1279 edittext.append("")
1280 1280 # run editor in the repository root
1281 1281 olddir = os.getcwd()
1282 1282 os.chdir(repo.root)
1283 1283 text = repo.ui.edit("\n".join(edittext), ctx.user())
1284 1284 text = re.sub("(?m)^HG:.*\n", "", text)
1285 1285 os.chdir(olddir)
1286 1286
1287 1287 if not text.strip():
1288 1288 raise util.Abort(_("empty commit message"))
1289 1289
1290 1290 return text
@@ -1,818 +1,819
1 1 # context.py - changeset and file context objects for mercurial
2 2 #
3 3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2, incorporated herein by reference.
7 7
8 8 from node import nullid, nullrev, short, hex
9 9 from i18n import _
10 10 import ancestor, bdiff, error, util, subrepo
11 11 import os, errno
12 12
13 13 propertycache = util.propertycache
14 14
15 15 class changectx(object):
16 16 """A changecontext object makes access to data related to a particular
17 17 changeset convenient."""
18 18 def __init__(self, repo, changeid=''):
19 19 """changeid is a revision number, node, or tag"""
20 20 if changeid == '':
21 21 changeid = '.'
22 22 self._repo = repo
23 23 if isinstance(changeid, (long, int)):
24 24 self._rev = changeid
25 25 self._node = self._repo.changelog.node(changeid)
26 26 else:
27 27 self._node = self._repo.lookup(changeid)
28 28 self._rev = self._repo.changelog.rev(self._node)
29 29
30 30 def __str__(self):
31 31 return short(self.node())
32 32
33 33 def __int__(self):
34 34 return self.rev()
35 35
36 36 def __repr__(self):
37 37 return "<changectx %s>" % str(self)
38 38
39 39 def __hash__(self):
40 40 try:
41 41 return hash(self._rev)
42 42 except AttributeError:
43 43 return id(self)
44 44
45 45 def __eq__(self, other):
46 46 try:
47 47 return self._rev == other._rev
48 48 except AttributeError:
49 49 return False
50 50
51 51 def __ne__(self, other):
52 52 return not (self == other)
53 53
54 54 def __nonzero__(self):
55 55 return self._rev != nullrev
56 56
57 57 @propertycache
58 58 def _changeset(self):
59 59 return self._repo.changelog.read(self.node())
60 60
61 61 @propertycache
62 62 def _manifest(self):
63 63 return self._repo.manifest.read(self._changeset[0])
64 64
65 65 @propertycache
66 66 def _manifestdelta(self):
67 67 return self._repo.manifest.readdelta(self._changeset[0])
68 68
69 69 @propertycache
70 70 def _parents(self):
71 71 p = self._repo.changelog.parentrevs(self._rev)
72 72 if p[1] == nullrev:
73 73 p = p[:-1]
74 74 return [changectx(self._repo, x) for x in p]
75 75
76 76 @propertycache
77 77 def substate(self):
78 78 return subrepo.state(self)
79 79
80 80 def __contains__(self, key):
81 81 return key in self._manifest
82 82
83 83 def __getitem__(self, key):
84 84 return self.filectx(key)
85 85
86 86 def __iter__(self):
87 87 for f in sorted(self._manifest):
88 88 yield f
89 89
90 90 def changeset(self): return self._changeset
91 91 def manifest(self): return self._manifest
92 92 def manifestnode(self): return self._changeset[0]
93 93
94 94 def rev(self): return self._rev
95 95 def node(self): return self._node
96 96 def hex(self): return hex(self._node)
97 97 def user(self): return self._changeset[1]
98 98 def date(self): return self._changeset[2]
99 99 def files(self): return self._changeset[3]
100 100 def description(self): return self._changeset[4]
101 101 def branch(self): return self._changeset[5].get("branch")
102 102 def extra(self): return self._changeset[5]
103 103 def tags(self): return self._repo.nodetags(self._node)
104 104
105 105 def parents(self):
106 106 """return contexts for each parent changeset"""
107 107 return self._parents
108 108
109 109 def p1(self):
110 110 return self._parents[0]
111 111
112 112 def p2(self):
113 113 if len(self._parents) == 2:
114 114 return self._parents[1]
115 115 return changectx(self._repo, -1)
116 116
117 117 def children(self):
118 118 """return contexts for each child changeset"""
119 119 c = self._repo.changelog.children(self._node)
120 120 return [changectx(self._repo, x) for x in c]
121 121
122 122 def ancestors(self):
123 123 for a in self._repo.changelog.ancestors(self._rev):
124 124 yield changectx(self._repo, a)
125 125
126 126 def descendants(self):
127 127 for d in self._repo.changelog.descendants(self._rev):
128 128 yield changectx(self._repo, d)
129 129
130 130 def _fileinfo(self, path):
131 131 if '_manifest' in self.__dict__:
132 132 try:
133 133 return self._manifest[path], self._manifest.flags(path)
134 134 except KeyError:
135 135 raise error.LookupError(self._node, path,
136 136 _('not found in manifest'))
137 137 if '_manifestdelta' in self.__dict__ or path in self.files():
138 138 if path in self._manifestdelta:
139 139 return self._manifestdelta[path], self._manifestdelta.flags(path)
140 140 node, flag = self._repo.manifest.find(self._changeset[0], path)
141 141 if not node:
142 142 raise error.LookupError(self._node, path,
143 143 _('not found in manifest'))
144 144
145 145 return node, flag
146 146
147 147 def filenode(self, path):
148 148 return self._fileinfo(path)[0]
149 149
150 150 def flags(self, path):
151 151 try:
152 152 return self._fileinfo(path)[1]
153 153 except error.LookupError:
154 154 return ''
155 155
156 156 def filectx(self, path, fileid=None, filelog=None):
157 157 """get a file context from this changeset"""
158 158 if fileid is None:
159 159 fileid = self.filenode(path)
160 160 return filectx(self._repo, path, fileid=fileid,
161 161 changectx=self, filelog=filelog)
162 162
163 163 def ancestor(self, c2):
164 164 """
165 165 return the ancestor context of self and c2
166 166 """
167 167 n = self._repo.changelog.ancestor(self._node, c2._node)
168 168 return changectx(self._repo, n)
169 169
170 170 def walk(self, match):
171 171 fset = set(match.files())
172 172 # for dirstate.walk, files=['.'] means "walk the whole tree".
173 173 # follow that here, too
174 174 fset.discard('.')
175 175 for fn in self:
176 176 for ffn in fset:
177 177 # match if the file is the exact name or a directory
178 178 if ffn == fn or fn.startswith("%s/" % ffn):
179 179 fset.remove(ffn)
180 180 break
181 181 if match(fn):
182 182 yield fn
183 183 for fn in sorted(fset):
184 184 if match.bad(fn, 'No such file in rev ' + str(self)) and match(fn):
185 185 yield fn
186 186
187 187 def sub(self, path):
188 188 return subrepo.subrepo(self, path)
189 189
190 190 class filectx(object):
191 191 """A filecontext object makes access to data related to a particular
192 192 filerevision convenient."""
193 193 def __init__(self, repo, path, changeid=None, fileid=None,
194 194 filelog=None, changectx=None):
195 195 """changeid can be a changeset revision, node, or tag.
196 196 fileid can be a file revision or node."""
197 197 self._repo = repo
198 198 self._path = path
199 199
200 200 assert (changeid is not None
201 201 or fileid is not None
202 202 or changectx is not None), \
203 203 ("bad args: changeid=%r, fileid=%r, changectx=%r"
204 204 % (changeid, fileid, changectx))
205 205
206 206 if filelog:
207 207 self._filelog = filelog
208 208
209 209 if changeid is not None:
210 210 self._changeid = changeid
211 211 if changectx is not None:
212 212 self._changectx = changectx
213 213 if fileid is not None:
214 214 self._fileid = fileid
215 215
216 216 @propertycache
217 217 def _changectx(self):
218 218 return changectx(self._repo, self._changeid)
219 219
220 220 @propertycache
221 221 def _filelog(self):
222 222 return self._repo.file(self._path)
223 223
224 224 @propertycache
225 225 def _changeid(self):
226 226 if '_changectx' in self.__dict__:
227 227 return self._changectx.rev()
228 228 else:
229 229 return self._filelog.linkrev(self._filerev)
230 230
231 231 @propertycache
232 232 def _filenode(self):
233 233 if '_fileid' in self.__dict__:
234 234 return self._filelog.lookup(self._fileid)
235 235 else:
236 236 return self._changectx.filenode(self._path)
237 237
238 238 @propertycache
239 239 def _filerev(self):
240 240 return self._filelog.rev(self._filenode)
241 241
242 242 @propertycache
243 243 def _repopath(self):
244 244 return self._path
245 245
246 246 def __nonzero__(self):
247 247 try:
248 248 self._filenode
249 249 return True
250 250 except error.LookupError:
251 251 # file is missing
252 252 return False
253 253
254 254 def __str__(self):
255 255 return "%s@%s" % (self.path(), short(self.node()))
256 256
257 257 def __repr__(self):
258 258 return "<filectx %s>" % str(self)
259 259
260 260 def __hash__(self):
261 261 try:
262 262 return hash((self._path, self._fileid))
263 263 except AttributeError:
264 264 return id(self)
265 265
266 266 def __eq__(self, other):
267 267 try:
268 268 return (self._path == other._path
269 269 and self._fileid == other._fileid)
270 270 except AttributeError:
271 271 return False
272 272
273 273 def __ne__(self, other):
274 274 return not (self == other)
275 275
276 276 def filectx(self, fileid):
277 277 '''opens an arbitrary revision of the file without
278 278 opening a new filelog'''
279 279 return filectx(self._repo, self._path, fileid=fileid,
280 280 filelog=self._filelog)
281 281
282 282 def filerev(self): return self._filerev
283 283 def filenode(self): return self._filenode
284 284 def flags(self): return self._changectx.flags(self._path)
285 285 def filelog(self): return self._filelog
286 286
287 287 def rev(self):
288 288 if '_changectx' in self.__dict__:
289 289 return self._changectx.rev()
290 290 if '_changeid' in self.__dict__:
291 291 return self._changectx.rev()
292 292 return self._filelog.linkrev(self._filerev)
293 293
294 294 def linkrev(self): return self._filelog.linkrev(self._filerev)
295 295 def node(self): return self._changectx.node()
296 296 def hex(self): return hex(self.node())
297 297 def user(self): return self._changectx.user()
298 298 def date(self): return self._changectx.date()
299 299 def files(self): return self._changectx.files()
300 300 def description(self): return self._changectx.description()
301 301 def branch(self): return self._changectx.branch()
302 def extra(self): return self._changectx.extra()
302 303 def manifest(self): return self._changectx.manifest()
303 304 def changectx(self): return self._changectx
304 305
305 306 def data(self): return self._filelog.read(self._filenode)
306 307 def path(self): return self._path
307 308 def size(self): return self._filelog.size(self._filerev)
308 309
309 310 def cmp(self, text): return self._filelog.cmp(self._filenode, text)
310 311
311 312 def renamed(self):
312 313 """check if file was actually renamed in this changeset revision
313 314
314 315 If rename logged in file revision, we report copy for changeset only
315 316 if file revisions linkrev points back to the changeset in question
316 317 or both changeset parents contain different file revisions.
317 318 """
318 319
319 320 renamed = self._filelog.renamed(self._filenode)
320 321 if not renamed:
321 322 return renamed
322 323
323 324 if self.rev() == self.linkrev():
324 325 return renamed
325 326
326 327 name = self.path()
327 328 fnode = self._filenode
328 329 for p in self._changectx.parents():
329 330 try:
330 331 if fnode == p.filenode(name):
331 332 return None
332 333 except error.LookupError:
333 334 pass
334 335 return renamed
335 336
336 337 def parents(self):
337 338 p = self._path
338 339 fl = self._filelog
339 340 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
340 341
341 342 r = self._filelog.renamed(self._filenode)
342 343 if r:
343 344 pl[0] = (r[0], r[1], None)
344 345
345 346 return [filectx(self._repo, p, fileid=n, filelog=l)
346 347 for p,n,l in pl if n != nullid]
347 348
348 349 def children(self):
349 350 # hard for renames
350 351 c = self._filelog.children(self._filenode)
351 352 return [filectx(self._repo, self._path, fileid=x,
352 353 filelog=self._filelog) for x in c]
353 354
354 355 def annotate(self, follow=False, linenumber=None):
355 356 '''returns a list of tuples of (ctx, line) for each line
356 357 in the file, where ctx is the filectx of the node where
357 358 that line was last changed.
358 359 This returns tuples of ((ctx, linenumber), line) for each line,
359 360 if "linenumber" parameter is NOT "None".
360 361 In such tuples, linenumber means one at the first appearance
361 362 in the managed file.
362 363 To reduce annotation cost,
363 364 this returns fixed value(False is used) as linenumber,
364 365 if "linenumber" parameter is "False".'''
365 366
366 367 def decorate_compat(text, rev):
367 368 return ([rev] * len(text.splitlines()), text)
368 369
369 370 def without_linenumber(text, rev):
370 371 return ([(rev, False)] * len(text.splitlines()), text)
371 372
372 373 def with_linenumber(text, rev):
373 374 size = len(text.splitlines())
374 375 return ([(rev, i) for i in xrange(1, size + 1)], text)
375 376
376 377 decorate = (((linenumber is None) and decorate_compat) or
377 378 (linenumber and with_linenumber) or
378 379 without_linenumber)
379 380
380 381 def pair(parent, child):
381 382 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
382 383 child[0][b1:b2] = parent[0][a1:a2]
383 384 return child
384 385
385 386 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
386 387 def getctx(path, fileid):
387 388 log = path == self._path and self._filelog or getlog(path)
388 389 return filectx(self._repo, path, fileid=fileid, filelog=log)
389 390 getctx = util.lrucachefunc(getctx)
390 391
391 392 def parents(f):
392 393 # we want to reuse filectx objects as much as possible
393 394 p = f._path
394 395 if f._filerev is None: # working dir
395 396 pl = [(n.path(), n.filerev()) for n in f.parents()]
396 397 else:
397 398 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
398 399
399 400 if follow:
400 401 r = f.renamed()
401 402 if r:
402 403 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
403 404
404 405 return [getctx(p, n) for p, n in pl if n != nullrev]
405 406
406 407 # use linkrev to find the first changeset where self appeared
407 408 if self.rev() != self.linkrev():
408 409 base = self.filectx(self.filerev())
409 410 else:
410 411 base = self
411 412
412 413 # find all ancestors
413 414 needed = {base: 1}
414 415 visit = [base]
415 416 files = [base._path]
416 417 while visit:
417 418 f = visit.pop(0)
418 419 for p in parents(f):
419 420 if p not in needed:
420 421 needed[p] = 1
421 422 visit.append(p)
422 423 if p._path not in files:
423 424 files.append(p._path)
424 425 else:
425 426 # count how many times we'll use this
426 427 needed[p] += 1
427 428
428 429 # sort by revision (per file) which is a topological order
429 430 visit = []
430 431 for f in files:
431 432 fn = [(n.rev(), n) for n in needed if n._path == f]
432 433 visit.extend(fn)
433 434
434 435 hist = {}
435 436 for r, f in sorted(visit):
436 437 curr = decorate(f.data(), f)
437 438 for p in parents(f):
438 439 if p != nullid:
439 440 curr = pair(hist[p], curr)
440 441 # trim the history of unneeded revs
441 442 needed[p] -= 1
442 443 if not needed[p]:
443 444 del hist[p]
444 445 hist[f] = curr
445 446
446 447 return zip(hist[f][0], hist[f][1].splitlines(True))
447 448
448 449 def ancestor(self, fc2):
449 450 """
450 451 find the common ancestor file context, if any, of self, and fc2
451 452 """
452 453
453 454 acache = {}
454 455
455 456 # prime the ancestor cache for the working directory
456 457 for c in (self, fc2):
457 458 if c._filerev is None:
458 459 pl = [(n.path(), n.filenode()) for n in c.parents()]
459 460 acache[(c._path, None)] = pl
460 461
461 462 flcache = {self._repopath:self._filelog, fc2._repopath:fc2._filelog}
462 463 def parents(vertex):
463 464 if vertex in acache:
464 465 return acache[vertex]
465 466 f, n = vertex
466 467 if f not in flcache:
467 468 flcache[f] = self._repo.file(f)
468 469 fl = flcache[f]
469 470 pl = [(f, p) for p in fl.parents(n) if p != nullid]
470 471 re = fl.renamed(n)
471 472 if re:
472 473 pl.append(re)
473 474 acache[vertex] = pl
474 475 return pl
475 476
476 477 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
477 478 v = ancestor.ancestor(a, b, parents)
478 479 if v:
479 480 f, n = v
480 481 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
481 482
482 483 return None
483 484
484 485 class workingctx(changectx):
485 486 """A workingctx object makes access to data related to
486 487 the current working directory convenient.
487 488 parents - a pair of parent nodeids, or None to use the dirstate.
488 489 date - any valid date string or (unixtime, offset), or None.
489 490 user - username string, or None.
490 491 extra - a dictionary of extra values, or None.
491 492 changes - a list of file lists as returned by localrepo.status()
492 493 or None to use the repository status.
493 494 """
494 495 def __init__(self, repo, parents=None, text="", user=None, date=None,
495 496 extra=None, changes=None):
496 497 self._repo = repo
497 498 self._rev = None
498 499 self._node = None
499 500 self._text = text
500 501 if date:
501 502 self._date = util.parsedate(date)
502 503 if user:
503 504 self._user = user
504 505 if parents:
505 506 self._parents = [changectx(self._repo, p) for p in parents]
506 507 if changes:
507 508 self._status = list(changes)
508 509
509 510 self._extra = {}
510 511 if extra:
511 512 self._extra = extra.copy()
512 513 if 'branch' not in self._extra:
513 514 branch = self._repo.dirstate.branch()
514 515 try:
515 516 branch = branch.decode('UTF-8').encode('UTF-8')
516 517 except UnicodeDecodeError:
517 518 raise util.Abort(_('branch name not in UTF-8!'))
518 519 self._extra['branch'] = branch
519 520 if self._extra['branch'] == '':
520 521 self._extra['branch'] = 'default'
521 522
522 523 def __str__(self):
523 524 return str(self._parents[0]) + "+"
524 525
525 526 def __nonzero__(self):
526 527 return True
527 528
528 529 def __contains__(self, key):
529 530 return self._repo.dirstate[key] not in "?r"
530 531
531 532 @propertycache
532 533 def _manifest(self):
533 534 """generate a manifest corresponding to the working directory"""
534 535
535 536 man = self._parents[0].manifest().copy()
536 537 copied = self._repo.dirstate.copies()
537 538 cf = lambda x: man.flags(copied.get(x, x))
538 539 ff = self._repo.dirstate.flagfunc(cf)
539 540 modified, added, removed, deleted, unknown = self._status[:5]
540 541 for i, l in (("a", added), ("m", modified), ("u", unknown)):
541 542 for f in l:
542 543 man[f] = man.get(copied.get(f, f), nullid) + i
543 544 try:
544 545 man.set(f, ff(f))
545 546 except OSError:
546 547 pass
547 548
548 549 for f in deleted + removed:
549 550 if f in man:
550 551 del man[f]
551 552
552 553 return man
553 554
554 555 @propertycache
555 556 def _status(self):
556 557 return self._repo.status(unknown=True)
557 558
558 559 @propertycache
559 560 def _user(self):
560 561 return self._repo.ui.username()
561 562
562 563 @propertycache
563 564 def _date(self):
564 565 return util.makedate()
565 566
566 567 @propertycache
567 568 def _parents(self):
568 569 p = self._repo.dirstate.parents()
569 570 if p[1] == nullid:
570 571 p = p[:-1]
571 572 self._parents = [changectx(self._repo, x) for x in p]
572 573 return self._parents
573 574
574 575 def manifest(self): return self._manifest
575 576
576 577 def user(self): return self._user or self._repo.ui.username()
577 578 def date(self): return self._date
578 579 def description(self): return self._text
579 580 def files(self):
580 581 return sorted(self._status[0] + self._status[1] + self._status[2])
581 582
582 583 def modified(self): return self._status[0]
583 584 def added(self): return self._status[1]
584 585 def removed(self): return self._status[2]
585 586 def deleted(self): return self._status[3]
586 587 def unknown(self): return self._status[4]
587 588 def clean(self): return self._status[5]
588 589 def branch(self): return self._extra['branch']
589 590 def extra(self): return self._extra
590 591
591 592 def tags(self):
592 593 t = []
593 594 [t.extend(p.tags()) for p in self.parents()]
594 595 return t
595 596
596 597 def children(self):
597 598 return []
598 599
599 600 def flags(self, path):
600 601 if '_manifest' in self.__dict__:
601 602 try:
602 603 return self._manifest.flags(path)
603 604 except KeyError:
604 605 return ''
605 606
606 607 pnode = self._parents[0].changeset()[0]
607 608 orig = self._repo.dirstate.copies().get(path, path)
608 609 node, flag = self._repo.manifest.find(pnode, orig)
609 610 try:
610 611 ff = self._repo.dirstate.flagfunc(lambda x: flag or '')
611 612 return ff(path)
612 613 except OSError:
613 614 pass
614 615
615 616 if not node or path in self.deleted() or path in self.removed():
616 617 return ''
617 618 return flag
618 619
619 620 def filectx(self, path, filelog=None):
620 621 """get a file context from the working directory"""
621 622 return workingfilectx(self._repo, path, workingctx=self,
622 623 filelog=filelog)
623 624
624 625 def ancestor(self, c2):
625 626 """return the ancestor context of self and c2"""
626 627 return self._parents[0].ancestor(c2) # punt on two parents for now
627 628
628 629 def walk(self, match):
629 630 return sorted(self._repo.dirstate.walk(match, True, False))
630 631
631 632 def dirty(self, missing=False):
632 633 "check whether a working directory is modified"
633 634
634 635 return (self.p2() or self.branch() != self.p1().branch() or
635 636 self.modified() or self.added() or self.removed() or
636 637 (missing and self.deleted()))
637 638
638 639 class workingfilectx(filectx):
639 640 """A workingfilectx object makes access to data related to a particular
640 641 file in the working directory convenient."""
641 642 def __init__(self, repo, path, filelog=None, workingctx=None):
642 643 """changeid can be a changeset revision, node, or tag.
643 644 fileid can be a file revision or node."""
644 645 self._repo = repo
645 646 self._path = path
646 647 self._changeid = None
647 648 self._filerev = self._filenode = None
648 649
649 650 if filelog:
650 651 self._filelog = filelog
651 652 if workingctx:
652 653 self._changectx = workingctx
653 654
654 655 @propertycache
655 656 def _changectx(self):
656 657 return workingctx(self._repo)
657 658
658 659 def __nonzero__(self):
659 660 return True
660 661
661 662 def __str__(self):
662 663 return "%s@%s" % (self.path(), self._changectx)
663 664
664 665 def data(self): return self._repo.wread(self._path)
665 666 def renamed(self):
666 667 rp = self._repo.dirstate.copied(self._path)
667 668 if not rp:
668 669 return None
669 670 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
670 671
671 672 def parents(self):
672 673 '''return parent filectxs, following copies if necessary'''
673 674 def filenode(ctx, path):
674 675 return ctx._manifest.get(path, nullid)
675 676
676 677 path = self._path
677 678 fl = self._filelog
678 679 pcl = self._changectx._parents
679 680 renamed = self.renamed()
680 681
681 682 if renamed:
682 683 pl = [renamed + (None,)]
683 684 else:
684 685 pl = [(path, filenode(pcl[0], path), fl)]
685 686
686 687 for pc in pcl[1:]:
687 688 pl.append((path, filenode(pc, path), fl))
688 689
689 690 return [filectx(self._repo, p, fileid=n, filelog=l)
690 691 for p,n,l in pl if n != nullid]
691 692
692 693 def children(self):
693 694 return []
694 695
695 696 def size(self): return os.stat(self._repo.wjoin(self._path)).st_size
696 697 def date(self):
697 698 t, tz = self._changectx.date()
698 699 try:
699 700 return (int(os.lstat(self._repo.wjoin(self._path)).st_mtime), tz)
700 701 except OSError, err:
701 702 if err.errno != errno.ENOENT: raise
702 703 return (t, tz)
703 704
704 705 def cmp(self, text): return self._repo.wread(self._path) == text
705 706
706 707 class memctx(object):
707 708 """Use memctx to perform in-memory commits via localrepo.commitctx().
708 709
709 710 Revision information is supplied at initialization time while
710 711 related files data and is made available through a callback
711 712 mechanism. 'repo' is the current localrepo, 'parents' is a
712 713 sequence of two parent revisions identifiers (pass None for every
713 714 missing parent), 'text' is the commit message and 'files' lists
714 715 names of files touched by the revision (normalized and relative to
715 716 repository root).
716 717
717 718 filectxfn(repo, memctx, path) is a callable receiving the
718 719 repository, the current memctx object and the normalized path of
719 720 requested file, relative to repository root. It is fired by the
720 721 commit function for every file in 'files', but calls order is
721 722 undefined. If the file is available in the revision being
722 723 committed (updated or added), filectxfn returns a memfilectx
723 724 object. If the file was removed, filectxfn raises an
724 725 IOError. Moved files are represented by marking the source file
725 726 removed and the new file added with copy information (see
726 727 memfilectx).
727 728
728 729 user receives the committer name and defaults to current
729 730 repository username, date is the commit date in any format
730 731 supported by util.parsedate() and defaults to current date, extra
731 732 is a dictionary of metadata or is left empty.
732 733 """
733 734 def __init__(self, repo, parents, text, files, filectxfn, user=None,
734 735 date=None, extra=None):
735 736 self._repo = repo
736 737 self._rev = None
737 738 self._node = None
738 739 self._text = text
739 740 self._date = date and util.parsedate(date) or util.makedate()
740 741 self._user = user
741 742 parents = [(p or nullid) for p in parents]
742 743 p1, p2 = parents
743 744 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
744 745 files = sorted(set(files))
745 746 self._status = [files, [], [], [], []]
746 747 self._filectxfn = filectxfn
747 748
748 749 self._extra = extra and extra.copy() or {}
749 750 if 'branch' not in self._extra:
750 751 self._extra['branch'] = 'default'
751 752 elif self._extra.get('branch') == '':
752 753 self._extra['branch'] = 'default'
753 754
754 755 def __str__(self):
755 756 return str(self._parents[0]) + "+"
756 757
757 758 def __int__(self):
758 759 return self._rev
759 760
760 761 def __nonzero__(self):
761 762 return True
762 763
763 764 def __getitem__(self, key):
764 765 return self.filectx(key)
765 766
766 767 def p1(self): return self._parents[0]
767 768 def p2(self): return self._parents[1]
768 769
769 770 def user(self): return self._user or self._repo.ui.username()
770 771 def date(self): return self._date
771 772 def description(self): return self._text
772 773 def files(self): return self.modified()
773 774 def modified(self): return self._status[0]
774 775 def added(self): return self._status[1]
775 776 def removed(self): return self._status[2]
776 777 def deleted(self): return self._status[3]
777 778 def unknown(self): return self._status[4]
778 779 def clean(self): return self._status[5]
779 780 def branch(self): return self._extra['branch']
780 781 def extra(self): return self._extra
781 782 def flags(self, f): return self[f].flags()
782 783
783 784 def parents(self):
784 785 """return contexts for each parent changeset"""
785 786 return self._parents
786 787
787 788 def filectx(self, path, filelog=None):
788 789 """get a file context from the working directory"""
789 790 return self._filectxfn(self._repo, self, path)
790 791
791 792 class memfilectx(object):
792 793 """memfilectx represents an in-memory file to commit.
793 794
794 795 See memctx for more details.
795 796 """
796 797 def __init__(self, path, data, islink, isexec, copied):
797 798 """
798 799 path is the normalized file path relative to repository root.
799 800 data is the file content as a string.
800 801 islink is True if the file is a symbolic link.
801 802 isexec is True if the file is executable.
802 803 copied is the source file path if current file was copied in the
803 804 revision being committed, or None."""
804 805 self._path = path
805 806 self._data = data
806 807 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
807 808 self._copied = None
808 809 if copied:
809 810 self._copied = (copied, nullid)
810 811
811 812 def __nonzero__(self): return True
812 813 def __str__(self): return "%s@%s" % (self.path(), self._changectx)
813 814 def path(self): return self._path
814 815 def data(self): return self._data
815 816 def flags(self): return self._flags
816 817 def isexec(self): return 'x' in self._flags
817 818 def islink(self): return 'l' in self._flags
818 819 def renamed(self): return self._copied
General Comments 0
You need to be logged in to leave comments. Login now