##// END OF EJS Templates
Automatically run "verify" whenever we run "recover"
Matt Mackall -
r1516:0b1b029b default
parent child Browse files
Show More
@@ -1,2633 +1,2635 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from demandload import demandload
9 9 from node import *
10 10 from i18n import gettext as _
11 11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 12 demandload(globals(), "fancyopts ui hg util lock revlog")
13 13 demandload(globals(), "fnmatch hgweb mdiff random signal time traceback")
14 14 demandload(globals(), "errno socket version struct atexit sets bz2")
15 15
16 16 class UnknownCommand(Exception):
17 17 """Exception raised if command is not in the command table."""
18 18
19 19 def filterfiles(filters, files):
20 20 l = [x for x in files if x in filters]
21 21
22 22 for t in filters:
23 23 if t and t[-1] != "/":
24 24 t += "/"
25 25 l += [x for x in files if x.startswith(t)]
26 26 return l
27 27
28 28 def relpath(repo, args):
29 29 cwd = repo.getcwd()
30 30 if cwd:
31 31 return [util.normpath(os.path.join(cwd, x)) for x in args]
32 32 return args
33 33
34 34 def matchpats(repo, cwd, pats=[], opts={}, head=''):
35 35 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
36 36 opts.get('exclude'), head)
37 37
38 38 def makewalk(repo, pats, opts, head=''):
39 39 cwd = repo.getcwd()
40 40 files, matchfn, anypats = matchpats(repo, cwd, pats, opts, head)
41 41 exact = dict(zip(files, files))
42 42 def walk():
43 43 for src, fn in repo.walk(files=files, match=matchfn):
44 44 yield src, fn, util.pathto(cwd, fn), fn in exact
45 45 return files, matchfn, walk()
46 46
47 47 def walk(repo, pats, opts, head=''):
48 48 files, matchfn, results = makewalk(repo, pats, opts, head)
49 49 for r in results:
50 50 yield r
51 51
52 52 def walkchangerevs(ui, repo, cwd, pats, opts):
53 53 '''Iterate over files and the revs they changed in.
54 54
55 55 Callers most commonly need to iterate backwards over the history
56 56 it is interested in. Doing so has awful (quadratic-looking)
57 57 performance, so we use iterators in a "windowed" way.
58 58
59 59 We walk a window of revisions in the desired order. Within the
60 60 window, we first walk forwards to gather data, then in the desired
61 61 order (usually backwards) to display it.
62 62
63 63 This function returns an (iterator, getchange) pair. The
64 64 getchange function returns the changelog entry for a numeric
65 65 revision. The iterator yields 3-tuples. They will be of one of
66 66 the following forms:
67 67
68 68 "window", incrementing, lastrev: stepping through a window,
69 69 positive if walking forwards through revs, last rev in the
70 70 sequence iterated over - use to reset state for the current window
71 71
72 72 "add", rev, fns: out-of-order traversal of the given file names
73 73 fns, which changed during revision rev - use to gather data for
74 74 possible display
75 75
76 76 "iter", rev, None: in-order traversal of the revs earlier iterated
77 77 over with "add" - use to display data'''
78 78
79 79 if repo.changelog.count() == 0:
80 80 return [], False
81 81
82 82 cwd = repo.getcwd()
83 83 if not pats and cwd:
84 84 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
85 85 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
86 86 files, matchfn, anypats = matchpats(repo, (pats and cwd) or '',
87 87 pats, opts)
88 88 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
89 89 wanted = {}
90 90 slowpath = anypats
91 91 window = 300
92 92 fncache = {}
93 93
94 94 chcache = {}
95 95 def getchange(rev):
96 96 ch = chcache.get(rev)
97 97 if ch is None:
98 98 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
99 99 return ch
100 100
101 101 if not slowpath and not files:
102 102 # No files, no patterns. Display all revs.
103 103 wanted = dict(zip(revs, revs))
104 104 if not slowpath:
105 105 # Only files, no patterns. Check the history of each file.
106 106 def filerevgen(filelog):
107 107 for i in xrange(filelog.count() - 1, -1, -window):
108 108 revs = []
109 109 for j in xrange(max(0, i - window), i + 1):
110 110 revs.append(filelog.linkrev(filelog.node(j)))
111 111 revs.reverse()
112 112 for rev in revs:
113 113 yield rev
114 114
115 115 minrev, maxrev = min(revs), max(revs)
116 116 for file in files:
117 117 filelog = repo.file(file)
118 118 # A zero count may be a directory or deleted file, so
119 119 # try to find matching entries on the slow path.
120 120 if filelog.count() == 0:
121 121 slowpath = True
122 122 break
123 123 for rev in filerevgen(filelog):
124 124 if rev <= maxrev:
125 125 if rev < minrev:
126 126 break
127 127 fncache.setdefault(rev, [])
128 128 fncache[rev].append(file)
129 129 wanted[rev] = 1
130 130 if slowpath:
131 131 # The slow path checks files modified in every changeset.
132 132 def changerevgen():
133 133 for i in xrange(repo.changelog.count() - 1, -1, -window):
134 134 for j in xrange(max(0, i - window), i + 1):
135 135 yield j, getchange(j)[3]
136 136
137 137 for rev, changefiles in changerevgen():
138 138 matches = filter(matchfn, changefiles)
139 139 if matches:
140 140 fncache[rev] = matches
141 141 wanted[rev] = 1
142 142
143 143 def iterate():
144 144 for i in xrange(0, len(revs), window):
145 145 yield 'window', revs[0] < revs[-1], revs[-1]
146 146 nrevs = [rev for rev in revs[i:min(i+window, len(revs))]
147 147 if rev in wanted]
148 148 srevs = list(nrevs)
149 149 srevs.sort()
150 150 for rev in srevs:
151 151 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
152 152 yield 'add', rev, fns
153 153 for rev in nrevs:
154 154 yield 'iter', rev, None
155 155 return iterate(), getchange
156 156
157 157 revrangesep = ':'
158 158
159 159 def revrange(ui, repo, revs, revlog=None):
160 160 """Yield revision as strings from a list of revision specifications."""
161 161 if revlog is None:
162 162 revlog = repo.changelog
163 163 revcount = revlog.count()
164 164 def fix(val, defval):
165 165 if not val:
166 166 return defval
167 167 try:
168 168 num = int(val)
169 169 if str(num) != val:
170 170 raise ValueError
171 171 if num < 0: num += revcount
172 172 if num < 0: num = 0
173 173 elif num >= revcount:
174 174 raise ValueError
175 175 except ValueError:
176 176 try:
177 177 num = repo.changelog.rev(repo.lookup(val))
178 178 except KeyError:
179 179 try:
180 180 num = revlog.rev(revlog.lookup(val))
181 181 except KeyError:
182 182 raise util.Abort(_('invalid revision identifier %s'), val)
183 183 return num
184 184 seen = {}
185 185 for spec in revs:
186 186 if spec.find(revrangesep) >= 0:
187 187 start, end = spec.split(revrangesep, 1)
188 188 start = fix(start, 0)
189 189 end = fix(end, revcount - 1)
190 190 step = start > end and -1 or 1
191 191 for rev in xrange(start, end+step, step):
192 192 if rev in seen: continue
193 193 seen[rev] = 1
194 194 yield str(rev)
195 195 else:
196 196 rev = fix(spec, None)
197 197 if rev in seen: continue
198 198 seen[rev] = 1
199 199 yield str(rev)
200 200
201 201 def make_filename(repo, r, pat, node=None,
202 202 total=None, seqno=None, revwidth=None, pathname=None):
203 203 node_expander = {
204 204 'H': lambda: hex(node),
205 205 'R': lambda: str(r.rev(node)),
206 206 'h': lambda: short(node),
207 207 }
208 208 expander = {
209 209 '%': lambda: '%',
210 210 'b': lambda: os.path.basename(repo.root),
211 211 }
212 212
213 213 try:
214 214 if node:
215 215 expander.update(node_expander)
216 216 if node and revwidth is not None:
217 217 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
218 218 if total is not None:
219 219 expander['N'] = lambda: str(total)
220 220 if seqno is not None:
221 221 expander['n'] = lambda: str(seqno)
222 222 if total is not None and seqno is not None:
223 223 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
224 224 if pathname is not None:
225 225 expander['s'] = lambda: os.path.basename(pathname)
226 226 expander['d'] = lambda: os.path.dirname(pathname) or '.'
227 227 expander['p'] = lambda: pathname
228 228
229 229 newname = []
230 230 patlen = len(pat)
231 231 i = 0
232 232 while i < patlen:
233 233 c = pat[i]
234 234 if c == '%':
235 235 i += 1
236 236 c = pat[i]
237 237 c = expander[c]()
238 238 newname.append(c)
239 239 i += 1
240 240 return ''.join(newname)
241 241 except KeyError, inst:
242 242 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
243 243 inst.args[0])
244 244
245 245 def make_file(repo, r, pat, node=None,
246 246 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
247 247 if not pat or pat == '-':
248 248 return 'w' in mode and sys.stdout or sys.stdin
249 249 if hasattr(pat, 'write') and 'w' in mode:
250 250 return pat
251 251 if hasattr(pat, 'read') and 'r' in mode:
252 252 return pat
253 253 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
254 254 pathname),
255 255 mode)
256 256
257 257 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
258 258 changes=None, text=False):
259 259 if not changes:
260 260 (c, a, d, u) = repo.changes(node1, node2, files, match=match)
261 261 else:
262 262 (c, a, d, u) = changes
263 263 if files:
264 264 c, a, d = map(lambda x: filterfiles(files, x), (c, a, d))
265 265
266 266 if not c and not a and not d:
267 267 return
268 268
269 269 if node2:
270 270 change = repo.changelog.read(node2)
271 271 mmap2 = repo.manifest.read(change[0])
272 272 date2 = util.datestr(change[2])
273 273 def read(f):
274 274 return repo.file(f).read(mmap2[f])
275 275 else:
276 276 date2 = util.datestr()
277 277 if not node1:
278 278 node1 = repo.dirstate.parents()[0]
279 279 def read(f):
280 280 return repo.wfile(f).read()
281 281
282 282 if ui.quiet:
283 283 r = None
284 284 else:
285 285 hexfunc = ui.verbose and hex or short
286 286 r = [hexfunc(node) for node in [node1, node2] if node]
287 287
288 288 change = repo.changelog.read(node1)
289 289 mmap = repo.manifest.read(change[0])
290 290 date1 = util.datestr(change[2])
291 291
292 292 for f in c:
293 293 to = None
294 294 if f in mmap:
295 295 to = repo.file(f).read(mmap[f])
296 296 tn = read(f)
297 297 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
298 298 for f in a:
299 299 to = None
300 300 tn = read(f)
301 301 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
302 302 for f in d:
303 303 to = repo.file(f).read(mmap[f])
304 304 tn = None
305 305 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
306 306
307 307 def trimuser(ui, name, rev, revcache):
308 308 """trim the name of the user who committed a change"""
309 309 user = revcache.get(rev)
310 310 if user is None:
311 311 user = revcache[rev] = ui.shortuser(name)
312 312 return user
313 313
314 314 def show_changeset(ui, repo, rev=0, changenode=None, brinfo=None):
315 315 """show a single changeset or file revision"""
316 316 log = repo.changelog
317 317 if changenode is None:
318 318 changenode = log.node(rev)
319 319 elif not rev:
320 320 rev = log.rev(changenode)
321 321
322 322 if ui.quiet:
323 323 ui.write("%d:%s\n" % (rev, short(changenode)))
324 324 return
325 325
326 326 changes = log.read(changenode)
327 327 date = util.datestr(changes[2])
328 328
329 329 parents = [(log.rev(p), ui.verbose and hex(p) or short(p))
330 330 for p in log.parents(changenode)
331 331 if ui.debugflag or p != nullid]
332 332 if not ui.debugflag and len(parents) == 1 and parents[0][0] == rev-1:
333 333 parents = []
334 334
335 335 if ui.verbose:
336 336 ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
337 337 else:
338 338 ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
339 339
340 340 for tag in repo.nodetags(changenode):
341 341 ui.status(_("tag: %s\n") % tag)
342 342 for parent in parents:
343 343 ui.write(_("parent: %d:%s\n") % parent)
344 344
345 345 if brinfo and changenode in brinfo:
346 346 br = brinfo[changenode]
347 347 ui.write(_("branch: %s\n") % " ".join(br))
348 348
349 349 ui.debug(_("manifest: %d:%s\n") % (repo.manifest.rev(changes[0]),
350 350 hex(changes[0])))
351 351 ui.status(_("user: %s\n") % changes[1])
352 352 ui.status(_("date: %s\n") % date)
353 353
354 354 if ui.debugflag:
355 355 files = repo.changes(log.parents(changenode)[0], changenode)
356 356 for key, value in zip([_("files:"), _("files+:"), _("files-:")], files):
357 357 if value:
358 358 ui.note("%-12s %s\n" % (key, " ".join(value)))
359 359 else:
360 360 ui.note(_("files: %s\n") % " ".join(changes[3]))
361 361
362 362 description = changes[4].strip()
363 363 if description:
364 364 if ui.verbose:
365 365 ui.status(_("description:\n"))
366 366 ui.status(description)
367 367 ui.status("\n\n")
368 368 else:
369 369 ui.status(_("summary: %s\n") % description.splitlines()[0])
370 370 ui.status("\n")
371 371
372 372 def show_version(ui):
373 373 """output version and copyright information"""
374 374 ui.write(_("Mercurial Distributed SCM (version %s)\n")
375 375 % version.get_version())
376 376 ui.status(_(
377 377 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
378 378 "This is free software; see the source for copying conditions. "
379 379 "There is NO\nwarranty; "
380 380 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
381 381 ))
382 382
383 383 def help_(ui, cmd=None, with_version=False):
384 384 """show help for a given command or all commands"""
385 385 option_lists = []
386 386 if cmd and cmd != 'shortlist':
387 387 if with_version:
388 388 show_version(ui)
389 389 ui.write('\n')
390 390 key, i = find(cmd)
391 391 # synopsis
392 392 ui.write("%s\n\n" % i[2])
393 393
394 394 # description
395 395 doc = i[0].__doc__
396 396 if ui.quiet:
397 397 doc = doc.splitlines(0)[0]
398 398 ui.write("%s\n" % doc.rstrip())
399 399
400 400 if not ui.quiet:
401 401 # aliases
402 402 aliases = ', '.join(key.split('|')[1:])
403 403 if aliases:
404 404 ui.write(_("\naliases: %s\n") % aliases)
405 405
406 406 # options
407 407 if i[1]:
408 408 option_lists.append(("options", i[1]))
409 409
410 410 else:
411 411 # program name
412 412 if ui.verbose or with_version:
413 413 show_version(ui)
414 414 else:
415 415 ui.status(_("Mercurial Distributed SCM\n"))
416 416 ui.status('\n')
417 417
418 418 # list of commands
419 419 if cmd == "shortlist":
420 420 ui.status(_('basic commands (use "hg help" '
421 421 'for the full list or option "-v" for details):\n\n'))
422 422 elif ui.verbose:
423 423 ui.status(_('list of commands:\n\n'))
424 424 else:
425 425 ui.status(_('list of commands (use "hg help -v" '
426 426 'to show aliases and global options):\n\n'))
427 427
428 428 h = {}
429 429 cmds = {}
430 430 for c, e in table.items():
431 431 f = c.split("|")[0]
432 432 if cmd == "shortlist" and not f.startswith("^"):
433 433 continue
434 434 f = f.lstrip("^")
435 435 if not ui.debugflag and f.startswith("debug"):
436 436 continue
437 437 d = ""
438 438 if e[0].__doc__:
439 439 d = e[0].__doc__.splitlines(0)[0].rstrip()
440 440 h[f] = d
441 441 cmds[f]=c.lstrip("^")
442 442
443 443 fns = h.keys()
444 444 fns.sort()
445 445 m = max(map(len, fns))
446 446 for f in fns:
447 447 if ui.verbose:
448 448 commands = cmds[f].replace("|",", ")
449 449 ui.write(" %s:\n %s\n"%(commands,h[f]))
450 450 else:
451 451 ui.write(' %-*s %s\n' % (m, f, h[f]))
452 452
453 453 # global options
454 454 if ui.verbose:
455 455 option_lists.append(("global options", globalopts))
456 456
457 457 # list all option lists
458 458 opt_output = []
459 459 for title, options in option_lists:
460 460 opt_output.append(("\n%s:\n" % title, None))
461 461 for shortopt, longopt, default, desc in options:
462 462 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
463 463 longopt and " --%s" % longopt),
464 464 "%s%s" % (desc,
465 465 default and _(" (default: %s)") % default
466 466 or "")))
467 467
468 468 if opt_output:
469 469 opts_len = max([len(line[0]) for line in opt_output if line[1]])
470 470 for first, second in opt_output:
471 471 if second:
472 472 ui.write(" %-*s %s\n" % (opts_len, first, second))
473 473 else:
474 474 ui.write("%s\n" % first)
475 475
476 476 # Commands start here, listed alphabetically
477 477
478 478 def add(ui, repo, *pats, **opts):
479 479 """add the specified files on the next commit
480 480
481 481 Schedule files to be version controlled and added to the repository.
482 482
483 483 The files will be added to the repository at the next commit.
484 484
485 485 If no names are given, add all files in the current directory and
486 486 its subdirectories.
487 487 """
488 488
489 489 names = []
490 490 for src, abs, rel, exact in walk(repo, pats, opts):
491 491 if exact:
492 492 if ui.verbose: ui.status(_('adding %s\n') % rel)
493 493 names.append(abs)
494 494 elif repo.dirstate.state(abs) == '?':
495 495 ui.status(_('adding %s\n') % rel)
496 496 names.append(abs)
497 497 repo.add(names)
498 498
499 499 def addremove(ui, repo, *pats, **opts):
500 500 """add all new files, delete all missing files
501 501
502 502 Add all new files and remove all missing files from the repository.
503 503
504 504 New files are ignored if they match any of the patterns in .hgignore. As
505 505 with add, these changes take effect at the next commit.
506 506 """
507 507 add, remove = [], []
508 508 for src, abs, rel, exact in walk(repo, pats, opts):
509 509 if src == 'f' and repo.dirstate.state(abs) == '?':
510 510 add.append(abs)
511 511 if ui.verbose or not exact:
512 512 ui.status(_('adding %s\n') % rel)
513 513 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
514 514 remove.append(abs)
515 515 if ui.verbose or not exact:
516 516 ui.status(_('removing %s\n') % rel)
517 517 repo.add(add)
518 518 repo.remove(remove)
519 519
520 520 def annotate(ui, repo, *pats, **opts):
521 521 """show changeset information per file line
522 522
523 523 List changes in files, showing the revision id responsible for each line
524 524
525 525 This command is useful to discover who did a change or when a change took
526 526 place.
527 527
528 528 Without the -a option, annotate will avoid processing files it
529 529 detects as binary. With -a, annotate will generate an annotation
530 530 anyway, probably with undesirable results.
531 531 """
532 532 def getnode(rev):
533 533 return short(repo.changelog.node(rev))
534 534
535 535 ucache = {}
536 536 def getname(rev):
537 537 cl = repo.changelog.read(repo.changelog.node(rev))
538 538 return trimuser(ui, cl[1], rev, ucache)
539 539
540 540 if not pats:
541 541 raise util.Abort(_('at least one file name or pattern required'))
542 542
543 543 opmap = [['user', getname], ['number', str], ['changeset', getnode]]
544 544 if not opts['user'] and not opts['changeset']:
545 545 opts['number'] = 1
546 546
547 547 if opts['rev']:
548 548 node = repo.changelog.lookup(opts['rev'])
549 549 else:
550 550 node = repo.dirstate.parents()[0]
551 551 change = repo.changelog.read(node)
552 552 mmap = repo.manifest.read(change[0])
553 553
554 554 for src, abs, rel, exact in walk(repo, pats, opts):
555 555 if abs not in mmap:
556 556 ui.warn(_("warning: %s is not in the repository!\n") % rel)
557 557 continue
558 558
559 559 f = repo.file(abs)
560 560 if not opts['text'] and util.binary(f.read(mmap[abs])):
561 561 ui.write(_("%s: binary file\n") % rel)
562 562 continue
563 563
564 564 lines = f.annotate(mmap[abs])
565 565 pieces = []
566 566
567 567 for o, f in opmap:
568 568 if opts[o]:
569 569 l = [f(n) for n, dummy in lines]
570 570 if l:
571 571 m = max(map(len, l))
572 572 pieces.append(["%*s" % (m, x) for x in l])
573 573
574 574 if pieces:
575 575 for p, l in zip(zip(*pieces), lines):
576 576 ui.write("%s: %s" % (" ".join(p), l[1]))
577 577
578 578 def bundle(ui, repo, fname, dest="default-push", **opts):
579 579 """create a changegroup file
580 580
581 581 Generate a compressed changegroup file collecting all changesets
582 582 not found in the other repository.
583 583
584 584 This file can then be transferred using conventional means and
585 585 applied to another repository with the unbundle command. This is
586 586 useful when native push and pull are not available or when
587 587 exporting an entire repository is undesirable. The standard file
588 588 extension is ".hg".
589 589
590 590 Unlike import/export, this exactly preserves all changeset
591 591 contents including permissions, rename data, and revision history.
592 592 """
593 593 f = open(fname, "wb")
594 594 dest = ui.expandpath(dest, repo.root)
595 595 other = hg.repository(ui, dest)
596 596 o = repo.findoutgoing(other)
597 597 cg = repo.changegroup(o)
598 598
599 599 try:
600 600 f.write("HG10")
601 601 z = bz2.BZ2Compressor(9)
602 602 while 1:
603 603 chunk = cg.read(4096)
604 604 if not chunk:
605 605 break
606 606 f.write(z.compress(chunk))
607 607 f.write(z.flush())
608 608 except:
609 609 os.unlink(fname)
610 610 raise
611 611
612 612 def cat(ui, repo, file1, *pats, **opts):
613 613 """output the latest or given revisions of files
614 614
615 615 Print the specified files as they were at the given revision.
616 616 If no revision is given then the tip is used.
617 617
618 618 Output may be to a file, in which case the name of the file is
619 619 given using a format string. The formatting rules are the same as
620 620 for the export command, with the following additions:
621 621
622 622 %s basename of file being printed
623 623 %d dirname of file being printed, or '.' if in repo root
624 624 %p root-relative path name of file being printed
625 625 """
626 626 mf = {}
627 627 if opts['rev']:
628 628 change = repo.changelog.read(repo.lookup(opts['rev']))
629 629 mf = repo.manifest.read(change[0])
630 630 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts):
631 631 r = repo.file(abs)
632 632 if opts['rev']:
633 633 try:
634 634 n = mf[abs]
635 635 except (hg.RepoError, KeyError):
636 636 try:
637 637 n = r.lookup(rev)
638 638 except KeyError, inst:
639 639 raise util.Abort(_('cannot find file %s in rev %s'), rel, rev)
640 640 else:
641 641 n = r.tip()
642 642 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
643 643 fp.write(r.read(n))
644 644
645 645 def clone(ui, source, dest=None, **opts):
646 646 """make a copy of an existing repository
647 647
648 648 Create a copy of an existing repository in a new directory.
649 649
650 650 If no destination directory name is specified, it defaults to the
651 651 basename of the source.
652 652
653 653 The location of the source is added to the new repository's
654 654 .hg/hgrc file, as the default to be used for future pulls.
655 655
656 656 For efficiency, hardlinks are used for cloning whenever the source
657 657 and destination are on the same filesystem. Some filesystems,
658 658 such as AFS, implement hardlinking incorrectly, but do not report
659 659 errors. In these cases, use the --pull option to avoid
660 660 hardlinking.
661 661 """
662 662 if dest is None:
663 663 dest = os.path.basename(os.path.normpath(source))
664 664
665 665 if os.path.exists(dest):
666 666 raise util.Abort(_("destination '%s' already exists"), dest)
667 667
668 668 dest = os.path.realpath(dest)
669 669
670 670 class Dircleanup:
671 671 def __init__(self, dir_):
672 672 self.rmtree = shutil.rmtree
673 673 self.dir_ = dir_
674 674 os.mkdir(dir_)
675 675 def close(self):
676 676 self.dir_ = None
677 677 def __del__(self):
678 678 if self.dir_:
679 679 self.rmtree(self.dir_, True)
680 680
681 681 if opts['ssh']:
682 682 ui.setconfig("ui", "ssh", opts['ssh'])
683 683 if opts['remotecmd']:
684 684 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
685 685
686 686 if not os.path.exists(source):
687 687 source = ui.expandpath(source)
688 688
689 689 d = Dircleanup(dest)
690 690 abspath = source
691 691 other = hg.repository(ui, source)
692 692
693 693 copy = False
694 694 if other.dev() != -1:
695 695 abspath = os.path.abspath(source)
696 696 if not opts['pull'] and not opts['rev']:
697 697 copy = True
698 698
699 699 if copy:
700 700 try:
701 701 # we use a lock here because if we race with commit, we
702 702 # can end up with extra data in the cloned revlogs that's
703 703 # not pointed to by changesets, thus causing verify to
704 704 # fail
705 705 l1 = lock.lock(os.path.join(source, ".hg", "lock"))
706 706 except OSError:
707 707 copy = False
708 708
709 709 if copy:
710 710 # we lock here to avoid premature writing to the target
711 711 os.mkdir(os.path.join(dest, ".hg"))
712 712 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
713 713
714 714 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
715 715 for f in files.split():
716 716 src = os.path.join(source, ".hg", f)
717 717 dst = os.path.join(dest, ".hg", f)
718 718 try:
719 719 util.copyfiles(src, dst)
720 720 except OSError, inst:
721 721 if inst.errno != errno.ENOENT: raise
722 722
723 723 repo = hg.repository(ui, dest)
724 724
725 725 else:
726 726 revs = None
727 727 if opts['rev']:
728 728 if not other.local():
729 729 raise util.Abort("clone -r not supported yet for remote repositories.")
730 730 else:
731 731 revs = [other.lookup(rev) for rev in opts['rev']]
732 732 repo = hg.repository(ui, dest, create=1)
733 733 repo.pull(other, heads = revs)
734 734
735 735 f = repo.opener("hgrc", "w", text=True)
736 736 f.write("[paths]\n")
737 737 f.write("default = %s\n" % abspath)
738 738
739 739 if not opts['noupdate']:
740 740 update(ui, repo)
741 741
742 742 d.close()
743 743
744 744 def commit(ui, repo, *pats, **opts):
745 745 """commit the specified files or all outstanding changes
746 746
747 747 Commit changes to the given files into the repository.
748 748
749 749 If a list of files is omitted, all changes reported by "hg status"
750 750 from the root of the repository will be commited.
751 751
752 752 The HGEDITOR or EDITOR environment variables are used to start an
753 753 editor to add a commit comment.
754 754 """
755 755 message = opts['message']
756 756 logfile = opts['logfile']
757 757
758 758 if message and logfile:
759 759 raise util.Abort(_('options --message and --logfile are mutually '
760 760 'exclusive'))
761 761 if not message and logfile:
762 762 try:
763 763 if logfile == '-':
764 764 message = sys.stdin.read()
765 765 else:
766 766 message = open(logfile).read()
767 767 except IOError, inst:
768 768 raise util.Abort(_("can't read commit message '%s': %s") %
769 769 (logfile, inst.strerror))
770 770
771 771 if opts['addremove']:
772 772 addremove(ui, repo, *pats, **opts)
773 773 cwd = repo.getcwd()
774 774 if not pats and cwd:
775 775 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
776 776 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
777 777 fns, match, anypats = matchpats(repo, (pats and repo.getcwd()) or '',
778 778 pats, opts)
779 779 if pats:
780 780 c, a, d, u = repo.changes(files=fns, match=match)
781 781 files = c + a + [fn for fn in d if repo.dirstate.state(fn) == 'r']
782 782 else:
783 783 files = []
784 784 try:
785 785 repo.commit(files, message, opts['user'], opts['date'], match)
786 786 except ValueError, inst:
787 787 raise util.Abort(str(inst))
788 788
789 789 def docopy(ui, repo, pats, opts):
790 790 cwd = repo.getcwd()
791 791 errors = 0
792 792 copied = []
793 793 targets = {}
794 794
795 795 def okaytocopy(abs, rel, exact):
796 796 reasons = {'?': _('is not managed'),
797 797 'a': _('has been marked for add')}
798 798 reason = reasons.get(repo.dirstate.state(abs))
799 799 if reason:
800 800 if exact: ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
801 801 else:
802 802 return True
803 803
804 804 def copy(abssrc, relsrc, target, exact):
805 805 abstarget = util.canonpath(repo.root, cwd, target)
806 806 reltarget = util.pathto(cwd, abstarget)
807 807 prevsrc = targets.get(abstarget)
808 808 if prevsrc is not None:
809 809 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
810 810 (reltarget, abssrc, prevsrc))
811 811 return
812 812 elif os.path.exists(reltarget):
813 813 if opts['force']:
814 814 os.unlink(reltarget)
815 815 else:
816 816 ui.warn(_('%s: not overwriting - file exists\n') %
817 817 reltarget)
818 818 return
819 819 if ui.verbose or not exact:
820 820 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
821 821 if not opts['after']:
822 822 targetdir = os.path.dirname(reltarget) or '.'
823 823 if not os.path.isdir(targetdir):
824 824 os.makedirs(targetdir)
825 825 try:
826 826 shutil.copyfile(relsrc, reltarget)
827 827 shutil.copymode(relsrc, reltarget)
828 828 except shutil.Error, inst:
829 829 raise util.Abort(str(inst))
830 830 except IOError, inst:
831 831 if inst.errno == errno.ENOENT:
832 832 ui.warn(_('%s: deleted in working copy\n') % relsrc)
833 833 else:
834 834 ui.warn(_('%s: cannot copy - %s\n') %
835 835 (relsrc, inst.strerror))
836 836 errors += 1
837 837 return
838 838 targets[abstarget] = abssrc
839 839 repo.copy(abssrc, abstarget)
840 840 copied.append((abssrc, relsrc, exact))
841 841
842 842 pats = list(pats)
843 843 if not pats:
844 844 raise util.Abort(_('no source or destination specified'))
845 845 if len(pats) == 1:
846 846 raise util.Abort(_('no destination specified'))
847 847 dest = pats.pop()
848 848 destdirexists = os.path.isdir(dest)
849 849 if (len(pats) > 1 or not os.path.exists(pats[0])) and not destdirexists:
850 850 raise util.Abort(_('with multiple sources, destination must be an '
851 851 'existing directory'))
852 852
853 853 for pat in pats:
854 854 if os.path.isdir(pat):
855 855 if destdirexists:
856 856 striplen = len(os.path.split(pat)[0])
857 857 else:
858 858 striplen = len(pat)
859 859 if striplen:
860 860 striplen += len(os.sep)
861 861 targetpath = lambda p: os.path.join(dest, p[striplen:])
862 862 elif destdirexists:
863 863 targetpath = lambda p: os.path.join(dest, os.path.basename(p))
864 864 else:
865 865 targetpath = lambda p: dest
866 866 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
867 867 if okaytocopy(abssrc, relsrc, exact):
868 868 copy(abssrc, relsrc, targetpath(abssrc), exact)
869 869
870 870 if errors:
871 871 ui.warn(_('(consider using --after)\n'))
872 872 if len(copied) == 0:
873 873 raise util.Abort(_('no files to copy'))
874 874 return errors, copied
875 875
876 876 def copy(ui, repo, *pats, **opts):
877 877 """mark files as copied for the next commit
878 878
879 879 Mark dest as having copies of source files. If dest is a
880 880 directory, copies are put in that directory. If dest is a file,
881 881 there can only be one source.
882 882
883 883 By default, this command copies the contents of files as they
884 884 stand in the working directory. If invoked with --after, the
885 885 operation is recorded, but no copying is performed.
886 886
887 887 This command takes effect in the next commit.
888 888
889 889 NOTE: This command should be treated as experimental. While it
890 890 should properly record copied files, this information is not yet
891 891 fully used by merge, nor fully reported by log.
892 892 """
893 893 errs, copied = docopy(ui, repo, pats, opts)
894 894 return errs
895 895
896 896 def debugancestor(ui, index, rev1, rev2):
897 897 """find the ancestor revision of two revisions in a given index"""
898 898 r = revlog.revlog(util.opener(os.getcwd()), index, "")
899 899 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
900 900 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
901 901
902 902 def debugcheckstate(ui, repo):
903 903 """validate the correctness of the current dirstate"""
904 904 parent1, parent2 = repo.dirstate.parents()
905 905 repo.dirstate.read()
906 906 dc = repo.dirstate.map
907 907 keys = dc.keys()
908 908 keys.sort()
909 909 m1n = repo.changelog.read(parent1)[0]
910 910 m2n = repo.changelog.read(parent2)[0]
911 911 m1 = repo.manifest.read(m1n)
912 912 m2 = repo.manifest.read(m2n)
913 913 errors = 0
914 914 for f in dc:
915 915 state = repo.dirstate.state(f)
916 916 if state in "nr" and f not in m1:
917 917 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
918 918 errors += 1
919 919 if state in "a" and f in m1:
920 920 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
921 921 errors += 1
922 922 if state in "m" and f not in m1 and f not in m2:
923 923 ui.warn(_("%s in state %s, but not in either manifest\n") %
924 924 (f, state))
925 925 errors += 1
926 926 for f in m1:
927 927 state = repo.dirstate.state(f)
928 928 if state not in "nrm":
929 929 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
930 930 errors += 1
931 931 if errors:
932 932 raise util.Abort(_(".hg/dirstate inconsistent with current parent's manifest"))
933 933
934 934 def debugconfig(ui):
935 935 """show combined config settings from all hgrc files"""
936 936 try:
937 937 repo = hg.repository(ui)
938 938 except hg.RepoError:
939 939 pass
940 940 for section, name, value in ui.walkconfig():
941 941 ui.write('%s.%s=%s\n' % (section, name, value))
942 942
943 943 def debugsetparents(ui, repo, rev1, rev2=None):
944 944 """manually set the parents of the current working directory
945 945
946 946 This is useful for writing repository conversion tools, but should
947 947 be used with care.
948 948 """
949 949
950 950 if not rev2:
951 951 rev2 = hex(nullid)
952 952
953 953 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
954 954
955 955 def debugstate(ui, repo):
956 956 """show the contents of the current dirstate"""
957 957 repo.dirstate.read()
958 958 dc = repo.dirstate.map
959 959 keys = dc.keys()
960 960 keys.sort()
961 961 for file_ in keys:
962 962 ui.write("%c %3o %10d %s %s\n"
963 963 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
964 964 time.strftime("%x %X",
965 965 time.localtime(dc[file_][3])), file_))
966 966 for f in repo.dirstate.copies:
967 967 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
968 968
969 969 def debugdata(ui, file_, rev):
970 970 """dump the contents of an data file revision"""
971 971 r = revlog.revlog(util.opener(os.getcwd()), file_[:-2] + ".i", file_)
972 972 try:
973 973 ui.write(r.revision(r.lookup(rev)))
974 974 except KeyError:
975 975 raise util.Abort(_('invalid revision identifier %s'), rev)
976 976
977 977 def debugindex(ui, file_):
978 978 """dump the contents of an index file"""
979 979 r = revlog.revlog(util.opener(os.getcwd()), file_, "")
980 980 ui.write(" rev offset length base linkrev" +
981 981 " nodeid p1 p2\n")
982 982 for i in range(r.count()):
983 983 e = r.index[i]
984 984 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
985 985 i, e[0], e[1], e[2], e[3],
986 986 short(e[6]), short(e[4]), short(e[5])))
987 987
988 988 def debugindexdot(ui, file_):
989 989 """dump an index DAG as a .dot file"""
990 990 r = revlog.revlog(util.opener(os.getcwd()), file_, "")
991 991 ui.write("digraph G {\n")
992 992 for i in range(r.count()):
993 993 e = r.index[i]
994 994 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
995 995 if e[5] != nullid:
996 996 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
997 997 ui.write("}\n")
998 998
999 999 def debugrename(ui, repo, file, rev=None):
1000 1000 """dump rename information"""
1001 1001 r = repo.file(relpath(repo, [file])[0])
1002 1002 if rev:
1003 1003 try:
1004 1004 # assume all revision numbers are for changesets
1005 1005 n = repo.lookup(rev)
1006 1006 change = repo.changelog.read(n)
1007 1007 m = repo.manifest.read(change[0])
1008 1008 n = m[relpath(repo, [file])[0]]
1009 1009 except hg.RepoError, KeyError:
1010 1010 n = r.lookup(rev)
1011 1011 else:
1012 1012 n = r.tip()
1013 1013 m = r.renamed(n)
1014 1014 if m:
1015 1015 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1016 1016 else:
1017 1017 ui.write(_("not renamed\n"))
1018 1018
1019 1019 def debugwalk(ui, repo, *pats, **opts):
1020 1020 """show how files match on given patterns"""
1021 1021 items = list(walk(repo, pats, opts))
1022 1022 if not items:
1023 1023 return
1024 1024 fmt = '%%s %%-%ds %%-%ds %%s' % (
1025 1025 max([len(abs) for (src, abs, rel, exact) in items]),
1026 1026 max([len(rel) for (src, abs, rel, exact) in items]))
1027 1027 for src, abs, rel, exact in items:
1028 1028 line = fmt % (src, abs, rel, exact and 'exact' or '')
1029 1029 ui.write("%s\n" % line.rstrip())
1030 1030
1031 1031 def diff(ui, repo, *pats, **opts):
1032 1032 """diff working directory (or selected files)
1033 1033
1034 1034 Show differences between revisions for the specified files.
1035 1035
1036 1036 Differences between files are shown using the unified diff format.
1037 1037
1038 1038 When two revision arguments are given, then changes are shown
1039 1039 between those revisions. If only one revision is specified then
1040 1040 that revision is compared to the working directory, and, when no
1041 1041 revisions are specified, the working directory files are compared
1042 1042 to its parent.
1043 1043
1044 1044 Without the -a option, diff will avoid generating diffs of files
1045 1045 it detects as binary. With -a, diff will generate a diff anyway,
1046 1046 probably with undesirable results.
1047 1047 """
1048 1048 node1, node2 = None, None
1049 1049 revs = [repo.lookup(x) for x in opts['rev']]
1050 1050
1051 1051 if len(revs) > 0:
1052 1052 node1 = revs[0]
1053 1053 if len(revs) > 1:
1054 1054 node2 = revs[1]
1055 1055 if len(revs) > 2:
1056 1056 raise util.Abort(_("too many revisions to diff"))
1057 1057
1058 1058 fns, matchfn, anypats = matchpats(repo, repo.getcwd(), pats, opts)
1059 1059
1060 1060 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1061 1061 text=opts['text'])
1062 1062
1063 1063 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1064 1064 node = repo.lookup(changeset)
1065 1065 prev, other = repo.changelog.parents(node)
1066 1066 change = repo.changelog.read(node)
1067 1067
1068 1068 fp = make_file(repo, repo.changelog, opts['output'],
1069 1069 node=node, total=total, seqno=seqno,
1070 1070 revwidth=revwidth)
1071 1071 if fp != sys.stdout:
1072 1072 ui.note("%s\n" % fp.name)
1073 1073
1074 1074 fp.write("# HG changeset patch\n")
1075 1075 fp.write("# User %s\n" % change[1])
1076 1076 fp.write("# Node ID %s\n" % hex(node))
1077 1077 fp.write("# Parent %s\n" % hex(prev))
1078 1078 if other != nullid:
1079 1079 fp.write("# Parent %s\n" % hex(other))
1080 1080 fp.write(change[4].rstrip())
1081 1081 fp.write("\n\n")
1082 1082
1083 1083 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1084 1084 if fp != sys.stdout:
1085 1085 fp.close()
1086 1086
1087 1087 def export(ui, repo, *changesets, **opts):
1088 1088 """dump the header and diffs for one or more changesets
1089 1089
1090 1090 Print the changeset header and diffs for one or more revisions.
1091 1091
1092 1092 The information shown in the changeset header is: author,
1093 1093 changeset hash, parent and commit comment.
1094 1094
1095 1095 Output may be to a file, in which case the name of the file is
1096 1096 given using a format string. The formatting rules are as follows:
1097 1097
1098 1098 %% literal "%" character
1099 1099 %H changeset hash (40 bytes of hexadecimal)
1100 1100 %N number of patches being generated
1101 1101 %R changeset revision number
1102 1102 %b basename of the exporting repository
1103 1103 %h short-form changeset hash (12 bytes of hexadecimal)
1104 1104 %n zero-padded sequence number, starting at 1
1105 1105 %r zero-padded changeset revision number
1106 1106
1107 1107 Without the -a option, export will avoid generating diffs of files
1108 1108 it detects as binary. With -a, export will generate a diff anyway,
1109 1109 probably with undesirable results.
1110 1110 """
1111 1111 if not changesets:
1112 1112 raise util.Abort(_("export requires at least one changeset"))
1113 1113 seqno = 0
1114 1114 revs = list(revrange(ui, repo, changesets))
1115 1115 total = len(revs)
1116 1116 revwidth = max(map(len, revs))
1117 1117 ui.note(len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n"))
1118 1118 for cset in revs:
1119 1119 seqno += 1
1120 1120 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1121 1121
1122 1122 def forget(ui, repo, *pats, **opts):
1123 1123 """don't add the specified files on the next commit
1124 1124
1125 1125 Undo an 'hg add' scheduled for the next commit.
1126 1126 """
1127 1127 forget = []
1128 1128 for src, abs, rel, exact in walk(repo, pats, opts):
1129 1129 if repo.dirstate.state(abs) == 'a':
1130 1130 forget.append(abs)
1131 1131 if ui.verbose or not exact:
1132 1132 ui.status(_('forgetting %s\n') % rel)
1133 1133 repo.forget(forget)
1134 1134
1135 1135 def grep(ui, repo, pattern, *pats, **opts):
1136 1136 """search for a pattern in specified files and revisions
1137 1137
1138 1138 Search revisions of files for a regular expression.
1139 1139
1140 1140 This command behaves differently than Unix grep. It only accepts
1141 1141 Python/Perl regexps. It searches repository history, not the
1142 1142 working directory. It always prints the revision number in which
1143 1143 a match appears.
1144 1144
1145 1145 By default, grep only prints output for the first revision of a
1146 1146 file in which it finds a match. To get it to print every revision
1147 1147 that contains a change in match status ("-" for a match that
1148 1148 becomes a non-match, or "+" for a non-match that becomes a match),
1149 1149 use the --all flag.
1150 1150 """
1151 1151 reflags = 0
1152 1152 if opts['ignore_case']:
1153 1153 reflags |= re.I
1154 1154 regexp = re.compile(pattern, reflags)
1155 1155 sep, eol = ':', '\n'
1156 1156 if opts['print0']:
1157 1157 sep = eol = '\0'
1158 1158
1159 1159 fcache = {}
1160 1160 def getfile(fn):
1161 1161 if fn not in fcache:
1162 1162 fcache[fn] = repo.file(fn)
1163 1163 return fcache[fn]
1164 1164
1165 1165 def matchlines(body):
1166 1166 begin = 0
1167 1167 linenum = 0
1168 1168 while True:
1169 1169 match = regexp.search(body, begin)
1170 1170 if not match:
1171 1171 break
1172 1172 mstart, mend = match.span()
1173 1173 linenum += body.count('\n', begin, mstart) + 1
1174 1174 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1175 1175 lend = body.find('\n', mend)
1176 1176 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1177 1177 begin = lend + 1
1178 1178
1179 1179 class linestate:
1180 1180 def __init__(self, line, linenum, colstart, colend):
1181 1181 self.line = line
1182 1182 self.linenum = linenum
1183 1183 self.colstart = colstart
1184 1184 self.colend = colend
1185 1185 def __eq__(self, other):
1186 1186 return self.line == other.line
1187 1187 def __hash__(self):
1188 1188 return hash(self.line)
1189 1189
1190 1190 matches = {}
1191 1191 def grepbody(fn, rev, body):
1192 1192 matches[rev].setdefault(fn, {})
1193 1193 m = matches[rev][fn]
1194 1194 for lnum, cstart, cend, line in matchlines(body):
1195 1195 s = linestate(line, lnum, cstart, cend)
1196 1196 m[s] = s
1197 1197
1198 1198 prev = {}
1199 1199 ucache = {}
1200 1200 def display(fn, rev, states, prevstates):
1201 1201 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1202 1202 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1203 1203 counts = {'-': 0, '+': 0}
1204 1204 filerevmatches = {}
1205 1205 for l in diff:
1206 1206 if incrementing or not opts['all']:
1207 1207 change = ((l in prevstates) and '-') or '+'
1208 1208 r = rev
1209 1209 else:
1210 1210 change = ((l in states) and '-') or '+'
1211 1211 r = prev[fn]
1212 1212 cols = [fn, str(rev)]
1213 1213 if opts['line_number']: cols.append(str(l.linenum))
1214 1214 if opts['all']: cols.append(change)
1215 1215 if opts['user']: cols.append(trimuser(ui, getchange(rev)[1], rev,
1216 1216 ucache))
1217 1217 if opts['files_with_matches']:
1218 1218 c = (fn, rev)
1219 1219 if c in filerevmatches: continue
1220 1220 filerevmatches[c] = 1
1221 1221 else:
1222 1222 cols.append(l.line)
1223 1223 ui.write(sep.join(cols), eol)
1224 1224 counts[change] += 1
1225 1225 return counts['+'], counts['-']
1226 1226
1227 1227 fstate = {}
1228 1228 skip = {}
1229 1229 changeiter, getchange = walkchangerevs(ui, repo, repo.getcwd(), pats, opts)
1230 1230 count = 0
1231 1231 incrementing = False
1232 1232 for st, rev, fns in changeiter:
1233 1233 if st == 'window':
1234 1234 incrementing = rev
1235 1235 matches.clear()
1236 1236 elif st == 'add':
1237 1237 change = repo.changelog.read(repo.lookup(str(rev)))
1238 1238 mf = repo.manifest.read(change[0])
1239 1239 matches[rev] = {}
1240 1240 for fn in fns:
1241 1241 if fn in skip: continue
1242 1242 fstate.setdefault(fn, {})
1243 1243 try:
1244 1244 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1245 1245 except KeyError:
1246 1246 pass
1247 1247 elif st == 'iter':
1248 1248 states = matches[rev].items()
1249 1249 states.sort()
1250 1250 for fn, m in states:
1251 1251 if fn in skip: continue
1252 1252 if incrementing or not opts['all'] or fstate[fn]:
1253 1253 pos, neg = display(fn, rev, m, fstate[fn])
1254 1254 count += pos + neg
1255 1255 if pos and not opts['all']:
1256 1256 skip[fn] = True
1257 1257 fstate[fn] = m
1258 1258 prev[fn] = rev
1259 1259
1260 1260 if not incrementing:
1261 1261 fstate = fstate.items()
1262 1262 fstate.sort()
1263 1263 for fn, state in fstate:
1264 1264 if fn in skip: continue
1265 1265 display(fn, rev, {}, state)
1266 1266 return (count == 0 and 1) or 0
1267 1267
1268 1268 def heads(ui, repo, **opts):
1269 1269 """show current repository heads
1270 1270
1271 1271 Show all repository head changesets.
1272 1272
1273 1273 Repository "heads" are changesets that don't have children
1274 1274 changesets. They are where development generally takes place and
1275 1275 are the usual targets for update and merge operations.
1276 1276 """
1277 1277 heads = repo.changelog.heads()
1278 1278 br = None
1279 1279 if opts['branches']:
1280 1280 br = repo.branchlookup(heads)
1281 1281 for n in repo.changelog.heads():
1282 1282 show_changeset(ui, repo, changenode=n, brinfo=br)
1283 1283
1284 1284 def identify(ui, repo):
1285 1285 """print information about the working copy
1286 1286
1287 1287 Print a short summary of the current state of the repo.
1288 1288
1289 1289 This summary identifies the repository state using one or two parent
1290 1290 hash identifiers, followed by a "+" if there are uncommitted changes
1291 1291 in the working directory, followed by a list of tags for this revision.
1292 1292 """
1293 1293 parents = [p for p in repo.dirstate.parents() if p != nullid]
1294 1294 if not parents:
1295 1295 ui.write(_("unknown\n"))
1296 1296 return
1297 1297
1298 1298 hexfunc = ui.verbose and hex or short
1299 1299 (c, a, d, u) = repo.changes()
1300 1300 output = ["%s%s" % ('+'.join([hexfunc(parent) for parent in parents]),
1301 1301 (c or a or d) and "+" or "")]
1302 1302
1303 1303 if not ui.quiet:
1304 1304 # multiple tags for a single parent separated by '/'
1305 1305 parenttags = ['/'.join(tags)
1306 1306 for tags in map(repo.nodetags, parents) if tags]
1307 1307 # tags for multiple parents separated by ' + '
1308 1308 if parenttags:
1309 1309 output.append(' + '.join(parenttags))
1310 1310
1311 1311 ui.write("%s\n" % ' '.join(output))
1312 1312
1313 1313 def import_(ui, repo, patch1, *patches, **opts):
1314 1314 """import an ordered set of patches
1315 1315
1316 1316 Import a list of patches and commit them individually.
1317 1317
1318 1318 If there are outstanding changes in the working directory, import
1319 1319 will abort unless given the -f flag.
1320 1320
1321 1321 If a patch looks like a mail message (its first line starts with
1322 1322 "From " or looks like an RFC822 header), it will not be applied
1323 1323 unless the -f option is used. The importer neither parses nor
1324 1324 discards mail headers, so use -f only to override the "mailness"
1325 1325 safety check, not to import a real mail message.
1326 1326 """
1327 1327 patches = (patch1,) + patches
1328 1328
1329 1329 if not opts['force']:
1330 1330 (c, a, d, u) = repo.changes()
1331 1331 if c or a or d:
1332 1332 raise util.Abort(_("outstanding uncommitted changes"))
1333 1333
1334 1334 d = opts["base"]
1335 1335 strip = opts["strip"]
1336 1336
1337 1337 mailre = re.compile(r'(?:From |[\w-]+:)')
1338 1338
1339 1339 # attempt to detect the start of a patch
1340 1340 # (this heuristic is borrowed from quilt)
1341 1341 diffre = re.compile(r'(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1342 1342 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1343 1343 '(---|\*\*\*)[ \t])')
1344 1344
1345 1345 for patch in patches:
1346 1346 ui.status(_("applying %s\n") % patch)
1347 1347 pf = os.path.join(d, patch)
1348 1348
1349 1349 message = []
1350 1350 user = None
1351 1351 hgpatch = False
1352 1352 for line in file(pf):
1353 1353 line = line.rstrip()
1354 1354 if (not message and not hgpatch and
1355 1355 mailre.match(line) and not opts['force']):
1356 1356 if len(line) > 35: line = line[:32] + '...'
1357 1357 raise util.Abort(_('first line looks like a '
1358 1358 'mail header: ') + line)
1359 1359 if diffre.match(line):
1360 1360 break
1361 1361 elif hgpatch:
1362 1362 # parse values when importing the result of an hg export
1363 1363 if line.startswith("# User "):
1364 1364 user = line[7:]
1365 1365 ui.debug(_('User: %s\n') % user)
1366 1366 elif not line.startswith("# ") and line:
1367 1367 message.append(line)
1368 1368 hgpatch = False
1369 1369 elif line == '# HG changeset patch':
1370 1370 hgpatch = True
1371 1371 message = [] # We may have collected garbage
1372 1372 else:
1373 1373 message.append(line)
1374 1374
1375 1375 # make sure message isn't empty
1376 1376 if not message:
1377 1377 message = _("imported patch %s\n") % patch
1378 1378 else:
1379 1379 message = "%s\n" % '\n'.join(message)
1380 1380 ui.debug(_('message:\n%s\n') % message)
1381 1381
1382 1382 files = util.patch(strip, pf, ui)
1383 1383
1384 1384 if len(files) > 0:
1385 1385 addremove(ui, repo, *files)
1386 1386 repo.commit(files, message, user)
1387 1387
1388 1388 def incoming(ui, repo, source="default", **opts):
1389 1389 """show new changesets found in source
1390 1390
1391 1391 Show new changesets found in the specified repo or the default
1392 1392 pull repo. These are the changesets that would be pulled if a pull
1393 1393 was requested.
1394 1394
1395 1395 Currently only local repositories are supported.
1396 1396 """
1397 1397 source = ui.expandpath(source, repo.root)
1398 1398 other = hg.repository(ui, source)
1399 1399 if not other.local():
1400 1400 raise util.Abort(_("incoming doesn't work for remote repositories yet"))
1401 1401 o = repo.findincoming(other)
1402 1402 if not o:
1403 1403 return
1404 1404 o = other.changelog.nodesbetween(o)[0]
1405 1405 if opts['newest_first']:
1406 1406 o.reverse()
1407 1407 for n in o:
1408 1408 parents = [p for p in other.changelog.parents(n) if p != nullid]
1409 1409 if opts['no_merges'] and len(parents) == 2:
1410 1410 continue
1411 1411 show_changeset(ui, other, changenode=n)
1412 1412 if opts['patch']:
1413 1413 prev = (parents and parents[0]) or nullid
1414 1414 dodiff(ui, ui, other, prev, n)
1415 1415 ui.write("\n")
1416 1416
1417 1417 def init(ui, dest="."):
1418 1418 """create a new repository in the given directory
1419 1419
1420 1420 Initialize a new repository in the given directory. If the given
1421 1421 directory does not exist, it is created.
1422 1422
1423 1423 If no directory is given, the current directory is used.
1424 1424 """
1425 1425 if not os.path.exists(dest):
1426 1426 os.mkdir(dest)
1427 1427 hg.repository(ui, dest, create=1)
1428 1428
1429 1429 def locate(ui, repo, *pats, **opts):
1430 1430 """locate files matching specific patterns
1431 1431
1432 1432 Print all files under Mercurial control whose names match the
1433 1433 given patterns.
1434 1434
1435 1435 This command searches the current directory and its
1436 1436 subdirectories. To search an entire repository, move to the root
1437 1437 of the repository.
1438 1438
1439 1439 If no patterns are given to match, this command prints all file
1440 1440 names.
1441 1441
1442 1442 If you want to feed the output of this command into the "xargs"
1443 1443 command, use the "-0" option to both this command and "xargs".
1444 1444 This will avoid the problem of "xargs" treating single filenames
1445 1445 that contain white space as multiple filenames.
1446 1446 """
1447 1447 end = opts['print0'] and '\0' or '\n'
1448 1448
1449 1449 for src, abs, rel, exact in walk(repo, pats, opts, '(?:.*/|)'):
1450 1450 if repo.dirstate.state(abs) == '?':
1451 1451 continue
1452 1452 if opts['fullpath']:
1453 1453 ui.write(os.path.join(repo.root, abs), end)
1454 1454 else:
1455 1455 ui.write(rel, end)
1456 1456
1457 1457 def log(ui, repo, *pats, **opts):
1458 1458 """show revision history of entire repository or files
1459 1459
1460 1460 Print the revision history of the specified files or the entire project.
1461 1461
1462 1462 By default this command outputs: changeset id and hash, tags,
1463 1463 parents, user, date and time, and a summary for each commit. The
1464 1464 -v switch adds some more detail, such as changed files, manifest
1465 1465 hashes or message signatures.
1466 1466 """
1467 1467 class dui:
1468 1468 # Implement and delegate some ui protocol. Save hunks of
1469 1469 # output for later display in the desired order.
1470 1470 def __init__(self, ui):
1471 1471 self.ui = ui
1472 1472 self.hunk = {}
1473 1473 def bump(self, rev):
1474 1474 self.rev = rev
1475 1475 self.hunk[rev] = []
1476 1476 def note(self, *args):
1477 1477 if self.verbose:
1478 1478 self.write(*args)
1479 1479 def status(self, *args):
1480 1480 if not self.quiet:
1481 1481 self.write(*args)
1482 1482 def write(self, *args):
1483 1483 self.hunk[self.rev].append(args)
1484 1484 def debug(self, *args):
1485 1485 if self.debugflag:
1486 1486 self.write(*args)
1487 1487 def __getattr__(self, key):
1488 1488 return getattr(self.ui, key)
1489 1489 cwd = repo.getcwd()
1490 1490 if not pats and cwd:
1491 1491 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
1492 1492 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
1493 1493 changeiter, getchange = walkchangerevs(ui, repo, (pats and cwd) or '',
1494 1494 pats, opts)
1495 1495 for st, rev, fns in changeiter:
1496 1496 if st == 'window':
1497 1497 du = dui(ui)
1498 1498 elif st == 'add':
1499 1499 du.bump(rev)
1500 1500 changenode = repo.changelog.node(rev)
1501 1501 parents = [p for p in repo.changelog.parents(changenode)
1502 1502 if p != nullid]
1503 1503 if opts['no_merges'] and len(parents) == 2:
1504 1504 continue
1505 1505 if opts['only_merges'] and len(parents) != 2:
1506 1506 continue
1507 1507
1508 1508 br = None
1509 1509 if opts['keyword']:
1510 1510 changes = repo.changelog.read(repo.changelog.node(rev))
1511 1511 miss = 0
1512 1512 for k in [kw.lower() for kw in opts['keyword']]:
1513 1513 if not (k in changes[1].lower() or
1514 1514 k in changes[4].lower() or
1515 1515 k in " ".join(changes[3][:20]).lower()):
1516 1516 miss = 1
1517 1517 break
1518 1518 if miss:
1519 1519 continue
1520 1520
1521 1521 if opts['branch']:
1522 1522 br = repo.branchlookup([repo.changelog.node(rev)])
1523 1523
1524 1524 show_changeset(du, repo, rev, brinfo=br)
1525 1525 if opts['patch']:
1526 1526 prev = (parents and parents[0]) or nullid
1527 1527 dodiff(du, du, repo, prev, changenode, fns)
1528 1528 du.write("\n\n")
1529 1529 elif st == 'iter':
1530 1530 for args in du.hunk[rev]:
1531 1531 ui.write(*args)
1532 1532
1533 1533 def manifest(ui, repo, rev=None):
1534 1534 """output the latest or given revision of the project manifest
1535 1535
1536 1536 Print a list of version controlled files for the given revision.
1537 1537
1538 1538 The manifest is the list of files being version controlled. If no revision
1539 1539 is given then the tip is used.
1540 1540 """
1541 1541 if rev:
1542 1542 try:
1543 1543 # assume all revision numbers are for changesets
1544 1544 n = repo.lookup(rev)
1545 1545 change = repo.changelog.read(n)
1546 1546 n = change[0]
1547 1547 except hg.RepoError:
1548 1548 n = repo.manifest.lookup(rev)
1549 1549 else:
1550 1550 n = repo.manifest.tip()
1551 1551 m = repo.manifest.read(n)
1552 1552 mf = repo.manifest.readflags(n)
1553 1553 files = m.keys()
1554 1554 files.sort()
1555 1555
1556 1556 for f in files:
1557 1557 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
1558 1558
1559 1559 def outgoing(ui, repo, dest="default-push", **opts):
1560 1560 """show changesets not found in destination
1561 1561
1562 1562 Show changesets not found in the specified destination repo or the
1563 1563 default push repo. These are the changesets that would be pushed
1564 1564 if a push was requested.
1565 1565 """
1566 1566 dest = ui.expandpath(dest, repo.root)
1567 1567 other = hg.repository(ui, dest)
1568 1568 o = repo.findoutgoing(other)
1569 1569 o = repo.changelog.nodesbetween(o)[0]
1570 1570 if opts['newest_first']:
1571 1571 o.reverse()
1572 1572 for n in o:
1573 1573 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1574 1574 if opts['no_merges'] and len(parents) == 2:
1575 1575 continue
1576 1576 show_changeset(ui, repo, changenode=n)
1577 1577 if opts['patch']:
1578 1578 prev = (parents and parents[0]) or nullid
1579 1579 dodiff(ui, ui, repo, prev, n)
1580 1580 ui.write("\n")
1581 1581
1582 1582 def parents(ui, repo, rev=None):
1583 1583 """show the parents of the working dir or revision
1584 1584
1585 1585 Print the working directory's parent revisions.
1586 1586 """
1587 1587 if rev:
1588 1588 p = repo.changelog.parents(repo.lookup(rev))
1589 1589 else:
1590 1590 p = repo.dirstate.parents()
1591 1591
1592 1592 for n in p:
1593 1593 if n != nullid:
1594 1594 show_changeset(ui, repo, changenode=n)
1595 1595
1596 1596 def paths(ui, search=None):
1597 1597 """show definition of symbolic path names
1598 1598
1599 1599 Show definition of symbolic path name NAME. If no name is given, show
1600 1600 definition of available names.
1601 1601
1602 1602 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1603 1603 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1604 1604 """
1605 1605 try:
1606 1606 repo = hg.repository(ui=ui)
1607 1607 except hg.RepoError:
1608 1608 pass
1609 1609
1610 1610 if search:
1611 1611 for name, path in ui.configitems("paths"):
1612 1612 if name == search:
1613 1613 ui.write("%s\n" % path)
1614 1614 return
1615 1615 ui.warn(_("not found!\n"))
1616 1616 return 1
1617 1617 else:
1618 1618 for name, path in ui.configitems("paths"):
1619 1619 ui.write("%s = %s\n" % (name, path))
1620 1620
1621 1621 def pull(ui, repo, source="default", **opts):
1622 1622 """pull changes from the specified source
1623 1623
1624 1624 Pull changes from a remote repository to a local one.
1625 1625
1626 1626 This finds all changes from the repository at the specified path
1627 1627 or URL and adds them to the local repository. By default, this
1628 1628 does not update the copy of the project in the working directory.
1629 1629
1630 1630 Valid URLs are of the form:
1631 1631
1632 1632 local/filesystem/path
1633 1633 http://[user@]host[:port][/path]
1634 1634 https://[user@]host[:port][/path]
1635 1635 ssh://[user@]host[:port][/path]
1636 1636
1637 1637 SSH requires an accessible shell account on the destination machine
1638 1638 and a copy of hg in the remote path. With SSH, paths are relative
1639 1639 to the remote user's home directory by default; use two slashes at
1640 1640 the start of a path to specify it as relative to the filesystem root.
1641 1641 """
1642 1642 source = ui.expandpath(source, repo.root)
1643 1643 ui.status(_('pulling from %s\n') % (source))
1644 1644
1645 1645 if opts['ssh']:
1646 1646 ui.setconfig("ui", "ssh", opts['ssh'])
1647 1647 if opts['remotecmd']:
1648 1648 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1649 1649
1650 1650 other = hg.repository(ui, source)
1651 1651 revs = None
1652 1652 if opts['rev'] and not other.local():
1653 1653 raise util.Abort("pull -r doesn't work for remote repositories yet")
1654 1654 elif opts['rev']:
1655 1655 revs = [other.lookup(rev) for rev in opts['rev']]
1656 1656 r = repo.pull(other, heads=revs)
1657 1657 if not r:
1658 1658 if opts['update']:
1659 1659 return update(ui, repo)
1660 1660 else:
1661 1661 ui.status(_("(run 'hg update' to get a working copy)\n"))
1662 1662
1663 1663 return r
1664 1664
1665 1665 def push(ui, repo, dest="default-push", force=False, ssh=None, remotecmd=None):
1666 1666 """push changes to the specified destination
1667 1667
1668 1668 Push changes from the local repository to the given destination.
1669 1669
1670 1670 This is the symmetrical operation for pull. It helps to move
1671 1671 changes from the current repository to a different one. If the
1672 1672 destination is local this is identical to a pull in that directory
1673 1673 from the current one.
1674 1674
1675 1675 By default, push will refuse to run if it detects the result would
1676 1676 increase the number of remote heads. This generally indicates the
1677 1677 the client has forgotten to sync and merge before pushing.
1678 1678
1679 1679 Valid URLs are of the form:
1680 1680
1681 1681 local/filesystem/path
1682 1682 ssh://[user@]host[:port][/path]
1683 1683
1684 1684 SSH requires an accessible shell account on the destination
1685 1685 machine and a copy of hg in the remote path.
1686 1686 """
1687 1687 dest = ui.expandpath(dest, repo.root)
1688 1688 ui.status('pushing to %s\n' % (dest))
1689 1689
1690 1690 if ssh:
1691 1691 ui.setconfig("ui", "ssh", ssh)
1692 1692 if remotecmd:
1693 1693 ui.setconfig("ui", "remotecmd", remotecmd)
1694 1694
1695 1695 other = hg.repository(ui, dest)
1696 1696 r = repo.push(other, force)
1697 1697 return r
1698 1698
1699 1699 def rawcommit(ui, repo, *flist, **rc):
1700 1700 """raw commit interface
1701 1701
1702 1702 Lowlevel commit, for use in helper scripts.
1703 1703
1704 1704 This command is not intended to be used by normal users, as it is
1705 1705 primarily useful for importing from other SCMs.
1706 1706 """
1707 1707 message = rc['message']
1708 1708 if not message and rc['logfile']:
1709 1709 try:
1710 1710 message = open(rc['logfile']).read()
1711 1711 except IOError:
1712 1712 pass
1713 1713 if not message and not rc['logfile']:
1714 1714 raise util.Abort(_("missing commit message"))
1715 1715
1716 1716 files = relpath(repo, list(flist))
1717 1717 if rc['files']:
1718 1718 files += open(rc['files']).read().splitlines()
1719 1719
1720 1720 rc['parent'] = map(repo.lookup, rc['parent'])
1721 1721
1722 1722 try:
1723 1723 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
1724 1724 except ValueError, inst:
1725 1725 raise util.Abort(str(inst))
1726 1726
1727 1727 def recover(ui, repo):
1728 1728 """roll back an interrupted transaction
1729 1729
1730 1730 Recover from an interrupted commit or pull.
1731 1731
1732 1732 This command tries to fix the repository status after an interrupted
1733 1733 operation. It should only be necessary when Mercurial suggests it.
1734 1734 """
1735 repo.recover()
1735 if repo.recover():
1736 return repo.verify()
1737 return False
1736 1738
1737 1739 def remove(ui, repo, pat, *pats, **opts):
1738 1740 """remove the specified files on the next commit
1739 1741
1740 1742 Schedule the indicated files for removal from the repository.
1741 1743
1742 1744 This command schedules the files to be removed at the next commit.
1743 1745 This only removes files from the current branch, not from the
1744 1746 entire project history. If the files still exist in the working
1745 1747 directory, they will be deleted from it.
1746 1748 """
1747 1749 names = []
1748 1750 def okaytoremove(abs, rel, exact):
1749 1751 c, a, d, u = repo.changes(files = [abs])
1750 1752 reason = None
1751 1753 if c: reason = _('is modified')
1752 1754 elif a: reason = _('has been marked for add')
1753 1755 elif u: reason = _('is not managed')
1754 1756 if reason:
1755 1757 if exact: ui.warn(_('not removing %s: file %s\n') % (rel, reason))
1756 1758 else:
1757 1759 return True
1758 1760 for src, abs, rel, exact in walk(repo, (pat,) + pats, opts):
1759 1761 if okaytoremove(abs, rel, exact):
1760 1762 if ui.verbose or not exact: ui.status(_('removing %s\n') % rel)
1761 1763 names.append(abs)
1762 1764 repo.remove(names, unlink=True)
1763 1765
1764 1766 def rename(ui, repo, *pats, **opts):
1765 1767 """rename files; equivalent of copy + remove
1766 1768
1767 1769 Mark dest as copies of sources; mark sources for deletion. If
1768 1770 dest is a directory, copies are put in that directory. If dest is
1769 1771 a file, there can only be one source.
1770 1772
1771 1773 By default, this command copies the contents of files as they
1772 1774 stand in the working directory. If invoked with --after, the
1773 1775 operation is recorded, but no copying is performed.
1774 1776
1775 1777 This command takes effect in the next commit.
1776 1778
1777 1779 NOTE: This command should be treated as experimental. While it
1778 1780 should properly record rename files, this information is not yet
1779 1781 fully used by merge, nor fully reported by log.
1780 1782 """
1781 1783 errs, copied = docopy(ui, repo, pats, opts)
1782 1784 names = []
1783 1785 for abs, rel, exact in copied:
1784 1786 if ui.verbose or not exact: ui.status(_('removing %s\n') % rel)
1785 1787 names.append(abs)
1786 1788 repo.remove(names, unlink=True)
1787 1789 return errs
1788 1790
1789 1791 def revert(ui, repo, *pats, **opts):
1790 1792 """revert modified files or dirs back to their unmodified states
1791 1793
1792 1794 Revert any uncommitted modifications made to the named files or
1793 1795 directories. This restores the contents of the affected files to
1794 1796 an unmodified state.
1795 1797
1796 1798 If a file has been deleted, it is recreated. If the executable
1797 1799 mode of a file was changed, it is reset.
1798 1800
1799 1801 If names are given, all files matching the names are reverted.
1800 1802
1801 1803 If no names are given, all files in the current directory and
1802 1804 its subdirectories are reverted.
1803 1805 """
1804 1806 node = opts['rev'] and repo.lookup(opts['rev']) or \
1805 1807 repo.dirstate.parents()[0]
1806 1808
1807 1809 files, choose, anypats = matchpats(repo, repo.getcwd(), pats, opts)
1808 1810 (c, a, d, u) = repo.changes(match=choose)
1809 1811 repo.forget(a)
1810 1812 repo.undelete(d)
1811 1813
1812 1814 return repo.update(node, False, True, choose, False)
1813 1815
1814 1816 def root(ui, repo):
1815 1817 """print the root (top) of the current working dir
1816 1818
1817 1819 Print the root directory of the current repository.
1818 1820 """
1819 1821 ui.write(repo.root + "\n")
1820 1822
1821 1823 def serve(ui, repo, **opts):
1822 1824 """export the repository via HTTP
1823 1825
1824 1826 Start a local HTTP repository browser and pull server.
1825 1827
1826 1828 By default, the server logs accesses to stdout and errors to
1827 1829 stderr. Use the "-A" and "-E" options to log to files.
1828 1830 """
1829 1831
1830 1832 if opts["stdio"]:
1831 1833 fin, fout = sys.stdin, sys.stdout
1832 1834 sys.stdout = sys.stderr
1833 1835
1834 1836 # Prevent insertion/deletion of CRs
1835 1837 util.set_binary(fin)
1836 1838 util.set_binary(fout)
1837 1839
1838 1840 def getarg():
1839 1841 argline = fin.readline()[:-1]
1840 1842 arg, l = argline.split()
1841 1843 val = fin.read(int(l))
1842 1844 return arg, val
1843 1845 def respond(v):
1844 1846 fout.write("%d\n" % len(v))
1845 1847 fout.write(v)
1846 1848 fout.flush()
1847 1849
1848 1850 lock = None
1849 1851
1850 1852 while 1:
1851 1853 cmd = fin.readline()[:-1]
1852 1854 if cmd == '':
1853 1855 return
1854 1856 if cmd == "heads":
1855 1857 h = repo.heads()
1856 1858 respond(" ".join(map(hex, h)) + "\n")
1857 1859 if cmd == "lock":
1858 1860 lock = repo.lock()
1859 1861 respond("")
1860 1862 if cmd == "unlock":
1861 1863 if lock:
1862 1864 lock.release()
1863 1865 lock = None
1864 1866 respond("")
1865 1867 elif cmd == "branches":
1866 1868 arg, nodes = getarg()
1867 1869 nodes = map(bin, nodes.split(" "))
1868 1870 r = []
1869 1871 for b in repo.branches(nodes):
1870 1872 r.append(" ".join(map(hex, b)) + "\n")
1871 1873 respond("".join(r))
1872 1874 elif cmd == "between":
1873 1875 arg, pairs = getarg()
1874 1876 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
1875 1877 r = []
1876 1878 for b in repo.between(pairs):
1877 1879 r.append(" ".join(map(hex, b)) + "\n")
1878 1880 respond("".join(r))
1879 1881 elif cmd == "changegroup":
1880 1882 nodes = []
1881 1883 arg, roots = getarg()
1882 1884 nodes = map(bin, roots.split(" "))
1883 1885
1884 1886 cg = repo.changegroup(nodes)
1885 1887 while 1:
1886 1888 d = cg.read(4096)
1887 1889 if not d:
1888 1890 break
1889 1891 fout.write(d)
1890 1892
1891 1893 fout.flush()
1892 1894
1893 1895 elif cmd == "addchangegroup":
1894 1896 if not lock:
1895 1897 respond("not locked")
1896 1898 continue
1897 1899 respond("")
1898 1900
1899 1901 r = repo.addchangegroup(fin)
1900 1902 respond("")
1901 1903
1902 1904 optlist = "name templates style address port ipv6 accesslog errorlog"
1903 1905 for o in optlist.split():
1904 1906 if opts[o]:
1905 1907 ui.setconfig("web", o, opts[o])
1906 1908
1907 1909 try:
1908 1910 httpd = hgweb.create_server(repo)
1909 1911 except socket.error, inst:
1910 1912 raise util.Abort('cannot start server: ' + inst.args[1])
1911 1913
1912 1914 if ui.verbose:
1913 1915 addr, port = httpd.socket.getsockname()
1914 1916 if addr == '0.0.0.0':
1915 1917 addr = socket.gethostname()
1916 1918 else:
1917 1919 try:
1918 1920 addr = socket.gethostbyaddr(addr)[0]
1919 1921 except socket.error:
1920 1922 pass
1921 1923 if port != 80:
1922 1924 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
1923 1925 else:
1924 1926 ui.status(_('listening at http://%s/\n') % addr)
1925 1927 httpd.serve_forever()
1926 1928
1927 1929 def status(ui, repo, *pats, **opts):
1928 1930 """show changed files in the working directory
1929 1931
1930 1932 Show changed files in the working directory. If no names are
1931 1933 given, all files are shown. Otherwise, only files matching the
1932 1934 given names are shown.
1933 1935
1934 1936 The codes used to show the status of files are:
1935 1937 M = modified
1936 1938 A = added
1937 1939 R = removed
1938 1940 ? = not tracked
1939 1941 """
1940 1942
1941 1943 cwd = repo.getcwd()
1942 1944 files, matchfn, anypats = matchpats(repo, cwd, pats, opts)
1943 1945 (c, a, d, u) = [[util.pathto(cwd, x) for x in n]
1944 1946 for n in repo.changes(files=files, match=matchfn)]
1945 1947
1946 1948 changetypes = [(_('modified'), 'M', c),
1947 1949 (_('added'), 'A', a),
1948 1950 (_('removed'), 'R', d),
1949 1951 (_('unknown'), '?', u)]
1950 1952
1951 1953 end = opts['print0'] and '\0' or '\n'
1952 1954
1953 1955 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
1954 1956 or changetypes):
1955 1957 if opts['no_status']:
1956 1958 format = "%%s%s" % end
1957 1959 else:
1958 1960 format = "%s %%s%s" % (char, end);
1959 1961
1960 1962 for f in changes:
1961 1963 ui.write(format % f)
1962 1964
1963 1965 def tag(ui, repo, name, rev=None, **opts):
1964 1966 """add a tag for the current tip or a given revision
1965 1967
1966 1968 Name a particular revision using <name>.
1967 1969
1968 1970 Tags are used to name particular revisions of the repository and are
1969 1971 very useful to compare different revision, to go back to significant
1970 1972 earlier versions or to mark branch points as releases, etc.
1971 1973
1972 1974 If no revision is given, the tip is used.
1973 1975
1974 1976 To facilitate version control, distribution, and merging of tags,
1975 1977 they are stored as a file named ".hgtags" which is managed
1976 1978 similarly to other project files and can be hand-edited if
1977 1979 necessary.
1978 1980 """
1979 1981 if name == "tip":
1980 1982 raise util.Abort(_("the name 'tip' is reserved"))
1981 1983 if 'rev' in opts:
1982 1984 rev = opts['rev']
1983 1985 if rev:
1984 1986 r = hex(repo.lookup(rev))
1985 1987 else:
1986 1988 r = hex(repo.changelog.tip())
1987 1989
1988 1990 if name.find(revrangesep) >= 0:
1989 1991 raise util.Abort(_("'%s' cannot be used in a tag name") % revrangesep)
1990 1992
1991 1993 if opts['local']:
1992 1994 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
1993 1995 return
1994 1996
1995 1997 (c, a, d, u) = repo.changes()
1996 1998 for x in (c, a, d, u):
1997 1999 if ".hgtags" in x:
1998 2000 raise util.Abort(_("working copy of .hgtags is changed "
1999 2001 "(please commit .hgtags manually)"))
2000 2002
2001 2003 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
2002 2004 if repo.dirstate.state(".hgtags") == '?':
2003 2005 repo.add([".hgtags"])
2004 2006
2005 2007 message = (opts['message'] or
2006 2008 _("Added tag %s for changeset %s") % (name, r))
2007 2009 try:
2008 2010 repo.commit([".hgtags"], message, opts['user'], opts['date'])
2009 2011 except ValueError, inst:
2010 2012 raise util.Abort(str(inst))
2011 2013
2012 2014 def tags(ui, repo):
2013 2015 """list repository tags
2014 2016
2015 2017 List the repository tags.
2016 2018
2017 2019 This lists both regular and local tags.
2018 2020 """
2019 2021
2020 2022 l = repo.tagslist()
2021 2023 l.reverse()
2022 2024 for t, n in l:
2023 2025 try:
2024 2026 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2025 2027 except KeyError:
2026 2028 r = " ?:?"
2027 2029 ui.write("%-30s %s\n" % (t, r))
2028 2030
2029 2031 def tip(ui, repo):
2030 2032 """show the tip revision
2031 2033
2032 2034 Show the tip revision.
2033 2035 """
2034 2036 n = repo.changelog.tip()
2035 2037 show_changeset(ui, repo, changenode=n)
2036 2038
2037 2039 def unbundle(ui, repo, fname):
2038 2040 """apply a changegroup file
2039 2041
2040 2042 Apply a compressed changegroup file generated by the bundle
2041 2043 command.
2042 2044 """
2043 2045 f = urllib.urlopen(fname)
2044 2046
2045 2047 if f.read(4) != "HG10":
2046 2048 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2047 2049
2048 2050 def bzgenerator(f):
2049 2051 zd = bz2.BZ2Decompressor()
2050 2052 for chunk in f:
2051 2053 yield zd.decompress(chunk)
2052 2054
2053 2055 bzgen = bzgenerator(util.filechunkiter(f, 4096))
2054 2056 repo.addchangegroup(util.chunkbuffer(bzgen))
2055 2057
2056 2058 def undo(ui, repo):
2057 2059 """undo the last commit or pull
2058 2060
2059 2061 Roll back the last pull or commit transaction on the
2060 2062 repository, restoring the project to its earlier state.
2061 2063
2062 2064 This command should be used with care. There is only one level of
2063 2065 undo and there is no redo.
2064 2066
2065 2067 This command is not intended for use on public repositories. Once
2066 2068 a change is visible for pull by other users, undoing it locally is
2067 2069 ineffective.
2068 2070 """
2069 2071 repo.undo()
2070 2072
2071 2073 def update(ui, repo, node=None, merge=False, clean=False, branch=None):
2072 2074 """update or merge working directory
2073 2075
2074 2076 Update the working directory to the specified revision.
2075 2077
2076 2078 If there are no outstanding changes in the working directory and
2077 2079 there is a linear relationship between the current version and the
2078 2080 requested version, the result is the requested version.
2079 2081
2080 2082 Otherwise the result is a merge between the contents of the
2081 2083 current working directory and the requested version. Files that
2082 2084 changed between either parent are marked as changed for the next
2083 2085 commit and a commit must be performed before any further updates
2084 2086 are allowed.
2085 2087
2086 2088 By default, update will refuse to run if doing so would require
2087 2089 merging or discarding local changes.
2088 2090 """
2089 2091 if branch:
2090 2092 br = repo.branchlookup(branch=branch)
2091 2093 found = []
2092 2094 for x in br:
2093 2095 if branch in br[x]:
2094 2096 found.append(x)
2095 2097 if len(found) > 1:
2096 2098 ui.warn(_("Found multiple heads for %s\n") % branch)
2097 2099 for x in found:
2098 2100 show_changeset(ui, repo, changenode=x, brinfo=br)
2099 2101 return 1
2100 2102 if len(found) == 1:
2101 2103 node = found[0]
2102 2104 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2103 2105 else:
2104 2106 ui.warn(_("branch %s not found\n") % (branch))
2105 2107 return 1
2106 2108 else:
2107 2109 node = node and repo.lookup(node) or repo.changelog.tip()
2108 2110 return repo.update(node, allow=merge, force=clean)
2109 2111
2110 2112 def verify(ui, repo):
2111 2113 """verify the integrity of the repository
2112 2114
2113 2115 Verify the integrity of the current repository.
2114 2116
2115 2117 This will perform an extensive check of the repository's
2116 2118 integrity, validating the hashes and checksums of each entry in
2117 2119 the changelog, manifest, and tracked files, as well as the
2118 2120 integrity of their crosslinks and indices.
2119 2121 """
2120 2122 return repo.verify()
2121 2123
2122 2124 # Command options and aliases are listed here, alphabetically
2123 2125
2124 2126 table = {
2125 2127 "^add":
2126 2128 (add,
2127 2129 [('I', 'include', [], _('include names matching the given patterns')),
2128 2130 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2129 2131 "hg add [OPTION]... [FILE]..."),
2130 2132 "addremove":
2131 2133 (addremove,
2132 2134 [('I', 'include', [], _('include names matching the given patterns')),
2133 2135 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2134 2136 "hg addremove [OPTION]... [FILE]..."),
2135 2137 "^annotate":
2136 2138 (annotate,
2137 2139 [('r', 'rev', '', _('annotate the specified revision')),
2138 2140 ('a', 'text', None, _('treat all files as text')),
2139 2141 ('u', 'user', None, _('list the author')),
2140 2142 ('n', 'number', None, _('list the revision number (default)')),
2141 2143 ('c', 'changeset', None, _('list the changeset')),
2142 2144 ('I', 'include', [], _('include names matching the given patterns')),
2143 2145 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2144 2146 _('hg annotate [OPTION]... FILE...')),
2145 2147 "bundle":
2146 2148 (bundle,
2147 2149 [],
2148 2150 _('hg bundle FILE DEST')),
2149 2151 "cat":
2150 2152 (cat,
2151 2153 [('I', 'include', [], _('include names matching the given patterns')),
2152 2154 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2153 2155 ('o', 'output', "", _('print output to file with formatted name')),
2154 2156 ('r', 'rev', '', _('print the given revision'))],
2155 2157 _('hg cat [OPTION]... FILE...')),
2156 2158 "^clone":
2157 2159 (clone,
2158 2160 [('U', 'noupdate', None, _('do not update the new working directory')),
2159 2161 ('e', 'ssh', "", _('specify ssh command to use')),
2160 2162 ('', 'pull', None, _('use pull protocol to copy metadata')),
2161 2163 ('r', 'rev', [], _('a changeset you would like to have after cloning')),
2162 2164 ('', 'remotecmd', "", _('specify hg command to run on the remote side'))],
2163 2165 _('hg clone [OPTION]... SOURCE [DEST]')),
2164 2166 "^commit|ci":
2165 2167 (commit,
2166 2168 [('A', 'addremove', None, _('run addremove during commit')),
2167 2169 ('I', 'include', [], _('include names matching the given patterns')),
2168 2170 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2169 2171 ('m', 'message', "", _('use <text> as commit message')),
2170 2172 ('l', 'logfile', "", _('read the commit message from <file>')),
2171 2173 ('d', 'date', "", _('record datecode as commit date')),
2172 2174 ('u', 'user', "", _('record user as commiter'))],
2173 2175 _('hg commit [OPTION]... [FILE]...')),
2174 2176 "copy|cp": (copy,
2175 2177 [('I', 'include', [], _('include names matching the given patterns')),
2176 2178 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2177 2179 ('A', 'after', None, _('record a copy that has already occurred')),
2178 2180 ('f', 'force', None, _('forcibly copy over an existing managed file'))],
2179 2181 _('hg copy [OPTION]... [SOURCE]... DEST')),
2180 2182 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2181 2183 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2182 2184 "debugconfig": (debugconfig, [], _('debugconfig')),
2183 2185 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2184 2186 "debugstate": (debugstate, [], _('debugstate')),
2185 2187 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2186 2188 "debugindex": (debugindex, [], _('debugindex FILE')),
2187 2189 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2188 2190 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2189 2191 "debugwalk":
2190 2192 (debugwalk,
2191 2193 [('I', 'include', [], _('include names matching the given patterns')),
2192 2194 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2193 2195 _('debugwalk [OPTION]... [FILE]...')),
2194 2196 "^diff":
2195 2197 (diff,
2196 2198 [('r', 'rev', [], _('revision')),
2197 2199 ('a', 'text', None, _('treat all files as text')),
2198 2200 ('I', 'include', [], _('include names matching the given patterns')),
2199 2201 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2200 2202 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2201 2203 "^export":
2202 2204 (export,
2203 2205 [('o', 'output', "", _('print output to file with formatted name')),
2204 2206 ('a', 'text', None, _('treat all files as text'))],
2205 2207 "hg export [-a] [-o OUTFILE] REV..."),
2206 2208 "forget":
2207 2209 (forget,
2208 2210 [('I', 'include', [], _('include names matching the given patterns')),
2209 2211 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2210 2212 "hg forget [OPTION]... FILE..."),
2211 2213 "grep":
2212 2214 (grep,
2213 2215 [('0', 'print0', None, _('end fields with NUL')),
2214 2216 ('I', 'include', [], _('include names matching the given patterns')),
2215 2217 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2216 2218 ('', 'all', None, _('print all revisions that match')),
2217 2219 ('i', 'ignore-case', None, _('ignore case when matching')),
2218 2220 ('l', 'files-with-matches', None, _('print only filenames and revs that match')),
2219 2221 ('n', 'line-number', None, _('print matching line numbers')),
2220 2222 ('r', 'rev', [], _('search in given revision range')),
2221 2223 ('u', 'user', None, _('print user who committed change'))],
2222 2224 "hg grep [OPTION]... PATTERN [FILE]..."),
2223 2225 "heads":
2224 2226 (heads,
2225 2227 [('b', 'branches', None, _('find branch info'))],
2226 2228 _('hg heads [-b]')),
2227 2229 "help": (help_, [], _('hg help [COMMAND]')),
2228 2230 "identify|id": (identify, [], _('hg identify')),
2229 2231 "import|patch":
2230 2232 (import_,
2231 2233 [('p', 'strip', 1, _('directory strip option for patch. This has the same\n') +
2232 2234 _('meaning as the corresponding patch option')),
2233 2235 ('f', 'force', None, _('skip check for outstanding uncommitted changes')),
2234 2236 ('b', 'base', "", _('base path'))],
2235 2237 "hg import [-f] [-p NUM] [-b BASE] PATCH..."),
2236 2238 "incoming|in": (incoming,
2237 2239 [('M', 'no-merges', None, _("do not show merges")),
2238 2240 ('p', 'patch', None, _('show patch')),
2239 2241 ('n', 'newest-first', None, _('show newest record first'))],
2240 2242 _('hg incoming [-p] [-n] [-M] [SOURCE]')),
2241 2243 "^init": (init, [], _('hg init [DEST]')),
2242 2244 "locate":
2243 2245 (locate,
2244 2246 [('r', 'rev', '', _('search the repository as it stood at rev')),
2245 2247 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
2246 2248 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
2247 2249 ('I', 'include', [], _('include names matching the given patterns')),
2248 2250 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2249 2251 _('hg locate [OPTION]... [PATTERN]...')),
2250 2252 "^log|history":
2251 2253 (log,
2252 2254 [('I', 'include', [], _('include names matching the given patterns')),
2253 2255 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2254 2256 ('b', 'branch', None, _('show branches')),
2255 2257 ('k', 'keyword', [], _('search for a keyword')),
2256 2258 ('r', 'rev', [], _('show the specified revision or range')),
2257 2259 ('M', 'no-merges', None, _("do not show merges")),
2258 2260 ('m', 'only-merges', None, _("show only merges")),
2259 2261 ('p', 'patch', None, _('show patch'))],
2260 2262 _('hg log [-I] [-X] [-r REV]... [-p] [FILE]')),
2261 2263 "manifest": (manifest, [], _('hg manifest [REV]')),
2262 2264 "outgoing|out": (outgoing,
2263 2265 [('M', 'no-merges', None, _("do not show merges")),
2264 2266 ('p', 'patch', None, _('show patch')),
2265 2267 ('n', 'newest-first', None, _('show newest record first'))],
2266 2268 _('hg outgoing [-p] [-n] [-M] [DEST]')),
2267 2269 "^parents": (parents, [], _('hg parents [REV]')),
2268 2270 "paths": (paths, [], _('hg paths [NAME]')),
2269 2271 "^pull":
2270 2272 (pull,
2271 2273 [('u', 'update', None, _('update the working directory to tip after pull')),
2272 2274 ('e', 'ssh', "", _('specify ssh command to use')),
2273 2275 ('r', 'rev', [], _('a specific revision you would like to pull')),
2274 2276 ('', 'remotecmd', "", _('specify hg command to run on the remote side'))],
2275 2277 _('hg pull [-u] [-e FILE] [-r rev] [--remotecmd FILE] [SOURCE]')),
2276 2278 "^push":
2277 2279 (push,
2278 2280 [('f', 'force', None, _('force push')),
2279 2281 ('e', 'ssh', "", _('specify ssh command to use')),
2280 2282 ('', 'remotecmd', "", _('specify hg command to run on the remote side'))],
2281 2283 _('hg push [-f] [-e FILE] [--remotecmd FILE] [DEST]')),
2282 2284 "rawcommit":
2283 2285 (rawcommit,
2284 2286 [('p', 'parent', [], _('parent')),
2285 2287 ('d', 'date', "", _('date code')),
2286 2288 ('u', 'user', "", _('user')),
2287 2289 ('F', 'files', "", _('file list')),
2288 2290 ('m', 'message', "", _('commit message')),
2289 2291 ('l', 'logfile', "", _('commit message file'))],
2290 2292 _('hg rawcommit [OPTION]... [FILE]...')),
2291 2293 "recover": (recover, [], _("hg recover")),
2292 2294 "^remove|rm": (remove,
2293 2295 [('I', 'include', [], _('include names matching the given patterns')),
2294 2296 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2295 2297 _("hg remove [OPTION]... FILE...")),
2296 2298 "rename|mv": (rename,
2297 2299 [('I', 'include', [], _('include names matching the given patterns')),
2298 2300 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2299 2301 ('A', 'after', None, _('record a rename that has already occurred')),
2300 2302 ('f', 'force', None, _('forcibly copy over an existing managed file'))],
2301 2303 _('hg rename [OPTION]... [SOURCE]... DEST')),
2302 2304 "^revert":
2303 2305 (revert,
2304 2306 [('I', 'include', [], _('include names matching the given patterns')),
2305 2307 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2306 2308 ("r", "rev", "", _("revision to revert to"))],
2307 2309 _("hg revert [-n] [-r REV] [NAME]...")),
2308 2310 "root": (root, [], _("hg root")),
2309 2311 "^serve":
2310 2312 (serve,
2311 2313 [('A', 'accesslog', '', _('name of access log file to write to')),
2312 2314 ('E', 'errorlog', '', _('name of error log file to write to')),
2313 2315 ('p', 'port', 0, _('port to use (default: 8000)')),
2314 2316 ('a', 'address', '', _('address to use')),
2315 2317 ('n', 'name', "", _('name to show in web pages (default: working dir)')),
2316 2318 ('', 'stdio', None, _('for remote clients')),
2317 2319 ('t', 'templates', "", _('web templates to use')),
2318 2320 ('', 'style', "", _('template style to use')),
2319 2321 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2320 2322 _("hg serve [OPTION]...")),
2321 2323 "^status|st":
2322 2324 (status,
2323 2325 [('m', 'modified', None, _('show only modified files')),
2324 2326 ('a', 'added', None, _('show only added files')),
2325 2327 ('r', 'removed', None, _('show only removed files')),
2326 2328 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2327 2329 ('n', 'no-status', None, _('hide status prefix')),
2328 2330 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
2329 2331 ('I', 'include', [], _('include names matching the given patterns')),
2330 2332 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2331 2333 _("hg status [OPTION]... [FILE]...")),
2332 2334 "tag":
2333 2335 (tag,
2334 2336 [('l', 'local', None, _('make the tag local')),
2335 2337 ('m', 'message', "", _('message for tag commit log entry')),
2336 2338 ('d', 'date', "", _('record datecode as commit date')),
2337 2339 ('u', 'user', "", _('record user as commiter')),
2338 2340 ('r', 'rev', "", _('revision to tag'))],
2339 2341 _('hg tag [OPTION]... NAME [REV]')),
2340 2342 "tags": (tags, [], _('hg tags')),
2341 2343 "tip": (tip, [], _('hg tip')),
2342 2344 "unbundle":
2343 2345 (unbundle,
2344 2346 [],
2345 2347 _('hg unbundle FILE')),
2346 2348 "undo": (undo, [], _('hg undo')),
2347 2349 "^update|up|checkout|co":
2348 2350 (update,
2349 2351 [('b', 'branch', "", _('checkout the head of a specific branch')),
2350 2352 ('m', 'merge', None, _('allow merging of branches')),
2351 2353 ('C', 'clean', None, _('overwrite locally modified files'))],
2352 2354 _('hg update [-b TAG] [-m] [-C] [REV]')),
2353 2355 "verify": (verify, [], _('hg verify')),
2354 2356 "version": (show_version, [], _('hg version')),
2355 2357 }
2356 2358
2357 2359 globalopts = [
2358 2360 ('R', 'repository', "", _("repository root directory")),
2359 2361 ('', 'cwd', '', _("change working directory")),
2360 2362 ('y', 'noninteractive', None, _("do not prompt, assume 'yes' for any required answers")),
2361 2363 ('q', 'quiet', None, _("suppress output")),
2362 2364 ('v', 'verbose', None, _("enable additional output")),
2363 2365 ('', 'debug', None, _("enable debugging output")),
2364 2366 ('', 'debugger', None, _("start debugger")),
2365 2367 ('', 'traceback', None, _("print traceback on exception")),
2366 2368 ('', 'time', None, _("time how long the command takes")),
2367 2369 ('', 'profile', None, _("print command execution profile")),
2368 2370 ('', 'version', None, _("output version information and exit")),
2369 2371 ('h', 'help', None, _("display help and exit")),
2370 2372 ]
2371 2373
2372 2374 norepo = ("clone init version help debugancestor debugconfig debugdata"
2373 2375 " debugindex debugindexdot paths")
2374 2376
2375 2377 def find(cmd):
2376 2378 choice = []
2377 2379 for e in table.keys():
2378 2380 aliases = e.lstrip("^").split("|")
2379 2381 if cmd in aliases:
2380 2382 return e, table[e]
2381 2383 for a in aliases:
2382 2384 if a.startswith(cmd):
2383 2385 choice.append(e)
2384 2386 if len(choice) == 1:
2385 2387 e = choice[0]
2386 2388 return e, table[e]
2387 2389
2388 2390 raise UnknownCommand(cmd)
2389 2391
2390 2392 class SignalInterrupt(Exception):
2391 2393 """Exception raised on SIGTERM and SIGHUP."""
2392 2394
2393 2395 def catchterm(*args):
2394 2396 raise SignalInterrupt
2395 2397
2396 2398 def run():
2397 2399 sys.exit(dispatch(sys.argv[1:]))
2398 2400
2399 2401 class ParseError(Exception):
2400 2402 """Exception raised on errors in parsing the command line."""
2401 2403
2402 2404 def parse(ui, args):
2403 2405 options = {}
2404 2406 cmdoptions = {}
2405 2407
2406 2408 try:
2407 2409 args = fancyopts.fancyopts(args, globalopts, options)
2408 2410 except fancyopts.getopt.GetoptError, inst:
2409 2411 raise ParseError(None, inst)
2410 2412
2411 2413 if args:
2412 2414 cmd, args = args[0], args[1:]
2413 2415 defaults = ui.config("defaults", cmd)
2414 2416 if defaults:
2415 2417 # reparse with command defaults added
2416 2418 args = [cmd] + defaults.split() + args
2417 2419 try:
2418 2420 args = fancyopts.fancyopts(args, globalopts, options)
2419 2421 except fancyopts.getopt.GetoptError, inst:
2420 2422 raise ParseError(None, inst)
2421 2423
2422 2424 cmd, args = args[0], args[1:]
2423 2425
2424 2426 i = find(cmd)[1]
2425 2427 c = list(i[1])
2426 2428 else:
2427 2429 cmd = None
2428 2430 c = []
2429 2431
2430 2432 # combine global options into local
2431 2433 for o in globalopts:
2432 2434 c.append((o[0], o[1], options[o[1]], o[3]))
2433 2435
2434 2436 try:
2435 2437 args = fancyopts.fancyopts(args, c, cmdoptions)
2436 2438 except fancyopts.getopt.GetoptError, inst:
2437 2439 raise ParseError(cmd, inst)
2438 2440
2439 2441 # separate global options back out
2440 2442 for o in globalopts:
2441 2443 n = o[1]
2442 2444 options[n] = cmdoptions[n]
2443 2445 del cmdoptions[n]
2444 2446
2445 2447 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
2446 2448
2447 2449 def dispatch(args):
2448 2450 signal.signal(signal.SIGTERM, catchterm)
2449 2451 try:
2450 2452 signal.signal(signal.SIGHUP, catchterm)
2451 2453 except AttributeError:
2452 2454 pass
2453 2455
2454 2456 try:
2455 2457 u = ui.ui()
2456 2458 except util.Abort, inst:
2457 2459 sys.stderr.write(_("abort: %s\n") % inst)
2458 2460 sys.exit(1)
2459 2461
2460 2462 external = []
2461 2463 for x in u.extensions():
2462 2464 def on_exception(Exception, inst):
2463 2465 u.warn(_("*** failed to import extension %s\n") % x[1])
2464 2466 u.warn("%s\n" % inst)
2465 2467 if "--traceback" in sys.argv[1:]:
2466 2468 traceback.print_exc()
2467 2469 if x[1]:
2468 2470 try:
2469 2471 mod = imp.load_source(x[0], x[1])
2470 2472 except Exception, inst:
2471 2473 on_exception(Exception, inst)
2472 2474 continue
2473 2475 else:
2474 2476 def importh(name):
2475 2477 mod = __import__(name)
2476 2478 components = name.split('.')
2477 2479 for comp in components[1:]:
2478 2480 mod = getattr(mod, comp)
2479 2481 return mod
2480 2482 try:
2481 2483 mod = importh(x[0])
2482 2484 except Exception, inst:
2483 2485 on_exception(Exception, inst)
2484 2486 continue
2485 2487
2486 2488 external.append(mod)
2487 2489 for x in external:
2488 2490 cmdtable = getattr(x, 'cmdtable', {})
2489 2491 for t in cmdtable:
2490 2492 if t in table:
2491 2493 u.warn(_("module %s overrides %s\n") % (x.__name__, t))
2492 2494 table.update(cmdtable)
2493 2495
2494 2496 try:
2495 2497 cmd, func, args, options, cmdoptions = parse(u, args)
2496 2498 except ParseError, inst:
2497 2499 if inst.args[0]:
2498 2500 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
2499 2501 help_(u, inst.args[0])
2500 2502 else:
2501 2503 u.warn(_("hg: %s\n") % inst.args[1])
2502 2504 help_(u, 'shortlist')
2503 2505 sys.exit(-1)
2504 2506 except UnknownCommand, inst:
2505 2507 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2506 2508 help_(u, 'shortlist')
2507 2509 sys.exit(1)
2508 2510
2509 2511 if options["time"]:
2510 2512 def get_times():
2511 2513 t = os.times()
2512 2514 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
2513 2515 t = (t[0], t[1], t[2], t[3], time.clock())
2514 2516 return t
2515 2517 s = get_times()
2516 2518 def print_time():
2517 2519 t = get_times()
2518 2520 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
2519 2521 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
2520 2522 atexit.register(print_time)
2521 2523
2522 2524 u.updateopts(options["verbose"], options["debug"], options["quiet"],
2523 2525 not options["noninteractive"])
2524 2526
2525 2527 # enter the debugger before command execution
2526 2528 if options['debugger']:
2527 2529 pdb.set_trace()
2528 2530
2529 2531 try:
2530 2532 try:
2531 2533 if options['help']:
2532 2534 help_(u, cmd, options['version'])
2533 2535 sys.exit(0)
2534 2536 elif options['version']:
2535 2537 show_version(u)
2536 2538 sys.exit(0)
2537 2539 elif not cmd:
2538 2540 help_(u, 'shortlist')
2539 2541 sys.exit(0)
2540 2542
2541 2543 if options['cwd']:
2542 2544 try:
2543 2545 os.chdir(options['cwd'])
2544 2546 except OSError, inst:
2545 2547 raise util.Abort('%s: %s' %
2546 2548 (options['cwd'], inst.strerror))
2547 2549
2548 2550 if cmd not in norepo.split():
2549 2551 path = options["repository"] or ""
2550 2552 repo = hg.repository(ui=u, path=path)
2551 2553 for x in external:
2552 2554 if hasattr(x, 'reposetup'): x.reposetup(u, repo)
2553 2555 d = lambda: func(u, repo, *args, **cmdoptions)
2554 2556 else:
2555 2557 d = lambda: func(u, *args, **cmdoptions)
2556 2558
2557 2559 if options['profile']:
2558 2560 import hotshot, hotshot.stats
2559 2561 prof = hotshot.Profile("hg.prof")
2560 2562 r = prof.runcall(d)
2561 2563 prof.close()
2562 2564 stats = hotshot.stats.load("hg.prof")
2563 2565 stats.strip_dirs()
2564 2566 stats.sort_stats('time', 'calls')
2565 2567 stats.print_stats(40)
2566 2568 return r
2567 2569 else:
2568 2570 return d()
2569 2571 except:
2570 2572 # enter the debugger when we hit an exception
2571 2573 if options['debugger']:
2572 2574 pdb.post_mortem(sys.exc_info()[2])
2573 2575 if options['traceback']:
2574 2576 traceback.print_exc()
2575 2577 raise
2576 2578 except hg.RepoError, inst:
2577 2579 u.warn(_("abort: "), inst, "!\n")
2578 2580 except revlog.RevlogError, inst:
2579 2581 u.warn(_("abort: "), inst, "!\n")
2580 2582 except SignalInterrupt:
2581 2583 u.warn(_("killed!\n"))
2582 2584 except KeyboardInterrupt:
2583 2585 try:
2584 2586 u.warn(_("interrupted!\n"))
2585 2587 except IOError, inst:
2586 2588 if inst.errno == errno.EPIPE:
2587 2589 if u.debugflag:
2588 2590 u.warn(_("\nbroken pipe\n"))
2589 2591 else:
2590 2592 raise
2591 2593 except IOError, inst:
2592 2594 if hasattr(inst, "code"):
2593 2595 u.warn(_("abort: %s\n") % inst)
2594 2596 elif hasattr(inst, "reason"):
2595 2597 u.warn(_("abort: error: %s\n") % inst.reason[1])
2596 2598 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
2597 2599 if u.debugflag:
2598 2600 u.warn(_("broken pipe\n"))
2599 2601 elif getattr(inst, "strerror", None):
2600 2602 if getattr(inst, "filename", None):
2601 2603 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
2602 2604 else:
2603 2605 u.warn(_("abort: %s\n") % inst.strerror)
2604 2606 else:
2605 2607 raise
2606 2608 except OSError, inst:
2607 2609 if hasattr(inst, "filename"):
2608 2610 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
2609 2611 else:
2610 2612 u.warn(_("abort: %s\n") % inst.strerror)
2611 2613 except util.Abort, inst:
2612 2614 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
2613 2615 sys.exit(1)
2614 2616 except TypeError, inst:
2615 2617 # was this an argument error?
2616 2618 tb = traceback.extract_tb(sys.exc_info()[2])
2617 2619 if len(tb) > 2: # no
2618 2620 raise
2619 2621 u.debug(inst, "\n")
2620 2622 u.warn(_("%s: invalid arguments\n") % cmd)
2621 2623 help_(u, cmd)
2622 2624 except UnknownCommand, inst:
2623 2625 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2624 2626 help_(u, 'shortlist')
2625 2627 except SystemExit:
2626 2628 # don't catch this in the catch-all below
2627 2629 raise
2628 2630 except:
2629 2631 u.warn(_("** unknown exception encountered, details follow\n"))
2630 2632 u.warn(_("** report bug details to mercurial@selenic.com\n"))
2631 2633 raise
2632 2634
2633 2635 sys.exit(-1)
@@ -1,1746 +1,1748 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import struct, os, util
9 9 import filelog, manifest, changelog, dirstate, repo
10 10 from node import *
11 11 from i18n import gettext as _
12 12 from demandload import *
13 13 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
14 14
15 15 class localrepository:
16 16 def __init__(self, ui, path=None, create=0):
17 17 if not path:
18 18 p = os.getcwd()
19 19 while not os.path.isdir(os.path.join(p, ".hg")):
20 20 oldp = p
21 21 p = os.path.dirname(p)
22 22 if p == oldp: raise repo.RepoError(_("no repo found"))
23 23 path = p
24 24 self.path = os.path.join(path, ".hg")
25 25
26 26 if not create and not os.path.isdir(self.path):
27 27 raise repo.RepoError(_("repository %s not found") % self.path)
28 28
29 29 self.root = os.path.abspath(path)
30 30 self.ui = ui
31 31 self.opener = util.opener(self.path)
32 32 self.wopener = util.opener(self.root)
33 33 self.manifest = manifest.manifest(self.opener)
34 34 self.changelog = changelog.changelog(self.opener)
35 35 self.tagscache = None
36 36 self.nodetagscache = None
37 37 self.encodepats = None
38 38 self.decodepats = None
39 39
40 40 if create:
41 41 os.mkdir(self.path)
42 42 os.mkdir(self.join("data"))
43 43
44 44 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
45 45 try:
46 46 self.ui.readconfig(self.join("hgrc"))
47 47 except IOError: pass
48 48
49 49 def hook(self, name, **args):
50 50 def runhook(name, cmd):
51 51 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
52 52 old = {}
53 53 for k, v in args.items():
54 54 k = k.upper()
55 55 old[k] = os.environ.get(k, None)
56 56 os.environ[k] = v
57 57
58 58 # Hooks run in the repository root
59 59 olddir = os.getcwd()
60 60 os.chdir(self.root)
61 61 r = os.system(cmd)
62 62 os.chdir(olddir)
63 63
64 64 for k, v in old.items():
65 65 if v != None:
66 66 os.environ[k] = v
67 67 else:
68 68 del os.environ[k]
69 69
70 70 if r:
71 71 self.ui.warn(_("abort: %s hook failed with status %d!\n") %
72 72 (name, r))
73 73 return False
74 74 return True
75 75
76 76 r = True
77 77 for hname, cmd in self.ui.configitems("hooks"):
78 78 s = hname.split(".")
79 79 if s[0] == name and cmd:
80 80 r = runhook(hname, cmd) and r
81 81 return r
82 82
83 83 def tags(self):
84 84 '''return a mapping of tag to node'''
85 85 if not self.tagscache:
86 86 self.tagscache = {}
87 87 def addtag(self, k, n):
88 88 try:
89 89 bin_n = bin(n)
90 90 except TypeError:
91 91 bin_n = ''
92 92 self.tagscache[k.strip()] = bin_n
93 93
94 94 try:
95 95 # read each head of the tags file, ending with the tip
96 96 # and add each tag found to the map, with "newer" ones
97 97 # taking precedence
98 98 fl = self.file(".hgtags")
99 99 h = fl.heads()
100 100 h.reverse()
101 101 for r in h:
102 102 for l in fl.read(r).splitlines():
103 103 if l:
104 104 n, k = l.split(" ", 1)
105 105 addtag(self, k, n)
106 106 except KeyError:
107 107 pass
108 108
109 109 try:
110 110 f = self.opener("localtags")
111 111 for l in f:
112 112 n, k = l.split(" ", 1)
113 113 addtag(self, k, n)
114 114 except IOError:
115 115 pass
116 116
117 117 self.tagscache['tip'] = self.changelog.tip()
118 118
119 119 return self.tagscache
120 120
121 121 def tagslist(self):
122 122 '''return a list of tags ordered by revision'''
123 123 l = []
124 124 for t, n in self.tags().items():
125 125 try:
126 126 r = self.changelog.rev(n)
127 127 except:
128 128 r = -2 # sort to the beginning of the list if unknown
129 129 l.append((r,t,n))
130 130 l.sort()
131 131 return [(t,n) for r,t,n in l]
132 132
133 133 def nodetags(self, node):
134 134 '''return the tags associated with a node'''
135 135 if not self.nodetagscache:
136 136 self.nodetagscache = {}
137 137 for t,n in self.tags().items():
138 138 self.nodetagscache.setdefault(n,[]).append(t)
139 139 return self.nodetagscache.get(node, [])
140 140
141 141 def lookup(self, key):
142 142 try:
143 143 return self.tags()[key]
144 144 except KeyError:
145 145 try:
146 146 return self.changelog.lookup(key)
147 147 except:
148 148 raise repo.RepoError(_("unknown revision '%s'") % key)
149 149
150 150 def dev(self):
151 151 return os.stat(self.path).st_dev
152 152
153 153 def local(self):
154 154 return True
155 155
156 156 def join(self, f):
157 157 return os.path.join(self.path, f)
158 158
159 159 def wjoin(self, f):
160 160 return os.path.join(self.root, f)
161 161
162 162 def file(self, f):
163 163 if f[0] == '/': f = f[1:]
164 164 return filelog.filelog(self.opener, f)
165 165
166 166 def getcwd(self):
167 167 return self.dirstate.getcwd()
168 168
169 169 def wfile(self, f, mode='r'):
170 170 return self.wopener(f, mode)
171 171
172 172 def wread(self, filename):
173 173 if self.encodepats == None:
174 174 l = []
175 175 for pat, cmd in self.ui.configitems("encode"):
176 176 mf = util.matcher("", "/", [pat], [], [])[1]
177 177 l.append((mf, cmd))
178 178 self.encodepats = l
179 179
180 180 data = self.wopener(filename, 'r').read()
181 181
182 182 for mf, cmd in self.encodepats:
183 183 if mf(filename):
184 184 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
185 185 data = util.filter(data, cmd)
186 186 break
187 187
188 188 return data
189 189
190 190 def wwrite(self, filename, data, fd=None):
191 191 if self.decodepats == None:
192 192 l = []
193 193 for pat, cmd in self.ui.configitems("decode"):
194 194 mf = util.matcher("", "/", [pat], [], [])[1]
195 195 l.append((mf, cmd))
196 196 self.decodepats = l
197 197
198 198 for mf, cmd in self.decodepats:
199 199 if mf(filename):
200 200 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
201 201 data = util.filter(data, cmd)
202 202 break
203 203
204 204 if fd:
205 205 return fd.write(data)
206 206 return self.wopener(filename, 'w').write(data)
207 207
208 208 def transaction(self):
209 209 # save dirstate for undo
210 210 try:
211 211 ds = self.opener("dirstate").read()
212 212 except IOError:
213 213 ds = ""
214 214 self.opener("journal.dirstate", "w").write(ds)
215 215
216 216 def after():
217 217 util.rename(self.join("journal"), self.join("undo"))
218 218 util.rename(self.join("journal.dirstate"),
219 219 self.join("undo.dirstate"))
220 220
221 221 return transaction.transaction(self.ui.warn, self.opener,
222 222 self.join("journal"), after)
223 223
224 224 def recover(self):
225 225 lock = self.lock()
226 226 if os.path.exists(self.join("journal")):
227 227 self.ui.status(_("rolling back interrupted transaction\n"))
228 return transaction.rollback(self.opener, self.join("journal"))
228 transaction.rollback(self.opener, self.join("journal"))
229 return True
229 230 else:
230 231 self.ui.warn(_("no interrupted transaction available\n"))
232 return False
231 233
232 234 def undo(self):
233 235 lock = self.lock()
234 236 if os.path.exists(self.join("undo")):
235 237 self.ui.status(_("rolling back last transaction\n"))
236 238 transaction.rollback(self.opener, self.join("undo"))
237 239 self.dirstate = None
238 240 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
239 241 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
240 242 else:
241 243 self.ui.warn(_("no undo information available\n"))
242 244
243 245 def lock(self, wait=1):
244 246 try:
245 247 return lock.lock(self.join("lock"), 0)
246 248 except lock.LockHeld, inst:
247 249 if wait:
248 250 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
249 251 return lock.lock(self.join("lock"), wait)
250 252 raise inst
251 253
252 254 def rawcommit(self, files, text, user, date, p1=None, p2=None):
253 255 orig_parent = self.dirstate.parents()[0] or nullid
254 256 p1 = p1 or self.dirstate.parents()[0] or nullid
255 257 p2 = p2 or self.dirstate.parents()[1] or nullid
256 258 c1 = self.changelog.read(p1)
257 259 c2 = self.changelog.read(p2)
258 260 m1 = self.manifest.read(c1[0])
259 261 mf1 = self.manifest.readflags(c1[0])
260 262 m2 = self.manifest.read(c2[0])
261 263 changed = []
262 264
263 265 if orig_parent == p1:
264 266 update_dirstate = 1
265 267 else:
266 268 update_dirstate = 0
267 269
268 270 tr = self.transaction()
269 271 mm = m1.copy()
270 272 mfm = mf1.copy()
271 273 linkrev = self.changelog.count()
272 274 for f in files:
273 275 try:
274 276 t = self.wread(f)
275 277 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
276 278 r = self.file(f)
277 279 mfm[f] = tm
278 280
279 281 fp1 = m1.get(f, nullid)
280 282 fp2 = m2.get(f, nullid)
281 283
282 284 # is the same revision on two branches of a merge?
283 285 if fp2 == fp1:
284 286 fp2 = nullid
285 287
286 288 if fp2 != nullid:
287 289 # is one parent an ancestor of the other?
288 290 fpa = r.ancestor(fp1, fp2)
289 291 if fpa == fp1:
290 292 fp1, fp2 = fp2, nullid
291 293 elif fpa == fp2:
292 294 fp2 = nullid
293 295
294 296 # is the file unmodified from the parent?
295 297 if t == r.read(fp1):
296 298 # record the proper existing parent in manifest
297 299 # no need to add a revision
298 300 mm[f] = fp1
299 301 continue
300 302
301 303 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
302 304 changed.append(f)
303 305 if update_dirstate:
304 306 self.dirstate.update([f], "n")
305 307 except IOError:
306 308 try:
307 309 del mm[f]
308 310 del mfm[f]
309 311 if update_dirstate:
310 312 self.dirstate.forget([f])
311 313 except:
312 314 # deleted from p2?
313 315 pass
314 316
315 317 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
316 318 user = user or self.ui.username()
317 319 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
318 320 tr.close()
319 321 if update_dirstate:
320 322 self.dirstate.setparents(n, nullid)
321 323
322 324 def commit(self, files = None, text = "", user = None, date = None,
323 325 match = util.always, force=False):
324 326 commit = []
325 327 remove = []
326 328 changed = []
327 329
328 330 if files:
329 331 for f in files:
330 332 s = self.dirstate.state(f)
331 333 if s in 'nmai':
332 334 commit.append(f)
333 335 elif s == 'r':
334 336 remove.append(f)
335 337 else:
336 338 self.ui.warn(_("%s not tracked!\n") % f)
337 339 else:
338 340 (c, a, d, u) = self.changes(match=match)
339 341 commit = c + a
340 342 remove = d
341 343
342 344 p1, p2 = self.dirstate.parents()
343 345 c1 = self.changelog.read(p1)
344 346 c2 = self.changelog.read(p2)
345 347 m1 = self.manifest.read(c1[0])
346 348 mf1 = self.manifest.readflags(c1[0])
347 349 m2 = self.manifest.read(c2[0])
348 350
349 351 if not commit and not remove and not force and p2 == nullid:
350 352 self.ui.status(_("nothing changed\n"))
351 353 return None
352 354
353 355 if not self.hook("precommit"):
354 356 return None
355 357
356 358 lock = self.lock()
357 359 tr = self.transaction()
358 360
359 361 # check in files
360 362 new = {}
361 363 linkrev = self.changelog.count()
362 364 commit.sort()
363 365 for f in commit:
364 366 self.ui.note(f + "\n")
365 367 try:
366 368 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
367 369 t = self.wread(f)
368 370 except IOError:
369 371 self.ui.warn(_("trouble committing %s!\n") % f)
370 372 raise
371 373
372 374 r = self.file(f)
373 375
374 376 meta = {}
375 377 cp = self.dirstate.copied(f)
376 378 if cp:
377 379 meta["copy"] = cp
378 380 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
379 381 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
380 382 fp1, fp2 = nullid, nullid
381 383 else:
382 384 fp1 = m1.get(f, nullid)
383 385 fp2 = m2.get(f, nullid)
384 386
385 387 # is the same revision on two branches of a merge?
386 388 if fp2 == fp1:
387 389 fp2 = nullid
388 390
389 391 if fp2 != nullid:
390 392 # is one parent an ancestor of the other?
391 393 fpa = r.ancestor(fp1, fp2)
392 394 if fpa == fp1:
393 395 fp1, fp2 = fp2, nullid
394 396 elif fpa == fp2:
395 397 fp2 = nullid
396 398
397 399 # is the file unmodified from the parent?
398 400 if not meta and t == r.read(fp1):
399 401 # record the proper existing parent in manifest
400 402 # no need to add a revision
401 403 new[f] = fp1
402 404 continue
403 405
404 406 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
405 407 # remember what we've added so that we can later calculate
406 408 # the files to pull from a set of changesets
407 409 changed.append(f)
408 410
409 411 # update manifest
410 412 m1.update(new)
411 413 for f in remove:
412 414 if f in m1:
413 415 del m1[f]
414 416 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
415 417 (new, remove))
416 418
417 419 # add changeset
418 420 new = new.keys()
419 421 new.sort()
420 422
421 423 if not text:
422 424 edittext = ""
423 425 if p2 != nullid:
424 426 edittext += "HG: branch merge\n"
425 427 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
426 428 edittext += "".join(["HG: changed %s\n" % f for f in changed])
427 429 edittext += "".join(["HG: removed %s\n" % f for f in remove])
428 430 if not changed and not remove:
429 431 edittext += "HG: no files changed\n"
430 432 edittext = self.ui.edit(edittext)
431 433 if not edittext.rstrip():
432 434 return None
433 435 text = edittext
434 436
435 437 user = user or self.ui.username()
436 438 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
437 439 tr.close()
438 440
439 441 self.dirstate.setparents(n)
440 442 self.dirstate.update(new, "n")
441 443 self.dirstate.forget(remove)
442 444
443 445 if not self.hook("commit", node=hex(n)):
444 446 return None
445 447 return n
446 448
447 449 def walk(self, node=None, files=[], match=util.always):
448 450 if node:
449 451 for fn in self.manifest.read(self.changelog.read(node)[0]):
450 452 if match(fn): yield 'm', fn
451 453 else:
452 454 for src, fn in self.dirstate.walk(files, match):
453 455 yield src, fn
454 456
455 457 def changes(self, node1 = None, node2 = None, files = [],
456 458 match = util.always):
457 459 mf2, u = None, []
458 460
459 461 def fcmp(fn, mf):
460 462 t1 = self.wread(fn)
461 463 t2 = self.file(fn).read(mf.get(fn, nullid))
462 464 return cmp(t1, t2)
463 465
464 466 def mfmatches(node):
465 467 mf = dict(self.manifest.read(node))
466 468 for fn in mf.keys():
467 469 if not match(fn):
468 470 del mf[fn]
469 471 return mf
470 472
471 473 # are we comparing the working directory?
472 474 if not node2:
473 475 l, c, a, d, u = self.dirstate.changes(files, match)
474 476
475 477 # are we comparing working dir against its parent?
476 478 if not node1:
477 479 if l:
478 480 # do a full compare of any files that might have changed
479 481 change = self.changelog.read(self.dirstate.parents()[0])
480 482 mf2 = mfmatches(change[0])
481 483 for f in l:
482 484 if fcmp(f, mf2):
483 485 c.append(f)
484 486
485 487 for l in c, a, d, u:
486 488 l.sort()
487 489
488 490 return (c, a, d, u)
489 491
490 492 # are we comparing working dir against non-tip?
491 493 # generate a pseudo-manifest for the working dir
492 494 if not node2:
493 495 if not mf2:
494 496 change = self.changelog.read(self.dirstate.parents()[0])
495 497 mf2 = mfmatches(change[0])
496 498 for f in a + c + l:
497 499 mf2[f] = ""
498 500 for f in d:
499 501 if f in mf2: del mf2[f]
500 502 else:
501 503 change = self.changelog.read(node2)
502 504 mf2 = mfmatches(change[0])
503 505
504 506 # flush lists from dirstate before comparing manifests
505 507 c, a = [], []
506 508
507 509 change = self.changelog.read(node1)
508 510 mf1 = mfmatches(change[0])
509 511
510 512 for fn in mf2:
511 513 if mf1.has_key(fn):
512 514 if mf1[fn] != mf2[fn]:
513 515 if mf2[fn] != "" or fcmp(fn, mf1):
514 516 c.append(fn)
515 517 del mf1[fn]
516 518 else:
517 519 a.append(fn)
518 520
519 521 d = mf1.keys()
520 522
521 523 for l in c, a, d, u:
522 524 l.sort()
523 525
524 526 return (c, a, d, u)
525 527
526 528 def add(self, list):
527 529 for f in list:
528 530 p = self.wjoin(f)
529 531 if not os.path.exists(p):
530 532 self.ui.warn(_("%s does not exist!\n") % f)
531 533 elif not os.path.isfile(p):
532 534 self.ui.warn(_("%s not added: only files supported currently\n") % f)
533 535 elif self.dirstate.state(f) in 'an':
534 536 self.ui.warn(_("%s already tracked!\n") % f)
535 537 else:
536 538 self.dirstate.update([f], "a")
537 539
538 540 def forget(self, list):
539 541 for f in list:
540 542 if self.dirstate.state(f) not in 'ai':
541 543 self.ui.warn(_("%s not added!\n") % f)
542 544 else:
543 545 self.dirstate.forget([f])
544 546
545 547 def remove(self, list, unlink=False):
546 548 if unlink:
547 549 for f in list:
548 550 try:
549 551 util.unlink(self.wjoin(f))
550 552 except OSError, inst:
551 553 if inst.errno != errno.ENOENT: raise
552 554 for f in list:
553 555 p = self.wjoin(f)
554 556 if os.path.exists(p):
555 557 self.ui.warn(_("%s still exists!\n") % f)
556 558 elif self.dirstate.state(f) == 'a':
557 559 self.ui.warn(_("%s never committed!\n") % f)
558 560 self.dirstate.forget([f])
559 561 elif f not in self.dirstate:
560 562 self.ui.warn(_("%s not tracked!\n") % f)
561 563 else:
562 564 self.dirstate.update([f], "r")
563 565
564 566 def undelete(self, list):
565 567 p = self.dirstate.parents()[0]
566 568 mn = self.changelog.read(p)[0]
567 569 mf = self.manifest.readflags(mn)
568 570 m = self.manifest.read(mn)
569 571 for f in list:
570 572 if self.dirstate.state(f) not in "r":
571 573 self.ui.warn("%s not removed!\n" % f)
572 574 else:
573 575 t = self.file(f).read(m[f])
574 576 self.wwrite(f, t)
575 577 util.set_exec(self.wjoin(f), mf[f])
576 578 self.dirstate.update([f], "n")
577 579
578 580 def copy(self, source, dest):
579 581 p = self.wjoin(dest)
580 582 if not os.path.exists(p):
581 583 self.ui.warn(_("%s does not exist!\n") % dest)
582 584 elif not os.path.isfile(p):
583 585 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
584 586 else:
585 587 if self.dirstate.state(dest) == '?':
586 588 self.dirstate.update([dest], "a")
587 589 self.dirstate.copy(source, dest)
588 590
589 591 def heads(self):
590 592 return self.changelog.heads()
591 593
592 594 # branchlookup returns a dict giving a list of branches for
593 595 # each head. A branch is defined as the tag of a node or
594 596 # the branch of the node's parents. If a node has multiple
595 597 # branch tags, tags are eliminated if they are visible from other
596 598 # branch tags.
597 599 #
598 600 # So, for this graph: a->b->c->d->e
599 601 # \ /
600 602 # aa -----/
601 603 # a has tag 2.6.12
602 604 # d has tag 2.6.13
603 605 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
604 606 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
605 607 # from the list.
606 608 #
607 609 # It is possible that more than one head will have the same branch tag.
608 610 # callers need to check the result for multiple heads under the same
609 611 # branch tag if that is a problem for them (ie checkout of a specific
610 612 # branch).
611 613 #
612 614 # passing in a specific branch will limit the depth of the search
613 615 # through the parents. It won't limit the branches returned in the
614 616 # result though.
615 617 def branchlookup(self, heads=None, branch=None):
616 618 if not heads:
617 619 heads = self.heads()
618 620 headt = [ h for h in heads ]
619 621 chlog = self.changelog
620 622 branches = {}
621 623 merges = []
622 624 seenmerge = {}
623 625
624 626 # traverse the tree once for each head, recording in the branches
625 627 # dict which tags are visible from this head. The branches
626 628 # dict also records which tags are visible from each tag
627 629 # while we traverse.
628 630 while headt or merges:
629 631 if merges:
630 632 n, found = merges.pop()
631 633 visit = [n]
632 634 else:
633 635 h = headt.pop()
634 636 visit = [h]
635 637 found = [h]
636 638 seen = {}
637 639 while visit:
638 640 n = visit.pop()
639 641 if n in seen:
640 642 continue
641 643 pp = chlog.parents(n)
642 644 tags = self.nodetags(n)
643 645 if tags:
644 646 for x in tags:
645 647 if x == 'tip':
646 648 continue
647 649 for f in found:
648 650 branches.setdefault(f, {})[n] = 1
649 651 branches.setdefault(n, {})[n] = 1
650 652 break
651 653 if n not in found:
652 654 found.append(n)
653 655 if branch in tags:
654 656 continue
655 657 seen[n] = 1
656 658 if pp[1] != nullid and n not in seenmerge:
657 659 merges.append((pp[1], [x for x in found]))
658 660 seenmerge[n] = 1
659 661 if pp[0] != nullid:
660 662 visit.append(pp[0])
661 663 # traverse the branches dict, eliminating branch tags from each
662 664 # head that are visible from another branch tag for that head.
663 665 out = {}
664 666 viscache = {}
665 667 for h in heads:
666 668 def visible(node):
667 669 if node in viscache:
668 670 return viscache[node]
669 671 ret = {}
670 672 visit = [node]
671 673 while visit:
672 674 x = visit.pop()
673 675 if x in viscache:
674 676 ret.update(viscache[x])
675 677 elif x not in ret:
676 678 ret[x] = 1
677 679 if x in branches:
678 680 visit[len(visit):] = branches[x].keys()
679 681 viscache[node] = ret
680 682 return ret
681 683 if h not in branches:
682 684 continue
683 685 # O(n^2), but somewhat limited. This only searches the
684 686 # tags visible from a specific head, not all the tags in the
685 687 # whole repo.
686 688 for b in branches[h]:
687 689 vis = False
688 690 for bb in branches[h].keys():
689 691 if b != bb:
690 692 if b in visible(bb):
691 693 vis = True
692 694 break
693 695 if not vis:
694 696 l = out.setdefault(h, [])
695 697 l[len(l):] = self.nodetags(b)
696 698 return out
697 699
698 700 def branches(self, nodes):
699 701 if not nodes: nodes = [self.changelog.tip()]
700 702 b = []
701 703 for n in nodes:
702 704 t = n
703 705 while n:
704 706 p = self.changelog.parents(n)
705 707 if p[1] != nullid or p[0] == nullid:
706 708 b.append((t, n, p[0], p[1]))
707 709 break
708 710 n = p[0]
709 711 return b
710 712
711 713 def between(self, pairs):
712 714 r = []
713 715
714 716 for top, bottom in pairs:
715 717 n, l, i = top, [], 0
716 718 f = 1
717 719
718 720 while n != bottom:
719 721 p = self.changelog.parents(n)[0]
720 722 if i == f:
721 723 l.append(n)
722 724 f = f * 2
723 725 n = p
724 726 i += 1
725 727
726 728 r.append(l)
727 729
728 730 return r
729 731
730 732 def findincoming(self, remote, base=None, heads=None):
731 733 m = self.changelog.nodemap
732 734 search = []
733 735 fetch = {}
734 736 seen = {}
735 737 seenbranch = {}
736 738 if base == None:
737 739 base = {}
738 740
739 741 # assume we're closer to the tip than the root
740 742 # and start by examining the heads
741 743 self.ui.status(_("searching for changes\n"))
742 744
743 745 if not heads:
744 746 heads = remote.heads()
745 747
746 748 unknown = []
747 749 for h in heads:
748 750 if h not in m:
749 751 unknown.append(h)
750 752 else:
751 753 base[h] = 1
752 754
753 755 if not unknown:
754 756 return None
755 757
756 758 rep = {}
757 759 reqcnt = 0
758 760
759 761 # search through remote branches
760 762 # a 'branch' here is a linear segment of history, with four parts:
761 763 # head, root, first parent, second parent
762 764 # (a branch always has two parents (or none) by definition)
763 765 unknown = remote.branches(unknown)
764 766 while unknown:
765 767 r = []
766 768 while unknown:
767 769 n = unknown.pop(0)
768 770 if n[0] in seen:
769 771 continue
770 772
771 773 self.ui.debug(_("examining %s:%s\n") % (short(n[0]), short(n[1])))
772 774 if n[0] == nullid:
773 775 break
774 776 if n in seenbranch:
775 777 self.ui.debug(_("branch already found\n"))
776 778 continue
777 779 if n[1] and n[1] in m: # do we know the base?
778 780 self.ui.debug(_("found incomplete branch %s:%s\n")
779 781 % (short(n[0]), short(n[1])))
780 782 search.append(n) # schedule branch range for scanning
781 783 seenbranch[n] = 1
782 784 else:
783 785 if n[1] not in seen and n[1] not in fetch:
784 786 if n[2] in m and n[3] in m:
785 787 self.ui.debug(_("found new changeset %s\n") %
786 788 short(n[1]))
787 789 fetch[n[1]] = 1 # earliest unknown
788 790 base[n[2]] = 1 # latest known
789 791 continue
790 792
791 793 for a in n[2:4]:
792 794 if a not in rep:
793 795 r.append(a)
794 796 rep[a] = 1
795 797
796 798 seen[n[0]] = 1
797 799
798 800 if r:
799 801 reqcnt += 1
800 802 self.ui.debug(_("request %d: %s\n") %
801 803 (reqcnt, " ".join(map(short, r))))
802 804 for p in range(0, len(r), 10):
803 805 for b in remote.branches(r[p:p+10]):
804 806 self.ui.debug(_("received %s:%s\n") %
805 807 (short(b[0]), short(b[1])))
806 808 if b[0] in m:
807 809 self.ui.debug(_("found base node %s\n") % short(b[0]))
808 810 base[b[0]] = 1
809 811 elif b[0] not in seen:
810 812 unknown.append(b)
811 813
812 814 # do binary search on the branches we found
813 815 while search:
814 816 n = search.pop(0)
815 817 reqcnt += 1
816 818 l = remote.between([(n[0], n[1])])[0]
817 819 l.append(n[1])
818 820 p = n[0]
819 821 f = 1
820 822 for i in l:
821 823 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
822 824 if i in m:
823 825 if f <= 2:
824 826 self.ui.debug(_("found new branch changeset %s\n") %
825 827 short(p))
826 828 fetch[p] = 1
827 829 base[i] = 1
828 830 else:
829 831 self.ui.debug(_("narrowed branch search to %s:%s\n")
830 832 % (short(p), short(i)))
831 833 search.append((p, i))
832 834 break
833 835 p, f = i, f * 2
834 836
835 837 # sanity check our fetch list
836 838 for f in fetch.keys():
837 839 if f in m:
838 840 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
839 841
840 842 if base.keys() == [nullid]:
841 843 self.ui.warn(_("warning: pulling from an unrelated repository!\n"))
842 844
843 845 self.ui.note(_("found new changesets starting at ") +
844 846 " ".join([short(f) for f in fetch]) + "\n")
845 847
846 848 self.ui.debug(_("%d total queries\n") % reqcnt)
847 849
848 850 return fetch.keys()
849 851
850 852 def findoutgoing(self, remote, base=None, heads=None):
851 853 if base == None:
852 854 base = {}
853 855 self.findincoming(remote, base, heads)
854 856
855 857 self.ui.debug(_("common changesets up to ")
856 858 + " ".join(map(short, base.keys())) + "\n")
857 859
858 860 remain = dict.fromkeys(self.changelog.nodemap)
859 861
860 862 # prune everything remote has from the tree
861 863 del remain[nullid]
862 864 remove = base.keys()
863 865 while remove:
864 866 n = remove.pop(0)
865 867 if n in remain:
866 868 del remain[n]
867 869 for p in self.changelog.parents(n):
868 870 remove.append(p)
869 871
870 872 # find every node whose parents have been pruned
871 873 subset = []
872 874 for n in remain:
873 875 p1, p2 = self.changelog.parents(n)
874 876 if p1 not in remain and p2 not in remain:
875 877 subset.append(n)
876 878
877 879 # this is the set of all roots we have to push
878 880 return subset
879 881
880 882 def pull(self, remote, heads = None):
881 883 lock = self.lock()
882 884
883 885 # if we have an empty repo, fetch everything
884 886 if self.changelog.tip() == nullid:
885 887 self.ui.status(_("requesting all changes\n"))
886 888 fetch = [nullid]
887 889 else:
888 890 fetch = self.findincoming(remote)
889 891
890 892 if not fetch:
891 893 self.ui.status(_("no changes found\n"))
892 894 return 1
893 895
894 896 if heads is None:
895 897 cg = remote.changegroup(fetch)
896 898 else:
897 899 cg = remote.changegroupsubset(fetch, heads)
898 900 return self.addchangegroup(cg)
899 901
900 902 def push(self, remote, force=False):
901 903 lock = remote.lock()
902 904
903 905 base = {}
904 906 heads = remote.heads()
905 907 inc = self.findincoming(remote, base, heads)
906 908 if not force and inc:
907 909 self.ui.warn(_("abort: unsynced remote changes!\n"))
908 910 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
909 911 return 1
910 912
911 913 update = self.findoutgoing(remote, base)
912 914 if not update:
913 915 self.ui.status(_("no changes found\n"))
914 916 return 1
915 917 elif not force:
916 918 if len(heads) < len(self.changelog.heads()):
917 919 self.ui.warn(_("abort: push creates new remote branches!\n"))
918 920 self.ui.status(_("(did you forget to merge?"
919 921 " use push -f to force)\n"))
920 922 return 1
921 923
922 924 cg = self.changegroup(update)
923 925 return remote.addchangegroup(cg)
924 926
925 927 def changegroupsubset(self, bases, heads):
926 928 """This function generates a changegroup consisting of all the nodes
927 929 that are descendents of any of the bases, and ancestors of any of
928 930 the heads.
929 931
930 932 It is fairly complex as determining which filenodes and which
931 933 manifest nodes need to be included for the changeset to be complete
932 934 is non-trivial.
933 935
934 936 Another wrinkle is doing the reverse, figuring out which changeset in
935 937 the changegroup a particular filenode or manifestnode belongs to."""
936 938
937 939 # Set up some initial variables
938 940 # Make it easy to refer to self.changelog
939 941 cl = self.changelog
940 942 # msng is short for missing - compute the list of changesets in this
941 943 # changegroup.
942 944 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
943 945 # Some bases may turn out to be superfluous, and some heads may be
944 946 # too. nodesbetween will return the minimal set of bases and heads
945 947 # necessary to re-create the changegroup.
946 948
947 949 # Known heads are the list of heads that it is assumed the recipient
948 950 # of this changegroup will know about.
949 951 knownheads = {}
950 952 # We assume that all parents of bases are known heads.
951 953 for n in bases:
952 954 for p in cl.parents(n):
953 955 if p != nullid:
954 956 knownheads[p] = 1
955 957 knownheads = knownheads.keys()
956 958 if knownheads:
957 959 # Now that we know what heads are known, we can compute which
958 960 # changesets are known. The recipient must know about all
959 961 # changesets required to reach the known heads from the null
960 962 # changeset.
961 963 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
962 964 junk = None
963 965 # Transform the list into an ersatz set.
964 966 has_cl_set = dict.fromkeys(has_cl_set)
965 967 else:
966 968 # If there were no known heads, the recipient cannot be assumed to
967 969 # know about any changesets.
968 970 has_cl_set = {}
969 971
970 972 # Make it easy to refer to self.manifest
971 973 mnfst = self.manifest
972 974 # We don't know which manifests are missing yet
973 975 msng_mnfst_set = {}
974 976 # Nor do we know which filenodes are missing.
975 977 msng_filenode_set = {}
976 978
977 979 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
978 980 junk = None
979 981
980 982 # A changeset always belongs to itself, so the changenode lookup
981 983 # function for a changenode is identity.
982 984 def identity(x):
983 985 return x
984 986
985 987 # A function generating function. Sets up an environment for the
986 988 # inner function.
987 989 def cmp_by_rev_func(revlog):
988 990 # Compare two nodes by their revision number in the environment's
989 991 # revision history. Since the revision number both represents the
990 992 # most efficient order to read the nodes in, and represents a
991 993 # topological sorting of the nodes, this function is often useful.
992 994 def cmp_by_rev(a, b):
993 995 return cmp(revlog.rev(a), revlog.rev(b))
994 996 return cmp_by_rev
995 997
996 998 # If we determine that a particular file or manifest node must be a
997 999 # node that the recipient of the changegroup will already have, we can
998 1000 # also assume the recipient will have all the parents. This function
999 1001 # prunes them from the set of missing nodes.
1000 1002 def prune_parents(revlog, hasset, msngset):
1001 1003 haslst = hasset.keys()
1002 1004 haslst.sort(cmp_by_rev_func(revlog))
1003 1005 for node in haslst:
1004 1006 parentlst = [p for p in revlog.parents(node) if p != nullid]
1005 1007 while parentlst:
1006 1008 n = parentlst.pop()
1007 1009 if n not in hasset:
1008 1010 hasset[n] = 1
1009 1011 p = [p for p in revlog.parents(n) if p != nullid]
1010 1012 parentlst.extend(p)
1011 1013 for n in hasset:
1012 1014 msngset.pop(n, None)
1013 1015
1014 1016 # This is a function generating function used to set up an environment
1015 1017 # for the inner function to execute in.
1016 1018 def manifest_and_file_collector(changedfileset):
1017 1019 # This is an information gathering function that gathers
1018 1020 # information from each changeset node that goes out as part of
1019 1021 # the changegroup. The information gathered is a list of which
1020 1022 # manifest nodes are potentially required (the recipient may
1021 1023 # already have them) and total list of all files which were
1022 1024 # changed in any changeset in the changegroup.
1023 1025 #
1024 1026 # We also remember the first changenode we saw any manifest
1025 1027 # referenced by so we can later determine which changenode 'owns'
1026 1028 # the manifest.
1027 1029 def collect_manifests_and_files(clnode):
1028 1030 c = cl.read(clnode)
1029 1031 for f in c[3]:
1030 1032 # This is to make sure we only have one instance of each
1031 1033 # filename string for each filename.
1032 1034 changedfileset.setdefault(f, f)
1033 1035 msng_mnfst_set.setdefault(c[0], clnode)
1034 1036 return collect_manifests_and_files
1035 1037
1036 1038 # Figure out which manifest nodes (of the ones we think might be part
1037 1039 # of the changegroup) the recipient must know about and remove them
1038 1040 # from the changegroup.
1039 1041 def prune_manifests():
1040 1042 has_mnfst_set = {}
1041 1043 for n in msng_mnfst_set:
1042 1044 # If a 'missing' manifest thinks it belongs to a changenode
1043 1045 # the recipient is assumed to have, obviously the recipient
1044 1046 # must have that manifest.
1045 1047 linknode = cl.node(mnfst.linkrev(n))
1046 1048 if linknode in has_cl_set:
1047 1049 has_mnfst_set[n] = 1
1048 1050 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1049 1051
1050 1052 # Use the information collected in collect_manifests_and_files to say
1051 1053 # which changenode any manifestnode belongs to.
1052 1054 def lookup_manifest_link(mnfstnode):
1053 1055 return msng_mnfst_set[mnfstnode]
1054 1056
1055 1057 # A function generating function that sets up the initial environment
1056 1058 # the inner function.
1057 1059 def filenode_collector(changedfiles):
1058 1060 next_rev = [0]
1059 1061 # This gathers information from each manifestnode included in the
1060 1062 # changegroup about which filenodes the manifest node references
1061 1063 # so we can include those in the changegroup too.
1062 1064 #
1063 1065 # It also remembers which changenode each filenode belongs to. It
1064 1066 # does this by assuming the a filenode belongs to the changenode
1065 1067 # the first manifest that references it belongs to.
1066 1068 def collect_msng_filenodes(mnfstnode):
1067 1069 r = mnfst.rev(mnfstnode)
1068 1070 if r == next_rev[0]:
1069 1071 # If the last rev we looked at was the one just previous,
1070 1072 # we only need to see a diff.
1071 1073 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1072 1074 # For each line in the delta
1073 1075 for dline in delta.splitlines():
1074 1076 # get the filename and filenode for that line
1075 1077 f, fnode = dline.split('\0')
1076 1078 fnode = bin(fnode[:40])
1077 1079 f = changedfiles.get(f, None)
1078 1080 # And if the file is in the list of files we care
1079 1081 # about.
1080 1082 if f is not None:
1081 1083 # Get the changenode this manifest belongs to
1082 1084 clnode = msng_mnfst_set[mnfstnode]
1083 1085 # Create the set of filenodes for the file if
1084 1086 # there isn't one already.
1085 1087 ndset = msng_filenode_set.setdefault(f, {})
1086 1088 # And set the filenode's changelog node to the
1087 1089 # manifest's if it hasn't been set already.
1088 1090 ndset.setdefault(fnode, clnode)
1089 1091 else:
1090 1092 # Otherwise we need a full manifest.
1091 1093 m = mnfst.read(mnfstnode)
1092 1094 # For every file in we care about.
1093 1095 for f in changedfiles:
1094 1096 fnode = m.get(f, None)
1095 1097 # If it's in the manifest
1096 1098 if fnode is not None:
1097 1099 # See comments above.
1098 1100 clnode = msng_mnfst_set[mnfstnode]
1099 1101 ndset = msng_filenode_set.setdefault(f, {})
1100 1102 ndset.setdefault(fnode, clnode)
1101 1103 # Remember the revision we hope to see next.
1102 1104 next_rev[0] = r + 1
1103 1105 return collect_msng_filenodes
1104 1106
1105 1107 # We have a list of filenodes we think we need for a file, lets remove
1106 1108 # all those we now the recipient must have.
1107 1109 def prune_filenodes(f, filerevlog):
1108 1110 msngset = msng_filenode_set[f]
1109 1111 hasset = {}
1110 1112 # If a 'missing' filenode thinks it belongs to a changenode we
1111 1113 # assume the recipient must have, then the recipient must have
1112 1114 # that filenode.
1113 1115 for n in msngset:
1114 1116 clnode = cl.node(filerevlog.linkrev(n))
1115 1117 if clnode in has_cl_set:
1116 1118 hasset[n] = 1
1117 1119 prune_parents(filerevlog, hasset, msngset)
1118 1120
1119 1121 # A function generator function that sets up the a context for the
1120 1122 # inner function.
1121 1123 def lookup_filenode_link_func(fname):
1122 1124 msngset = msng_filenode_set[fname]
1123 1125 # Lookup the changenode the filenode belongs to.
1124 1126 def lookup_filenode_link(fnode):
1125 1127 return msngset[fnode]
1126 1128 return lookup_filenode_link
1127 1129
1128 1130 # Now that we have all theses utility functions to help out and
1129 1131 # logically divide up the task, generate the group.
1130 1132 def gengroup():
1131 1133 # The set of changed files starts empty.
1132 1134 changedfiles = {}
1133 1135 # Create a changenode group generator that will call our functions
1134 1136 # back to lookup the owning changenode and collect information.
1135 1137 group = cl.group(msng_cl_lst, identity,
1136 1138 manifest_and_file_collector(changedfiles))
1137 1139 for chnk in group:
1138 1140 yield chnk
1139 1141
1140 1142 # The list of manifests has been collected by the generator
1141 1143 # calling our functions back.
1142 1144 prune_manifests()
1143 1145 msng_mnfst_lst = msng_mnfst_set.keys()
1144 1146 # Sort the manifestnodes by revision number.
1145 1147 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1146 1148 # Create a generator for the manifestnodes that calls our lookup
1147 1149 # and data collection functions back.
1148 1150 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1149 1151 filenode_collector(changedfiles))
1150 1152 for chnk in group:
1151 1153 yield chnk
1152 1154
1153 1155 # These are no longer needed, dereference and toss the memory for
1154 1156 # them.
1155 1157 msng_mnfst_lst = None
1156 1158 msng_mnfst_set.clear()
1157 1159
1158 1160 changedfiles = changedfiles.keys()
1159 1161 changedfiles.sort()
1160 1162 # Go through all our files in order sorted by name.
1161 1163 for fname in changedfiles:
1162 1164 filerevlog = self.file(fname)
1163 1165 # Toss out the filenodes that the recipient isn't really
1164 1166 # missing.
1165 1167 prune_filenodes(fname, filerevlog)
1166 1168 msng_filenode_lst = msng_filenode_set[fname].keys()
1167 1169 # If any filenodes are left, generate the group for them,
1168 1170 # otherwise don't bother.
1169 1171 if len(msng_filenode_lst) > 0:
1170 1172 yield struct.pack(">l", len(fname) + 4) + fname
1171 1173 # Sort the filenodes by their revision #
1172 1174 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1173 1175 # Create a group generator and only pass in a changenode
1174 1176 # lookup function as we need to collect no information
1175 1177 # from filenodes.
1176 1178 group = filerevlog.group(msng_filenode_lst,
1177 1179 lookup_filenode_link_func(fname))
1178 1180 for chnk in group:
1179 1181 yield chnk
1180 1182 # Don't need this anymore, toss it to free memory.
1181 1183 del msng_filenode_set[fname]
1182 1184 # Signal that no more groups are left.
1183 1185 yield struct.pack(">l", 0)
1184 1186
1185 1187 return util.chunkbuffer(gengroup())
1186 1188
1187 1189 def changegroup(self, basenodes):
1188 1190 """Generate a changegroup of all nodes that we have that a recipient
1189 1191 doesn't.
1190 1192
1191 1193 This is much easier than the previous function as we can assume that
1192 1194 the recipient has any changenode we aren't sending them."""
1193 1195 cl = self.changelog
1194 1196 nodes = cl.nodesbetween(basenodes, None)[0]
1195 1197 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1196 1198
1197 1199 def identity(x):
1198 1200 return x
1199 1201
1200 1202 def gennodelst(revlog):
1201 1203 for r in xrange(0, revlog.count()):
1202 1204 n = revlog.node(r)
1203 1205 if revlog.linkrev(n) in revset:
1204 1206 yield n
1205 1207
1206 1208 def changed_file_collector(changedfileset):
1207 1209 def collect_changed_files(clnode):
1208 1210 c = cl.read(clnode)
1209 1211 for fname in c[3]:
1210 1212 changedfileset[fname] = 1
1211 1213 return collect_changed_files
1212 1214
1213 1215 def lookuprevlink_func(revlog):
1214 1216 def lookuprevlink(n):
1215 1217 return cl.node(revlog.linkrev(n))
1216 1218 return lookuprevlink
1217 1219
1218 1220 def gengroup():
1219 1221 # construct a list of all changed files
1220 1222 changedfiles = {}
1221 1223
1222 1224 for chnk in cl.group(nodes, identity,
1223 1225 changed_file_collector(changedfiles)):
1224 1226 yield chnk
1225 1227 changedfiles = changedfiles.keys()
1226 1228 changedfiles.sort()
1227 1229
1228 1230 mnfst = self.manifest
1229 1231 nodeiter = gennodelst(mnfst)
1230 1232 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1231 1233 yield chnk
1232 1234
1233 1235 for fname in changedfiles:
1234 1236 filerevlog = self.file(fname)
1235 1237 nodeiter = gennodelst(filerevlog)
1236 1238 nodeiter = list(nodeiter)
1237 1239 if nodeiter:
1238 1240 yield struct.pack(">l", len(fname) + 4) + fname
1239 1241 lookup = lookuprevlink_func(filerevlog)
1240 1242 for chnk in filerevlog.group(nodeiter, lookup):
1241 1243 yield chnk
1242 1244
1243 1245 yield struct.pack(">l", 0)
1244 1246
1245 1247 return util.chunkbuffer(gengroup())
1246 1248
1247 1249 def addchangegroup(self, source):
1248 1250
1249 1251 def getchunk():
1250 1252 d = source.read(4)
1251 1253 if not d: return ""
1252 1254 l = struct.unpack(">l", d)[0]
1253 1255 if l <= 4: return ""
1254 1256 d = source.read(l - 4)
1255 1257 if len(d) < l - 4:
1256 1258 raise repo.RepoError(_("premature EOF reading chunk"
1257 1259 " (got %d bytes, expected %d)")
1258 1260 % (len(d), l - 4))
1259 1261 return d
1260 1262
1261 1263 def getgroup():
1262 1264 while 1:
1263 1265 c = getchunk()
1264 1266 if not c: break
1265 1267 yield c
1266 1268
1267 1269 def csmap(x):
1268 1270 self.ui.debug(_("add changeset %s\n") % short(x))
1269 1271 return self.changelog.count()
1270 1272
1271 1273 def revmap(x):
1272 1274 return self.changelog.rev(x)
1273 1275
1274 1276 if not source: return
1275 1277 changesets = files = revisions = 0
1276 1278
1277 1279 tr = self.transaction()
1278 1280
1279 1281 oldheads = len(self.changelog.heads())
1280 1282
1281 1283 # pull off the changeset group
1282 1284 self.ui.status(_("adding changesets\n"))
1283 1285 co = self.changelog.tip()
1284 1286 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1285 1287 cnr, cor = map(self.changelog.rev, (cn, co))
1286 1288 if cn == nullid:
1287 1289 cnr = cor
1288 1290 changesets = cnr - cor
1289 1291
1290 1292 # pull off the manifest group
1291 1293 self.ui.status(_("adding manifests\n"))
1292 1294 mm = self.manifest.tip()
1293 1295 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1294 1296
1295 1297 # process the files
1296 1298 self.ui.status(_("adding file changes\n"))
1297 1299 while 1:
1298 1300 f = getchunk()
1299 1301 if not f: break
1300 1302 self.ui.debug(_("adding %s revisions\n") % f)
1301 1303 fl = self.file(f)
1302 1304 o = fl.count()
1303 1305 n = fl.addgroup(getgroup(), revmap, tr)
1304 1306 revisions += fl.count() - o
1305 1307 files += 1
1306 1308
1307 1309 newheads = len(self.changelog.heads())
1308 1310 heads = ""
1309 1311 if oldheads and newheads > oldheads:
1310 1312 heads = _(" (+%d heads)") % (newheads - oldheads)
1311 1313
1312 1314 self.ui.status(_("added %d changesets"
1313 1315 " with %d changes to %d files%s\n")
1314 1316 % (changesets, revisions, files, heads))
1315 1317
1316 1318 tr.close()
1317 1319
1318 1320 if changesets > 0:
1319 1321 if not self.hook("changegroup",
1320 1322 node=hex(self.changelog.node(cor+1))):
1321 1323 self.ui.warn(_("abort: changegroup hook returned failure!\n"))
1322 1324 return 1
1323 1325
1324 1326 for i in range(cor + 1, cnr + 1):
1325 1327 self.hook("commit", node=hex(self.changelog.node(i)))
1326 1328
1327 1329 return
1328 1330
1329 1331 def update(self, node, allow=False, force=False, choose=None,
1330 1332 moddirstate=True):
1331 1333 pl = self.dirstate.parents()
1332 1334 if not force and pl[1] != nullid:
1333 1335 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1334 1336 return 1
1335 1337
1336 1338 p1, p2 = pl[0], node
1337 1339 pa = self.changelog.ancestor(p1, p2)
1338 1340 m1n = self.changelog.read(p1)[0]
1339 1341 m2n = self.changelog.read(p2)[0]
1340 1342 man = self.manifest.ancestor(m1n, m2n)
1341 1343 m1 = self.manifest.read(m1n)
1342 1344 mf1 = self.manifest.readflags(m1n)
1343 1345 m2 = self.manifest.read(m2n)
1344 1346 mf2 = self.manifest.readflags(m2n)
1345 1347 ma = self.manifest.read(man)
1346 1348 mfa = self.manifest.readflags(man)
1347 1349
1348 1350 (c, a, d, u) = self.changes()
1349 1351
1350 1352 # is this a jump, or a merge? i.e. is there a linear path
1351 1353 # from p1 to p2?
1352 1354 linear_path = (pa == p1 or pa == p2)
1353 1355
1354 1356 # resolve the manifest to determine which files
1355 1357 # we care about merging
1356 1358 self.ui.note(_("resolving manifests\n"))
1357 1359 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1358 1360 (force, allow, moddirstate, linear_path))
1359 1361 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1360 1362 (short(man), short(m1n), short(m2n)))
1361 1363
1362 1364 merge = {}
1363 1365 get = {}
1364 1366 remove = []
1365 1367
1366 1368 # construct a working dir manifest
1367 1369 mw = m1.copy()
1368 1370 mfw = mf1.copy()
1369 1371 umap = dict.fromkeys(u)
1370 1372
1371 1373 for f in a + c + u:
1372 1374 mw[f] = ""
1373 1375 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1374 1376
1375 1377 for f in d:
1376 1378 if f in mw: del mw[f]
1377 1379
1378 1380 # If we're jumping between revisions (as opposed to merging),
1379 1381 # and if neither the working directory nor the target rev has
1380 1382 # the file, then we need to remove it from the dirstate, to
1381 1383 # prevent the dirstate from listing the file when it is no
1382 1384 # longer in the manifest.
1383 1385 if moddirstate and linear_path and f not in m2:
1384 1386 self.dirstate.forget((f,))
1385 1387
1386 1388 # Compare manifests
1387 1389 for f, n in mw.iteritems():
1388 1390 if choose and not choose(f): continue
1389 1391 if f in m2:
1390 1392 s = 0
1391 1393
1392 1394 # is the wfile new since m1, and match m2?
1393 1395 if f not in m1:
1394 1396 t1 = self.wread(f)
1395 1397 t2 = self.file(f).read(m2[f])
1396 1398 if cmp(t1, t2) == 0:
1397 1399 n = m2[f]
1398 1400 del t1, t2
1399 1401
1400 1402 # are files different?
1401 1403 if n != m2[f]:
1402 1404 a = ma.get(f, nullid)
1403 1405 # are both different from the ancestor?
1404 1406 if n != a and m2[f] != a:
1405 1407 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1406 1408 # merge executable bits
1407 1409 # "if we changed or they changed, change in merge"
1408 1410 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1409 1411 mode = ((a^b) | (a^c)) ^ a
1410 1412 merge[f] = (m1.get(f, nullid), m2[f], mode)
1411 1413 s = 1
1412 1414 # are we clobbering?
1413 1415 # is remote's version newer?
1414 1416 # or are we going back in time?
1415 1417 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1416 1418 self.ui.debug(_(" remote %s is newer, get\n") % f)
1417 1419 get[f] = m2[f]
1418 1420 s = 1
1419 1421 elif f in umap:
1420 1422 # this unknown file is the same as the checkout
1421 1423 get[f] = m2[f]
1422 1424
1423 1425 if not s and mfw[f] != mf2[f]:
1424 1426 if force:
1425 1427 self.ui.debug(_(" updating permissions for %s\n") % f)
1426 1428 util.set_exec(self.wjoin(f), mf2[f])
1427 1429 else:
1428 1430 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1429 1431 mode = ((a^b) | (a^c)) ^ a
1430 1432 if mode != b:
1431 1433 self.ui.debug(_(" updating permissions for %s\n") % f)
1432 1434 util.set_exec(self.wjoin(f), mode)
1433 1435 del m2[f]
1434 1436 elif f in ma:
1435 1437 if n != ma[f]:
1436 1438 r = _("d")
1437 1439 if not force and (linear_path or allow):
1438 1440 r = self.ui.prompt(
1439 1441 (_(" local changed %s which remote deleted\n") % f) +
1440 1442 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1441 1443 if r == _("d"):
1442 1444 remove.append(f)
1443 1445 else:
1444 1446 self.ui.debug(_("other deleted %s\n") % f)
1445 1447 remove.append(f) # other deleted it
1446 1448 else:
1447 1449 # file is created on branch or in working directory
1448 1450 if force and f not in umap:
1449 1451 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1450 1452 remove.append(f)
1451 1453 elif n == m1.get(f, nullid): # same as parent
1452 1454 if p2 == pa: # going backwards?
1453 1455 self.ui.debug(_("remote deleted %s\n") % f)
1454 1456 remove.append(f)
1455 1457 else:
1456 1458 self.ui.debug(_("local modified %s, keeping\n") % f)
1457 1459 else:
1458 1460 self.ui.debug(_("working dir created %s, keeping\n") % f)
1459 1461
1460 1462 for f, n in m2.iteritems():
1461 1463 if choose and not choose(f): continue
1462 1464 if f[0] == "/": continue
1463 1465 if f in ma and n != ma[f]:
1464 1466 r = _("k")
1465 1467 if not force and (linear_path or allow):
1466 1468 r = self.ui.prompt(
1467 1469 (_("remote changed %s which local deleted\n") % f) +
1468 1470 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1469 1471 if r == _("k"): get[f] = n
1470 1472 elif f not in ma:
1471 1473 self.ui.debug(_("remote created %s\n") % f)
1472 1474 get[f] = n
1473 1475 else:
1474 1476 if force or p2 == pa: # going backwards?
1475 1477 self.ui.debug(_("local deleted %s, recreating\n") % f)
1476 1478 get[f] = n
1477 1479 else:
1478 1480 self.ui.debug(_("local deleted %s\n") % f)
1479 1481
1480 1482 del mw, m1, m2, ma
1481 1483
1482 1484 if force:
1483 1485 for f in merge:
1484 1486 get[f] = merge[f][1]
1485 1487 merge = {}
1486 1488
1487 1489 if linear_path or force:
1488 1490 # we don't need to do any magic, just jump to the new rev
1489 1491 branch_merge = False
1490 1492 p1, p2 = p2, nullid
1491 1493 else:
1492 1494 if not allow:
1493 1495 self.ui.status(_("this update spans a branch"
1494 1496 " affecting the following files:\n"))
1495 1497 fl = merge.keys() + get.keys()
1496 1498 fl.sort()
1497 1499 for f in fl:
1498 1500 cf = ""
1499 1501 if f in merge: cf = _(" (resolve)")
1500 1502 self.ui.status(" %s%s\n" % (f, cf))
1501 1503 self.ui.warn(_("aborting update spanning branches!\n"))
1502 1504 self.ui.status(_("(use update -m to merge across branches"
1503 1505 " or -C to lose changes)\n"))
1504 1506 return 1
1505 1507 branch_merge = True
1506 1508
1507 1509 # get the files we don't need to change
1508 1510 files = get.keys()
1509 1511 files.sort()
1510 1512 for f in files:
1511 1513 if f[0] == "/": continue
1512 1514 self.ui.note(_("getting %s\n") % f)
1513 1515 t = self.file(f).read(get[f])
1514 1516 self.wwrite(f, t)
1515 1517 util.set_exec(self.wjoin(f), mf2[f])
1516 1518 if moddirstate:
1517 1519 if branch_merge:
1518 1520 self.dirstate.update([f], 'n', st_mtime=-1)
1519 1521 else:
1520 1522 self.dirstate.update([f], 'n')
1521 1523
1522 1524 # merge the tricky bits
1523 1525 files = merge.keys()
1524 1526 files.sort()
1525 1527 for f in files:
1526 1528 self.ui.status(_("merging %s\n") % f)
1527 1529 my, other, flag = merge[f]
1528 1530 self.merge3(f, my, other)
1529 1531 util.set_exec(self.wjoin(f), flag)
1530 1532 if moddirstate:
1531 1533 if branch_merge:
1532 1534 # We've done a branch merge, mark this file as merged
1533 1535 # so that we properly record the merger later
1534 1536 self.dirstate.update([f], 'm')
1535 1537 else:
1536 1538 # We've update-merged a locally modified file, so
1537 1539 # we set the dirstate to emulate a normal checkout
1538 1540 # of that file some time in the past. Thus our
1539 1541 # merge will appear as a normal local file
1540 1542 # modification.
1541 1543 f_len = len(self.file(f).read(other))
1542 1544 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1543 1545
1544 1546 remove.sort()
1545 1547 for f in remove:
1546 1548 self.ui.note(_("removing %s\n") % f)
1547 1549 try:
1548 1550 util.unlink(self.wjoin(f))
1549 1551 except OSError, inst:
1550 1552 if inst.errno != errno.ENOENT:
1551 1553 self.ui.warn(_("update failed to remove %s: %s!\n") %
1552 1554 (f, inst.strerror))
1553 1555 if moddirstate:
1554 1556 if branch_merge:
1555 1557 self.dirstate.update(remove, 'r')
1556 1558 else:
1557 1559 self.dirstate.forget(remove)
1558 1560
1559 1561 if moddirstate:
1560 1562 self.dirstate.setparents(p1, p2)
1561 1563
1562 1564 def merge3(self, fn, my, other):
1563 1565 """perform a 3-way merge in the working directory"""
1564 1566
1565 1567 def temp(prefix, node):
1566 1568 pre = "%s~%s." % (os.path.basename(fn), prefix)
1567 1569 (fd, name) = tempfile.mkstemp("", pre)
1568 1570 f = os.fdopen(fd, "wb")
1569 1571 self.wwrite(fn, fl.read(node), f)
1570 1572 f.close()
1571 1573 return name
1572 1574
1573 1575 fl = self.file(fn)
1574 1576 base = fl.ancestor(my, other)
1575 1577 a = self.wjoin(fn)
1576 1578 b = temp("base", base)
1577 1579 c = temp("other", other)
1578 1580
1579 1581 self.ui.note(_("resolving %s\n") % fn)
1580 1582 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1581 1583 (fn, short(my), short(other), short(base)))
1582 1584
1583 1585 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1584 1586 or "hgmerge")
1585 1587 r = os.system('%s "%s" "%s" "%s"' % (cmd, a, b, c))
1586 1588 if r:
1587 1589 self.ui.warn(_("merging %s failed!\n") % fn)
1588 1590
1589 1591 os.unlink(b)
1590 1592 os.unlink(c)
1591 1593
1592 1594 def verify(self):
1593 1595 filelinkrevs = {}
1594 1596 filenodes = {}
1595 1597 changesets = revisions = files = 0
1596 1598 errors = [0]
1597 1599 neededmanifests = {}
1598 1600
1599 1601 def err(msg):
1600 1602 self.ui.warn(msg + "\n")
1601 1603 errors[0] += 1
1602 1604
1603 1605 seen = {}
1604 1606 self.ui.status(_("checking changesets\n"))
1605 1607 d = self.changelog.checksize()
1606 1608 if d:
1607 1609 err(_("changeset data short %d bytes") % d)
1608 1610 for i in range(self.changelog.count()):
1609 1611 changesets += 1
1610 1612 n = self.changelog.node(i)
1611 1613 l = self.changelog.linkrev(n)
1612 1614 if l != i:
1613 1615 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1614 1616 if n in seen:
1615 1617 err(_("duplicate changeset at revision %d") % i)
1616 1618 seen[n] = 1
1617 1619
1618 1620 for p in self.changelog.parents(n):
1619 1621 if p not in self.changelog.nodemap:
1620 1622 err(_("changeset %s has unknown parent %s") %
1621 1623 (short(n), short(p)))
1622 1624 try:
1623 1625 changes = self.changelog.read(n)
1624 1626 except KeyboardInterrupt:
1625 1627 self.ui.warn(_("interrupted"))
1626 1628 raise
1627 1629 except Exception, inst:
1628 1630 err(_("unpacking changeset %s: %s") % (short(n), inst))
1629 1631
1630 1632 neededmanifests[changes[0]] = n
1631 1633
1632 1634 for f in changes[3]:
1633 1635 filelinkrevs.setdefault(f, []).append(i)
1634 1636
1635 1637 seen = {}
1636 1638 self.ui.status(_("checking manifests\n"))
1637 1639 d = self.manifest.checksize()
1638 1640 if d:
1639 1641 err(_("manifest data short %d bytes") % d)
1640 1642 for i in range(self.manifest.count()):
1641 1643 n = self.manifest.node(i)
1642 1644 l = self.manifest.linkrev(n)
1643 1645
1644 1646 if l < 0 or l >= self.changelog.count():
1645 1647 err(_("bad manifest link (%d) at revision %d") % (l, i))
1646 1648
1647 1649 if n in neededmanifests:
1648 1650 del neededmanifests[n]
1649 1651
1650 1652 if n in seen:
1651 1653 err(_("duplicate manifest at revision %d") % i)
1652 1654
1653 1655 seen[n] = 1
1654 1656
1655 1657 for p in self.manifest.parents(n):
1656 1658 if p not in self.manifest.nodemap:
1657 1659 err(_("manifest %s has unknown parent %s") %
1658 1660 (short(n), short(p)))
1659 1661
1660 1662 try:
1661 1663 delta = mdiff.patchtext(self.manifest.delta(n))
1662 1664 except KeyboardInterrupt:
1663 1665 self.ui.warn(_("interrupted"))
1664 1666 raise
1665 1667 except Exception, inst:
1666 1668 err(_("unpacking manifest %s: %s") % (short(n), inst))
1667 1669
1668 1670 ff = [ l.split('\0') for l in delta.splitlines() ]
1669 1671 for f, fn in ff:
1670 1672 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1671 1673
1672 1674 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1673 1675
1674 1676 for m,c in neededmanifests.items():
1675 1677 err(_("Changeset %s refers to unknown manifest %s") %
1676 1678 (short(m), short(c)))
1677 1679 del neededmanifests
1678 1680
1679 1681 for f in filenodes:
1680 1682 if f not in filelinkrevs:
1681 1683 err(_("file %s in manifest but not in changesets") % f)
1682 1684
1683 1685 for f in filelinkrevs:
1684 1686 if f not in filenodes:
1685 1687 err(_("file %s in changeset but not in manifest") % f)
1686 1688
1687 1689 self.ui.status(_("checking files\n"))
1688 1690 ff = filenodes.keys()
1689 1691 ff.sort()
1690 1692 for f in ff:
1691 1693 if f == "/dev/null": continue
1692 1694 files += 1
1693 1695 fl = self.file(f)
1694 1696 d = fl.checksize()
1695 1697 if d:
1696 1698 err(_("%s file data short %d bytes") % (f, d))
1697 1699
1698 1700 nodes = { nullid: 1 }
1699 1701 seen = {}
1700 1702 for i in range(fl.count()):
1701 1703 revisions += 1
1702 1704 n = fl.node(i)
1703 1705
1704 1706 if n in seen:
1705 1707 err(_("%s: duplicate revision %d") % (f, i))
1706 1708 if n not in filenodes[f]:
1707 1709 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1708 1710 else:
1709 1711 del filenodes[f][n]
1710 1712
1711 1713 flr = fl.linkrev(n)
1712 1714 if flr not in filelinkrevs[f]:
1713 1715 err(_("%s:%s points to unexpected changeset %d")
1714 1716 % (f, short(n), flr))
1715 1717 else:
1716 1718 filelinkrevs[f].remove(flr)
1717 1719
1718 1720 # verify contents
1719 1721 try:
1720 1722 t = fl.read(n)
1721 1723 except KeyboardInterrupt:
1722 1724 self.ui.warn(_("interrupted"))
1723 1725 raise
1724 1726 except Exception, inst:
1725 1727 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1726 1728
1727 1729 # verify parents
1728 1730 (p1, p2) = fl.parents(n)
1729 1731 if p1 not in nodes:
1730 1732 err(_("file %s:%s unknown parent 1 %s") %
1731 1733 (f, short(n), short(p1)))
1732 1734 if p2 not in nodes:
1733 1735 err(_("file %s:%s unknown parent 2 %s") %
1734 1736 (f, short(n), short(p1)))
1735 1737 nodes[n] = 1
1736 1738
1737 1739 # cross-check
1738 1740 for node in filenodes[f]:
1739 1741 err(_("node %s in manifests not in %s") % (hex(node), f))
1740 1742
1741 1743 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1742 1744 (files, changesets, revisions))
1743 1745
1744 1746 if errors[0]:
1745 1747 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1746 1748 return 1
General Comments 0
You need to be logged in to leave comments. Login now