##// END OF EJS Templates
Renamed localrepo.undo() to rollback() and talk about "rollback information".
Thomas Arendsen Hein -
r2362:482d3fb4 default
parent child Browse files
Show More
@@ -1,3481 +1,3481
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from demandload import demandload
9 9 from node import *
10 10 from i18n import gettext as _
11 11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
13 13 demandload(globals(), "fnmatch mdiff random signal tempfile time")
14 14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
15 15 demandload(globals(), "archival changegroup")
16 16 demandload(globals(), "mercurial.hgweb.server:create_server")
17 17 demandload(globals(), "mercurial.hgweb:hgweb,hgwebdir")
18 18
19 19 class UnknownCommand(Exception):
20 20 """Exception raised if command is not in the command table."""
21 21 class AmbiguousCommand(Exception):
22 22 """Exception raised if command shortcut matches more than one command."""
23 23
24 24 def bail_if_changed(repo):
25 25 modified, added, removed, deleted, unknown = repo.changes()
26 26 if modified or added or removed or deleted:
27 27 raise util.Abort(_("outstanding uncommitted changes"))
28 28
29 29 def filterfiles(filters, files):
30 30 l = [x for x in files if x in filters]
31 31
32 32 for t in filters:
33 33 if t and t[-1] != "/":
34 34 t += "/"
35 35 l += [x for x in files if x.startswith(t)]
36 36 return l
37 37
38 38 def relpath(repo, args):
39 39 cwd = repo.getcwd()
40 40 if cwd:
41 41 return [util.normpath(os.path.join(cwd, x)) for x in args]
42 42 return args
43 43
44 44 def matchpats(repo, pats=[], opts={}, head=''):
45 45 cwd = repo.getcwd()
46 46 if not pats and cwd:
47 47 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
48 48 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
49 49 cwd = ''
50 50 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
51 51 opts.get('exclude'), head)
52 52
53 53 def makewalk(repo, pats, opts, node=None, head='', badmatch=None):
54 54 files, matchfn, anypats = matchpats(repo, pats, opts, head)
55 55 exact = dict(zip(files, files))
56 56 def walk():
57 57 for src, fn in repo.walk(node=node, files=files, match=matchfn,
58 58 badmatch=badmatch):
59 59 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
60 60 return files, matchfn, walk()
61 61
62 62 def walk(repo, pats, opts, node=None, head='', badmatch=None):
63 63 files, matchfn, results = makewalk(repo, pats, opts, node, head, badmatch)
64 64 for r in results:
65 65 yield r
66 66
67 67 def walkchangerevs(ui, repo, pats, opts):
68 68 '''Iterate over files and the revs they changed in.
69 69
70 70 Callers most commonly need to iterate backwards over the history
71 71 it is interested in. Doing so has awful (quadratic-looking)
72 72 performance, so we use iterators in a "windowed" way.
73 73
74 74 We walk a window of revisions in the desired order. Within the
75 75 window, we first walk forwards to gather data, then in the desired
76 76 order (usually backwards) to display it.
77 77
78 78 This function returns an (iterator, getchange, matchfn) tuple. The
79 79 getchange function returns the changelog entry for a numeric
80 80 revision. The iterator yields 3-tuples. They will be of one of
81 81 the following forms:
82 82
83 83 "window", incrementing, lastrev: stepping through a window,
84 84 positive if walking forwards through revs, last rev in the
85 85 sequence iterated over - use to reset state for the current window
86 86
87 87 "add", rev, fns: out-of-order traversal of the given file names
88 88 fns, which changed during revision rev - use to gather data for
89 89 possible display
90 90
91 91 "iter", rev, None: in-order traversal of the revs earlier iterated
92 92 over with "add" - use to display data'''
93 93
94 94 def increasing_windows(start, end, windowsize=8, sizelimit=512):
95 95 if start < end:
96 96 while start < end:
97 97 yield start, min(windowsize, end-start)
98 98 start += windowsize
99 99 if windowsize < sizelimit:
100 100 windowsize *= 2
101 101 else:
102 102 while start > end:
103 103 yield start, min(windowsize, start-end-1)
104 104 start -= windowsize
105 105 if windowsize < sizelimit:
106 106 windowsize *= 2
107 107
108 108
109 109 files, matchfn, anypats = matchpats(repo, pats, opts)
110 110
111 111 if repo.changelog.count() == 0:
112 112 return [], False, matchfn
113 113
114 114 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
115 115 wanted = {}
116 116 slowpath = anypats
117 117 fncache = {}
118 118
119 119 chcache = {}
120 120 def getchange(rev):
121 121 ch = chcache.get(rev)
122 122 if ch is None:
123 123 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
124 124 return ch
125 125
126 126 if not slowpath and not files:
127 127 # No files, no patterns. Display all revs.
128 128 wanted = dict(zip(revs, revs))
129 129 if not slowpath:
130 130 # Only files, no patterns. Check the history of each file.
131 131 def filerevgen(filelog):
132 132 for i, window in increasing_windows(filelog.count()-1, -1):
133 133 revs = []
134 134 for j in xrange(i - window, i + 1):
135 135 revs.append(filelog.linkrev(filelog.node(j)))
136 136 revs.reverse()
137 137 for rev in revs:
138 138 yield rev
139 139
140 140 minrev, maxrev = min(revs), max(revs)
141 141 for file_ in files:
142 142 filelog = repo.file(file_)
143 143 # A zero count may be a directory or deleted file, so
144 144 # try to find matching entries on the slow path.
145 145 if filelog.count() == 0:
146 146 slowpath = True
147 147 break
148 148 for rev in filerevgen(filelog):
149 149 if rev <= maxrev:
150 150 if rev < minrev:
151 151 break
152 152 fncache.setdefault(rev, [])
153 153 fncache[rev].append(file_)
154 154 wanted[rev] = 1
155 155 if slowpath:
156 156 # The slow path checks files modified in every changeset.
157 157 def changerevgen():
158 158 for i, window in increasing_windows(repo.changelog.count()-1, -1):
159 159 for j in xrange(i - window, i + 1):
160 160 yield j, getchange(j)[3]
161 161
162 162 for rev, changefiles in changerevgen():
163 163 matches = filter(matchfn, changefiles)
164 164 if matches:
165 165 fncache[rev] = matches
166 166 wanted[rev] = 1
167 167
168 168 def iterate():
169 169 for i, window in increasing_windows(0, len(revs)):
170 170 yield 'window', revs[0] < revs[-1], revs[-1]
171 171 nrevs = [rev for rev in revs[i:i+window]
172 172 if rev in wanted]
173 173 srevs = list(nrevs)
174 174 srevs.sort()
175 175 for rev in srevs:
176 176 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
177 177 yield 'add', rev, fns
178 178 for rev in nrevs:
179 179 yield 'iter', rev, None
180 180 return iterate(), getchange, matchfn
181 181
182 182 revrangesep = ':'
183 183
184 184 def revfix(repo, val, defval):
185 185 '''turn user-level id of changeset into rev number.
186 186 user-level id can be tag, changeset, rev number, or negative rev
187 187 number relative to number of revs (-1 is tip, etc).'''
188 188 if not val:
189 189 return defval
190 190 try:
191 191 num = int(val)
192 192 if str(num) != val:
193 193 raise ValueError
194 194 if num < 0:
195 195 num += repo.changelog.count()
196 196 if num < 0:
197 197 num = 0
198 198 elif num >= repo.changelog.count():
199 199 raise ValueError
200 200 except ValueError:
201 201 try:
202 202 num = repo.changelog.rev(repo.lookup(val))
203 203 except KeyError:
204 204 raise util.Abort(_('invalid revision identifier %s'), val)
205 205 return num
206 206
207 207 def revpair(ui, repo, revs):
208 208 '''return pair of nodes, given list of revisions. second item can
209 209 be None, meaning use working dir.'''
210 210 if not revs:
211 211 return repo.dirstate.parents()[0], None
212 212 end = None
213 213 if len(revs) == 1:
214 214 start = revs[0]
215 215 if revrangesep in start:
216 216 start, end = start.split(revrangesep, 1)
217 217 start = revfix(repo, start, 0)
218 218 end = revfix(repo, end, repo.changelog.count() - 1)
219 219 else:
220 220 start = revfix(repo, start, None)
221 221 elif len(revs) == 2:
222 222 if revrangesep in revs[0] or revrangesep in revs[1]:
223 223 raise util.Abort(_('too many revisions specified'))
224 224 start = revfix(repo, revs[0], None)
225 225 end = revfix(repo, revs[1], None)
226 226 else:
227 227 raise util.Abort(_('too many revisions specified'))
228 228 if end is not None: end = repo.lookup(str(end))
229 229 return repo.lookup(str(start)), end
230 230
231 231 def revrange(ui, repo, revs):
232 232 """Yield revision as strings from a list of revision specifications."""
233 233 seen = {}
234 234 for spec in revs:
235 235 if spec.find(revrangesep) >= 0:
236 236 start, end = spec.split(revrangesep, 1)
237 237 start = revfix(repo, start, 0)
238 238 end = revfix(repo, end, repo.changelog.count() - 1)
239 239 step = start > end and -1 or 1
240 240 for rev in xrange(start, end+step, step):
241 241 if rev in seen:
242 242 continue
243 243 seen[rev] = 1
244 244 yield str(rev)
245 245 else:
246 246 rev = revfix(repo, spec, None)
247 247 if rev in seen:
248 248 continue
249 249 seen[rev] = 1
250 250 yield str(rev)
251 251
252 252 def make_filename(repo, r, pat, node=None,
253 253 total=None, seqno=None, revwidth=None, pathname=None):
254 254 node_expander = {
255 255 'H': lambda: hex(node),
256 256 'R': lambda: str(r.rev(node)),
257 257 'h': lambda: short(node),
258 258 }
259 259 expander = {
260 260 '%': lambda: '%',
261 261 'b': lambda: os.path.basename(repo.root),
262 262 }
263 263
264 264 try:
265 265 if node:
266 266 expander.update(node_expander)
267 267 if node and revwidth is not None:
268 268 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
269 269 if total is not None:
270 270 expander['N'] = lambda: str(total)
271 271 if seqno is not None:
272 272 expander['n'] = lambda: str(seqno)
273 273 if total is not None and seqno is not None:
274 274 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
275 275 if pathname is not None:
276 276 expander['s'] = lambda: os.path.basename(pathname)
277 277 expander['d'] = lambda: os.path.dirname(pathname) or '.'
278 278 expander['p'] = lambda: pathname
279 279
280 280 newname = []
281 281 patlen = len(pat)
282 282 i = 0
283 283 while i < patlen:
284 284 c = pat[i]
285 285 if c == '%':
286 286 i += 1
287 287 c = pat[i]
288 288 c = expander[c]()
289 289 newname.append(c)
290 290 i += 1
291 291 return ''.join(newname)
292 292 except KeyError, inst:
293 293 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
294 294 inst.args[0])
295 295
296 296 def make_file(repo, r, pat, node=None,
297 297 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
298 298 if not pat or pat == '-':
299 299 return 'w' in mode and sys.stdout or sys.stdin
300 300 if hasattr(pat, 'write') and 'w' in mode:
301 301 return pat
302 302 if hasattr(pat, 'read') and 'r' in mode:
303 303 return pat
304 304 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
305 305 pathname),
306 306 mode)
307 307
308 308 def write_bundle(cg, filename=None, compress=True):
309 309 """Write a bundle file and return its filename.
310 310
311 311 Existing files will not be overwritten.
312 312 If no filename is specified, a temporary file is created.
313 313 bz2 compression can be turned off.
314 314 The bundle file will be deleted in case of errors.
315 315 """
316 316 class nocompress(object):
317 317 def compress(self, x):
318 318 return x
319 319 def flush(self):
320 320 return ""
321 321
322 322 fh = None
323 323 cleanup = None
324 324 try:
325 325 if filename:
326 326 if os.path.exists(filename):
327 327 raise util.Abort(_("file '%s' already exists"), filename)
328 328 fh = open(filename, "wb")
329 329 else:
330 330 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
331 331 fh = os.fdopen(fd, "wb")
332 332 cleanup = filename
333 333
334 334 if compress:
335 335 fh.write("HG10")
336 336 z = bz2.BZ2Compressor(9)
337 337 else:
338 338 fh.write("HG10UN")
339 339 z = nocompress()
340 340 # parse the changegroup data, otherwise we will block
341 341 # in case of sshrepo because we don't know the end of the stream
342 342
343 343 # an empty chunkiter is the end of the changegroup
344 344 empty = False
345 345 while not empty:
346 346 empty = True
347 347 for chunk in changegroup.chunkiter(cg):
348 348 empty = False
349 349 fh.write(z.compress(changegroup.genchunk(chunk)))
350 350 fh.write(z.compress(changegroup.closechunk()))
351 351 fh.write(z.flush())
352 352 cleanup = None
353 353 return filename
354 354 finally:
355 355 if fh is not None:
356 356 fh.close()
357 357 if cleanup is not None:
358 358 os.unlink(cleanup)
359 359
360 360 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
361 361 changes=None, text=False, opts={}):
362 362 if not node1:
363 363 node1 = repo.dirstate.parents()[0]
364 364 # reading the data for node1 early allows it to play nicely
365 365 # with repo.changes and the revlog cache.
366 366 change = repo.changelog.read(node1)
367 367 mmap = repo.manifest.read(change[0])
368 368 date1 = util.datestr(change[2])
369 369
370 370 if not changes:
371 371 changes = repo.changes(node1, node2, files, match=match)
372 372 modified, added, removed, deleted, unknown = changes
373 373 if files:
374 374 modified, added, removed = map(lambda x: filterfiles(files, x),
375 375 (modified, added, removed))
376 376
377 377 if not modified and not added and not removed:
378 378 return
379 379
380 380 if node2:
381 381 change = repo.changelog.read(node2)
382 382 mmap2 = repo.manifest.read(change[0])
383 383 date2 = util.datestr(change[2])
384 384 def read(f):
385 385 return repo.file(f).read(mmap2[f])
386 386 else:
387 387 date2 = util.datestr()
388 388 def read(f):
389 389 return repo.wread(f)
390 390
391 391 if ui.quiet:
392 392 r = None
393 393 else:
394 394 hexfunc = ui.verbose and hex or short
395 395 r = [hexfunc(node) for node in [node1, node2] if node]
396 396
397 397 diffopts = ui.diffopts()
398 398 showfunc = opts.get('show_function') or diffopts['showfunc']
399 399 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
400 400 for f in modified:
401 401 to = None
402 402 if f in mmap:
403 403 to = repo.file(f).read(mmap[f])
404 404 tn = read(f)
405 405 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
406 406 showfunc=showfunc, ignorews=ignorews))
407 407 for f in added:
408 408 to = None
409 409 tn = read(f)
410 410 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
411 411 showfunc=showfunc, ignorews=ignorews))
412 412 for f in removed:
413 413 to = repo.file(f).read(mmap[f])
414 414 tn = None
415 415 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
416 416 showfunc=showfunc, ignorews=ignorews))
417 417
418 418 def trimuser(ui, name, rev, revcache):
419 419 """trim the name of the user who committed a change"""
420 420 user = revcache.get(rev)
421 421 if user is None:
422 422 user = revcache[rev] = ui.shortuser(name)
423 423 return user
424 424
425 425 class changeset_printer(object):
426 426 '''show changeset information when templating not requested.'''
427 427
428 428 def __init__(self, ui, repo):
429 429 self.ui = ui
430 430 self.repo = repo
431 431
432 432 def show(self, rev=0, changenode=None, brinfo=None):
433 433 '''show a single changeset or file revision'''
434 434 log = self.repo.changelog
435 435 if changenode is None:
436 436 changenode = log.node(rev)
437 437 elif not rev:
438 438 rev = log.rev(changenode)
439 439
440 440 if self.ui.quiet:
441 441 self.ui.write("%d:%s\n" % (rev, short(changenode)))
442 442 return
443 443
444 444 changes = log.read(changenode)
445 445 date = util.datestr(changes[2])
446 446
447 447 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
448 448 for p in log.parents(changenode)
449 449 if self.ui.debugflag or p != nullid]
450 450 if (not self.ui.debugflag and len(parents) == 1 and
451 451 parents[0][0] == rev-1):
452 452 parents = []
453 453
454 454 if self.ui.verbose:
455 455 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
456 456 else:
457 457 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
458 458
459 459 for tag in self.repo.nodetags(changenode):
460 460 self.ui.status(_("tag: %s\n") % tag)
461 461 for parent in parents:
462 462 self.ui.write(_("parent: %d:%s\n") % parent)
463 463
464 464 if brinfo and changenode in brinfo:
465 465 br = brinfo[changenode]
466 466 self.ui.write(_("branch: %s\n") % " ".join(br))
467 467
468 468 self.ui.debug(_("manifest: %d:%s\n") %
469 469 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
470 470 self.ui.status(_("user: %s\n") % changes[1])
471 471 self.ui.status(_("date: %s\n") % date)
472 472
473 473 if self.ui.debugflag:
474 474 files = self.repo.changes(log.parents(changenode)[0], changenode)
475 475 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
476 476 files):
477 477 if value:
478 478 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
479 479 else:
480 480 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
481 481
482 482 description = changes[4].strip()
483 483 if description:
484 484 if self.ui.verbose:
485 485 self.ui.status(_("description:\n"))
486 486 self.ui.status(description)
487 487 self.ui.status("\n\n")
488 488 else:
489 489 self.ui.status(_("summary: %s\n") %
490 490 description.splitlines()[0])
491 491 self.ui.status("\n")
492 492
493 493 def show_changeset(ui, repo, opts):
494 494 '''show one changeset. uses template or regular display. caller
495 495 can pass in 'style' and 'template' options in opts.'''
496 496
497 497 tmpl = opts.get('template')
498 498 if tmpl:
499 499 tmpl = templater.parsestring(tmpl, quoted=False)
500 500 else:
501 501 tmpl = ui.config('ui', 'logtemplate')
502 502 if tmpl: tmpl = templater.parsestring(tmpl)
503 503 mapfile = opts.get('style') or ui.config('ui', 'style')
504 504 if tmpl or mapfile:
505 505 if mapfile:
506 506 if not os.path.isfile(mapfile):
507 507 mapname = templater.templatepath('map-cmdline.' + mapfile)
508 508 if not mapname: mapname = templater.templatepath(mapfile)
509 509 if mapname: mapfile = mapname
510 510 try:
511 511 t = templater.changeset_templater(ui, repo, mapfile)
512 512 except SyntaxError, inst:
513 513 raise util.Abort(inst.args[0])
514 514 if tmpl: t.use_template(tmpl)
515 515 return t
516 516 return changeset_printer(ui, repo)
517 517
518 518 def show_version(ui):
519 519 """output version and copyright information"""
520 520 ui.write(_("Mercurial Distributed SCM (version %s)\n")
521 521 % version.get_version())
522 522 ui.status(_(
523 523 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
524 524 "This is free software; see the source for copying conditions. "
525 525 "There is NO\nwarranty; "
526 526 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
527 527 ))
528 528
529 529 def help_(ui, cmd=None, with_version=False):
530 530 """show help for a given command or all commands"""
531 531 option_lists = []
532 532 if cmd and cmd != 'shortlist':
533 533 if with_version:
534 534 show_version(ui)
535 535 ui.write('\n')
536 536 aliases, i = find(cmd)
537 537 # synopsis
538 538 ui.write("%s\n\n" % i[2])
539 539
540 540 # description
541 541 doc = i[0].__doc__
542 542 if not doc:
543 543 doc = _("(No help text available)")
544 544 if ui.quiet:
545 545 doc = doc.splitlines(0)[0]
546 546 ui.write("%s\n" % doc.rstrip())
547 547
548 548 if not ui.quiet:
549 549 # aliases
550 550 if len(aliases) > 1:
551 551 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
552 552
553 553 # options
554 554 if i[1]:
555 555 option_lists.append(("options", i[1]))
556 556
557 557 else:
558 558 # program name
559 559 if ui.verbose or with_version:
560 560 show_version(ui)
561 561 else:
562 562 ui.status(_("Mercurial Distributed SCM\n"))
563 563 ui.status('\n')
564 564
565 565 # list of commands
566 566 if cmd == "shortlist":
567 567 ui.status(_('basic commands (use "hg help" '
568 568 'for the full list or option "-v" for details):\n\n'))
569 569 elif ui.verbose:
570 570 ui.status(_('list of commands:\n\n'))
571 571 else:
572 572 ui.status(_('list of commands (use "hg help -v" '
573 573 'to show aliases and global options):\n\n'))
574 574
575 575 h = {}
576 576 cmds = {}
577 577 for c, e in table.items():
578 578 f = c.split("|")[0]
579 579 if cmd == "shortlist" and not f.startswith("^"):
580 580 continue
581 581 f = f.lstrip("^")
582 582 if not ui.debugflag and f.startswith("debug"):
583 583 continue
584 584 doc = e[0].__doc__
585 585 if not doc:
586 586 doc = _("(No help text available)")
587 587 h[f] = doc.splitlines(0)[0].rstrip()
588 588 cmds[f] = c.lstrip("^")
589 589
590 590 fns = h.keys()
591 591 fns.sort()
592 592 m = max(map(len, fns))
593 593 for f in fns:
594 594 if ui.verbose:
595 595 commands = cmds[f].replace("|",", ")
596 596 ui.write(" %s:\n %s\n"%(commands, h[f]))
597 597 else:
598 598 ui.write(' %-*s %s\n' % (m, f, h[f]))
599 599
600 600 # global options
601 601 if ui.verbose:
602 602 option_lists.append(("global options", globalopts))
603 603
604 604 # list all option lists
605 605 opt_output = []
606 606 for title, options in option_lists:
607 607 opt_output.append(("\n%s:\n" % title, None))
608 608 for shortopt, longopt, default, desc in options:
609 609 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
610 610 longopt and " --%s" % longopt),
611 611 "%s%s" % (desc,
612 612 default
613 613 and _(" (default: %s)") % default
614 614 or "")))
615 615
616 616 if opt_output:
617 617 opts_len = max([len(line[0]) for line in opt_output if line[1]])
618 618 for first, second in opt_output:
619 619 if second:
620 620 ui.write(" %-*s %s\n" % (opts_len, first, second))
621 621 else:
622 622 ui.write("%s\n" % first)
623 623
624 624 # Commands start here, listed alphabetically
625 625
626 626 def add(ui, repo, *pats, **opts):
627 627 """add the specified files on the next commit
628 628
629 629 Schedule files to be version controlled and added to the repository.
630 630
631 631 The files will be added to the repository at the next commit.
632 632
633 633 If no names are given, add all files in the repository.
634 634 """
635 635
636 636 names = []
637 637 for src, abs, rel, exact in walk(repo, pats, opts):
638 638 if exact:
639 639 if ui.verbose:
640 640 ui.status(_('adding %s\n') % rel)
641 641 names.append(abs)
642 642 elif repo.dirstate.state(abs) == '?':
643 643 ui.status(_('adding %s\n') % rel)
644 644 names.append(abs)
645 645 repo.add(names)
646 646
647 647 def addremove(ui, repo, *pats, **opts):
648 648 """add all new files, delete all missing files (DEPRECATED)
649 649
650 650 (DEPRECATED)
651 651 Add all new files and remove all missing files from the repository.
652 652
653 653 New files are ignored if they match any of the patterns in .hgignore. As
654 654 with add, these changes take effect at the next commit.
655 655
656 656 This command is now deprecated and will be removed in a future
657 657 release. Please use add and remove --after instead.
658 658 """
659 659 ui.warn(_('(the addremove command is deprecated; use add and remove '
660 660 '--after instead)\n'))
661 661 return addremove_lock(ui, repo, pats, opts)
662 662
663 663 def addremove_lock(ui, repo, pats, opts, wlock=None):
664 664 add, remove = [], []
665 665 for src, abs, rel, exact in walk(repo, pats, opts):
666 666 if src == 'f' and repo.dirstate.state(abs) == '?':
667 667 add.append(abs)
668 668 if ui.verbose or not exact:
669 669 ui.status(_('adding %s\n') % ((pats and rel) or abs))
670 670 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
671 671 remove.append(abs)
672 672 if ui.verbose or not exact:
673 673 ui.status(_('removing %s\n') % ((pats and rel) or abs))
674 674 repo.add(add, wlock=wlock)
675 675 repo.remove(remove, wlock=wlock)
676 676
677 677 def annotate(ui, repo, *pats, **opts):
678 678 """show changeset information per file line
679 679
680 680 List changes in files, showing the revision id responsible for each line
681 681
682 682 This command is useful to discover who did a change or when a change took
683 683 place.
684 684
685 685 Without the -a option, annotate will avoid processing files it
686 686 detects as binary. With -a, annotate will generate an annotation
687 687 anyway, probably with undesirable results.
688 688 """
689 689 def getnode(rev):
690 690 return short(repo.changelog.node(rev))
691 691
692 692 ucache = {}
693 693 def getname(rev):
694 694 cl = repo.changelog.read(repo.changelog.node(rev))
695 695 return trimuser(ui, cl[1], rev, ucache)
696 696
697 697 dcache = {}
698 698 def getdate(rev):
699 699 datestr = dcache.get(rev)
700 700 if datestr is None:
701 701 cl = repo.changelog.read(repo.changelog.node(rev))
702 702 datestr = dcache[rev] = util.datestr(cl[2])
703 703 return datestr
704 704
705 705 if not pats:
706 706 raise util.Abort(_('at least one file name or pattern required'))
707 707
708 708 opmap = [['user', getname], ['number', str], ['changeset', getnode],
709 709 ['date', getdate]]
710 710 if not opts['user'] and not opts['changeset'] and not opts['date']:
711 711 opts['number'] = 1
712 712
713 713 if opts['rev']:
714 714 node = repo.changelog.lookup(opts['rev'])
715 715 else:
716 716 node = repo.dirstate.parents()[0]
717 717 change = repo.changelog.read(node)
718 718 mmap = repo.manifest.read(change[0])
719 719
720 720 for src, abs, rel, exact in walk(repo, pats, opts, node=node):
721 721 f = repo.file(abs)
722 722 if not opts['text'] and util.binary(f.read(mmap[abs])):
723 723 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
724 724 continue
725 725
726 726 lines = f.annotate(mmap[abs])
727 727 pieces = []
728 728
729 729 for o, f in opmap:
730 730 if opts[o]:
731 731 l = [f(n) for n, dummy in lines]
732 732 if l:
733 733 m = max(map(len, l))
734 734 pieces.append(["%*s" % (m, x) for x in l])
735 735
736 736 if pieces:
737 737 for p, l in zip(zip(*pieces), lines):
738 738 ui.write("%s: %s" % (" ".join(p), l[1]))
739 739
740 740 def archive(ui, repo, dest, **opts):
741 741 '''create unversioned archive of a repository revision
742 742
743 743 By default, the revision used is the parent of the working
744 744 directory; use "-r" to specify a different revision.
745 745
746 746 To specify the type of archive to create, use "-t". Valid
747 747 types are:
748 748
749 749 "files" (default): a directory full of files
750 750 "tar": tar archive, uncompressed
751 751 "tbz2": tar archive, compressed using bzip2
752 752 "tgz": tar archive, compressed using gzip
753 753 "uzip": zip archive, uncompressed
754 754 "zip": zip archive, compressed using deflate
755 755
756 756 The exact name of the destination archive or directory is given
757 757 using a format string; see "hg help export" for details.
758 758
759 759 Each member added to an archive file has a directory prefix
760 760 prepended. Use "-p" to specify a format string for the prefix.
761 761 The default is the basename of the archive, with suffixes removed.
762 762 '''
763 763
764 764 if opts['rev']:
765 765 node = repo.lookup(opts['rev'])
766 766 else:
767 767 node, p2 = repo.dirstate.parents()
768 768 if p2 != nullid:
769 769 raise util.Abort(_('uncommitted merge - please provide a '
770 770 'specific revision'))
771 771
772 772 dest = make_filename(repo, repo.changelog, dest, node)
773 773 prefix = make_filename(repo, repo.changelog, opts['prefix'], node)
774 774 if os.path.realpath(dest) == repo.root:
775 775 raise util.Abort(_('repository root cannot be destination'))
776 776 dummy, matchfn, dummy = matchpats(repo, [], opts)
777 777 archival.archive(repo, dest, node, opts.get('type') or 'files',
778 778 not opts['no_decode'], matchfn, prefix)
779 779
780 780 def backout(ui, repo, rev, **opts):
781 781 '''reverse effect of earlier changeset
782 782
783 783 Commit the backed out changes as a new changeset. The new
784 784 changeset is a child of the backed out changeset.
785 785
786 786 If you back out a changeset other than the tip, a new head is
787 787 created. This head is the parent of the working directory. If
788 788 you back out an old changeset, your working directory will appear
789 789 old after the backout. You should merge the backout changeset
790 790 with another head.
791 791
792 792 The --merge option remembers the parent of the working directory
793 793 before starting the backout, then merges the new head with that
794 794 changeset afterwards. This saves you from doing the merge by
795 795 hand. The result of this merge is not committed, as for a normal
796 796 merge.'''
797 797
798 798 bail_if_changed(repo)
799 799 op1, op2 = repo.dirstate.parents()
800 800 if op2 != nullid:
801 801 raise util.Abort(_('outstanding uncommitted merge'))
802 802 node = repo.lookup(rev)
803 803 parent, p2 = repo.changelog.parents(node)
804 804 if parent == nullid:
805 805 raise util.Abort(_('cannot back out a change with no parents'))
806 806 if p2 != nullid:
807 807 raise util.Abort(_('cannot back out a merge'))
808 808 repo.update(node, force=True, show_stats=False)
809 809 revert_opts = opts.copy()
810 810 revert_opts['rev'] = hex(parent)
811 811 revert(ui, repo, **revert_opts)
812 812 commit_opts = opts.copy()
813 813 commit_opts['addremove'] = False
814 814 if not commit_opts['message'] and not commit_opts['logfile']:
815 815 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
816 816 commit_opts['force_editor'] = True
817 817 commit(ui, repo, **commit_opts)
818 818 def nice(node):
819 819 return '%d:%s' % (repo.changelog.rev(node), short(node))
820 820 ui.status(_('changeset %s backs out changeset %s\n') %
821 821 (nice(repo.changelog.tip()), nice(node)))
822 822 if opts['merge'] and op1 != node:
823 823 ui.status(_('merging with changeset %s\n') % nice(op1))
824 824 doupdate(ui, repo, hex(op1), **opts)
825 825
826 826 def bundle(ui, repo, fname, dest="default-push", **opts):
827 827 """create a changegroup file
828 828
829 829 Generate a compressed changegroup file collecting all changesets
830 830 not found in the other repository.
831 831
832 832 This file can then be transferred using conventional means and
833 833 applied to another repository with the unbundle command. This is
834 834 useful when native push and pull are not available or when
835 835 exporting an entire repository is undesirable. The standard file
836 836 extension is ".hg".
837 837
838 838 Unlike import/export, this exactly preserves all changeset
839 839 contents including permissions, rename data, and revision history.
840 840 """
841 841 dest = ui.expandpath(dest)
842 842 other = hg.repository(ui, dest)
843 843 o = repo.findoutgoing(other, force=opts['force'])
844 844 cg = repo.changegroup(o, 'bundle')
845 845 write_bundle(cg, fname)
846 846
847 847 def cat(ui, repo, file1, *pats, **opts):
848 848 """output the latest or given revisions of files
849 849
850 850 Print the specified files as they were at the given revision.
851 851 If no revision is given then the tip is used.
852 852
853 853 Output may be to a file, in which case the name of the file is
854 854 given using a format string. The formatting rules are the same as
855 855 for the export command, with the following additions:
856 856
857 857 %s basename of file being printed
858 858 %d dirname of file being printed, or '.' if in repo root
859 859 %p root-relative path name of file being printed
860 860 """
861 861 mf = {}
862 862 rev = opts['rev']
863 863 if rev:
864 864 node = repo.lookup(rev)
865 865 else:
866 866 node = repo.changelog.tip()
867 867 change = repo.changelog.read(node)
868 868 mf = repo.manifest.read(change[0])
869 869 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, node):
870 870 r = repo.file(abs)
871 871 n = mf[abs]
872 872 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
873 873 fp.write(r.read(n))
874 874
875 875 def clone(ui, source, dest=None, **opts):
876 876 """make a copy of an existing repository
877 877
878 878 Create a copy of an existing repository in a new directory.
879 879
880 880 If no destination directory name is specified, it defaults to the
881 881 basename of the source.
882 882
883 883 The location of the source is added to the new repository's
884 884 .hg/hgrc file, as the default to be used for future pulls.
885 885
886 886 For efficiency, hardlinks are used for cloning whenever the source
887 887 and destination are on the same filesystem. Some filesystems,
888 888 such as AFS, implement hardlinking incorrectly, but do not report
889 889 errors. In these cases, use the --pull option to avoid
890 890 hardlinking.
891 891
892 892 See pull for valid source format details.
893 893 """
894 894 if dest is None:
895 895 dest = os.path.basename(os.path.normpath(source))
896 896
897 897 if os.path.exists(dest):
898 898 raise util.Abort(_("destination '%s' already exists"), dest)
899 899
900 900 dest = os.path.realpath(dest)
901 901
902 902 class Dircleanup(object):
903 903 def __init__(self, dir_):
904 904 self.rmtree = shutil.rmtree
905 905 self.dir_ = dir_
906 906 os.mkdir(dir_)
907 907 def close(self):
908 908 self.dir_ = None
909 909 def __del__(self):
910 910 if self.dir_:
911 911 self.rmtree(self.dir_, True)
912 912
913 913 if opts['ssh']:
914 914 ui.setconfig("ui", "ssh", opts['ssh'])
915 915 if opts['remotecmd']:
916 916 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
917 917
918 918 source = ui.expandpath(source)
919 919
920 920 d = Dircleanup(dest)
921 921 abspath = source
922 922 other = hg.repository(ui, source)
923 923
924 924 copy = False
925 925 if other.dev() != -1:
926 926 abspath = os.path.abspath(source)
927 927 if not opts['pull'] and not opts['rev']:
928 928 copy = True
929 929
930 930 if copy:
931 931 try:
932 932 # we use a lock here because if we race with commit, we
933 933 # can end up with extra data in the cloned revlogs that's
934 934 # not pointed to by changesets, thus causing verify to
935 935 # fail
936 936 l1 = other.lock()
937 937 except lock.LockException:
938 938 copy = False
939 939
940 940 if copy:
941 941 # we lock here to avoid premature writing to the target
942 942 os.mkdir(os.path.join(dest, ".hg"))
943 943 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
944 944
945 945 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
946 946 for f in files.split():
947 947 src = os.path.join(source, ".hg", f)
948 948 dst = os.path.join(dest, ".hg", f)
949 949 try:
950 950 util.copyfiles(src, dst)
951 951 except OSError, inst:
952 952 if inst.errno != errno.ENOENT:
953 953 raise
954 954
955 955 repo = hg.repository(ui, dest)
956 956
957 957 else:
958 958 revs = None
959 959 if opts['rev']:
960 960 if not other.local():
961 961 error = _("clone -r not supported yet for remote repositories.")
962 962 raise util.Abort(error)
963 963 else:
964 964 revs = [other.lookup(rev) for rev in opts['rev']]
965 965 repo = hg.repository(ui, dest, create=1)
966 966 repo.pull(other, heads = revs)
967 967
968 968 f = repo.opener("hgrc", "w", text=True)
969 969 f.write("[paths]\n")
970 970 f.write("default = %s\n" % abspath)
971 971 f.close()
972 972
973 973 if not opts['noupdate']:
974 974 doupdate(repo.ui, repo)
975 975
976 976 d.close()
977 977
978 978 def commit(ui, repo, *pats, **opts):
979 979 """commit the specified files or all outstanding changes
980 980
981 981 Commit changes to the given files into the repository.
982 982
983 983 If a list of files is omitted, all changes reported by "hg status"
984 984 will be committed.
985 985
986 986 If no commit message is specified, the editor configured in your hgrc
987 987 or in the EDITOR environment variable is started to enter a message.
988 988 """
989 989 message = opts['message']
990 990 logfile = opts['logfile']
991 991
992 992 if message and logfile:
993 993 raise util.Abort(_('options --message and --logfile are mutually '
994 994 'exclusive'))
995 995 if not message and logfile:
996 996 try:
997 997 if logfile == '-':
998 998 message = sys.stdin.read()
999 999 else:
1000 1000 message = open(logfile).read()
1001 1001 except IOError, inst:
1002 1002 raise util.Abort(_("can't read commit message '%s': %s") %
1003 1003 (logfile, inst.strerror))
1004 1004
1005 1005 if opts['addremove']:
1006 1006 addremove_lock(ui, repo, pats, opts)
1007 1007 fns, match, anypats = matchpats(repo, pats, opts)
1008 1008 if pats:
1009 1009 modified, added, removed, deleted, unknown = (
1010 1010 repo.changes(files=fns, match=match))
1011 1011 files = modified + added + removed
1012 1012 else:
1013 1013 files = []
1014 1014 try:
1015 1015 repo.commit(files, message, opts['user'], opts['date'], match,
1016 1016 force_editor=opts.get('force_editor'))
1017 1017 except ValueError, inst:
1018 1018 raise util.Abort(str(inst))
1019 1019
1020 1020 def docopy(ui, repo, pats, opts, wlock):
1021 1021 # called with the repo lock held
1022 1022 cwd = repo.getcwd()
1023 1023 errors = 0
1024 1024 copied = []
1025 1025 targets = {}
1026 1026
1027 1027 def okaytocopy(abs, rel, exact):
1028 1028 reasons = {'?': _('is not managed'),
1029 1029 'a': _('has been marked for add'),
1030 1030 'r': _('has been marked for remove')}
1031 1031 state = repo.dirstate.state(abs)
1032 1032 reason = reasons.get(state)
1033 1033 if reason:
1034 1034 if state == 'a':
1035 1035 origsrc = repo.dirstate.copied(abs)
1036 1036 if origsrc is not None:
1037 1037 return origsrc
1038 1038 if exact:
1039 1039 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
1040 1040 else:
1041 1041 return abs
1042 1042
1043 1043 def copy(origsrc, abssrc, relsrc, target, exact):
1044 1044 abstarget = util.canonpath(repo.root, cwd, target)
1045 1045 reltarget = util.pathto(cwd, abstarget)
1046 1046 prevsrc = targets.get(abstarget)
1047 1047 if prevsrc is not None:
1048 1048 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1049 1049 (reltarget, abssrc, prevsrc))
1050 1050 return
1051 1051 if (not opts['after'] and os.path.exists(reltarget) or
1052 1052 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
1053 1053 if not opts['force']:
1054 1054 ui.warn(_('%s: not overwriting - file exists\n') %
1055 1055 reltarget)
1056 1056 return
1057 1057 if not opts['after']:
1058 1058 os.unlink(reltarget)
1059 1059 if opts['after']:
1060 1060 if not os.path.exists(reltarget):
1061 1061 return
1062 1062 else:
1063 1063 targetdir = os.path.dirname(reltarget) or '.'
1064 1064 if not os.path.isdir(targetdir):
1065 1065 os.makedirs(targetdir)
1066 1066 try:
1067 1067 restore = repo.dirstate.state(abstarget) == 'r'
1068 1068 if restore:
1069 1069 repo.undelete([abstarget], wlock)
1070 1070 try:
1071 1071 shutil.copyfile(relsrc, reltarget)
1072 1072 shutil.copymode(relsrc, reltarget)
1073 1073 restore = False
1074 1074 finally:
1075 1075 if restore:
1076 1076 repo.remove([abstarget], wlock)
1077 1077 except shutil.Error, inst:
1078 1078 raise util.Abort(str(inst))
1079 1079 except IOError, inst:
1080 1080 if inst.errno == errno.ENOENT:
1081 1081 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1082 1082 else:
1083 1083 ui.warn(_('%s: cannot copy - %s\n') %
1084 1084 (relsrc, inst.strerror))
1085 1085 errors += 1
1086 1086 return
1087 1087 if ui.verbose or not exact:
1088 1088 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1089 1089 targets[abstarget] = abssrc
1090 1090 if abstarget != origsrc:
1091 1091 repo.copy(origsrc, abstarget, wlock)
1092 1092 copied.append((abssrc, relsrc, exact))
1093 1093
1094 1094 def targetpathfn(pat, dest, srcs):
1095 1095 if os.path.isdir(pat):
1096 1096 abspfx = util.canonpath(repo.root, cwd, pat)
1097 1097 if destdirexists:
1098 1098 striplen = len(os.path.split(abspfx)[0])
1099 1099 else:
1100 1100 striplen = len(abspfx)
1101 1101 if striplen:
1102 1102 striplen += len(os.sep)
1103 1103 res = lambda p: os.path.join(dest, p[striplen:])
1104 1104 elif destdirexists:
1105 1105 res = lambda p: os.path.join(dest, os.path.basename(p))
1106 1106 else:
1107 1107 res = lambda p: dest
1108 1108 return res
1109 1109
1110 1110 def targetpathafterfn(pat, dest, srcs):
1111 1111 if util.patkind(pat, None)[0]:
1112 1112 # a mercurial pattern
1113 1113 res = lambda p: os.path.join(dest, os.path.basename(p))
1114 1114 else:
1115 1115 abspfx = util.canonpath(repo.root, cwd, pat)
1116 1116 if len(abspfx) < len(srcs[0][0]):
1117 1117 # A directory. Either the target path contains the last
1118 1118 # component of the source path or it does not.
1119 1119 def evalpath(striplen):
1120 1120 score = 0
1121 1121 for s in srcs:
1122 1122 t = os.path.join(dest, s[0][striplen:])
1123 1123 if os.path.exists(t):
1124 1124 score += 1
1125 1125 return score
1126 1126
1127 1127 striplen = len(abspfx)
1128 1128 if striplen:
1129 1129 striplen += len(os.sep)
1130 1130 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1131 1131 score = evalpath(striplen)
1132 1132 striplen1 = len(os.path.split(abspfx)[0])
1133 1133 if striplen1:
1134 1134 striplen1 += len(os.sep)
1135 1135 if evalpath(striplen1) > score:
1136 1136 striplen = striplen1
1137 1137 res = lambda p: os.path.join(dest, p[striplen:])
1138 1138 else:
1139 1139 # a file
1140 1140 if destdirexists:
1141 1141 res = lambda p: os.path.join(dest, os.path.basename(p))
1142 1142 else:
1143 1143 res = lambda p: dest
1144 1144 return res
1145 1145
1146 1146
1147 1147 pats = list(pats)
1148 1148 if not pats:
1149 1149 raise util.Abort(_('no source or destination specified'))
1150 1150 if len(pats) == 1:
1151 1151 raise util.Abort(_('no destination specified'))
1152 1152 dest = pats.pop()
1153 1153 destdirexists = os.path.isdir(dest)
1154 1154 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1155 1155 raise util.Abort(_('with multiple sources, destination must be an '
1156 1156 'existing directory'))
1157 1157 if opts['after']:
1158 1158 tfn = targetpathafterfn
1159 1159 else:
1160 1160 tfn = targetpathfn
1161 1161 copylist = []
1162 1162 for pat in pats:
1163 1163 srcs = []
1164 1164 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
1165 1165 origsrc = okaytocopy(abssrc, relsrc, exact)
1166 1166 if origsrc:
1167 1167 srcs.append((origsrc, abssrc, relsrc, exact))
1168 1168 if not srcs:
1169 1169 continue
1170 1170 copylist.append((tfn(pat, dest, srcs), srcs))
1171 1171 if not copylist:
1172 1172 raise util.Abort(_('no files to copy'))
1173 1173
1174 1174 for targetpath, srcs in copylist:
1175 1175 for origsrc, abssrc, relsrc, exact in srcs:
1176 1176 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1177 1177
1178 1178 if errors:
1179 1179 ui.warn(_('(consider using --after)\n'))
1180 1180 return errors, copied
1181 1181
1182 1182 def copy(ui, repo, *pats, **opts):
1183 1183 """mark files as copied for the next commit
1184 1184
1185 1185 Mark dest as having copies of source files. If dest is a
1186 1186 directory, copies are put in that directory. If dest is a file,
1187 1187 there can only be one source.
1188 1188
1189 1189 By default, this command copies the contents of files as they
1190 1190 stand in the working directory. If invoked with --after, the
1191 1191 operation is recorded, but no copying is performed.
1192 1192
1193 1193 This command takes effect in the next commit.
1194 1194
1195 1195 NOTE: This command should be treated as experimental. While it
1196 1196 should properly record copied files, this information is not yet
1197 1197 fully used by merge, nor fully reported by log.
1198 1198 """
1199 1199 wlock = repo.wlock(0)
1200 1200 errs, copied = docopy(ui, repo, pats, opts, wlock)
1201 1201 return errs
1202 1202
1203 1203 def debugancestor(ui, index, rev1, rev2):
1204 1204 """find the ancestor revision of two revisions in a given index"""
1205 1205 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1206 1206 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1207 1207 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1208 1208
1209 1209 def debugcomplete(ui, cmd='', **opts):
1210 1210 """returns the completion list associated with the given command"""
1211 1211
1212 1212 if opts['options']:
1213 1213 options = []
1214 1214 otables = [globalopts]
1215 1215 if cmd:
1216 1216 aliases, entry = find(cmd)
1217 1217 otables.append(entry[1])
1218 1218 for t in otables:
1219 1219 for o in t:
1220 1220 if o[0]:
1221 1221 options.append('-%s' % o[0])
1222 1222 options.append('--%s' % o[1])
1223 1223 ui.write("%s\n" % "\n".join(options))
1224 1224 return
1225 1225
1226 1226 clist = findpossible(cmd).keys()
1227 1227 clist.sort()
1228 1228 ui.write("%s\n" % "\n".join(clist))
1229 1229
1230 1230 def debugrebuildstate(ui, repo, rev=None):
1231 1231 """rebuild the dirstate as it would look like for the given revision"""
1232 1232 if not rev:
1233 1233 rev = repo.changelog.tip()
1234 1234 else:
1235 1235 rev = repo.lookup(rev)
1236 1236 change = repo.changelog.read(rev)
1237 1237 n = change[0]
1238 1238 files = repo.manifest.readflags(n)
1239 1239 wlock = repo.wlock()
1240 1240 repo.dirstate.rebuild(rev, files.iteritems())
1241 1241
1242 1242 def debugcheckstate(ui, repo):
1243 1243 """validate the correctness of the current dirstate"""
1244 1244 parent1, parent2 = repo.dirstate.parents()
1245 1245 repo.dirstate.read()
1246 1246 dc = repo.dirstate.map
1247 1247 keys = dc.keys()
1248 1248 keys.sort()
1249 1249 m1n = repo.changelog.read(parent1)[0]
1250 1250 m2n = repo.changelog.read(parent2)[0]
1251 1251 m1 = repo.manifest.read(m1n)
1252 1252 m2 = repo.manifest.read(m2n)
1253 1253 errors = 0
1254 1254 for f in dc:
1255 1255 state = repo.dirstate.state(f)
1256 1256 if state in "nr" and f not in m1:
1257 1257 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1258 1258 errors += 1
1259 1259 if state in "a" and f in m1:
1260 1260 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1261 1261 errors += 1
1262 1262 if state in "m" and f not in m1 and f not in m2:
1263 1263 ui.warn(_("%s in state %s, but not in either manifest\n") %
1264 1264 (f, state))
1265 1265 errors += 1
1266 1266 for f in m1:
1267 1267 state = repo.dirstate.state(f)
1268 1268 if state not in "nrm":
1269 1269 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1270 1270 errors += 1
1271 1271 if errors:
1272 1272 error = _(".hg/dirstate inconsistent with current parent's manifest")
1273 1273 raise util.Abort(error)
1274 1274
1275 1275 def debugconfig(ui, repo):
1276 1276 """show combined config settings from all hgrc files"""
1277 1277 for section, name, value in ui.walkconfig():
1278 1278 ui.write('%s.%s=%s\n' % (section, name, value))
1279 1279
1280 1280 def debugsetparents(ui, repo, rev1, rev2=None):
1281 1281 """manually set the parents of the current working directory
1282 1282
1283 1283 This is useful for writing repository conversion tools, but should
1284 1284 be used with care.
1285 1285 """
1286 1286
1287 1287 if not rev2:
1288 1288 rev2 = hex(nullid)
1289 1289
1290 1290 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1291 1291
1292 1292 def debugstate(ui, repo):
1293 1293 """show the contents of the current dirstate"""
1294 1294 repo.dirstate.read()
1295 1295 dc = repo.dirstate.map
1296 1296 keys = dc.keys()
1297 1297 keys.sort()
1298 1298 for file_ in keys:
1299 1299 ui.write("%c %3o %10d %s %s\n"
1300 1300 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1301 1301 time.strftime("%x %X",
1302 1302 time.localtime(dc[file_][3])), file_))
1303 1303 for f in repo.dirstate.copies:
1304 1304 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1305 1305
1306 1306 def debugdata(ui, file_, rev):
1307 1307 """dump the contents of an data file revision"""
1308 1308 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1309 1309 file_[:-2] + ".i", file_, 0)
1310 1310 try:
1311 1311 ui.write(r.revision(r.lookup(rev)))
1312 1312 except KeyError:
1313 1313 raise util.Abort(_('invalid revision identifier %s'), rev)
1314 1314
1315 1315 def debugindex(ui, file_):
1316 1316 """dump the contents of an index file"""
1317 1317 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1318 1318 ui.write(" rev offset length base linkrev" +
1319 1319 " nodeid p1 p2\n")
1320 1320 for i in range(r.count()):
1321 1321 node = r.node(i)
1322 1322 pp = r.parents(node)
1323 1323 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1324 1324 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1325 1325 short(node), short(pp[0]), short(pp[1])))
1326 1326
1327 1327 def debugindexdot(ui, file_):
1328 1328 """dump an index DAG as a .dot file"""
1329 1329 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1330 1330 ui.write("digraph G {\n")
1331 1331 for i in range(r.count()):
1332 1332 node = r.node(i)
1333 1333 pp = r.parents(node)
1334 1334 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1335 1335 if pp[1] != nullid:
1336 1336 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1337 1337 ui.write("}\n")
1338 1338
1339 1339 def debugrename(ui, repo, file, rev=None):
1340 1340 """dump rename information"""
1341 1341 r = repo.file(relpath(repo, [file])[0])
1342 1342 if rev:
1343 1343 try:
1344 1344 # assume all revision numbers are for changesets
1345 1345 n = repo.lookup(rev)
1346 1346 change = repo.changelog.read(n)
1347 1347 m = repo.manifest.read(change[0])
1348 1348 n = m[relpath(repo, [file])[0]]
1349 1349 except (hg.RepoError, KeyError):
1350 1350 n = r.lookup(rev)
1351 1351 else:
1352 1352 n = r.tip()
1353 1353 m = r.renamed(n)
1354 1354 if m:
1355 1355 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1356 1356 else:
1357 1357 ui.write(_("not renamed\n"))
1358 1358
1359 1359 def debugwalk(ui, repo, *pats, **opts):
1360 1360 """show how files match on given patterns"""
1361 1361 items = list(walk(repo, pats, opts))
1362 1362 if not items:
1363 1363 return
1364 1364 fmt = '%%s %%-%ds %%-%ds %%s' % (
1365 1365 max([len(abs) for (src, abs, rel, exact) in items]),
1366 1366 max([len(rel) for (src, abs, rel, exact) in items]))
1367 1367 for src, abs, rel, exact in items:
1368 1368 line = fmt % (src, abs, rel, exact and 'exact' or '')
1369 1369 ui.write("%s\n" % line.rstrip())
1370 1370
1371 1371 def diff(ui, repo, *pats, **opts):
1372 1372 """diff repository (or selected files)
1373 1373
1374 1374 Show differences between revisions for the specified files.
1375 1375
1376 1376 Differences between files are shown using the unified diff format.
1377 1377
1378 1378 When two revision arguments are given, then changes are shown
1379 1379 between those revisions. If only one revision is specified then
1380 1380 that revision is compared to the working directory, and, when no
1381 1381 revisions are specified, the working directory files are compared
1382 1382 to its parent.
1383 1383
1384 1384 Without the -a option, diff will avoid generating diffs of files
1385 1385 it detects as binary. With -a, diff will generate a diff anyway,
1386 1386 probably with undesirable results.
1387 1387 """
1388 1388 node1, node2 = revpair(ui, repo, opts['rev'])
1389 1389
1390 1390 fns, matchfn, anypats = matchpats(repo, pats, opts)
1391 1391
1392 1392 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1393 1393 text=opts['text'], opts=opts)
1394 1394
1395 1395 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1396 1396 node = repo.lookup(changeset)
1397 1397 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1398 1398 if opts['switch_parent']:
1399 1399 parents.reverse()
1400 1400 prev = (parents and parents[0]) or nullid
1401 1401 change = repo.changelog.read(node)
1402 1402
1403 1403 fp = make_file(repo, repo.changelog, opts['output'],
1404 1404 node=node, total=total, seqno=seqno,
1405 1405 revwidth=revwidth)
1406 1406 if fp != sys.stdout:
1407 1407 ui.note("%s\n" % fp.name)
1408 1408
1409 1409 fp.write("# HG changeset patch\n")
1410 1410 fp.write("# User %s\n" % change[1])
1411 1411 fp.write("# Date %d %d\n" % change[2])
1412 1412 fp.write("# Node ID %s\n" % hex(node))
1413 1413 fp.write("# Parent %s\n" % hex(prev))
1414 1414 if len(parents) > 1:
1415 1415 fp.write("# Parent %s\n" % hex(parents[1]))
1416 1416 fp.write(change[4].rstrip())
1417 1417 fp.write("\n\n")
1418 1418
1419 1419 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1420 1420 if fp != sys.stdout:
1421 1421 fp.close()
1422 1422
1423 1423 def export(ui, repo, *changesets, **opts):
1424 1424 """dump the header and diffs for one or more changesets
1425 1425
1426 1426 Print the changeset header and diffs for one or more revisions.
1427 1427
1428 1428 The information shown in the changeset header is: author,
1429 1429 changeset hash, parent and commit comment.
1430 1430
1431 1431 Output may be to a file, in which case the name of the file is
1432 1432 given using a format string. The formatting rules are as follows:
1433 1433
1434 1434 %% literal "%" character
1435 1435 %H changeset hash (40 bytes of hexadecimal)
1436 1436 %N number of patches being generated
1437 1437 %R changeset revision number
1438 1438 %b basename of the exporting repository
1439 1439 %h short-form changeset hash (12 bytes of hexadecimal)
1440 1440 %n zero-padded sequence number, starting at 1
1441 1441 %r zero-padded changeset revision number
1442 1442
1443 1443 Without the -a option, export will avoid generating diffs of files
1444 1444 it detects as binary. With -a, export will generate a diff anyway,
1445 1445 probably with undesirable results.
1446 1446
1447 1447 With the --switch-parent option, the diff will be against the second
1448 1448 parent. It can be useful to review a merge.
1449 1449 """
1450 1450 if not changesets:
1451 1451 raise util.Abort(_("export requires at least one changeset"))
1452 1452 seqno = 0
1453 1453 revs = list(revrange(ui, repo, changesets))
1454 1454 total = len(revs)
1455 1455 revwidth = max(map(len, revs))
1456 1456 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1457 1457 ui.note(msg)
1458 1458 for cset in revs:
1459 1459 seqno += 1
1460 1460 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1461 1461
1462 1462 def forget(ui, repo, *pats, **opts):
1463 1463 """don't add the specified files on the next commit (DEPRECATED)
1464 1464
1465 1465 (DEPRECATED)
1466 1466 Undo an 'hg add' scheduled for the next commit.
1467 1467
1468 1468 This command is now deprecated and will be removed in a future
1469 1469 release. Please use revert instead.
1470 1470 """
1471 1471 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1472 1472 forget = []
1473 1473 for src, abs, rel, exact in walk(repo, pats, opts):
1474 1474 if repo.dirstate.state(abs) == 'a':
1475 1475 forget.append(abs)
1476 1476 if ui.verbose or not exact:
1477 1477 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1478 1478 repo.forget(forget)
1479 1479
1480 1480 def grep(ui, repo, pattern, *pats, **opts):
1481 1481 """search for a pattern in specified files and revisions
1482 1482
1483 1483 Search revisions of files for a regular expression.
1484 1484
1485 1485 This command behaves differently than Unix grep. It only accepts
1486 1486 Python/Perl regexps. It searches repository history, not the
1487 1487 working directory. It always prints the revision number in which
1488 1488 a match appears.
1489 1489
1490 1490 By default, grep only prints output for the first revision of a
1491 1491 file in which it finds a match. To get it to print every revision
1492 1492 that contains a change in match status ("-" for a match that
1493 1493 becomes a non-match, or "+" for a non-match that becomes a match),
1494 1494 use the --all flag.
1495 1495 """
1496 1496 reflags = 0
1497 1497 if opts['ignore_case']:
1498 1498 reflags |= re.I
1499 1499 regexp = re.compile(pattern, reflags)
1500 1500 sep, eol = ':', '\n'
1501 1501 if opts['print0']:
1502 1502 sep = eol = '\0'
1503 1503
1504 1504 fcache = {}
1505 1505 def getfile(fn):
1506 1506 if fn not in fcache:
1507 1507 fcache[fn] = repo.file(fn)
1508 1508 return fcache[fn]
1509 1509
1510 1510 def matchlines(body):
1511 1511 begin = 0
1512 1512 linenum = 0
1513 1513 while True:
1514 1514 match = regexp.search(body, begin)
1515 1515 if not match:
1516 1516 break
1517 1517 mstart, mend = match.span()
1518 1518 linenum += body.count('\n', begin, mstart) + 1
1519 1519 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1520 1520 lend = body.find('\n', mend)
1521 1521 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1522 1522 begin = lend + 1
1523 1523
1524 1524 class linestate(object):
1525 1525 def __init__(self, line, linenum, colstart, colend):
1526 1526 self.line = line
1527 1527 self.linenum = linenum
1528 1528 self.colstart = colstart
1529 1529 self.colend = colend
1530 1530 def __eq__(self, other):
1531 1531 return self.line == other.line
1532 1532 def __hash__(self):
1533 1533 return hash(self.line)
1534 1534
1535 1535 matches = {}
1536 1536 def grepbody(fn, rev, body):
1537 1537 matches[rev].setdefault(fn, {})
1538 1538 m = matches[rev][fn]
1539 1539 for lnum, cstart, cend, line in matchlines(body):
1540 1540 s = linestate(line, lnum, cstart, cend)
1541 1541 m[s] = s
1542 1542
1543 1543 # FIXME: prev isn't used, why ?
1544 1544 prev = {}
1545 1545 ucache = {}
1546 1546 def display(fn, rev, states, prevstates):
1547 1547 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1548 1548 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1549 1549 counts = {'-': 0, '+': 0}
1550 1550 filerevmatches = {}
1551 1551 for l in diff:
1552 1552 if incrementing or not opts['all']:
1553 1553 change = ((l in prevstates) and '-') or '+'
1554 1554 r = rev
1555 1555 else:
1556 1556 change = ((l in states) and '-') or '+'
1557 1557 r = prev[fn]
1558 1558 cols = [fn, str(rev)]
1559 1559 if opts['line_number']:
1560 1560 cols.append(str(l.linenum))
1561 1561 if opts['all']:
1562 1562 cols.append(change)
1563 1563 if opts['user']:
1564 1564 cols.append(trimuser(ui, getchange(rev)[1], rev,
1565 1565 ucache))
1566 1566 if opts['files_with_matches']:
1567 1567 c = (fn, rev)
1568 1568 if c in filerevmatches:
1569 1569 continue
1570 1570 filerevmatches[c] = 1
1571 1571 else:
1572 1572 cols.append(l.line)
1573 1573 ui.write(sep.join(cols), eol)
1574 1574 counts[change] += 1
1575 1575 return counts['+'], counts['-']
1576 1576
1577 1577 fstate = {}
1578 1578 skip = {}
1579 1579 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1580 1580 count = 0
1581 1581 incrementing = False
1582 1582 for st, rev, fns in changeiter:
1583 1583 if st == 'window':
1584 1584 incrementing = rev
1585 1585 matches.clear()
1586 1586 elif st == 'add':
1587 1587 change = repo.changelog.read(repo.lookup(str(rev)))
1588 1588 mf = repo.manifest.read(change[0])
1589 1589 matches[rev] = {}
1590 1590 for fn in fns:
1591 1591 if fn in skip:
1592 1592 continue
1593 1593 fstate.setdefault(fn, {})
1594 1594 try:
1595 1595 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1596 1596 except KeyError:
1597 1597 pass
1598 1598 elif st == 'iter':
1599 1599 states = matches[rev].items()
1600 1600 states.sort()
1601 1601 for fn, m in states:
1602 1602 if fn in skip:
1603 1603 continue
1604 1604 if incrementing or not opts['all'] or fstate[fn]:
1605 1605 pos, neg = display(fn, rev, m, fstate[fn])
1606 1606 count += pos + neg
1607 1607 if pos and not opts['all']:
1608 1608 skip[fn] = True
1609 1609 fstate[fn] = m
1610 1610 prev[fn] = rev
1611 1611
1612 1612 if not incrementing:
1613 1613 fstate = fstate.items()
1614 1614 fstate.sort()
1615 1615 for fn, state in fstate:
1616 1616 if fn in skip:
1617 1617 continue
1618 1618 display(fn, rev, {}, state)
1619 1619 return (count == 0 and 1) or 0
1620 1620
1621 1621 def heads(ui, repo, **opts):
1622 1622 """show current repository heads
1623 1623
1624 1624 Show all repository head changesets.
1625 1625
1626 1626 Repository "heads" are changesets that don't have children
1627 1627 changesets. They are where development generally takes place and
1628 1628 are the usual targets for update and merge operations.
1629 1629 """
1630 1630 if opts['rev']:
1631 1631 heads = repo.heads(repo.lookup(opts['rev']))
1632 1632 else:
1633 1633 heads = repo.heads()
1634 1634 br = None
1635 1635 if opts['branches']:
1636 1636 br = repo.branchlookup(heads)
1637 1637 displayer = show_changeset(ui, repo, opts)
1638 1638 for n in heads:
1639 1639 displayer.show(changenode=n, brinfo=br)
1640 1640
1641 1641 def identify(ui, repo):
1642 1642 """print information about the working copy
1643 1643
1644 1644 Print a short summary of the current state of the repo.
1645 1645
1646 1646 This summary identifies the repository state using one or two parent
1647 1647 hash identifiers, followed by a "+" if there are uncommitted changes
1648 1648 in the working directory, followed by a list of tags for this revision.
1649 1649 """
1650 1650 parents = [p for p in repo.dirstate.parents() if p != nullid]
1651 1651 if not parents:
1652 1652 ui.write(_("unknown\n"))
1653 1653 return
1654 1654
1655 1655 hexfunc = ui.verbose and hex or short
1656 1656 modified, added, removed, deleted, unknown = repo.changes()
1657 1657 output = ["%s%s" %
1658 1658 ('+'.join([hexfunc(parent) for parent in parents]),
1659 1659 (modified or added or removed or deleted) and "+" or "")]
1660 1660
1661 1661 if not ui.quiet:
1662 1662 # multiple tags for a single parent separated by '/'
1663 1663 parenttags = ['/'.join(tags)
1664 1664 for tags in map(repo.nodetags, parents) if tags]
1665 1665 # tags for multiple parents separated by ' + '
1666 1666 if parenttags:
1667 1667 output.append(' + '.join(parenttags))
1668 1668
1669 1669 ui.write("%s\n" % ' '.join(output))
1670 1670
1671 1671 def import_(ui, repo, patch1, *patches, **opts):
1672 1672 """import an ordered set of patches
1673 1673
1674 1674 Import a list of patches and commit them individually.
1675 1675
1676 1676 If there are outstanding changes in the working directory, import
1677 1677 will abort unless given the -f flag.
1678 1678
1679 1679 If a patch looks like a mail message (its first line starts with
1680 1680 "From " or looks like an RFC822 header), it will not be applied
1681 1681 unless the -f option is used. The importer neither parses nor
1682 1682 discards mail headers, so use -f only to override the "mailness"
1683 1683 safety check, not to import a real mail message.
1684 1684 """
1685 1685 patches = (patch1,) + patches
1686 1686
1687 1687 if not opts['force']:
1688 1688 bail_if_changed(repo)
1689 1689
1690 1690 d = opts["base"]
1691 1691 strip = opts["strip"]
1692 1692
1693 1693 mailre = re.compile(r'(?:From |[\w-]+:)')
1694 1694
1695 1695 # attempt to detect the start of a patch
1696 1696 # (this heuristic is borrowed from quilt)
1697 1697 diffre = re.compile(r'(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1698 1698 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1699 1699 '(---|\*\*\*)[ \t])')
1700 1700
1701 1701 for patch in patches:
1702 1702 ui.status(_("applying %s\n") % patch)
1703 1703 pf = os.path.join(d, patch)
1704 1704
1705 1705 message = []
1706 1706 user = None
1707 1707 date = None
1708 1708 hgpatch = False
1709 1709 for line in file(pf):
1710 1710 line = line.rstrip()
1711 1711 if (not message and not hgpatch and
1712 1712 mailre.match(line) and not opts['force']):
1713 1713 if len(line) > 35:
1714 1714 line = line[:32] + '...'
1715 1715 raise util.Abort(_('first line looks like a '
1716 1716 'mail header: ') + line)
1717 1717 if diffre.match(line):
1718 1718 break
1719 1719 elif hgpatch:
1720 1720 # parse values when importing the result of an hg export
1721 1721 if line.startswith("# User "):
1722 1722 user = line[7:]
1723 1723 ui.debug(_('User: %s\n') % user)
1724 1724 elif line.startswith("# Date "):
1725 1725 date = line[7:]
1726 1726 elif not line.startswith("# ") and line:
1727 1727 message.append(line)
1728 1728 hgpatch = False
1729 1729 elif line == '# HG changeset patch':
1730 1730 hgpatch = True
1731 1731 message = [] # We may have collected garbage
1732 1732 elif message or line:
1733 1733 message.append(line)
1734 1734
1735 1735 # make sure message isn't empty
1736 1736 if not message:
1737 1737 message = _("imported patch %s\n") % patch
1738 1738 else:
1739 1739 message = '\n'.join(message).rstrip()
1740 1740 ui.debug(_('message:\n%s\n') % message)
1741 1741
1742 1742 files = util.patch(strip, pf, ui)
1743 1743
1744 1744 if len(files) > 0:
1745 1745 addremove_lock(ui, repo, files, {})
1746 1746 repo.commit(files, message, user, date)
1747 1747
1748 1748 def incoming(ui, repo, source="default", **opts):
1749 1749 """show new changesets found in source
1750 1750
1751 1751 Show new changesets found in the specified path/URL or the default
1752 1752 pull location. These are the changesets that would be pulled if a pull
1753 1753 was requested.
1754 1754
1755 1755 For remote repository, using --bundle avoids downloading the changesets
1756 1756 twice if the incoming is followed by a pull.
1757 1757
1758 1758 See pull for valid source format details.
1759 1759 """
1760 1760 source = ui.expandpath(source)
1761 1761 if opts['ssh']:
1762 1762 ui.setconfig("ui", "ssh", opts['ssh'])
1763 1763 if opts['remotecmd']:
1764 1764 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1765 1765
1766 1766 other = hg.repository(ui, source)
1767 1767 incoming = repo.findincoming(other, force=opts["force"])
1768 1768 if not incoming:
1769 1769 ui.status(_("no changes found\n"))
1770 1770 return
1771 1771
1772 1772 cleanup = None
1773 1773 try:
1774 1774 fname = opts["bundle"]
1775 1775 if fname or not other.local():
1776 1776 # create a bundle (uncompressed if other repo is not local)
1777 1777 cg = other.changegroup(incoming, "incoming")
1778 1778 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1779 1779 # keep written bundle?
1780 1780 if opts["bundle"]:
1781 1781 cleanup = None
1782 1782 if not other.local():
1783 1783 # use the created uncompressed bundlerepo
1784 1784 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1785 1785
1786 1786 o = other.changelog.nodesbetween(incoming)[0]
1787 1787 if opts['newest_first']:
1788 1788 o.reverse()
1789 1789 displayer = show_changeset(ui, other, opts)
1790 1790 for n in o:
1791 1791 parents = [p for p in other.changelog.parents(n) if p != nullid]
1792 1792 if opts['no_merges'] and len(parents) == 2:
1793 1793 continue
1794 1794 displayer.show(changenode=n)
1795 1795 if opts['patch']:
1796 1796 prev = (parents and parents[0]) or nullid
1797 1797 dodiff(ui, ui, other, prev, n)
1798 1798 ui.write("\n")
1799 1799 finally:
1800 1800 if hasattr(other, 'close'):
1801 1801 other.close()
1802 1802 if cleanup:
1803 1803 os.unlink(cleanup)
1804 1804
1805 1805 def init(ui, dest="."):
1806 1806 """create a new repository in the given directory
1807 1807
1808 1808 Initialize a new repository in the given directory. If the given
1809 1809 directory does not exist, it is created.
1810 1810
1811 1811 If no directory is given, the current directory is used.
1812 1812 """
1813 1813 if not os.path.exists(dest):
1814 1814 os.mkdir(dest)
1815 1815 hg.repository(ui, dest, create=1)
1816 1816
1817 1817 def locate(ui, repo, *pats, **opts):
1818 1818 """locate files matching specific patterns
1819 1819
1820 1820 Print all files under Mercurial control whose names match the
1821 1821 given patterns.
1822 1822
1823 1823 This command searches the current directory and its
1824 1824 subdirectories. To search an entire repository, move to the root
1825 1825 of the repository.
1826 1826
1827 1827 If no patterns are given to match, this command prints all file
1828 1828 names.
1829 1829
1830 1830 If you want to feed the output of this command into the "xargs"
1831 1831 command, use the "-0" option to both this command and "xargs".
1832 1832 This will avoid the problem of "xargs" treating single filenames
1833 1833 that contain white space as multiple filenames.
1834 1834 """
1835 1835 end = opts['print0'] and '\0' or '\n'
1836 1836 rev = opts['rev']
1837 1837 if rev:
1838 1838 node = repo.lookup(rev)
1839 1839 else:
1840 1840 node = None
1841 1841
1842 1842 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
1843 1843 head='(?:.*/|)'):
1844 1844 if not node and repo.dirstate.state(abs) == '?':
1845 1845 continue
1846 1846 if opts['fullpath']:
1847 1847 ui.write(os.path.join(repo.root, abs), end)
1848 1848 else:
1849 1849 ui.write(((pats and rel) or abs), end)
1850 1850
1851 1851 def log(ui, repo, *pats, **opts):
1852 1852 """show revision history of entire repository or files
1853 1853
1854 1854 Print the revision history of the specified files or the entire project.
1855 1855
1856 1856 By default this command outputs: changeset id and hash, tags,
1857 1857 non-trivial parents, user, date and time, and a summary for each
1858 1858 commit. When the -v/--verbose switch is used, the list of changed
1859 1859 files and full commit message is shown.
1860 1860 """
1861 1861 class dui(object):
1862 1862 # Implement and delegate some ui protocol. Save hunks of
1863 1863 # output for later display in the desired order.
1864 1864 def __init__(self, ui):
1865 1865 self.ui = ui
1866 1866 self.hunk = {}
1867 1867 self.header = {}
1868 1868 def bump(self, rev):
1869 1869 self.rev = rev
1870 1870 self.hunk[rev] = []
1871 1871 self.header[rev] = []
1872 1872 def note(self, *args):
1873 1873 if self.verbose:
1874 1874 self.write(*args)
1875 1875 def status(self, *args):
1876 1876 if not self.quiet:
1877 1877 self.write(*args)
1878 1878 def write(self, *args):
1879 1879 self.hunk[self.rev].append(args)
1880 1880 def write_header(self, *args):
1881 1881 self.header[self.rev].append(args)
1882 1882 def debug(self, *args):
1883 1883 if self.debugflag:
1884 1884 self.write(*args)
1885 1885 def __getattr__(self, key):
1886 1886 return getattr(self.ui, key)
1887 1887
1888 1888 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1889 1889
1890 1890 if opts['limit']:
1891 1891 try:
1892 1892 limit = int(opts['limit'])
1893 1893 except ValueError:
1894 1894 raise util.Abort(_('limit must be a positive integer'))
1895 1895 if limit <= 0: raise util.Abort(_('limit must be positive'))
1896 1896 else:
1897 1897 limit = sys.maxint
1898 1898 count = 0
1899 1899
1900 1900 displayer = show_changeset(ui, repo, opts)
1901 1901 for st, rev, fns in changeiter:
1902 1902 if st == 'window':
1903 1903 du = dui(ui)
1904 1904 displayer.ui = du
1905 1905 elif st == 'add':
1906 1906 du.bump(rev)
1907 1907 changenode = repo.changelog.node(rev)
1908 1908 parents = [p for p in repo.changelog.parents(changenode)
1909 1909 if p != nullid]
1910 1910 if opts['no_merges'] and len(parents) == 2:
1911 1911 continue
1912 1912 if opts['only_merges'] and len(parents) != 2:
1913 1913 continue
1914 1914
1915 1915 if opts['keyword']:
1916 1916 changes = getchange(rev)
1917 1917 miss = 0
1918 1918 for k in [kw.lower() for kw in opts['keyword']]:
1919 1919 if not (k in changes[1].lower() or
1920 1920 k in changes[4].lower() or
1921 1921 k in " ".join(changes[3][:20]).lower()):
1922 1922 miss = 1
1923 1923 break
1924 1924 if miss:
1925 1925 continue
1926 1926
1927 1927 br = None
1928 1928 if opts['branches']:
1929 1929 br = repo.branchlookup([repo.changelog.node(rev)])
1930 1930
1931 1931 displayer.show(rev, brinfo=br)
1932 1932 if opts['patch']:
1933 1933 prev = (parents and parents[0]) or nullid
1934 1934 dodiff(du, du, repo, prev, changenode, match=matchfn)
1935 1935 du.write("\n\n")
1936 1936 elif st == 'iter':
1937 1937 if count == limit: break
1938 1938 if du.header[rev]:
1939 1939 for args in du.header[rev]:
1940 1940 ui.write_header(*args)
1941 1941 if du.hunk[rev]:
1942 1942 count += 1
1943 1943 for args in du.hunk[rev]:
1944 1944 ui.write(*args)
1945 1945
1946 1946 def manifest(ui, repo, rev=None):
1947 1947 """output the latest or given revision of the project manifest
1948 1948
1949 1949 Print a list of version controlled files for the given revision.
1950 1950
1951 1951 The manifest is the list of files being version controlled. If no revision
1952 1952 is given then the tip is used.
1953 1953 """
1954 1954 if rev:
1955 1955 try:
1956 1956 # assume all revision numbers are for changesets
1957 1957 n = repo.lookup(rev)
1958 1958 change = repo.changelog.read(n)
1959 1959 n = change[0]
1960 1960 except hg.RepoError:
1961 1961 n = repo.manifest.lookup(rev)
1962 1962 else:
1963 1963 n = repo.manifest.tip()
1964 1964 m = repo.manifest.read(n)
1965 1965 mf = repo.manifest.readflags(n)
1966 1966 files = m.keys()
1967 1967 files.sort()
1968 1968
1969 1969 for f in files:
1970 1970 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
1971 1971
1972 1972 def merge(ui, repo, node=None, **opts):
1973 1973 """Merge working directory with another revision
1974 1974
1975 1975 Merge the contents of the current working directory and the
1976 1976 requested revision. Files that changed between either parent are
1977 1977 marked as changed for the next commit and a commit must be
1978 1978 performed before any further updates are allowed.
1979 1979 """
1980 1980 return doupdate(ui, repo, node=node, merge=True, **opts)
1981 1981
1982 1982 def outgoing(ui, repo, dest="default-push", **opts):
1983 1983 """show changesets not found in destination
1984 1984
1985 1985 Show changesets not found in the specified destination repository or
1986 1986 the default push location. These are the changesets that would be pushed
1987 1987 if a push was requested.
1988 1988
1989 1989 See pull for valid destination format details.
1990 1990 """
1991 1991 dest = ui.expandpath(dest)
1992 1992 if opts['ssh']:
1993 1993 ui.setconfig("ui", "ssh", opts['ssh'])
1994 1994 if opts['remotecmd']:
1995 1995 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1996 1996
1997 1997 other = hg.repository(ui, dest)
1998 1998 o = repo.findoutgoing(other, force=opts['force'])
1999 1999 if not o:
2000 2000 ui.status(_("no changes found\n"))
2001 2001 return
2002 2002 o = repo.changelog.nodesbetween(o)[0]
2003 2003 if opts['newest_first']:
2004 2004 o.reverse()
2005 2005 displayer = show_changeset(ui, repo, opts)
2006 2006 for n in o:
2007 2007 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2008 2008 if opts['no_merges'] and len(parents) == 2:
2009 2009 continue
2010 2010 displayer.show(changenode=n)
2011 2011 if opts['patch']:
2012 2012 prev = (parents and parents[0]) or nullid
2013 2013 dodiff(ui, ui, repo, prev, n)
2014 2014 ui.write("\n")
2015 2015
2016 2016 def parents(ui, repo, rev=None, branches=None, **opts):
2017 2017 """show the parents of the working dir or revision
2018 2018
2019 2019 Print the working directory's parent revisions.
2020 2020 """
2021 2021 if rev:
2022 2022 p = repo.changelog.parents(repo.lookup(rev))
2023 2023 else:
2024 2024 p = repo.dirstate.parents()
2025 2025
2026 2026 br = None
2027 2027 if branches is not None:
2028 2028 br = repo.branchlookup(p)
2029 2029 displayer = show_changeset(ui, repo, opts)
2030 2030 for n in p:
2031 2031 if n != nullid:
2032 2032 displayer.show(changenode=n, brinfo=br)
2033 2033
2034 2034 def paths(ui, repo, search=None):
2035 2035 """show definition of symbolic path names
2036 2036
2037 2037 Show definition of symbolic path name NAME. If no name is given, show
2038 2038 definition of available names.
2039 2039
2040 2040 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2041 2041 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2042 2042 """
2043 2043 if search:
2044 2044 for name, path in ui.configitems("paths"):
2045 2045 if name == search:
2046 2046 ui.write("%s\n" % path)
2047 2047 return
2048 2048 ui.warn(_("not found!\n"))
2049 2049 return 1
2050 2050 else:
2051 2051 for name, path in ui.configitems("paths"):
2052 2052 ui.write("%s = %s\n" % (name, path))
2053 2053
2054 2054 def postincoming(ui, repo, modheads, optupdate):
2055 2055 if modheads == 0:
2056 2056 return
2057 2057 if optupdate:
2058 2058 if modheads == 1:
2059 2059 return doupdate(ui, repo)
2060 2060 else:
2061 2061 ui.status(_("not updating, since new heads added\n"))
2062 2062 if modheads > 1:
2063 2063 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2064 2064 else:
2065 2065 ui.status(_("(run 'hg update' to get a working copy)\n"))
2066 2066
2067 2067 def pull(ui, repo, source="default", **opts):
2068 2068 """pull changes from the specified source
2069 2069
2070 2070 Pull changes from a remote repository to a local one.
2071 2071
2072 2072 This finds all changes from the repository at the specified path
2073 2073 or URL and adds them to the local repository. By default, this
2074 2074 does not update the copy of the project in the working directory.
2075 2075
2076 2076 Valid URLs are of the form:
2077 2077
2078 2078 local/filesystem/path
2079 2079 http://[user@]host[:port][/path]
2080 2080 https://[user@]host[:port][/path]
2081 2081 ssh://[user@]host[:port][/path]
2082 2082
2083 2083 Some notes about using SSH with Mercurial:
2084 2084 - SSH requires an accessible shell account on the destination machine
2085 2085 and a copy of hg in the remote path or specified with as remotecmd.
2086 2086 - /path is relative to the remote user's home directory by default.
2087 2087 Use two slashes at the start of a path to specify an absolute path.
2088 2088 - Mercurial doesn't use its own compression via SSH; the right thing
2089 2089 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2090 2090 Host *.mylocalnetwork.example.com
2091 2091 Compression off
2092 2092 Host *
2093 2093 Compression on
2094 2094 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2095 2095 with the --ssh command line option.
2096 2096 """
2097 2097 source = ui.expandpath(source)
2098 2098 ui.status(_('pulling from %s\n') % (source))
2099 2099
2100 2100 if opts['ssh']:
2101 2101 ui.setconfig("ui", "ssh", opts['ssh'])
2102 2102 if opts['remotecmd']:
2103 2103 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
2104 2104
2105 2105 other = hg.repository(ui, source)
2106 2106 revs = None
2107 2107 if opts['rev'] and not other.local():
2108 2108 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2109 2109 elif opts['rev']:
2110 2110 revs = [other.lookup(rev) for rev in opts['rev']]
2111 2111 modheads = repo.pull(other, heads=revs, force=opts['force'])
2112 2112 return postincoming(ui, repo, modheads, opts['update'])
2113 2113
2114 2114 def push(ui, repo, dest="default-push", **opts):
2115 2115 """push changes to the specified destination
2116 2116
2117 2117 Push changes from the local repository to the given destination.
2118 2118
2119 2119 This is the symmetrical operation for pull. It helps to move
2120 2120 changes from the current repository to a different one. If the
2121 2121 destination is local this is identical to a pull in that directory
2122 2122 from the current one.
2123 2123
2124 2124 By default, push will refuse to run if it detects the result would
2125 2125 increase the number of remote heads. This generally indicates the
2126 2126 the client has forgotten to sync and merge before pushing.
2127 2127
2128 2128 Valid URLs are of the form:
2129 2129
2130 2130 local/filesystem/path
2131 2131 ssh://[user@]host[:port][/path]
2132 2132
2133 2133 Look at the help text for the pull command for important details
2134 2134 about ssh:// URLs.
2135 2135 """
2136 2136 dest = ui.expandpath(dest)
2137 2137 ui.status('pushing to %s\n' % (dest))
2138 2138
2139 2139 if opts['ssh']:
2140 2140 ui.setconfig("ui", "ssh", opts['ssh'])
2141 2141 if opts['remotecmd']:
2142 2142 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
2143 2143
2144 2144 other = hg.repository(ui, dest)
2145 2145 revs = None
2146 2146 if opts['rev']:
2147 2147 revs = [repo.lookup(rev) for rev in opts['rev']]
2148 2148 r = repo.push(other, opts['force'], revs=revs)
2149 2149 return r == 0
2150 2150
2151 2151 def rawcommit(ui, repo, *flist, **rc):
2152 2152 """raw commit interface (DEPRECATED)
2153 2153
2154 2154 (DEPRECATED)
2155 2155 Lowlevel commit, for use in helper scripts.
2156 2156
2157 2157 This command is not intended to be used by normal users, as it is
2158 2158 primarily useful for importing from other SCMs.
2159 2159
2160 2160 This command is now deprecated and will be removed in a future
2161 2161 release, please use debugsetparents and commit instead.
2162 2162 """
2163 2163
2164 2164 ui.warn(_("(the rawcommit command is deprecated)\n"))
2165 2165
2166 2166 message = rc['message']
2167 2167 if not message and rc['logfile']:
2168 2168 try:
2169 2169 message = open(rc['logfile']).read()
2170 2170 except IOError:
2171 2171 pass
2172 2172 if not message and not rc['logfile']:
2173 2173 raise util.Abort(_("missing commit message"))
2174 2174
2175 2175 files = relpath(repo, list(flist))
2176 2176 if rc['files']:
2177 2177 files += open(rc['files']).read().splitlines()
2178 2178
2179 2179 rc['parent'] = map(repo.lookup, rc['parent'])
2180 2180
2181 2181 try:
2182 2182 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2183 2183 except ValueError, inst:
2184 2184 raise util.Abort(str(inst))
2185 2185
2186 2186 def recover(ui, repo):
2187 2187 """roll back an interrupted transaction
2188 2188
2189 2189 Recover from an interrupted commit or pull.
2190 2190
2191 2191 This command tries to fix the repository status after an interrupted
2192 2192 operation. It should only be necessary when Mercurial suggests it.
2193 2193 """
2194 2194 if repo.recover():
2195 2195 return repo.verify()
2196 2196 return 1
2197 2197
2198 2198 def remove(ui, repo, *pats, **opts):
2199 2199 """remove the specified files on the next commit
2200 2200
2201 2201 Schedule the indicated files for removal from the repository.
2202 2202
2203 2203 This command schedules the files to be removed at the next commit.
2204 2204 This only removes files from the current branch, not from the
2205 2205 entire project history. If the files still exist in the working
2206 2206 directory, they will be deleted from it. If invoked with --after,
2207 2207 files that have been manually deleted are marked as removed.
2208 2208
2209 2209 Modified files and added files are not removed by default. To
2210 2210 remove them, use the -f/--force option.
2211 2211 """
2212 2212 names = []
2213 2213 if not opts['after'] and not pats:
2214 2214 raise util.Abort(_('no files specified'))
2215 2215 files, matchfn, anypats = matchpats(repo, pats, opts)
2216 2216 exact = dict.fromkeys(files)
2217 2217 mardu = map(dict.fromkeys, repo.changes(files=files, match=matchfn))
2218 2218 modified, added, removed, deleted, unknown = mardu
2219 2219 remove, forget = [], []
2220 2220 for src, abs, rel, exact in walk(repo, pats, opts):
2221 2221 reason = None
2222 2222 if abs not in deleted and opts['after']:
2223 2223 reason = _('is still present')
2224 2224 elif abs in modified and not opts['force']:
2225 2225 reason = _('is modified (use -f to force removal)')
2226 2226 elif abs in added:
2227 2227 if opts['force']:
2228 2228 forget.append(abs)
2229 2229 continue
2230 2230 reason = _('has been marked for add (use -f to force removal)')
2231 2231 elif abs in unknown:
2232 2232 reason = _('is not managed')
2233 2233 elif abs in removed:
2234 2234 continue
2235 2235 if reason:
2236 2236 if exact:
2237 2237 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2238 2238 else:
2239 2239 if ui.verbose or not exact:
2240 2240 ui.status(_('removing %s\n') % rel)
2241 2241 remove.append(abs)
2242 2242 repo.forget(forget)
2243 2243 repo.remove(remove, unlink=not opts['after'])
2244 2244
2245 2245 def rename(ui, repo, *pats, **opts):
2246 2246 """rename files; equivalent of copy + remove
2247 2247
2248 2248 Mark dest as copies of sources; mark sources for deletion. If
2249 2249 dest is a directory, copies are put in that directory. If dest is
2250 2250 a file, there can only be one source.
2251 2251
2252 2252 By default, this command copies the contents of files as they
2253 2253 stand in the working directory. If invoked with --after, the
2254 2254 operation is recorded, but no copying is performed.
2255 2255
2256 2256 This command takes effect in the next commit.
2257 2257
2258 2258 NOTE: This command should be treated as experimental. While it
2259 2259 should properly record rename files, this information is not yet
2260 2260 fully used by merge, nor fully reported by log.
2261 2261 """
2262 2262 wlock = repo.wlock(0)
2263 2263 errs, copied = docopy(ui, repo, pats, opts, wlock)
2264 2264 names = []
2265 2265 for abs, rel, exact in copied:
2266 2266 if ui.verbose or not exact:
2267 2267 ui.status(_('removing %s\n') % rel)
2268 2268 names.append(abs)
2269 2269 repo.remove(names, True, wlock)
2270 2270 return errs
2271 2271
2272 2272 def revert(ui, repo, *pats, **opts):
2273 2273 """revert files or dirs to their states as of some revision
2274 2274
2275 2275 With no revision specified, revert the named files or directories
2276 2276 to the contents they had in the parent of the working directory.
2277 2277 This restores the contents of the affected files to an unmodified
2278 2278 state. If the working directory has two parents, you must
2279 2279 explicitly specify the revision to revert to.
2280 2280
2281 2281 Modified files are saved with a .orig suffix before reverting.
2282 2282 To disable these backups, use --no-backup.
2283 2283
2284 2284 Using the -r option, revert the given files or directories to
2285 2285 their contents as of a specific revision. This can be helpful to"roll
2286 2286 back" some or all of a change that should not have been committed.
2287 2287
2288 2288 Revert modifies the working directory. It does not commit any
2289 2289 changes, or change the parent of the working directory. If you
2290 2290 revert to a revision other than the parent of the working
2291 2291 directory, the reverted files will thus appear modified
2292 2292 afterwards.
2293 2293
2294 2294 If a file has been deleted, it is recreated. If the executable
2295 2295 mode of a file was changed, it is reset.
2296 2296
2297 2297 If names are given, all files matching the names are reverted.
2298 2298
2299 2299 If no arguments are given, all files in the repository are reverted.
2300 2300 """
2301 2301 parent, p2 = repo.dirstate.parents()
2302 2302 if opts['rev']:
2303 2303 node = repo.lookup(opts['rev'])
2304 2304 elif p2 != nullid:
2305 2305 raise util.Abort(_('working dir has two parents; '
2306 2306 'you must specify the revision to revert to'))
2307 2307 else:
2308 2308 node = parent
2309 2309 pmf = None
2310 2310 mf = repo.manifest.read(repo.changelog.read(node)[0])
2311 2311
2312 2312 wlock = repo.wlock()
2313 2313
2314 2314 # need all matching names in dirstate and manifest of target rev,
2315 2315 # so have to walk both. do not print errors if files exist in one
2316 2316 # but not other.
2317 2317
2318 2318 names = {}
2319 2319 target_only = {}
2320 2320
2321 2321 # walk dirstate.
2322 2322
2323 2323 for src, abs, rel, exact in walk(repo, pats, opts, badmatch=mf.has_key):
2324 2324 names[abs] = (rel, exact)
2325 2325 if src == 'b':
2326 2326 target_only[abs] = True
2327 2327
2328 2328 # walk target manifest.
2329 2329
2330 2330 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2331 2331 badmatch=names.has_key):
2332 2332 if abs in names: continue
2333 2333 names[abs] = (rel, exact)
2334 2334 target_only[abs] = True
2335 2335
2336 2336 changes = repo.changes(match=names.has_key, wlock=wlock)
2337 2337 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2338 2338
2339 2339 revert = ([], _('reverting %s\n'))
2340 2340 add = ([], _('adding %s\n'))
2341 2341 remove = ([], _('removing %s\n'))
2342 2342 forget = ([], _('forgetting %s\n'))
2343 2343 undelete = ([], _('undeleting %s\n'))
2344 2344 update = {}
2345 2345
2346 2346 disptable = (
2347 2347 # dispatch table:
2348 2348 # file state
2349 2349 # action if in target manifest
2350 2350 # action if not in target manifest
2351 2351 # make backup if in target manifest
2352 2352 # make backup if not in target manifest
2353 2353 (modified, revert, remove, True, True),
2354 2354 (added, revert, forget, True, False),
2355 2355 (removed, undelete, None, False, False),
2356 2356 (deleted, revert, remove, False, False),
2357 2357 (unknown, add, None, True, False),
2358 2358 (target_only, add, None, False, False),
2359 2359 )
2360 2360
2361 2361 entries = names.items()
2362 2362 entries.sort()
2363 2363
2364 2364 for abs, (rel, exact) in entries:
2365 2365 in_mf = abs in mf
2366 2366 def handle(xlist, dobackup):
2367 2367 xlist[0].append(abs)
2368 2368 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2369 2369 bakname = "%s.orig" % rel
2370 2370 ui.note(_('saving current version of %s as %s\n') %
2371 2371 (rel, bakname))
2372 2372 shutil.copyfile(rel, bakname)
2373 2373 shutil.copymode(rel, bakname)
2374 2374 if ui.verbose or not exact:
2375 2375 ui.status(xlist[1] % rel)
2376 2376 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2377 2377 if abs not in table: continue
2378 2378 # file has changed in dirstate
2379 2379 if in_mf:
2380 2380 handle(hitlist, backuphit)
2381 2381 elif misslist is not None:
2382 2382 handle(misslist, backupmiss)
2383 2383 else:
2384 2384 if exact: ui.warn(_('file not managed: %s\n' % rel))
2385 2385 break
2386 2386 else:
2387 2387 # file has not changed in dirstate
2388 2388 if node == parent:
2389 2389 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2390 2390 continue
2391 2391 if not in_mf:
2392 2392 if pmf is None:
2393 2393 # only need parent manifest in this unlikely case,
2394 2394 # so do not read by default
2395 2395 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2396 2396 if abs in pmf:
2397 2397 handle(remove, False)
2398 2398 update[abs] = True
2399 2399
2400 2400 repo.dirstate.forget(forget[0])
2401 2401 r = repo.update(node, False, True, update.has_key, False, wlock=wlock,
2402 2402 show_stats=False)
2403 2403 repo.dirstate.update(add[0], 'a')
2404 2404 repo.dirstate.update(undelete[0], 'n')
2405 2405 repo.dirstate.update(remove[0], 'r')
2406 2406 return r
2407 2407
2408 2408 def rollback(ui, repo):
2409 2409 """roll back the last transaction in this repository
2410 2410
2411 2411 Roll back the last transaction in this repository, restoring the
2412 2412 project to its state prior to the transaction.
2413 2413
2414 2414 Transactions are used to encapsulate the effects of all commands
2415 2415 that create new changesets or propagate existing changesets into a
2416 2416 repository. For example, the following commands are transactional,
2417 2417 and their effects can be rolled back:
2418 2418
2419 2419 commit
2420 2420 import
2421 2421 pull
2422 2422 push (with this repository as destination)
2423 2423 unbundle
2424 2424
2425 2425 This command should be used with care. There is only one level of
2426 2426 rollback, and there is no way to undo a rollback.
2427 2427
2428 2428 This command is not intended for use on public repositories. Once
2429 2429 changes are visible for pull by other users, rolling a transaction
2430 2430 back locally is ineffective (someone else may already have pulled
2431 2431 the changes). Furthermore, a race is possible with readers of the
2432 2432 repository; for example an in-progress pull from the repository
2433 2433 may fail if a rollback is performed.
2434 2434 """
2435 repo.undo()
2435 repo.rollback()
2436 2436
2437 2437 def root(ui, repo):
2438 2438 """print the root (top) of the current working dir
2439 2439
2440 2440 Print the root directory of the current repository.
2441 2441 """
2442 2442 ui.write(repo.root + "\n")
2443 2443
2444 2444 def serve(ui, repo, **opts):
2445 2445 """export the repository via HTTP
2446 2446
2447 2447 Start a local HTTP repository browser and pull server.
2448 2448
2449 2449 By default, the server logs accesses to stdout and errors to
2450 2450 stderr. Use the "-A" and "-E" options to log to files.
2451 2451 """
2452 2452
2453 2453 if opts["stdio"]:
2454 2454 if repo is None:
2455 2455 raise hg.RepoError(_('no repo found'))
2456 2456 fin, fout = sys.stdin, sys.stdout
2457 2457 sys.stdout = sys.stderr
2458 2458
2459 2459 # Prevent insertion/deletion of CRs
2460 2460 util.set_binary(fin)
2461 2461 util.set_binary(fout)
2462 2462
2463 2463 def getarg():
2464 2464 argline = fin.readline()[:-1]
2465 2465 arg, l = argline.split()
2466 2466 val = fin.read(int(l))
2467 2467 return arg, val
2468 2468 def respond(v):
2469 2469 fout.write("%d\n" % len(v))
2470 2470 fout.write(v)
2471 2471 fout.flush()
2472 2472
2473 2473 lock = None
2474 2474
2475 2475 while 1:
2476 2476 cmd = fin.readline()[:-1]
2477 2477 if cmd == '':
2478 2478 return
2479 2479 if cmd == "heads":
2480 2480 h = repo.heads()
2481 2481 respond(" ".join(map(hex, h)) + "\n")
2482 2482 if cmd == "lock":
2483 2483 lock = repo.lock()
2484 2484 respond("")
2485 2485 if cmd == "unlock":
2486 2486 if lock:
2487 2487 lock.release()
2488 2488 lock = None
2489 2489 respond("")
2490 2490 elif cmd == "branches":
2491 2491 arg, nodes = getarg()
2492 2492 nodes = map(bin, nodes.split(" "))
2493 2493 r = []
2494 2494 for b in repo.branches(nodes):
2495 2495 r.append(" ".join(map(hex, b)) + "\n")
2496 2496 respond("".join(r))
2497 2497 elif cmd == "between":
2498 2498 arg, pairs = getarg()
2499 2499 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
2500 2500 r = []
2501 2501 for b in repo.between(pairs):
2502 2502 r.append(" ".join(map(hex, b)) + "\n")
2503 2503 respond("".join(r))
2504 2504 elif cmd == "changegroup":
2505 2505 nodes = []
2506 2506 arg, roots = getarg()
2507 2507 nodes = map(bin, roots.split(" "))
2508 2508
2509 2509 cg = repo.changegroup(nodes, 'serve')
2510 2510 while 1:
2511 2511 d = cg.read(4096)
2512 2512 if not d:
2513 2513 break
2514 2514 fout.write(d)
2515 2515
2516 2516 fout.flush()
2517 2517
2518 2518 elif cmd == "addchangegroup":
2519 2519 if not lock:
2520 2520 respond("not locked")
2521 2521 continue
2522 2522 respond("")
2523 2523
2524 2524 r = repo.addchangegroup(fin, 'serve')
2525 2525 respond(str(r))
2526 2526
2527 2527 optlist = ("name templates style address port ipv6"
2528 2528 " accesslog errorlog webdir_conf")
2529 2529 for o in optlist.split():
2530 2530 if opts[o]:
2531 2531 ui.setconfig("web", o, opts[o])
2532 2532
2533 2533 if repo is None and not ui.config("web", "webdir_conf"):
2534 2534 raise hg.RepoError(_('no repo found'))
2535 2535
2536 2536 if opts['daemon'] and not opts['daemon_pipefds']:
2537 2537 rfd, wfd = os.pipe()
2538 2538 args = sys.argv[:]
2539 2539 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2540 2540 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2541 2541 args[0], args)
2542 2542 os.close(wfd)
2543 2543 os.read(rfd, 1)
2544 2544 os._exit(0)
2545 2545
2546 2546 try:
2547 2547 httpd = create_server(ui, repo, hgwebdir, hgweb)
2548 2548 except socket.error, inst:
2549 2549 raise util.Abort(_('cannot start server: ') + inst.args[1])
2550 2550
2551 2551 if ui.verbose:
2552 2552 addr, port = httpd.socket.getsockname()
2553 2553 if addr == '0.0.0.0':
2554 2554 addr = socket.gethostname()
2555 2555 else:
2556 2556 try:
2557 2557 addr = socket.gethostbyaddr(addr)[0]
2558 2558 except socket.error:
2559 2559 pass
2560 2560 if port != 80:
2561 2561 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2562 2562 else:
2563 2563 ui.status(_('listening at http://%s/\n') % addr)
2564 2564
2565 2565 if opts['pid_file']:
2566 2566 fp = open(opts['pid_file'], 'w')
2567 2567 fp.write(str(os.getpid()))
2568 2568 fp.close()
2569 2569
2570 2570 if opts['daemon_pipefds']:
2571 2571 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2572 2572 os.close(rfd)
2573 2573 os.write(wfd, 'y')
2574 2574 os.close(wfd)
2575 2575 sys.stdout.flush()
2576 2576 sys.stderr.flush()
2577 2577 fd = os.open(util.nulldev, os.O_RDWR)
2578 2578 if fd != 0: os.dup2(fd, 0)
2579 2579 if fd != 1: os.dup2(fd, 1)
2580 2580 if fd != 2: os.dup2(fd, 2)
2581 2581 if fd not in (0, 1, 2): os.close(fd)
2582 2582
2583 2583 httpd.serve_forever()
2584 2584
2585 2585 def status(ui, repo, *pats, **opts):
2586 2586 """show changed files in the working directory
2587 2587
2588 2588 Show changed files in the repository. If names are
2589 2589 given, only files that match are shown.
2590 2590
2591 2591 The codes used to show the status of files are:
2592 2592 M = modified
2593 2593 A = added
2594 2594 R = removed
2595 2595 ! = deleted, but still tracked
2596 2596 ? = not tracked
2597 2597 I = ignored (not shown by default)
2598 2598 """
2599 2599
2600 2600 show_ignored = opts['ignored'] and True or False
2601 2601 files, matchfn, anypats = matchpats(repo, pats, opts)
2602 2602 cwd = (pats and repo.getcwd()) or ''
2603 2603 modified, added, removed, deleted, unknown, ignored = [
2604 2604 [util.pathto(cwd, x) for x in n]
2605 2605 for n in repo.changes(files=files, match=matchfn,
2606 2606 show_ignored=show_ignored)]
2607 2607
2608 2608 changetypes = [('modified', 'M', modified),
2609 2609 ('added', 'A', added),
2610 2610 ('removed', 'R', removed),
2611 2611 ('deleted', '!', deleted),
2612 2612 ('unknown', '?', unknown),
2613 2613 ('ignored', 'I', ignored)]
2614 2614
2615 2615 end = opts['print0'] and '\0' or '\n'
2616 2616
2617 2617 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
2618 2618 or changetypes):
2619 2619 if opts['no_status']:
2620 2620 format = "%%s%s" % end
2621 2621 else:
2622 2622 format = "%s %%s%s" % (char, end)
2623 2623
2624 2624 for f in changes:
2625 2625 ui.write(format % f)
2626 2626
2627 2627 def tag(ui, repo, name, rev_=None, **opts):
2628 2628 """add a tag for the current tip or a given revision
2629 2629
2630 2630 Name a particular revision using <name>.
2631 2631
2632 2632 Tags are used to name particular revisions of the repository and are
2633 2633 very useful to compare different revision, to go back to significant
2634 2634 earlier versions or to mark branch points as releases, etc.
2635 2635
2636 2636 If no revision is given, the tip is used.
2637 2637
2638 2638 To facilitate version control, distribution, and merging of tags,
2639 2639 they are stored as a file named ".hgtags" which is managed
2640 2640 similarly to other project files and can be hand-edited if
2641 2641 necessary. The file '.hg/localtags' is used for local tags (not
2642 2642 shared among repositories).
2643 2643 """
2644 2644 if name == "tip":
2645 2645 raise util.Abort(_("the name 'tip' is reserved"))
2646 2646 if rev_ is not None:
2647 2647 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2648 2648 "please use 'hg tag [-r REV] NAME' instead\n"))
2649 2649 if opts['rev']:
2650 2650 raise util.Abort(_("use only one form to specify the revision"))
2651 2651 if opts['rev']:
2652 2652 rev_ = opts['rev']
2653 2653 if rev_:
2654 2654 r = hex(repo.lookup(rev_))
2655 2655 else:
2656 2656 r = hex(repo.changelog.tip())
2657 2657
2658 2658 disallowed = (revrangesep, '\r', '\n')
2659 2659 for c in disallowed:
2660 2660 if name.find(c) >= 0:
2661 2661 raise util.Abort(_("%s cannot be used in a tag name") % repr(c))
2662 2662
2663 2663 repo.hook('pretag', throw=True, node=r, tag=name,
2664 2664 local=int(not not opts['local']))
2665 2665
2666 2666 if opts['local']:
2667 2667 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
2668 2668 repo.hook('tag', node=r, tag=name, local=1)
2669 2669 return
2670 2670
2671 2671 for x in repo.changes():
2672 2672 if ".hgtags" in x:
2673 2673 raise util.Abort(_("working copy of .hgtags is changed "
2674 2674 "(please commit .hgtags manually)"))
2675 2675
2676 2676 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
2677 2677 if repo.dirstate.state(".hgtags") == '?':
2678 2678 repo.add([".hgtags"])
2679 2679
2680 2680 message = (opts['message'] or
2681 2681 _("Added tag %s for changeset %s") % (name, r))
2682 2682 try:
2683 2683 repo.commit([".hgtags"], message, opts['user'], opts['date'])
2684 2684 repo.hook('tag', node=r, tag=name, local=0)
2685 2685 except ValueError, inst:
2686 2686 raise util.Abort(str(inst))
2687 2687
2688 2688 def tags(ui, repo):
2689 2689 """list repository tags
2690 2690
2691 2691 List the repository tags.
2692 2692
2693 2693 This lists both regular and local tags.
2694 2694 """
2695 2695
2696 2696 l = repo.tagslist()
2697 2697 l.reverse()
2698 2698 for t, n in l:
2699 2699 try:
2700 2700 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2701 2701 except KeyError:
2702 2702 r = " ?:?"
2703 2703 if ui.quiet:
2704 2704 ui.write("%s\n" % t)
2705 2705 else:
2706 2706 ui.write("%-30s %s\n" % (t, r))
2707 2707
2708 2708 def tip(ui, repo, **opts):
2709 2709 """show the tip revision
2710 2710
2711 2711 Show the tip revision.
2712 2712 """
2713 2713 n = repo.changelog.tip()
2714 2714 br = None
2715 2715 if opts['branches']:
2716 2716 br = repo.branchlookup([n])
2717 2717 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2718 2718 if opts['patch']:
2719 2719 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2720 2720
2721 2721 def unbundle(ui, repo, fname, **opts):
2722 2722 """apply a changegroup file
2723 2723
2724 2724 Apply a compressed changegroup file generated by the bundle
2725 2725 command.
2726 2726 """
2727 2727 f = urllib.urlopen(fname)
2728 2728
2729 2729 header = f.read(6)
2730 2730 if not header.startswith("HG"):
2731 2731 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2732 2732 elif not header.startswith("HG10"):
2733 2733 raise util.Abort(_("%s: unknown bundle version") % fname)
2734 2734 elif header == "HG10BZ":
2735 2735 def generator(f):
2736 2736 zd = bz2.BZ2Decompressor()
2737 2737 zd.decompress("BZ")
2738 2738 for chunk in f:
2739 2739 yield zd.decompress(chunk)
2740 2740 elif header == "HG10UN":
2741 2741 def generator(f):
2742 2742 for chunk in f:
2743 2743 yield chunk
2744 2744 else:
2745 2745 raise util.Abort(_("%s: unknown bundle compression type")
2746 2746 % fname)
2747 2747 gen = generator(util.filechunkiter(f, 4096))
2748 2748 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle')
2749 2749 return postincoming(ui, repo, modheads, opts['update'])
2750 2750
2751 2751 def undo(ui, repo):
2752 2752 """undo the last commit or pull (DEPRECATED)
2753 2753
2754 2754 (DEPRECATED)
2755 2755 This command is now deprecated and will be removed in a future
2756 2756 release. Please use the rollback command instead. For usage
2757 2757 instructions, see the rollback command.
2758 2758 """
2759 2759 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2760 repo.undo()
2760 repo.rollback()
2761 2761
2762 2762 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2763 2763 branch=None, **opts):
2764 2764 """update or merge working directory
2765 2765
2766 2766 Update the working directory to the specified revision.
2767 2767
2768 2768 If there are no outstanding changes in the working directory and
2769 2769 there is a linear relationship between the current version and the
2770 2770 requested version, the result is the requested version.
2771 2771
2772 2772 To merge the working directory with another revision, use the
2773 2773 merge command.
2774 2774
2775 2775 By default, update will refuse to run if doing so would require
2776 2776 merging or discarding local changes.
2777 2777 """
2778 2778 if merge:
2779 2779 ui.warn(_('(the -m/--merge option is deprecated; '
2780 2780 'use the merge command instead)\n'))
2781 2781 return doupdate(ui, repo, node, merge, clean, force, branch, **opts)
2782 2782
2783 2783 def doupdate(ui, repo, node=None, merge=False, clean=False, force=None,
2784 2784 branch=None, **opts):
2785 2785 if branch:
2786 2786 br = repo.branchlookup(branch=branch)
2787 2787 found = []
2788 2788 for x in br:
2789 2789 if branch in br[x]:
2790 2790 found.append(x)
2791 2791 if len(found) > 1:
2792 2792 ui.warn(_("Found multiple heads for %s\n") % branch)
2793 2793 for x in found:
2794 2794 show_changeset(ui, repo, opts).show(changenode=x, brinfo=br)
2795 2795 return 1
2796 2796 if len(found) == 1:
2797 2797 node = found[0]
2798 2798 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2799 2799 else:
2800 2800 ui.warn(_("branch %s not found\n") % (branch))
2801 2801 return 1
2802 2802 else:
2803 2803 node = node and repo.lookup(node) or repo.changelog.tip()
2804 2804 return repo.update(node, allow=merge, force=clean, forcemerge=force)
2805 2805
2806 2806 def verify(ui, repo):
2807 2807 """verify the integrity of the repository
2808 2808
2809 2809 Verify the integrity of the current repository.
2810 2810
2811 2811 This will perform an extensive check of the repository's
2812 2812 integrity, validating the hashes and checksums of each entry in
2813 2813 the changelog, manifest, and tracked files, as well as the
2814 2814 integrity of their crosslinks and indices.
2815 2815 """
2816 2816 return repo.verify()
2817 2817
2818 2818 # Command options and aliases are listed here, alphabetically
2819 2819
2820 2820 table = {
2821 2821 "^add":
2822 2822 (add,
2823 2823 [('I', 'include', [], _('include names matching the given patterns')),
2824 2824 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2825 2825 _('hg add [OPTION]... [FILE]...')),
2826 2826 "debugaddremove|addremove":
2827 2827 (addremove,
2828 2828 [('I', 'include', [], _('include names matching the given patterns')),
2829 2829 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2830 2830 _('hg addremove [OPTION]... [FILE]...')),
2831 2831 "^annotate":
2832 2832 (annotate,
2833 2833 [('r', 'rev', '', _('annotate the specified revision')),
2834 2834 ('a', 'text', None, _('treat all files as text')),
2835 2835 ('u', 'user', None, _('list the author')),
2836 2836 ('d', 'date', None, _('list the date')),
2837 2837 ('n', 'number', None, _('list the revision number (default)')),
2838 2838 ('c', 'changeset', None, _('list the changeset')),
2839 2839 ('I', 'include', [], _('include names matching the given patterns')),
2840 2840 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2841 2841 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2842 2842 "archive":
2843 2843 (archive,
2844 2844 [('', 'no-decode', None, _('do not pass files through decoders')),
2845 2845 ('p', 'prefix', '', _('directory prefix for files in archive')),
2846 2846 ('r', 'rev', '', _('revision to distribute')),
2847 2847 ('t', 'type', '', _('type of distribution to create')),
2848 2848 ('I', 'include', [], _('include names matching the given patterns')),
2849 2849 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2850 2850 _('hg archive [OPTION]... DEST')),
2851 2851 "backout":
2852 2852 (backout,
2853 2853 [('', 'merge', None,
2854 2854 _('merge with old dirstate parent after backout')),
2855 2855 ('m', 'message', '', _('use <text> as commit message')),
2856 2856 ('l', 'logfile', '', _('read commit message from <file>')),
2857 2857 ('d', 'date', '', _('record datecode as commit date')),
2858 2858 ('u', 'user', '', _('record user as committer')),
2859 2859 ('I', 'include', [], _('include names matching the given patterns')),
2860 2860 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2861 2861 _('hg backout [OPTION]... REV')),
2862 2862 "bundle":
2863 2863 (bundle,
2864 2864 [('f', 'force', None,
2865 2865 _('run even when remote repository is unrelated'))],
2866 2866 _('hg bundle FILE DEST')),
2867 2867 "cat":
2868 2868 (cat,
2869 2869 [('o', 'output', '', _('print output to file with formatted name')),
2870 2870 ('r', 'rev', '', _('print the given revision')),
2871 2871 ('I', 'include', [], _('include names matching the given patterns')),
2872 2872 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2873 2873 _('hg cat [OPTION]... FILE...')),
2874 2874 "^clone":
2875 2875 (clone,
2876 2876 [('U', 'noupdate', None, _('do not update the new working directory')),
2877 2877 ('r', 'rev', [],
2878 2878 _('a changeset you would like to have after cloning')),
2879 2879 ('', 'pull', None, _('use pull protocol to copy metadata')),
2880 2880 ('e', 'ssh', '', _('specify ssh command to use')),
2881 2881 ('', 'remotecmd', '',
2882 2882 _('specify hg command to run on the remote side'))],
2883 2883 _('hg clone [OPTION]... SOURCE [DEST]')),
2884 2884 "^commit|ci":
2885 2885 (commit,
2886 2886 [('A', 'addremove', None,
2887 2887 _('mark new/missing files as added/removed before committing')),
2888 2888 ('m', 'message', '', _('use <text> as commit message')),
2889 2889 ('l', 'logfile', '', _('read the commit message from <file>')),
2890 2890 ('d', 'date', '', _('record datecode as commit date')),
2891 2891 ('u', 'user', '', _('record user as commiter')),
2892 2892 ('I', 'include', [], _('include names matching the given patterns')),
2893 2893 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2894 2894 _('hg commit [OPTION]... [FILE]...')),
2895 2895 "copy|cp":
2896 2896 (copy,
2897 2897 [('A', 'after', None, _('record a copy that has already occurred')),
2898 2898 ('f', 'force', None,
2899 2899 _('forcibly copy over an existing managed file')),
2900 2900 ('I', 'include', [], _('include names matching the given patterns')),
2901 2901 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2902 2902 _('hg copy [OPTION]... [SOURCE]... DEST')),
2903 2903 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2904 2904 "debugcomplete":
2905 2905 (debugcomplete,
2906 2906 [('o', 'options', None, _('show the command options'))],
2907 2907 _('debugcomplete [-o] CMD')),
2908 2908 "debugrebuildstate":
2909 2909 (debugrebuildstate,
2910 2910 [('r', 'rev', '', _('revision to rebuild to'))],
2911 2911 _('debugrebuildstate [-r REV] [REV]')),
2912 2912 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2913 2913 "debugconfig": (debugconfig, [], _('debugconfig')),
2914 2914 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2915 2915 "debugstate": (debugstate, [], _('debugstate')),
2916 2916 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2917 2917 "debugindex": (debugindex, [], _('debugindex FILE')),
2918 2918 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2919 2919 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2920 2920 "debugwalk":
2921 2921 (debugwalk,
2922 2922 [('I', 'include', [], _('include names matching the given patterns')),
2923 2923 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2924 2924 _('debugwalk [OPTION]... [FILE]...')),
2925 2925 "^diff":
2926 2926 (diff,
2927 2927 [('r', 'rev', [], _('revision')),
2928 2928 ('a', 'text', None, _('treat all files as text')),
2929 2929 ('p', 'show-function', None,
2930 2930 _('show which function each change is in')),
2931 2931 ('w', 'ignore-all-space', None,
2932 2932 _('ignore white space when comparing lines')),
2933 2933 ('I', 'include', [], _('include names matching the given patterns')),
2934 2934 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2935 2935 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2936 2936 "^export":
2937 2937 (export,
2938 2938 [('o', 'output', '', _('print output to file with formatted name')),
2939 2939 ('a', 'text', None, _('treat all files as text')),
2940 2940 ('', 'switch-parent', None, _('diff against the second parent'))],
2941 2941 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2942 2942 "debugforget|forget":
2943 2943 (forget,
2944 2944 [('I', 'include', [], _('include names matching the given patterns')),
2945 2945 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2946 2946 _('hg forget [OPTION]... FILE...')),
2947 2947 "grep":
2948 2948 (grep,
2949 2949 [('0', 'print0', None, _('end fields with NUL')),
2950 2950 ('', 'all', None, _('print all revisions that match')),
2951 2951 ('i', 'ignore-case', None, _('ignore case when matching')),
2952 2952 ('l', 'files-with-matches', None,
2953 2953 _('print only filenames and revs that match')),
2954 2954 ('n', 'line-number', None, _('print matching line numbers')),
2955 2955 ('r', 'rev', [], _('search in given revision range')),
2956 2956 ('u', 'user', None, _('print user who committed change')),
2957 2957 ('I', 'include', [], _('include names matching the given patterns')),
2958 2958 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2959 2959 _('hg grep [OPTION]... PATTERN [FILE]...')),
2960 2960 "heads":
2961 2961 (heads,
2962 2962 [('b', 'branches', None, _('show branches')),
2963 2963 ('', 'style', '', _('display using template map file')),
2964 2964 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2965 2965 ('', 'template', '', _('display with template'))],
2966 2966 _('hg heads [-b] [-r <rev>]')),
2967 2967 "help": (help_, [], _('hg help [COMMAND]')),
2968 2968 "identify|id": (identify, [], _('hg identify')),
2969 2969 "import|patch":
2970 2970 (import_,
2971 2971 [('p', 'strip', 1,
2972 2972 _('directory strip option for patch. This has the same\n'
2973 2973 'meaning as the corresponding patch option')),
2974 2974 ('b', 'base', '', _('base path')),
2975 2975 ('f', 'force', None,
2976 2976 _('skip check for outstanding uncommitted changes'))],
2977 2977 _('hg import [-p NUM] [-b BASE] [-f] PATCH...')),
2978 2978 "incoming|in": (incoming,
2979 2979 [('M', 'no-merges', None, _('do not show merges')),
2980 2980 ('f', 'force', None,
2981 2981 _('run even when remote repository is unrelated')),
2982 2982 ('', 'style', '', _('display using template map file')),
2983 2983 ('n', 'newest-first', None, _('show newest record first')),
2984 2984 ('', 'bundle', '', _('file to store the bundles into')),
2985 2985 ('p', 'patch', None, _('show patch')),
2986 2986 ('', 'template', '', _('display with template')),
2987 2987 ('e', 'ssh', '', _('specify ssh command to use')),
2988 2988 ('', 'remotecmd', '',
2989 2989 _('specify hg command to run on the remote side'))],
2990 2990 _('hg incoming [-p] [-n] [-M] [--bundle FILENAME] [SOURCE]')),
2991 2991 "^init": (init, [], _('hg init [DEST]')),
2992 2992 "locate":
2993 2993 (locate,
2994 2994 [('r', 'rev', '', _('search the repository as it stood at rev')),
2995 2995 ('0', 'print0', None,
2996 2996 _('end filenames with NUL, for use with xargs')),
2997 2997 ('f', 'fullpath', None,
2998 2998 _('print complete paths from the filesystem root')),
2999 2999 ('I', 'include', [], _('include names matching the given patterns')),
3000 3000 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3001 3001 _('hg locate [OPTION]... [PATTERN]...')),
3002 3002 "^log|history":
3003 3003 (log,
3004 3004 [('b', 'branches', None, _('show branches')),
3005 3005 ('k', 'keyword', [], _('search for a keyword')),
3006 3006 ('l', 'limit', '', _('limit number of changes displayed')),
3007 3007 ('r', 'rev', [], _('show the specified revision or range')),
3008 3008 ('M', 'no-merges', None, _('do not show merges')),
3009 3009 ('', 'style', '', _('display using template map file')),
3010 3010 ('m', 'only-merges', None, _('show only merges')),
3011 3011 ('p', 'patch', None, _('show patch')),
3012 3012 ('', 'template', '', _('display with template')),
3013 3013 ('I', 'include', [], _('include names matching the given patterns')),
3014 3014 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3015 3015 _('hg log [OPTION]... [FILE]')),
3016 3016 "manifest": (manifest, [], _('hg manifest [REV]')),
3017 3017 "merge":
3018 3018 (merge,
3019 3019 [('b', 'branch', '', _('merge with head of a specific branch')),
3020 3020 ('f', 'force', None, _('force a merge with outstanding changes'))],
3021 3021 _('hg merge [-b TAG] [-f] [REV]')),
3022 3022 "outgoing|out": (outgoing,
3023 3023 [('M', 'no-merges', None, _('do not show merges')),
3024 3024 ('f', 'force', None,
3025 3025 _('run even when remote repository is unrelated')),
3026 3026 ('p', 'patch', None, _('show patch')),
3027 3027 ('', 'style', '', _('display using template map file')),
3028 3028 ('n', 'newest-first', None, _('show newest record first')),
3029 3029 ('', 'template', '', _('display with template')),
3030 3030 ('e', 'ssh', '', _('specify ssh command to use')),
3031 3031 ('', 'remotecmd', '',
3032 3032 _('specify hg command to run on the remote side'))],
3033 3033 _('hg outgoing [-M] [-p] [-n] [DEST]')),
3034 3034 "^parents":
3035 3035 (parents,
3036 3036 [('b', 'branches', None, _('show branches')),
3037 3037 ('', 'style', '', _('display using template map file')),
3038 3038 ('', 'template', '', _('display with template'))],
3039 3039 _('hg parents [-b] [REV]')),
3040 3040 "paths": (paths, [], _('hg paths [NAME]')),
3041 3041 "^pull":
3042 3042 (pull,
3043 3043 [('u', 'update', None,
3044 3044 _('update the working directory to tip after pull')),
3045 3045 ('e', 'ssh', '', _('specify ssh command to use')),
3046 3046 ('f', 'force', None,
3047 3047 _('run even when remote repository is unrelated')),
3048 3048 ('r', 'rev', [], _('a specific revision you would like to pull')),
3049 3049 ('', 'remotecmd', '',
3050 3050 _('specify hg command to run on the remote side'))],
3051 3051 _('hg pull [-u] [-e FILE] [-r REV]... [--remotecmd FILE] [SOURCE]')),
3052 3052 "^push":
3053 3053 (push,
3054 3054 [('f', 'force', None, _('force push')),
3055 3055 ('e', 'ssh', '', _('specify ssh command to use')),
3056 3056 ('r', 'rev', [], _('a specific revision you would like to push')),
3057 3057 ('', 'remotecmd', '',
3058 3058 _('specify hg command to run on the remote side'))],
3059 3059 _('hg push [-f] [-e FILE] [-r REV]... [--remotecmd FILE] [DEST]')),
3060 3060 "debugrawcommit|rawcommit":
3061 3061 (rawcommit,
3062 3062 [('p', 'parent', [], _('parent')),
3063 3063 ('d', 'date', '', _('date code')),
3064 3064 ('u', 'user', '', _('user')),
3065 3065 ('F', 'files', '', _('file list')),
3066 3066 ('m', 'message', '', _('commit message')),
3067 3067 ('l', 'logfile', '', _('commit message file'))],
3068 3068 _('hg debugrawcommit [OPTION]... [FILE]...')),
3069 3069 "recover": (recover, [], _('hg recover')),
3070 3070 "^remove|rm":
3071 3071 (remove,
3072 3072 [('A', 'after', None, _('record remove that has already occurred')),
3073 3073 ('f', 'force', None, _('remove file even if modified')),
3074 3074 ('I', 'include', [], _('include names matching the given patterns')),
3075 3075 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3076 3076 _('hg remove [OPTION]... FILE...')),
3077 3077 "rename|mv":
3078 3078 (rename,
3079 3079 [('A', 'after', None, _('record a rename that has already occurred')),
3080 3080 ('f', 'force', None,
3081 3081 _('forcibly copy over an existing managed file')),
3082 3082 ('I', 'include', [], _('include names matching the given patterns')),
3083 3083 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3084 3084 _('hg rename [OPTION]... SOURCE... DEST')),
3085 3085 "^revert":
3086 3086 (revert,
3087 3087 [('r', 'rev', '', _('revision to revert to')),
3088 3088 ('', 'no-backup', None, _('do not save backup copies of files')),
3089 3089 ('I', 'include', [], _('include names matching given patterns')),
3090 3090 ('X', 'exclude', [], _('exclude names matching given patterns'))],
3091 3091 _('hg revert [-r REV] [NAME]...')),
3092 3092 "rollback": (rollback, [], _('hg rollback')),
3093 3093 "root": (root, [], _('hg root')),
3094 3094 "^serve":
3095 3095 (serve,
3096 3096 [('A', 'accesslog', '', _('name of access log file to write to')),
3097 3097 ('d', 'daemon', None, _('run server in background')),
3098 3098 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3099 3099 ('E', 'errorlog', '', _('name of error log file to write to')),
3100 3100 ('p', 'port', 0, _('port to use (default: 8000)')),
3101 3101 ('a', 'address', '', _('address to use')),
3102 3102 ('n', 'name', '',
3103 3103 _('name to show in web pages (default: working dir)')),
3104 3104 ('', 'webdir-conf', '', _('name of the webdir config file'
3105 3105 ' (serve more than one repo)')),
3106 3106 ('', 'pid-file', '', _('name of file to write process ID to')),
3107 3107 ('', 'stdio', None, _('for remote clients')),
3108 3108 ('t', 'templates', '', _('web templates to use')),
3109 3109 ('', 'style', '', _('template style to use')),
3110 3110 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3111 3111 _('hg serve [OPTION]...')),
3112 3112 "^status|st":
3113 3113 (status,
3114 3114 [('m', 'modified', None, _('show only modified files')),
3115 3115 ('a', 'added', None, _('show only added files')),
3116 3116 ('r', 'removed', None, _('show only removed files')),
3117 3117 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3118 3118 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3119 3119 ('i', 'ignored', None, _('show ignored files')),
3120 3120 ('n', 'no-status', None, _('hide status prefix')),
3121 3121 ('0', 'print0', None,
3122 3122 _('end filenames with NUL, for use with xargs')),
3123 3123 ('I', 'include', [], _('include names matching the given patterns')),
3124 3124 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3125 3125 _('hg status [OPTION]... [FILE]...')),
3126 3126 "tag":
3127 3127 (tag,
3128 3128 [('l', 'local', None, _('make the tag local')),
3129 3129 ('m', 'message', '', _('message for tag commit log entry')),
3130 3130 ('d', 'date', '', _('record datecode as commit date')),
3131 3131 ('u', 'user', '', _('record user as commiter')),
3132 3132 ('r', 'rev', '', _('revision to tag'))],
3133 3133 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3134 3134 "tags": (tags, [], _('hg tags')),
3135 3135 "tip":
3136 3136 (tip,
3137 3137 [('b', 'branches', None, _('show branches')),
3138 3138 ('', 'style', '', _('display using template map file')),
3139 3139 ('p', 'patch', None, _('show patch')),
3140 3140 ('', 'template', '', _('display with template'))],
3141 3141 _('hg tip [-b] [-p]')),
3142 3142 "unbundle":
3143 3143 (unbundle,
3144 3144 [('u', 'update', None,
3145 3145 _('update the working directory to tip after unbundle'))],
3146 3146 _('hg unbundle [-u] FILE')),
3147 3147 "debugundo|undo": (undo, [], _('hg undo')),
3148 3148 "^update|up|checkout|co":
3149 3149 (update,
3150 3150 [('b', 'branch', '', _('checkout the head of a specific branch')),
3151 3151 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3152 3152 ('C', 'clean', None, _('overwrite locally modified files')),
3153 3153 ('f', 'force', None, _('force a merge with outstanding changes'))],
3154 3154 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3155 3155 "verify": (verify, [], _('hg verify')),
3156 3156 "version": (show_version, [], _('hg version')),
3157 3157 }
3158 3158
3159 3159 globalopts = [
3160 3160 ('R', 'repository', '',
3161 3161 _('repository root directory or symbolic path name')),
3162 3162 ('', 'cwd', '', _('change working directory')),
3163 3163 ('y', 'noninteractive', None,
3164 3164 _('do not prompt, assume \'yes\' for any required answers')),
3165 3165 ('q', 'quiet', None, _('suppress output')),
3166 3166 ('v', 'verbose', None, _('enable additional output')),
3167 3167 ('', 'config', [], _('set/override config option')),
3168 3168 ('', 'debug', None, _('enable debugging output')),
3169 3169 ('', 'debugger', None, _('start debugger')),
3170 3170 ('', 'traceback', None, _('print traceback on exception')),
3171 3171 ('', 'time', None, _('time how long the command takes')),
3172 3172 ('', 'profile', None, _('print command execution profile')),
3173 3173 ('', 'version', None, _('output version information and exit')),
3174 3174 ('h', 'help', None, _('display help and exit')),
3175 3175 ]
3176 3176
3177 3177 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3178 3178 " debugindex debugindexdot")
3179 3179 optionalrepo = ("paths serve debugconfig")
3180 3180
3181 3181 def findpossible(cmd):
3182 3182 """
3183 3183 Return cmd -> (aliases, command table entry)
3184 3184 for each matching command.
3185 3185 Return debug commands (or their aliases) only if no normal command matches.
3186 3186 """
3187 3187 choice = {}
3188 3188 debugchoice = {}
3189 3189 for e in table.keys():
3190 3190 aliases = e.lstrip("^").split("|")
3191 3191 found = None
3192 3192 if cmd in aliases:
3193 3193 found = cmd
3194 3194 else:
3195 3195 for a in aliases:
3196 3196 if a.startswith(cmd):
3197 3197 found = a
3198 3198 break
3199 3199 if found is not None:
3200 3200 if aliases[0].startswith("debug"):
3201 3201 debugchoice[found] = (aliases, table[e])
3202 3202 else:
3203 3203 choice[found] = (aliases, table[e])
3204 3204
3205 3205 if not choice and debugchoice:
3206 3206 choice = debugchoice
3207 3207
3208 3208 return choice
3209 3209
3210 3210 def find(cmd):
3211 3211 """Return (aliases, command table entry) for command string."""
3212 3212 choice = findpossible(cmd)
3213 3213
3214 3214 if choice.has_key(cmd):
3215 3215 return choice[cmd]
3216 3216
3217 3217 if len(choice) > 1:
3218 3218 clist = choice.keys()
3219 3219 clist.sort()
3220 3220 raise AmbiguousCommand(cmd, clist)
3221 3221
3222 3222 if choice:
3223 3223 return choice.values()[0]
3224 3224
3225 3225 raise UnknownCommand(cmd)
3226 3226
3227 3227 def catchterm(*args):
3228 3228 raise util.SignalInterrupt
3229 3229
3230 3230 def run():
3231 3231 sys.exit(dispatch(sys.argv[1:]))
3232 3232
3233 3233 class ParseError(Exception):
3234 3234 """Exception raised on errors in parsing the command line."""
3235 3235
3236 3236 def parse(ui, args):
3237 3237 options = {}
3238 3238 cmdoptions = {}
3239 3239
3240 3240 try:
3241 3241 args = fancyopts.fancyopts(args, globalopts, options)
3242 3242 except fancyopts.getopt.GetoptError, inst:
3243 3243 raise ParseError(None, inst)
3244 3244
3245 3245 if args:
3246 3246 cmd, args = args[0], args[1:]
3247 3247 aliases, i = find(cmd)
3248 3248 cmd = aliases[0]
3249 3249 defaults = ui.config("defaults", cmd)
3250 3250 if defaults:
3251 3251 args = defaults.split() + args
3252 3252 c = list(i[1])
3253 3253 else:
3254 3254 cmd = None
3255 3255 c = []
3256 3256
3257 3257 # combine global options into local
3258 3258 for o in globalopts:
3259 3259 c.append((o[0], o[1], options[o[1]], o[3]))
3260 3260
3261 3261 try:
3262 3262 args = fancyopts.fancyopts(args, c, cmdoptions)
3263 3263 except fancyopts.getopt.GetoptError, inst:
3264 3264 raise ParseError(cmd, inst)
3265 3265
3266 3266 # separate global options back out
3267 3267 for o in globalopts:
3268 3268 n = o[1]
3269 3269 options[n] = cmdoptions[n]
3270 3270 del cmdoptions[n]
3271 3271
3272 3272 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3273 3273
3274 3274 def dispatch(args):
3275 3275 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3276 3276 num = getattr(signal, name, None)
3277 3277 if num: signal.signal(num, catchterm)
3278 3278
3279 3279 try:
3280 3280 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3281 3281 except util.Abort, inst:
3282 3282 sys.stderr.write(_("abort: %s\n") % inst)
3283 3283 return -1
3284 3284
3285 3285 external = []
3286 3286 for x in u.extensions():
3287 3287 try:
3288 3288 if x[1]:
3289 3289 mod = imp.load_source(x[0], x[1])
3290 3290 else:
3291 3291 def importh(name):
3292 3292 mod = __import__(name)
3293 3293 components = name.split('.')
3294 3294 for comp in components[1:]:
3295 3295 mod = getattr(mod, comp)
3296 3296 return mod
3297 3297 try:
3298 3298 mod = importh("hgext." + x[0])
3299 3299 except ImportError:
3300 3300 mod = importh(x[0])
3301 3301 external.append(mod)
3302 3302 except Exception, inst:
3303 3303 u.warn(_("*** failed to import extension %s: %s\n") % (x[0], inst))
3304 3304 if u.print_exc():
3305 3305 return 1
3306 3306
3307 3307 for x in external:
3308 3308 uisetup = getattr(x, 'uisetup', None)
3309 3309 if uisetup:
3310 3310 uisetup(u)
3311 3311 cmdtable = getattr(x, 'cmdtable', {})
3312 3312 for t in cmdtable:
3313 3313 if t in table:
3314 3314 u.warn(_("module %s overrides %s\n") % (x.__name__, t))
3315 3315 table.update(cmdtable)
3316 3316
3317 3317 try:
3318 3318 cmd, func, args, options, cmdoptions = parse(u, args)
3319 3319 if options["time"]:
3320 3320 def get_times():
3321 3321 t = os.times()
3322 3322 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3323 3323 t = (t[0], t[1], t[2], t[3], time.clock())
3324 3324 return t
3325 3325 s = get_times()
3326 3326 def print_time():
3327 3327 t = get_times()
3328 3328 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3329 3329 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3330 3330 atexit.register(print_time)
3331 3331
3332 3332 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3333 3333 not options["noninteractive"], options["traceback"],
3334 3334 options["config"])
3335 3335
3336 3336 # enter the debugger before command execution
3337 3337 if options['debugger']:
3338 3338 pdb.set_trace()
3339 3339
3340 3340 try:
3341 3341 if options['cwd']:
3342 3342 try:
3343 3343 os.chdir(options['cwd'])
3344 3344 except OSError, inst:
3345 3345 raise util.Abort('%s: %s' %
3346 3346 (options['cwd'], inst.strerror))
3347 3347
3348 3348 path = u.expandpath(options["repository"]) or ""
3349 3349 repo = path and hg.repository(u, path=path) or None
3350 3350
3351 3351 if options['help']:
3352 3352 return help_(u, cmd, options['version'])
3353 3353 elif options['version']:
3354 3354 return show_version(u)
3355 3355 elif not cmd:
3356 3356 return help_(u, 'shortlist')
3357 3357
3358 3358 if cmd not in norepo.split():
3359 3359 try:
3360 3360 if not repo:
3361 3361 repo = hg.repository(u, path=path)
3362 3362 u = repo.ui
3363 3363 for x in external:
3364 3364 if hasattr(x, 'reposetup'):
3365 3365 x.reposetup(u, repo)
3366 3366 except hg.RepoError:
3367 3367 if cmd not in optionalrepo.split():
3368 3368 raise
3369 3369 d = lambda: func(u, repo, *args, **cmdoptions)
3370 3370 else:
3371 3371 d = lambda: func(u, *args, **cmdoptions)
3372 3372
3373 3373 try:
3374 3374 if options['profile']:
3375 3375 import hotshot, hotshot.stats
3376 3376 prof = hotshot.Profile("hg.prof")
3377 3377 try:
3378 3378 try:
3379 3379 return prof.runcall(d)
3380 3380 except:
3381 3381 try:
3382 3382 u.warn(_('exception raised - generating '
3383 3383 'profile anyway\n'))
3384 3384 except:
3385 3385 pass
3386 3386 raise
3387 3387 finally:
3388 3388 prof.close()
3389 3389 stats = hotshot.stats.load("hg.prof")
3390 3390 stats.strip_dirs()
3391 3391 stats.sort_stats('time', 'calls')
3392 3392 stats.print_stats(40)
3393 3393 else:
3394 3394 return d()
3395 3395 finally:
3396 3396 u.flush()
3397 3397 except:
3398 3398 # enter the debugger when we hit an exception
3399 3399 if options['debugger']:
3400 3400 pdb.post_mortem(sys.exc_info()[2])
3401 3401 u.print_exc()
3402 3402 raise
3403 3403 except ParseError, inst:
3404 3404 if inst.args[0]:
3405 3405 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3406 3406 help_(u, inst.args[0])
3407 3407 else:
3408 3408 u.warn(_("hg: %s\n") % inst.args[1])
3409 3409 help_(u, 'shortlist')
3410 3410 except AmbiguousCommand, inst:
3411 3411 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3412 3412 (inst.args[0], " ".join(inst.args[1])))
3413 3413 except UnknownCommand, inst:
3414 3414 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3415 3415 help_(u, 'shortlist')
3416 3416 except hg.RepoError, inst:
3417 3417 u.warn(_("abort: %s!\n") % inst)
3418 3418 except lock.LockHeld, inst:
3419 3419 if inst.errno == errno.ETIMEDOUT:
3420 3420 reason = _('timed out waiting for lock held by %s') % inst.locker
3421 3421 else:
3422 3422 reason = _('lock held by %s') % inst.locker
3423 3423 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3424 3424 except lock.LockUnavailable, inst:
3425 3425 u.warn(_("abort: could not lock %s: %s\n") %
3426 3426 (inst.desc or inst.filename, inst.strerror))
3427 3427 except revlog.RevlogError, inst:
3428 3428 u.warn(_("abort: "), inst, "!\n")
3429 3429 except util.SignalInterrupt:
3430 3430 u.warn(_("killed!\n"))
3431 3431 except KeyboardInterrupt:
3432 3432 try:
3433 3433 u.warn(_("interrupted!\n"))
3434 3434 except IOError, inst:
3435 3435 if inst.errno == errno.EPIPE:
3436 3436 if u.debugflag:
3437 3437 u.warn(_("\nbroken pipe\n"))
3438 3438 else:
3439 3439 raise
3440 3440 except IOError, inst:
3441 3441 if hasattr(inst, "code"):
3442 3442 u.warn(_("abort: %s\n") % inst)
3443 3443 elif hasattr(inst, "reason"):
3444 3444 u.warn(_("abort: error: %s\n") % inst.reason[1])
3445 3445 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3446 3446 if u.debugflag:
3447 3447 u.warn(_("broken pipe\n"))
3448 3448 elif getattr(inst, "strerror", None):
3449 3449 if getattr(inst, "filename", None):
3450 3450 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3451 3451 else:
3452 3452 u.warn(_("abort: %s\n") % inst.strerror)
3453 3453 else:
3454 3454 raise
3455 3455 except OSError, inst:
3456 3456 if hasattr(inst, "filename"):
3457 3457 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3458 3458 else:
3459 3459 u.warn(_("abort: %s\n") % inst.strerror)
3460 3460 except util.Abort, inst:
3461 3461 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3462 3462 except TypeError, inst:
3463 3463 # was this an argument error?
3464 3464 tb = traceback.extract_tb(sys.exc_info()[2])
3465 3465 if len(tb) > 2: # no
3466 3466 raise
3467 3467 u.debug(inst, "\n")
3468 3468 u.warn(_("%s: invalid arguments\n") % cmd)
3469 3469 help_(u, cmd)
3470 3470 except SystemExit, inst:
3471 3471 # Commands shouldn't sys.exit directly, but give a return code.
3472 3472 # Just in case catch this and and pass exit code to caller.
3473 3473 return inst.code
3474 3474 except:
3475 3475 u.warn(_("** unknown exception encountered, details follow\n"))
3476 3476 u.warn(_("** report bug details to mercurial@selenic.com\n"))
3477 3477 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3478 3478 % version.get_version())
3479 3479 raise
3480 3480
3481 3481 return -1
@@ -1,2122 +1,2122
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import os, util
9 9 import filelog, manifest, changelog, dirstate, repo
10 10 from node import *
11 11 from i18n import gettext as _
12 12 from demandload import *
13 13 demandload(globals(), "appendfile changegroup")
14 14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 15 demandload(globals(), "revlog")
16 16
17 17 class localrepository(object):
18 18 def __del__(self):
19 19 self.transhandle = None
20 20 def __init__(self, parentui, path=None, create=0):
21 21 if not path:
22 22 p = os.getcwd()
23 23 while not os.path.isdir(os.path.join(p, ".hg")):
24 24 oldp = p
25 25 p = os.path.dirname(p)
26 26 if p == oldp:
27 27 raise repo.RepoError(_("no repo found"))
28 28 path = p
29 29 self.path = os.path.join(path, ".hg")
30 30
31 31 if not create and not os.path.isdir(self.path):
32 32 raise repo.RepoError(_("repository %s not found") % path)
33 33
34 34 self.root = os.path.abspath(path)
35 35 self.origroot = path
36 36 self.ui = ui.ui(parentui=parentui)
37 37 self.opener = util.opener(self.path)
38 38 self.wopener = util.opener(self.root)
39 39
40 40 try:
41 41 self.ui.readconfig(self.join("hgrc"), self.root)
42 42 except IOError:
43 43 pass
44 44
45 45 v = self.ui.revlogopts
46 46 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
47 47 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
48 48 fl = v.get('flags', None)
49 49 flags = 0
50 50 if fl != None:
51 51 for x in fl.split():
52 52 flags |= revlog.flagstr(x)
53 53 elif self.revlogv1:
54 54 flags = revlog.REVLOG_DEFAULT_FLAGS
55 55
56 56 v = self.revlogversion | flags
57 57 self.manifest = manifest.manifest(self.opener, v)
58 58 self.changelog = changelog.changelog(self.opener, v)
59 59
60 60 # the changelog might not have the inline index flag
61 61 # on. If the format of the changelog is the same as found in
62 62 # .hgrc, apply any flags found in the .hgrc as well.
63 63 # Otherwise, just version from the changelog
64 64 v = self.changelog.version
65 65 if v == self.revlogversion:
66 66 v |= flags
67 67 self.revlogversion = v
68 68
69 69 self.tagscache = None
70 70 self.nodetagscache = None
71 71 self.encodepats = None
72 72 self.decodepats = None
73 73 self.transhandle = None
74 74
75 75 if create:
76 76 os.mkdir(self.path)
77 77 os.mkdir(self.join("data"))
78 78
79 79 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
80 80
81 81 def hook(self, name, throw=False, **args):
82 82 def callhook(hname, funcname):
83 83 '''call python hook. hook is callable object, looked up as
84 84 name in python module. if callable returns "true", hook
85 85 fails, else passes. if hook raises exception, treated as
86 86 hook failure. exception propagates if throw is "true".
87 87
88 88 reason for "true" meaning "hook failed" is so that
89 89 unmodified commands (e.g. mercurial.commands.update) can
90 90 be run as hooks without wrappers to convert return values.'''
91 91
92 92 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
93 93 d = funcname.rfind('.')
94 94 if d == -1:
95 95 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
96 96 % (hname, funcname))
97 97 modname = funcname[:d]
98 98 try:
99 99 obj = __import__(modname)
100 100 except ImportError:
101 101 raise util.Abort(_('%s hook is invalid '
102 102 '(import of "%s" failed)') %
103 103 (hname, modname))
104 104 try:
105 105 for p in funcname.split('.')[1:]:
106 106 obj = getattr(obj, p)
107 107 except AttributeError, err:
108 108 raise util.Abort(_('%s hook is invalid '
109 109 '("%s" is not defined)') %
110 110 (hname, funcname))
111 111 if not callable(obj):
112 112 raise util.Abort(_('%s hook is invalid '
113 113 '("%s" is not callable)') %
114 114 (hname, funcname))
115 115 try:
116 116 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
117 117 except (KeyboardInterrupt, util.SignalInterrupt):
118 118 raise
119 119 except Exception, exc:
120 120 if isinstance(exc, util.Abort):
121 121 self.ui.warn(_('error: %s hook failed: %s\n') %
122 122 (hname, exc.args[0] % exc.args[1:]))
123 123 else:
124 124 self.ui.warn(_('error: %s hook raised an exception: '
125 125 '%s\n') % (hname, exc))
126 126 if throw:
127 127 raise
128 128 self.ui.print_exc()
129 129 return True
130 130 if r:
131 131 if throw:
132 132 raise util.Abort(_('%s hook failed') % hname)
133 133 self.ui.warn(_('warning: %s hook failed\n') % hname)
134 134 return r
135 135
136 136 def runhook(name, cmd):
137 137 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
138 138 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
139 139 r = util.system(cmd, environ=env, cwd=self.root)
140 140 if r:
141 141 desc, r = util.explain_exit(r)
142 142 if throw:
143 143 raise util.Abort(_('%s hook %s') % (name, desc))
144 144 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
145 145 return r
146 146
147 147 r = False
148 148 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
149 149 if hname.split(".", 1)[0] == name and cmd]
150 150 hooks.sort()
151 151 for hname, cmd in hooks:
152 152 if cmd.startswith('python:'):
153 153 r = callhook(hname, cmd[7:].strip()) or r
154 154 else:
155 155 r = runhook(hname, cmd) or r
156 156 return r
157 157
158 158 def tags(self):
159 159 '''return a mapping of tag to node'''
160 160 if not self.tagscache:
161 161 self.tagscache = {}
162 162
163 163 def parsetag(line, context):
164 164 if not line:
165 165 return
166 166 s = l.split(" ", 1)
167 167 if len(s) != 2:
168 168 self.ui.warn(_("%s: cannot parse entry\n") % context)
169 169 return
170 170 node, key = s
171 171 key = key.strip()
172 172 try:
173 173 bin_n = bin(node)
174 174 except TypeError:
175 175 self.ui.warn(_("%s: node '%s' is not well formed\n") %
176 176 (context, node))
177 177 return
178 178 if bin_n not in self.changelog.nodemap:
179 179 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
180 180 (context, key))
181 181 return
182 182 self.tagscache[key] = bin_n
183 183
184 184 # read the tags file from each head, ending with the tip,
185 185 # and add each tag found to the map, with "newer" ones
186 186 # taking precedence
187 187 heads = self.heads()
188 188 heads.reverse()
189 189 fl = self.file(".hgtags")
190 190 for node in heads:
191 191 change = self.changelog.read(node)
192 192 rev = self.changelog.rev(node)
193 193 fn, ff = self.manifest.find(change[0], '.hgtags')
194 194 if fn is None: continue
195 195 count = 0
196 196 for l in fl.read(fn).splitlines():
197 197 count += 1
198 198 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
199 199 (rev, short(node), count))
200 200 try:
201 201 f = self.opener("localtags")
202 202 count = 0
203 203 for l in f:
204 204 count += 1
205 205 parsetag(l, _("localtags, line %d") % count)
206 206 except IOError:
207 207 pass
208 208
209 209 self.tagscache['tip'] = self.changelog.tip()
210 210
211 211 return self.tagscache
212 212
213 213 def tagslist(self):
214 214 '''return a list of tags ordered by revision'''
215 215 l = []
216 216 for t, n in self.tags().items():
217 217 try:
218 218 r = self.changelog.rev(n)
219 219 except:
220 220 r = -2 # sort to the beginning of the list if unknown
221 221 l.append((r, t, n))
222 222 l.sort()
223 223 return [(t, n) for r, t, n in l]
224 224
225 225 def nodetags(self, node):
226 226 '''return the tags associated with a node'''
227 227 if not self.nodetagscache:
228 228 self.nodetagscache = {}
229 229 for t, n in self.tags().items():
230 230 self.nodetagscache.setdefault(n, []).append(t)
231 231 return self.nodetagscache.get(node, [])
232 232
233 233 def lookup(self, key):
234 234 try:
235 235 return self.tags()[key]
236 236 except KeyError:
237 237 try:
238 238 return self.changelog.lookup(key)
239 239 except:
240 240 raise repo.RepoError(_("unknown revision '%s'") % key)
241 241
242 242 def dev(self):
243 243 return os.stat(self.path).st_dev
244 244
245 245 def local(self):
246 246 return True
247 247
248 248 def join(self, f):
249 249 return os.path.join(self.path, f)
250 250
251 251 def wjoin(self, f):
252 252 return os.path.join(self.root, f)
253 253
254 254 def file(self, f):
255 255 if f[0] == '/':
256 256 f = f[1:]
257 257 return filelog.filelog(self.opener, f, self.revlogversion)
258 258
259 259 def getcwd(self):
260 260 return self.dirstate.getcwd()
261 261
262 262 def wfile(self, f, mode='r'):
263 263 return self.wopener(f, mode)
264 264
265 265 def wread(self, filename):
266 266 if self.encodepats == None:
267 267 l = []
268 268 for pat, cmd in self.ui.configitems("encode"):
269 269 mf = util.matcher(self.root, "", [pat], [], [])[1]
270 270 l.append((mf, cmd))
271 271 self.encodepats = l
272 272
273 273 data = self.wopener(filename, 'r').read()
274 274
275 275 for mf, cmd in self.encodepats:
276 276 if mf(filename):
277 277 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
278 278 data = util.filter(data, cmd)
279 279 break
280 280
281 281 return data
282 282
283 283 def wwrite(self, filename, data, fd=None):
284 284 if self.decodepats == None:
285 285 l = []
286 286 for pat, cmd in self.ui.configitems("decode"):
287 287 mf = util.matcher(self.root, "", [pat], [], [])[1]
288 288 l.append((mf, cmd))
289 289 self.decodepats = l
290 290
291 291 for mf, cmd in self.decodepats:
292 292 if mf(filename):
293 293 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
294 294 data = util.filter(data, cmd)
295 295 break
296 296
297 297 if fd:
298 298 return fd.write(data)
299 299 return self.wopener(filename, 'w').write(data)
300 300
301 301 def transaction(self):
302 302 tr = self.transhandle
303 303 if tr != None and tr.running():
304 304 return tr.nest()
305 305
306 # save dirstate for undo
306 # save dirstate for rollback
307 307 try:
308 308 ds = self.opener("dirstate").read()
309 309 except IOError:
310 310 ds = ""
311 311 self.opener("journal.dirstate", "w").write(ds)
312 312
313 313 tr = transaction.transaction(self.ui.warn, self.opener,
314 314 self.join("journal"),
315 315 aftertrans(self.path))
316 316 self.transhandle = tr
317 317 return tr
318 318
319 319 def recover(self):
320 320 l = self.lock()
321 321 if os.path.exists(self.join("journal")):
322 322 self.ui.status(_("rolling back interrupted transaction\n"))
323 323 transaction.rollback(self.opener, self.join("journal"))
324 324 self.reload()
325 325 return True
326 326 else:
327 327 self.ui.warn(_("no interrupted transaction available\n"))
328 328 return False
329 329
330 def undo(self, wlock=None):
330 def rollback(self, wlock=None):
331 331 if not wlock:
332 332 wlock = self.wlock()
333 333 l = self.lock()
334 334 if os.path.exists(self.join("undo")):
335 335 self.ui.status(_("rolling back last transaction\n"))
336 336 transaction.rollback(self.opener, self.join("undo"))
337 337 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
338 338 self.reload()
339 339 self.wreload()
340 340 else:
341 self.ui.warn(_("no undo information available\n"))
341 self.ui.warn(_("no rollback information available\n"))
342 342
343 343 def wreload(self):
344 344 self.dirstate.read()
345 345
346 346 def reload(self):
347 347 self.changelog.load()
348 348 self.manifest.load()
349 349 self.tagscache = None
350 350 self.nodetagscache = None
351 351
352 352 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
353 353 desc=None):
354 354 try:
355 355 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
356 356 except lock.LockHeld, inst:
357 357 if not wait:
358 358 raise
359 359 self.ui.warn(_("waiting for lock on %s held by %s\n") %
360 360 (desc, inst.args[0]))
361 361 # default to 600 seconds timeout
362 362 l = lock.lock(self.join(lockname),
363 363 int(self.ui.config("ui", "timeout") or 600),
364 364 releasefn, desc=desc)
365 365 if acquirefn:
366 366 acquirefn()
367 367 return l
368 368
369 369 def lock(self, wait=1):
370 370 return self.do_lock("lock", wait, acquirefn=self.reload,
371 371 desc=_('repository %s') % self.origroot)
372 372
373 373 def wlock(self, wait=1):
374 374 return self.do_lock("wlock", wait, self.dirstate.write,
375 375 self.wreload,
376 376 desc=_('working directory of %s') % self.origroot)
377 377
378 378 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
379 379 "determine whether a new filenode is needed"
380 380 fp1 = manifest1.get(filename, nullid)
381 381 fp2 = manifest2.get(filename, nullid)
382 382
383 383 if fp2 != nullid:
384 384 # is one parent an ancestor of the other?
385 385 fpa = filelog.ancestor(fp1, fp2)
386 386 if fpa == fp1:
387 387 fp1, fp2 = fp2, nullid
388 388 elif fpa == fp2:
389 389 fp2 = nullid
390 390
391 391 # is the file unmodified from the parent? report existing entry
392 392 if fp2 == nullid and text == filelog.read(fp1):
393 393 return (fp1, None, None)
394 394
395 395 return (None, fp1, fp2)
396 396
397 397 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
398 398 orig_parent = self.dirstate.parents()[0] or nullid
399 399 p1 = p1 or self.dirstate.parents()[0] or nullid
400 400 p2 = p2 or self.dirstate.parents()[1] or nullid
401 401 c1 = self.changelog.read(p1)
402 402 c2 = self.changelog.read(p2)
403 403 m1 = self.manifest.read(c1[0])
404 404 mf1 = self.manifest.readflags(c1[0])
405 405 m2 = self.manifest.read(c2[0])
406 406 changed = []
407 407
408 408 if orig_parent == p1:
409 409 update_dirstate = 1
410 410 else:
411 411 update_dirstate = 0
412 412
413 413 if not wlock:
414 414 wlock = self.wlock()
415 415 l = self.lock()
416 416 tr = self.transaction()
417 417 mm = m1.copy()
418 418 mfm = mf1.copy()
419 419 linkrev = self.changelog.count()
420 420 for f in files:
421 421 try:
422 422 t = self.wread(f)
423 423 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
424 424 r = self.file(f)
425 425 mfm[f] = tm
426 426
427 427 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
428 428 if entry:
429 429 mm[f] = entry
430 430 continue
431 431
432 432 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
433 433 changed.append(f)
434 434 if update_dirstate:
435 435 self.dirstate.update([f], "n")
436 436 except IOError:
437 437 try:
438 438 del mm[f]
439 439 del mfm[f]
440 440 if update_dirstate:
441 441 self.dirstate.forget([f])
442 442 except:
443 443 # deleted from p2?
444 444 pass
445 445
446 446 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
447 447 user = user or self.ui.username()
448 448 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
449 449 tr.close()
450 450 if update_dirstate:
451 451 self.dirstate.setparents(n, nullid)
452 452
453 453 def commit(self, files=None, text="", user=None, date=None,
454 454 match=util.always, force=False, lock=None, wlock=None,
455 455 force_editor=False):
456 456 commit = []
457 457 remove = []
458 458 changed = []
459 459
460 460 if files:
461 461 for f in files:
462 462 s = self.dirstate.state(f)
463 463 if s in 'nmai':
464 464 commit.append(f)
465 465 elif s == 'r':
466 466 remove.append(f)
467 467 else:
468 468 self.ui.warn(_("%s not tracked!\n") % f)
469 469 else:
470 470 modified, added, removed, deleted, unknown = self.changes(match=match)
471 471 commit = modified + added
472 472 remove = removed
473 473
474 474 p1, p2 = self.dirstate.parents()
475 475 c1 = self.changelog.read(p1)
476 476 c2 = self.changelog.read(p2)
477 477 m1 = self.manifest.read(c1[0])
478 478 mf1 = self.manifest.readflags(c1[0])
479 479 m2 = self.manifest.read(c2[0])
480 480
481 481 if not commit and not remove and not force and p2 == nullid:
482 482 self.ui.status(_("nothing changed\n"))
483 483 return None
484 484
485 485 xp1 = hex(p1)
486 486 if p2 == nullid: xp2 = ''
487 487 else: xp2 = hex(p2)
488 488
489 489 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
490 490
491 491 if not wlock:
492 492 wlock = self.wlock()
493 493 if not lock:
494 494 lock = self.lock()
495 495 tr = self.transaction()
496 496
497 497 # check in files
498 498 new = {}
499 499 linkrev = self.changelog.count()
500 500 commit.sort()
501 501 for f in commit:
502 502 self.ui.note(f + "\n")
503 503 try:
504 504 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
505 505 t = self.wread(f)
506 506 except IOError:
507 507 self.ui.warn(_("trouble committing %s!\n") % f)
508 508 raise
509 509
510 510 r = self.file(f)
511 511
512 512 meta = {}
513 513 cp = self.dirstate.copied(f)
514 514 if cp:
515 515 meta["copy"] = cp
516 516 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
517 517 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
518 518 fp1, fp2 = nullid, nullid
519 519 else:
520 520 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
521 521 if entry:
522 522 new[f] = entry
523 523 continue
524 524
525 525 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
526 526 # remember what we've added so that we can later calculate
527 527 # the files to pull from a set of changesets
528 528 changed.append(f)
529 529
530 530 # update manifest
531 531 m1 = m1.copy()
532 532 m1.update(new)
533 533 for f in remove:
534 534 if f in m1:
535 535 del m1[f]
536 536 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
537 537 (new, remove))
538 538
539 539 # add changeset
540 540 new = new.keys()
541 541 new.sort()
542 542
543 543 user = user or self.ui.username()
544 544 if not text or force_editor:
545 545 edittext = []
546 546 if text:
547 547 edittext.append(text)
548 548 edittext.append("")
549 549 if p2 != nullid:
550 550 edittext.append("HG: branch merge")
551 551 edittext.extend(["HG: changed %s" % f for f in changed])
552 552 edittext.extend(["HG: removed %s" % f for f in remove])
553 553 if not changed and not remove:
554 554 edittext.append("HG: no files changed")
555 555 edittext.append("")
556 556 # run editor in the repository root
557 557 olddir = os.getcwd()
558 558 os.chdir(self.root)
559 559 text = self.ui.edit("\n".join(edittext), user)
560 560 os.chdir(olddir)
561 561
562 562 lines = [line.rstrip() for line in text.rstrip().splitlines()]
563 563 while lines and not lines[0]:
564 564 del lines[0]
565 565 if not lines:
566 566 return None
567 567 text = '\n'.join(lines)
568 568 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
569 569 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
570 570 parent2=xp2)
571 571 tr.close()
572 572
573 573 self.dirstate.setparents(n)
574 574 self.dirstate.update(new, "n")
575 575 self.dirstate.forget(remove)
576 576
577 577 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
578 578 return n
579 579
580 580 def walk(self, node=None, files=[], match=util.always, badmatch=None):
581 581 if node:
582 582 fdict = dict.fromkeys(files)
583 583 for fn in self.manifest.read(self.changelog.read(node)[0]):
584 584 fdict.pop(fn, None)
585 585 if match(fn):
586 586 yield 'm', fn
587 587 for fn in fdict:
588 588 if badmatch and badmatch(fn):
589 589 if match(fn):
590 590 yield 'b', fn
591 591 else:
592 592 self.ui.warn(_('%s: No such file in rev %s\n') % (
593 593 util.pathto(self.getcwd(), fn), short(node)))
594 594 else:
595 595 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
596 596 yield src, fn
597 597
598 598 def changes(self, node1=None, node2=None, files=[], match=util.always,
599 599 wlock=None, show_ignored=None):
600 600 """return changes between two nodes or node and working directory
601 601
602 602 If node1 is None, use the first dirstate parent instead.
603 603 If node2 is None, compare node1 with working directory.
604 604 """
605 605
606 606 def fcmp(fn, mf):
607 607 t1 = self.wread(fn)
608 608 t2 = self.file(fn).read(mf.get(fn, nullid))
609 609 return cmp(t1, t2)
610 610
611 611 def mfmatches(node):
612 612 change = self.changelog.read(node)
613 613 mf = dict(self.manifest.read(change[0]))
614 614 for fn in mf.keys():
615 615 if not match(fn):
616 616 del mf[fn]
617 617 return mf
618 618
619 619 if node1:
620 620 # read the manifest from node1 before the manifest from node2,
621 621 # so that we'll hit the manifest cache if we're going through
622 622 # all the revisions in parent->child order.
623 623 mf1 = mfmatches(node1)
624 624
625 625 # are we comparing the working directory?
626 626 if not node2:
627 627 if not wlock:
628 628 try:
629 629 wlock = self.wlock(wait=0)
630 630 except lock.LockException:
631 631 wlock = None
632 632 lookup, modified, added, removed, deleted, unknown, ignored = (
633 633 self.dirstate.changes(files, match, show_ignored))
634 634
635 635 # are we comparing working dir against its parent?
636 636 if not node1:
637 637 if lookup:
638 638 # do a full compare of any files that might have changed
639 639 mf2 = mfmatches(self.dirstate.parents()[0])
640 640 for f in lookup:
641 641 if fcmp(f, mf2):
642 642 modified.append(f)
643 643 elif wlock is not None:
644 644 self.dirstate.update([f], "n")
645 645 else:
646 646 # we are comparing working dir against non-parent
647 647 # generate a pseudo-manifest for the working dir
648 648 mf2 = mfmatches(self.dirstate.parents()[0])
649 649 for f in lookup + modified + added:
650 650 mf2[f] = ""
651 651 for f in removed:
652 652 if f in mf2:
653 653 del mf2[f]
654 654 else:
655 655 # we are comparing two revisions
656 656 deleted, unknown, ignored = [], [], []
657 657 mf2 = mfmatches(node2)
658 658
659 659 if node1:
660 660 # flush lists from dirstate before comparing manifests
661 661 modified, added = [], []
662 662
663 663 for fn in mf2:
664 664 if mf1.has_key(fn):
665 665 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
666 666 modified.append(fn)
667 667 del mf1[fn]
668 668 else:
669 669 added.append(fn)
670 670
671 671 removed = mf1.keys()
672 672
673 673 # sort and return results:
674 674 for l in modified, added, removed, deleted, unknown, ignored:
675 675 l.sort()
676 676 if show_ignored is None:
677 677 return (modified, added, removed, deleted, unknown)
678 678 else:
679 679 return (modified, added, removed, deleted, unknown, ignored)
680 680
681 681 def add(self, list, wlock=None):
682 682 if not wlock:
683 683 wlock = self.wlock()
684 684 for f in list:
685 685 p = self.wjoin(f)
686 686 if not os.path.exists(p):
687 687 self.ui.warn(_("%s does not exist!\n") % f)
688 688 elif not os.path.isfile(p):
689 689 self.ui.warn(_("%s not added: only files supported currently\n")
690 690 % f)
691 691 elif self.dirstate.state(f) in 'an':
692 692 self.ui.warn(_("%s already tracked!\n") % f)
693 693 else:
694 694 self.dirstate.update([f], "a")
695 695
696 696 def forget(self, list, wlock=None):
697 697 if not wlock:
698 698 wlock = self.wlock()
699 699 for f in list:
700 700 if self.dirstate.state(f) not in 'ai':
701 701 self.ui.warn(_("%s not added!\n") % f)
702 702 else:
703 703 self.dirstate.forget([f])
704 704
705 705 def remove(self, list, unlink=False, wlock=None):
706 706 if unlink:
707 707 for f in list:
708 708 try:
709 709 util.unlink(self.wjoin(f))
710 710 except OSError, inst:
711 711 if inst.errno != errno.ENOENT:
712 712 raise
713 713 if not wlock:
714 714 wlock = self.wlock()
715 715 for f in list:
716 716 p = self.wjoin(f)
717 717 if os.path.exists(p):
718 718 self.ui.warn(_("%s still exists!\n") % f)
719 719 elif self.dirstate.state(f) == 'a':
720 720 self.dirstate.forget([f])
721 721 elif f not in self.dirstate:
722 722 self.ui.warn(_("%s not tracked!\n") % f)
723 723 else:
724 724 self.dirstate.update([f], "r")
725 725
726 726 def undelete(self, list, wlock=None):
727 727 p = self.dirstate.parents()[0]
728 728 mn = self.changelog.read(p)[0]
729 729 mf = self.manifest.readflags(mn)
730 730 m = self.manifest.read(mn)
731 731 if not wlock:
732 732 wlock = self.wlock()
733 733 for f in list:
734 734 if self.dirstate.state(f) not in "r":
735 735 self.ui.warn("%s not removed!\n" % f)
736 736 else:
737 737 t = self.file(f).read(m[f])
738 738 self.wwrite(f, t)
739 739 util.set_exec(self.wjoin(f), mf[f])
740 740 self.dirstate.update([f], "n")
741 741
742 742 def copy(self, source, dest, wlock=None):
743 743 p = self.wjoin(dest)
744 744 if not os.path.exists(p):
745 745 self.ui.warn(_("%s does not exist!\n") % dest)
746 746 elif not os.path.isfile(p):
747 747 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
748 748 else:
749 749 if not wlock:
750 750 wlock = self.wlock()
751 751 if self.dirstate.state(dest) == '?':
752 752 self.dirstate.update([dest], "a")
753 753 self.dirstate.copy(source, dest)
754 754
755 755 def heads(self, start=None):
756 756 heads = self.changelog.heads(start)
757 757 # sort the output in rev descending order
758 758 heads = [(-self.changelog.rev(h), h) for h in heads]
759 759 heads.sort()
760 760 return [n for (r, n) in heads]
761 761
762 762 # branchlookup returns a dict giving a list of branches for
763 763 # each head. A branch is defined as the tag of a node or
764 764 # the branch of the node's parents. If a node has multiple
765 765 # branch tags, tags are eliminated if they are visible from other
766 766 # branch tags.
767 767 #
768 768 # So, for this graph: a->b->c->d->e
769 769 # \ /
770 770 # aa -----/
771 771 # a has tag 2.6.12
772 772 # d has tag 2.6.13
773 773 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
774 774 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
775 775 # from the list.
776 776 #
777 777 # It is possible that more than one head will have the same branch tag.
778 778 # callers need to check the result for multiple heads under the same
779 779 # branch tag if that is a problem for them (ie checkout of a specific
780 780 # branch).
781 781 #
782 782 # passing in a specific branch will limit the depth of the search
783 783 # through the parents. It won't limit the branches returned in the
784 784 # result though.
785 785 def branchlookup(self, heads=None, branch=None):
786 786 if not heads:
787 787 heads = self.heads()
788 788 headt = [ h for h in heads ]
789 789 chlog = self.changelog
790 790 branches = {}
791 791 merges = []
792 792 seenmerge = {}
793 793
794 794 # traverse the tree once for each head, recording in the branches
795 795 # dict which tags are visible from this head. The branches
796 796 # dict also records which tags are visible from each tag
797 797 # while we traverse.
798 798 while headt or merges:
799 799 if merges:
800 800 n, found = merges.pop()
801 801 visit = [n]
802 802 else:
803 803 h = headt.pop()
804 804 visit = [h]
805 805 found = [h]
806 806 seen = {}
807 807 while visit:
808 808 n = visit.pop()
809 809 if n in seen:
810 810 continue
811 811 pp = chlog.parents(n)
812 812 tags = self.nodetags(n)
813 813 if tags:
814 814 for x in tags:
815 815 if x == 'tip':
816 816 continue
817 817 for f in found:
818 818 branches.setdefault(f, {})[n] = 1
819 819 branches.setdefault(n, {})[n] = 1
820 820 break
821 821 if n not in found:
822 822 found.append(n)
823 823 if branch in tags:
824 824 continue
825 825 seen[n] = 1
826 826 if pp[1] != nullid and n not in seenmerge:
827 827 merges.append((pp[1], [x for x in found]))
828 828 seenmerge[n] = 1
829 829 if pp[0] != nullid:
830 830 visit.append(pp[0])
831 831 # traverse the branches dict, eliminating branch tags from each
832 832 # head that are visible from another branch tag for that head.
833 833 out = {}
834 834 viscache = {}
835 835 for h in heads:
836 836 def visible(node):
837 837 if node in viscache:
838 838 return viscache[node]
839 839 ret = {}
840 840 visit = [node]
841 841 while visit:
842 842 x = visit.pop()
843 843 if x in viscache:
844 844 ret.update(viscache[x])
845 845 elif x not in ret:
846 846 ret[x] = 1
847 847 if x in branches:
848 848 visit[len(visit):] = branches[x].keys()
849 849 viscache[node] = ret
850 850 return ret
851 851 if h not in branches:
852 852 continue
853 853 # O(n^2), but somewhat limited. This only searches the
854 854 # tags visible from a specific head, not all the tags in the
855 855 # whole repo.
856 856 for b in branches[h]:
857 857 vis = False
858 858 for bb in branches[h].keys():
859 859 if b != bb:
860 860 if b in visible(bb):
861 861 vis = True
862 862 break
863 863 if not vis:
864 864 l = out.setdefault(h, [])
865 865 l[len(l):] = self.nodetags(b)
866 866 return out
867 867
868 868 def branches(self, nodes):
869 869 if not nodes:
870 870 nodes = [self.changelog.tip()]
871 871 b = []
872 872 for n in nodes:
873 873 t = n
874 874 while 1:
875 875 p = self.changelog.parents(n)
876 876 if p[1] != nullid or p[0] == nullid:
877 877 b.append((t, n, p[0], p[1]))
878 878 break
879 879 n = p[0]
880 880 return b
881 881
882 882 def between(self, pairs):
883 883 r = []
884 884
885 885 for top, bottom in pairs:
886 886 n, l, i = top, [], 0
887 887 f = 1
888 888
889 889 while n != bottom:
890 890 p = self.changelog.parents(n)[0]
891 891 if i == f:
892 892 l.append(n)
893 893 f = f * 2
894 894 n = p
895 895 i += 1
896 896
897 897 r.append(l)
898 898
899 899 return r
900 900
901 901 def findincoming(self, remote, base=None, heads=None, force=False):
902 902 """Return list of roots of the subsets of missing nodes from remote
903 903
904 904 If base dict is specified, assume that these nodes and their parents
905 905 exist on the remote side and that no child of a node of base exists
906 906 in both remote and self.
907 907 Furthermore base will be updated to include the nodes that exists
908 908 in self and remote but no children exists in self and remote.
909 909 If a list of heads is specified, return only nodes which are heads
910 910 or ancestors of these heads.
911 911
912 912 All the ancestors of base are in self and in remote.
913 913 All the descendants of the list returned are missing in self.
914 914 (and so we know that the rest of the nodes are missing in remote, see
915 915 outgoing)
916 916 """
917 917 m = self.changelog.nodemap
918 918 search = []
919 919 fetch = {}
920 920 seen = {}
921 921 seenbranch = {}
922 922 if base == None:
923 923 base = {}
924 924
925 925 if not heads:
926 926 heads = remote.heads()
927 927
928 928 if self.changelog.tip() == nullid:
929 929 base[nullid] = 1
930 930 if heads != [nullid]:
931 931 return [nullid]
932 932 return []
933 933
934 934 # assume we're closer to the tip than the root
935 935 # and start by examining the heads
936 936 self.ui.status(_("searching for changes\n"))
937 937
938 938 unknown = []
939 939 for h in heads:
940 940 if h not in m:
941 941 unknown.append(h)
942 942 else:
943 943 base[h] = 1
944 944
945 945 if not unknown:
946 946 return []
947 947
948 948 req = dict.fromkeys(unknown)
949 949 reqcnt = 0
950 950
951 951 # search through remote branches
952 952 # a 'branch' here is a linear segment of history, with four parts:
953 953 # head, root, first parent, second parent
954 954 # (a branch always has two parents (or none) by definition)
955 955 unknown = remote.branches(unknown)
956 956 while unknown:
957 957 r = []
958 958 while unknown:
959 959 n = unknown.pop(0)
960 960 if n[0] in seen:
961 961 continue
962 962
963 963 self.ui.debug(_("examining %s:%s\n")
964 964 % (short(n[0]), short(n[1])))
965 965 if n[0] == nullid: # found the end of the branch
966 966 pass
967 967 elif n in seenbranch:
968 968 self.ui.debug(_("branch already found\n"))
969 969 continue
970 970 elif n[1] and n[1] in m: # do we know the base?
971 971 self.ui.debug(_("found incomplete branch %s:%s\n")
972 972 % (short(n[0]), short(n[1])))
973 973 search.append(n) # schedule branch range for scanning
974 974 seenbranch[n] = 1
975 975 else:
976 976 if n[1] not in seen and n[1] not in fetch:
977 977 if n[2] in m and n[3] in m:
978 978 self.ui.debug(_("found new changeset %s\n") %
979 979 short(n[1]))
980 980 fetch[n[1]] = 1 # earliest unknown
981 981 for p in n[2:4]:
982 982 if p in m:
983 983 base[p] = 1 # latest known
984 984
985 985 for p in n[2:4]:
986 986 if p not in req and p not in m:
987 987 r.append(p)
988 988 req[p] = 1
989 989 seen[n[0]] = 1
990 990
991 991 if r:
992 992 reqcnt += 1
993 993 self.ui.debug(_("request %d: %s\n") %
994 994 (reqcnt, " ".join(map(short, r))))
995 995 for p in range(0, len(r), 10):
996 996 for b in remote.branches(r[p:p+10]):
997 997 self.ui.debug(_("received %s:%s\n") %
998 998 (short(b[0]), short(b[1])))
999 999 unknown.append(b)
1000 1000
1001 1001 # do binary search on the branches we found
1002 1002 while search:
1003 1003 n = search.pop(0)
1004 1004 reqcnt += 1
1005 1005 l = remote.between([(n[0], n[1])])[0]
1006 1006 l.append(n[1])
1007 1007 p = n[0]
1008 1008 f = 1
1009 1009 for i in l:
1010 1010 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1011 1011 if i in m:
1012 1012 if f <= 2:
1013 1013 self.ui.debug(_("found new branch changeset %s\n") %
1014 1014 short(p))
1015 1015 fetch[p] = 1
1016 1016 base[i] = 1
1017 1017 else:
1018 1018 self.ui.debug(_("narrowed branch search to %s:%s\n")
1019 1019 % (short(p), short(i)))
1020 1020 search.append((p, i))
1021 1021 break
1022 1022 p, f = i, f * 2
1023 1023
1024 1024 # sanity check our fetch list
1025 1025 for f in fetch.keys():
1026 1026 if f in m:
1027 1027 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1028 1028
1029 1029 if base.keys() == [nullid]:
1030 1030 if force:
1031 1031 self.ui.warn(_("warning: repository is unrelated\n"))
1032 1032 else:
1033 1033 raise util.Abort(_("repository is unrelated"))
1034 1034
1035 1035 self.ui.note(_("found new changesets starting at ") +
1036 1036 " ".join([short(f) for f in fetch]) + "\n")
1037 1037
1038 1038 self.ui.debug(_("%d total queries\n") % reqcnt)
1039 1039
1040 1040 return fetch.keys()
1041 1041
1042 1042 def findoutgoing(self, remote, base=None, heads=None, force=False):
1043 1043 """Return list of nodes that are roots of subsets not in remote
1044 1044
1045 1045 If base dict is specified, assume that these nodes and their parents
1046 1046 exist on the remote side.
1047 1047 If a list of heads is specified, return only nodes which are heads
1048 1048 or ancestors of these heads, and return a second element which
1049 1049 contains all remote heads which get new children.
1050 1050 """
1051 1051 if base == None:
1052 1052 base = {}
1053 1053 self.findincoming(remote, base, heads, force=force)
1054 1054
1055 1055 self.ui.debug(_("common changesets up to ")
1056 1056 + " ".join(map(short, base.keys())) + "\n")
1057 1057
1058 1058 remain = dict.fromkeys(self.changelog.nodemap)
1059 1059
1060 1060 # prune everything remote has from the tree
1061 1061 del remain[nullid]
1062 1062 remove = base.keys()
1063 1063 while remove:
1064 1064 n = remove.pop(0)
1065 1065 if n in remain:
1066 1066 del remain[n]
1067 1067 for p in self.changelog.parents(n):
1068 1068 remove.append(p)
1069 1069
1070 1070 # find every node whose parents have been pruned
1071 1071 subset = []
1072 1072 # find every remote head that will get new children
1073 1073 updated_heads = {}
1074 1074 for n in remain:
1075 1075 p1, p2 = self.changelog.parents(n)
1076 1076 if p1 not in remain and p2 not in remain:
1077 1077 subset.append(n)
1078 1078 if heads:
1079 1079 if p1 in heads:
1080 1080 updated_heads[p1] = True
1081 1081 if p2 in heads:
1082 1082 updated_heads[p2] = True
1083 1083
1084 1084 # this is the set of all roots we have to push
1085 1085 if heads:
1086 1086 return subset, updated_heads.keys()
1087 1087 else:
1088 1088 return subset
1089 1089
1090 1090 def pull(self, remote, heads=None, force=False):
1091 1091 l = self.lock()
1092 1092
1093 1093 fetch = self.findincoming(remote, force=force)
1094 1094 if fetch == [nullid]:
1095 1095 self.ui.status(_("requesting all changes\n"))
1096 1096
1097 1097 if not fetch:
1098 1098 self.ui.status(_("no changes found\n"))
1099 1099 return 0
1100 1100
1101 1101 if heads is None:
1102 1102 cg = remote.changegroup(fetch, 'pull')
1103 1103 else:
1104 1104 cg = remote.changegroupsubset(fetch, heads, 'pull')
1105 1105 return self.addchangegroup(cg, 'pull')
1106 1106
1107 1107 def push(self, remote, force=False, revs=None):
1108 1108 lock = remote.lock()
1109 1109
1110 1110 base = {}
1111 1111 remote_heads = remote.heads()
1112 1112 inc = self.findincoming(remote, base, remote_heads, force=force)
1113 1113 if not force and inc:
1114 1114 self.ui.warn(_("abort: unsynced remote changes!\n"))
1115 1115 self.ui.status(_("(did you forget to sync?"
1116 1116 " use push -f to force)\n"))
1117 1117 return 1
1118 1118
1119 1119 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1120 1120 if revs is not None:
1121 1121 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1122 1122 else:
1123 1123 bases, heads = update, self.changelog.heads()
1124 1124
1125 1125 if not bases:
1126 1126 self.ui.status(_("no changes found\n"))
1127 1127 return 1
1128 1128 elif not force:
1129 1129 # FIXME we don't properly detect creation of new heads
1130 1130 # in the push -r case, assume the user knows what he's doing
1131 1131 if not revs and len(remote_heads) < len(heads) \
1132 1132 and remote_heads != [nullid]:
1133 1133 self.ui.warn(_("abort: push creates new remote branches!\n"))
1134 1134 self.ui.status(_("(did you forget to merge?"
1135 1135 " use push -f to force)\n"))
1136 1136 return 1
1137 1137
1138 1138 if revs is None:
1139 1139 cg = self.changegroup(update, 'push')
1140 1140 else:
1141 1141 cg = self.changegroupsubset(update, revs, 'push')
1142 1142 return remote.addchangegroup(cg, 'push')
1143 1143
1144 1144 def changegroupsubset(self, bases, heads, source):
1145 1145 """This function generates a changegroup consisting of all the nodes
1146 1146 that are descendents of any of the bases, and ancestors of any of
1147 1147 the heads.
1148 1148
1149 1149 It is fairly complex as determining which filenodes and which
1150 1150 manifest nodes need to be included for the changeset to be complete
1151 1151 is non-trivial.
1152 1152
1153 1153 Another wrinkle is doing the reverse, figuring out which changeset in
1154 1154 the changegroup a particular filenode or manifestnode belongs to."""
1155 1155
1156 1156 self.hook('preoutgoing', throw=True, source=source)
1157 1157
1158 1158 # Set up some initial variables
1159 1159 # Make it easy to refer to self.changelog
1160 1160 cl = self.changelog
1161 1161 # msng is short for missing - compute the list of changesets in this
1162 1162 # changegroup.
1163 1163 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1164 1164 # Some bases may turn out to be superfluous, and some heads may be
1165 1165 # too. nodesbetween will return the minimal set of bases and heads
1166 1166 # necessary to re-create the changegroup.
1167 1167
1168 1168 # Known heads are the list of heads that it is assumed the recipient
1169 1169 # of this changegroup will know about.
1170 1170 knownheads = {}
1171 1171 # We assume that all parents of bases are known heads.
1172 1172 for n in bases:
1173 1173 for p in cl.parents(n):
1174 1174 if p != nullid:
1175 1175 knownheads[p] = 1
1176 1176 knownheads = knownheads.keys()
1177 1177 if knownheads:
1178 1178 # Now that we know what heads are known, we can compute which
1179 1179 # changesets are known. The recipient must know about all
1180 1180 # changesets required to reach the known heads from the null
1181 1181 # changeset.
1182 1182 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1183 1183 junk = None
1184 1184 # Transform the list into an ersatz set.
1185 1185 has_cl_set = dict.fromkeys(has_cl_set)
1186 1186 else:
1187 1187 # If there were no known heads, the recipient cannot be assumed to
1188 1188 # know about any changesets.
1189 1189 has_cl_set = {}
1190 1190
1191 1191 # Make it easy to refer to self.manifest
1192 1192 mnfst = self.manifest
1193 1193 # We don't know which manifests are missing yet
1194 1194 msng_mnfst_set = {}
1195 1195 # Nor do we know which filenodes are missing.
1196 1196 msng_filenode_set = {}
1197 1197
1198 1198 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1199 1199 junk = None
1200 1200
1201 1201 # A changeset always belongs to itself, so the changenode lookup
1202 1202 # function for a changenode is identity.
1203 1203 def identity(x):
1204 1204 return x
1205 1205
1206 1206 # A function generating function. Sets up an environment for the
1207 1207 # inner function.
1208 1208 def cmp_by_rev_func(revlog):
1209 1209 # Compare two nodes by their revision number in the environment's
1210 1210 # revision history. Since the revision number both represents the
1211 1211 # most efficient order to read the nodes in, and represents a
1212 1212 # topological sorting of the nodes, this function is often useful.
1213 1213 def cmp_by_rev(a, b):
1214 1214 return cmp(revlog.rev(a), revlog.rev(b))
1215 1215 return cmp_by_rev
1216 1216
1217 1217 # If we determine that a particular file or manifest node must be a
1218 1218 # node that the recipient of the changegroup will already have, we can
1219 1219 # also assume the recipient will have all the parents. This function
1220 1220 # prunes them from the set of missing nodes.
1221 1221 def prune_parents(revlog, hasset, msngset):
1222 1222 haslst = hasset.keys()
1223 1223 haslst.sort(cmp_by_rev_func(revlog))
1224 1224 for node in haslst:
1225 1225 parentlst = [p for p in revlog.parents(node) if p != nullid]
1226 1226 while parentlst:
1227 1227 n = parentlst.pop()
1228 1228 if n not in hasset:
1229 1229 hasset[n] = 1
1230 1230 p = [p for p in revlog.parents(n) if p != nullid]
1231 1231 parentlst.extend(p)
1232 1232 for n in hasset:
1233 1233 msngset.pop(n, None)
1234 1234
1235 1235 # This is a function generating function used to set up an environment
1236 1236 # for the inner function to execute in.
1237 1237 def manifest_and_file_collector(changedfileset):
1238 1238 # This is an information gathering function that gathers
1239 1239 # information from each changeset node that goes out as part of
1240 1240 # the changegroup. The information gathered is a list of which
1241 1241 # manifest nodes are potentially required (the recipient may
1242 1242 # already have them) and total list of all files which were
1243 1243 # changed in any changeset in the changegroup.
1244 1244 #
1245 1245 # We also remember the first changenode we saw any manifest
1246 1246 # referenced by so we can later determine which changenode 'owns'
1247 1247 # the manifest.
1248 1248 def collect_manifests_and_files(clnode):
1249 1249 c = cl.read(clnode)
1250 1250 for f in c[3]:
1251 1251 # This is to make sure we only have one instance of each
1252 1252 # filename string for each filename.
1253 1253 changedfileset.setdefault(f, f)
1254 1254 msng_mnfst_set.setdefault(c[0], clnode)
1255 1255 return collect_manifests_and_files
1256 1256
1257 1257 # Figure out which manifest nodes (of the ones we think might be part
1258 1258 # of the changegroup) the recipient must know about and remove them
1259 1259 # from the changegroup.
1260 1260 def prune_manifests():
1261 1261 has_mnfst_set = {}
1262 1262 for n in msng_mnfst_set:
1263 1263 # If a 'missing' manifest thinks it belongs to a changenode
1264 1264 # the recipient is assumed to have, obviously the recipient
1265 1265 # must have that manifest.
1266 1266 linknode = cl.node(mnfst.linkrev(n))
1267 1267 if linknode in has_cl_set:
1268 1268 has_mnfst_set[n] = 1
1269 1269 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1270 1270
1271 1271 # Use the information collected in collect_manifests_and_files to say
1272 1272 # which changenode any manifestnode belongs to.
1273 1273 def lookup_manifest_link(mnfstnode):
1274 1274 return msng_mnfst_set[mnfstnode]
1275 1275
1276 1276 # A function generating function that sets up the initial environment
1277 1277 # the inner function.
1278 1278 def filenode_collector(changedfiles):
1279 1279 next_rev = [0]
1280 1280 # This gathers information from each manifestnode included in the
1281 1281 # changegroup about which filenodes the manifest node references
1282 1282 # so we can include those in the changegroup too.
1283 1283 #
1284 1284 # It also remembers which changenode each filenode belongs to. It
1285 1285 # does this by assuming the a filenode belongs to the changenode
1286 1286 # the first manifest that references it belongs to.
1287 1287 def collect_msng_filenodes(mnfstnode):
1288 1288 r = mnfst.rev(mnfstnode)
1289 1289 if r == next_rev[0]:
1290 1290 # If the last rev we looked at was the one just previous,
1291 1291 # we only need to see a diff.
1292 1292 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1293 1293 # For each line in the delta
1294 1294 for dline in delta.splitlines():
1295 1295 # get the filename and filenode for that line
1296 1296 f, fnode = dline.split('\0')
1297 1297 fnode = bin(fnode[:40])
1298 1298 f = changedfiles.get(f, None)
1299 1299 # And if the file is in the list of files we care
1300 1300 # about.
1301 1301 if f is not None:
1302 1302 # Get the changenode this manifest belongs to
1303 1303 clnode = msng_mnfst_set[mnfstnode]
1304 1304 # Create the set of filenodes for the file if
1305 1305 # there isn't one already.
1306 1306 ndset = msng_filenode_set.setdefault(f, {})
1307 1307 # And set the filenode's changelog node to the
1308 1308 # manifest's if it hasn't been set already.
1309 1309 ndset.setdefault(fnode, clnode)
1310 1310 else:
1311 1311 # Otherwise we need a full manifest.
1312 1312 m = mnfst.read(mnfstnode)
1313 1313 # For every file in we care about.
1314 1314 for f in changedfiles:
1315 1315 fnode = m.get(f, None)
1316 1316 # If it's in the manifest
1317 1317 if fnode is not None:
1318 1318 # See comments above.
1319 1319 clnode = msng_mnfst_set[mnfstnode]
1320 1320 ndset = msng_filenode_set.setdefault(f, {})
1321 1321 ndset.setdefault(fnode, clnode)
1322 1322 # Remember the revision we hope to see next.
1323 1323 next_rev[0] = r + 1
1324 1324 return collect_msng_filenodes
1325 1325
1326 1326 # We have a list of filenodes we think we need for a file, lets remove
1327 1327 # all those we now the recipient must have.
1328 1328 def prune_filenodes(f, filerevlog):
1329 1329 msngset = msng_filenode_set[f]
1330 1330 hasset = {}
1331 1331 # If a 'missing' filenode thinks it belongs to a changenode we
1332 1332 # assume the recipient must have, then the recipient must have
1333 1333 # that filenode.
1334 1334 for n in msngset:
1335 1335 clnode = cl.node(filerevlog.linkrev(n))
1336 1336 if clnode in has_cl_set:
1337 1337 hasset[n] = 1
1338 1338 prune_parents(filerevlog, hasset, msngset)
1339 1339
1340 1340 # A function generator function that sets up the a context for the
1341 1341 # inner function.
1342 1342 def lookup_filenode_link_func(fname):
1343 1343 msngset = msng_filenode_set[fname]
1344 1344 # Lookup the changenode the filenode belongs to.
1345 1345 def lookup_filenode_link(fnode):
1346 1346 return msngset[fnode]
1347 1347 return lookup_filenode_link
1348 1348
1349 1349 # Now that we have all theses utility functions to help out and
1350 1350 # logically divide up the task, generate the group.
1351 1351 def gengroup():
1352 1352 # The set of changed files starts empty.
1353 1353 changedfiles = {}
1354 1354 # Create a changenode group generator that will call our functions
1355 1355 # back to lookup the owning changenode and collect information.
1356 1356 group = cl.group(msng_cl_lst, identity,
1357 1357 manifest_and_file_collector(changedfiles))
1358 1358 for chnk in group:
1359 1359 yield chnk
1360 1360
1361 1361 # The list of manifests has been collected by the generator
1362 1362 # calling our functions back.
1363 1363 prune_manifests()
1364 1364 msng_mnfst_lst = msng_mnfst_set.keys()
1365 1365 # Sort the manifestnodes by revision number.
1366 1366 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1367 1367 # Create a generator for the manifestnodes that calls our lookup
1368 1368 # and data collection functions back.
1369 1369 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1370 1370 filenode_collector(changedfiles))
1371 1371 for chnk in group:
1372 1372 yield chnk
1373 1373
1374 1374 # These are no longer needed, dereference and toss the memory for
1375 1375 # them.
1376 1376 msng_mnfst_lst = None
1377 1377 msng_mnfst_set.clear()
1378 1378
1379 1379 changedfiles = changedfiles.keys()
1380 1380 changedfiles.sort()
1381 1381 # Go through all our files in order sorted by name.
1382 1382 for fname in changedfiles:
1383 1383 filerevlog = self.file(fname)
1384 1384 # Toss out the filenodes that the recipient isn't really
1385 1385 # missing.
1386 1386 if msng_filenode_set.has_key(fname):
1387 1387 prune_filenodes(fname, filerevlog)
1388 1388 msng_filenode_lst = msng_filenode_set[fname].keys()
1389 1389 else:
1390 1390 msng_filenode_lst = []
1391 1391 # If any filenodes are left, generate the group for them,
1392 1392 # otherwise don't bother.
1393 1393 if len(msng_filenode_lst) > 0:
1394 1394 yield changegroup.genchunk(fname)
1395 1395 # Sort the filenodes by their revision #
1396 1396 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1397 1397 # Create a group generator and only pass in a changenode
1398 1398 # lookup function as we need to collect no information
1399 1399 # from filenodes.
1400 1400 group = filerevlog.group(msng_filenode_lst,
1401 1401 lookup_filenode_link_func(fname))
1402 1402 for chnk in group:
1403 1403 yield chnk
1404 1404 if msng_filenode_set.has_key(fname):
1405 1405 # Don't need this anymore, toss it to free memory.
1406 1406 del msng_filenode_set[fname]
1407 1407 # Signal that no more groups are left.
1408 1408 yield changegroup.closechunk()
1409 1409
1410 1410 if msng_cl_lst:
1411 1411 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1412 1412
1413 1413 return util.chunkbuffer(gengroup())
1414 1414
1415 1415 def changegroup(self, basenodes, source):
1416 1416 """Generate a changegroup of all nodes that we have that a recipient
1417 1417 doesn't.
1418 1418
1419 1419 This is much easier than the previous function as we can assume that
1420 1420 the recipient has any changenode we aren't sending them."""
1421 1421
1422 1422 self.hook('preoutgoing', throw=True, source=source)
1423 1423
1424 1424 cl = self.changelog
1425 1425 nodes = cl.nodesbetween(basenodes, None)[0]
1426 1426 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1427 1427
1428 1428 def identity(x):
1429 1429 return x
1430 1430
1431 1431 def gennodelst(revlog):
1432 1432 for r in xrange(0, revlog.count()):
1433 1433 n = revlog.node(r)
1434 1434 if revlog.linkrev(n) in revset:
1435 1435 yield n
1436 1436
1437 1437 def changed_file_collector(changedfileset):
1438 1438 def collect_changed_files(clnode):
1439 1439 c = cl.read(clnode)
1440 1440 for fname in c[3]:
1441 1441 changedfileset[fname] = 1
1442 1442 return collect_changed_files
1443 1443
1444 1444 def lookuprevlink_func(revlog):
1445 1445 def lookuprevlink(n):
1446 1446 return cl.node(revlog.linkrev(n))
1447 1447 return lookuprevlink
1448 1448
1449 1449 def gengroup():
1450 1450 # construct a list of all changed files
1451 1451 changedfiles = {}
1452 1452
1453 1453 for chnk in cl.group(nodes, identity,
1454 1454 changed_file_collector(changedfiles)):
1455 1455 yield chnk
1456 1456 changedfiles = changedfiles.keys()
1457 1457 changedfiles.sort()
1458 1458
1459 1459 mnfst = self.manifest
1460 1460 nodeiter = gennodelst(mnfst)
1461 1461 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1462 1462 yield chnk
1463 1463
1464 1464 for fname in changedfiles:
1465 1465 filerevlog = self.file(fname)
1466 1466 nodeiter = gennodelst(filerevlog)
1467 1467 nodeiter = list(nodeiter)
1468 1468 if nodeiter:
1469 1469 yield changegroup.genchunk(fname)
1470 1470 lookup = lookuprevlink_func(filerevlog)
1471 1471 for chnk in filerevlog.group(nodeiter, lookup):
1472 1472 yield chnk
1473 1473
1474 1474 yield changegroup.closechunk()
1475 1475
1476 1476 if nodes:
1477 1477 self.hook('outgoing', node=hex(nodes[0]), source=source)
1478 1478
1479 1479 return util.chunkbuffer(gengroup())
1480 1480
1481 1481 def addchangegroup(self, source, srctype):
1482 1482 """add changegroup to repo.
1483 1483 returns number of heads modified or added + 1."""
1484 1484
1485 1485 def csmap(x):
1486 1486 self.ui.debug(_("add changeset %s\n") % short(x))
1487 1487 return cl.count()
1488 1488
1489 1489 def revmap(x):
1490 1490 return cl.rev(x)
1491 1491
1492 1492 if not source:
1493 1493 return 0
1494 1494
1495 1495 self.hook('prechangegroup', throw=True, source=srctype)
1496 1496
1497 1497 changesets = files = revisions = 0
1498 1498
1499 1499 tr = self.transaction()
1500 1500
1501 1501 # write changelog and manifest data to temp files so
1502 1502 # concurrent readers will not see inconsistent view
1503 1503 cl = None
1504 1504 try:
1505 1505 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1506 1506
1507 1507 oldheads = len(cl.heads())
1508 1508
1509 1509 # pull off the changeset group
1510 1510 self.ui.status(_("adding changesets\n"))
1511 1511 cor = cl.count() - 1
1512 1512 chunkiter = changegroup.chunkiter(source)
1513 1513 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1514 1514 raise util.Abort(_("received changelog group is empty"))
1515 1515 cnr = cl.count() - 1
1516 1516 changesets = cnr - cor
1517 1517
1518 1518 mf = None
1519 1519 try:
1520 1520 mf = appendfile.appendmanifest(self.opener,
1521 1521 self.manifest.version)
1522 1522
1523 1523 # pull off the manifest group
1524 1524 self.ui.status(_("adding manifests\n"))
1525 1525 chunkiter = changegroup.chunkiter(source)
1526 1526 # no need to check for empty manifest group here:
1527 1527 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1528 1528 # no new manifest will be created and the manifest group will
1529 1529 # be empty during the pull
1530 1530 mf.addgroup(chunkiter, revmap, tr)
1531 1531
1532 1532 # process the files
1533 1533 self.ui.status(_("adding file changes\n"))
1534 1534 while 1:
1535 1535 f = changegroup.getchunk(source)
1536 1536 if not f:
1537 1537 break
1538 1538 self.ui.debug(_("adding %s revisions\n") % f)
1539 1539 fl = self.file(f)
1540 1540 o = fl.count()
1541 1541 chunkiter = changegroup.chunkiter(source)
1542 1542 if fl.addgroup(chunkiter, revmap, tr) is None:
1543 1543 raise util.Abort(_("received file revlog group is empty"))
1544 1544 revisions += fl.count() - o
1545 1545 files += 1
1546 1546
1547 1547 # write order here is important so concurrent readers will see
1548 1548 # consistent view of repo
1549 1549 mf.writedata()
1550 1550 finally:
1551 1551 if mf:
1552 1552 mf.cleanup()
1553 1553 cl.writedata()
1554 1554 finally:
1555 1555 if cl:
1556 1556 cl.cleanup()
1557 1557
1558 1558 # make changelog and manifest see real files again
1559 1559 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1560 1560 self.manifest = manifest.manifest(self.opener, self.manifest.version)
1561 1561 self.changelog.checkinlinesize(tr)
1562 1562 self.manifest.checkinlinesize(tr)
1563 1563
1564 1564 newheads = len(self.changelog.heads())
1565 1565 heads = ""
1566 1566 if oldheads and newheads > oldheads:
1567 1567 heads = _(" (+%d heads)") % (newheads - oldheads)
1568 1568
1569 1569 self.ui.status(_("added %d changesets"
1570 1570 " with %d changes to %d files%s\n")
1571 1571 % (changesets, revisions, files, heads))
1572 1572
1573 1573 if changesets > 0:
1574 1574 self.hook('pretxnchangegroup', throw=True,
1575 1575 node=hex(self.changelog.node(cor+1)), source=srctype)
1576 1576
1577 1577 tr.close()
1578 1578
1579 1579 if changesets > 0:
1580 1580 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1581 1581 source=srctype)
1582 1582
1583 1583 for i in range(cor + 1, cnr + 1):
1584 1584 self.hook("incoming", node=hex(self.changelog.node(i)),
1585 1585 source=srctype)
1586 1586
1587 1587 return newheads - oldheads + 1
1588 1588
1589 1589 def update(self, node, allow=False, force=False, choose=None,
1590 1590 moddirstate=True, forcemerge=False, wlock=None, show_stats=True):
1591 1591 pl = self.dirstate.parents()
1592 1592 if not force and pl[1] != nullid:
1593 1593 raise util.Abort(_("outstanding uncommitted merges"))
1594 1594
1595 1595 err = False
1596 1596
1597 1597 p1, p2 = pl[0], node
1598 1598 pa = self.changelog.ancestor(p1, p2)
1599 1599 m1n = self.changelog.read(p1)[0]
1600 1600 m2n = self.changelog.read(p2)[0]
1601 1601 man = self.manifest.ancestor(m1n, m2n)
1602 1602 m1 = self.manifest.read(m1n)
1603 1603 mf1 = self.manifest.readflags(m1n)
1604 1604 m2 = self.manifest.read(m2n).copy()
1605 1605 mf2 = self.manifest.readflags(m2n)
1606 1606 ma = self.manifest.read(man)
1607 1607 mfa = self.manifest.readflags(man)
1608 1608
1609 1609 modified, added, removed, deleted, unknown = self.changes()
1610 1610
1611 1611 # is this a jump, or a merge? i.e. is there a linear path
1612 1612 # from p1 to p2?
1613 1613 linear_path = (pa == p1 or pa == p2)
1614 1614
1615 1615 if allow and linear_path:
1616 1616 raise util.Abort(_("there is nothing to merge, "
1617 1617 "just use 'hg update'"))
1618 1618 if allow and not forcemerge:
1619 1619 if modified or added or removed:
1620 1620 raise util.Abort(_("outstanding uncommitted changes"))
1621 1621
1622 1622 if not forcemerge and not force:
1623 1623 for f in unknown:
1624 1624 if f in m2:
1625 1625 t1 = self.wread(f)
1626 1626 t2 = self.file(f).read(m2[f])
1627 1627 if cmp(t1, t2) != 0:
1628 1628 raise util.Abort(_("'%s' already exists in the working"
1629 1629 " dir and differs from remote") % f)
1630 1630
1631 1631 # resolve the manifest to determine which files
1632 1632 # we care about merging
1633 1633 self.ui.note(_("resolving manifests\n"))
1634 1634 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1635 1635 (force, allow, moddirstate, linear_path))
1636 1636 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1637 1637 (short(man), short(m1n), short(m2n)))
1638 1638
1639 1639 merge = {}
1640 1640 get = {}
1641 1641 remove = []
1642 1642
1643 1643 # construct a working dir manifest
1644 1644 mw = m1.copy()
1645 1645 mfw = mf1.copy()
1646 1646 umap = dict.fromkeys(unknown)
1647 1647
1648 1648 for f in added + modified + unknown:
1649 1649 mw[f] = ""
1650 1650 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1651 1651
1652 1652 if moddirstate and not wlock:
1653 1653 wlock = self.wlock()
1654 1654
1655 1655 for f in deleted + removed:
1656 1656 if f in mw:
1657 1657 del mw[f]
1658 1658
1659 1659 # If we're jumping between revisions (as opposed to merging),
1660 1660 # and if neither the working directory nor the target rev has
1661 1661 # the file, then we need to remove it from the dirstate, to
1662 1662 # prevent the dirstate from listing the file when it is no
1663 1663 # longer in the manifest.
1664 1664 if moddirstate and linear_path and f not in m2:
1665 1665 self.dirstate.forget((f,))
1666 1666
1667 1667 # Compare manifests
1668 1668 for f, n in mw.iteritems():
1669 1669 if choose and not choose(f):
1670 1670 continue
1671 1671 if f in m2:
1672 1672 s = 0
1673 1673
1674 1674 # is the wfile new since m1, and match m2?
1675 1675 if f not in m1:
1676 1676 t1 = self.wread(f)
1677 1677 t2 = self.file(f).read(m2[f])
1678 1678 if cmp(t1, t2) == 0:
1679 1679 n = m2[f]
1680 1680 del t1, t2
1681 1681
1682 1682 # are files different?
1683 1683 if n != m2[f]:
1684 1684 a = ma.get(f, nullid)
1685 1685 # are both different from the ancestor?
1686 1686 if n != a and m2[f] != a:
1687 1687 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1688 1688 # merge executable bits
1689 1689 # "if we changed or they changed, change in merge"
1690 1690 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1691 1691 mode = ((a^b) | (a^c)) ^ a
1692 1692 merge[f] = (m1.get(f, nullid), m2[f], mode)
1693 1693 s = 1
1694 1694 # are we clobbering?
1695 1695 # is remote's version newer?
1696 1696 # or are we going back in time?
1697 1697 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1698 1698 self.ui.debug(_(" remote %s is newer, get\n") % f)
1699 1699 get[f] = m2[f]
1700 1700 s = 1
1701 1701 elif f in umap or f in added:
1702 1702 # this unknown file is the same as the checkout
1703 1703 # we need to reset the dirstate if the file was added
1704 1704 get[f] = m2[f]
1705 1705
1706 1706 if not s and mfw[f] != mf2[f]:
1707 1707 if force:
1708 1708 self.ui.debug(_(" updating permissions for %s\n") % f)
1709 1709 util.set_exec(self.wjoin(f), mf2[f])
1710 1710 else:
1711 1711 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1712 1712 mode = ((a^b) | (a^c)) ^ a
1713 1713 if mode != b:
1714 1714 self.ui.debug(_(" updating permissions for %s\n")
1715 1715 % f)
1716 1716 util.set_exec(self.wjoin(f), mode)
1717 1717 del m2[f]
1718 1718 elif f in ma:
1719 1719 if n != ma[f]:
1720 1720 r = _("d")
1721 1721 if not force and (linear_path or allow):
1722 1722 r = self.ui.prompt(
1723 1723 (_(" local changed %s which remote deleted\n") % f) +
1724 1724 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1725 1725 if r == _("d"):
1726 1726 remove.append(f)
1727 1727 else:
1728 1728 self.ui.debug(_("other deleted %s\n") % f)
1729 1729 remove.append(f) # other deleted it
1730 1730 else:
1731 1731 # file is created on branch or in working directory
1732 1732 if force and f not in umap:
1733 1733 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1734 1734 remove.append(f)
1735 1735 elif n == m1.get(f, nullid): # same as parent
1736 1736 if p2 == pa: # going backwards?
1737 1737 self.ui.debug(_("remote deleted %s\n") % f)
1738 1738 remove.append(f)
1739 1739 else:
1740 1740 self.ui.debug(_("local modified %s, keeping\n") % f)
1741 1741 else:
1742 1742 self.ui.debug(_("working dir created %s, keeping\n") % f)
1743 1743
1744 1744 for f, n in m2.iteritems():
1745 1745 if choose and not choose(f):
1746 1746 continue
1747 1747 if f[0] == "/":
1748 1748 continue
1749 1749 if f in ma and n != ma[f]:
1750 1750 r = _("k")
1751 1751 if not force and (linear_path or allow):
1752 1752 r = self.ui.prompt(
1753 1753 (_("remote changed %s which local deleted\n") % f) +
1754 1754 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1755 1755 if r == _("k"):
1756 1756 get[f] = n
1757 1757 elif f not in ma:
1758 1758 self.ui.debug(_("remote created %s\n") % f)
1759 1759 get[f] = n
1760 1760 else:
1761 1761 if force or p2 == pa: # going backwards?
1762 1762 self.ui.debug(_("local deleted %s, recreating\n") % f)
1763 1763 get[f] = n
1764 1764 else:
1765 1765 self.ui.debug(_("local deleted %s\n") % f)
1766 1766
1767 1767 del mw, m1, m2, ma
1768 1768
1769 1769 if force:
1770 1770 for f in merge:
1771 1771 get[f] = merge[f][1]
1772 1772 merge = {}
1773 1773
1774 1774 if linear_path or force:
1775 1775 # we don't need to do any magic, just jump to the new rev
1776 1776 branch_merge = False
1777 1777 p1, p2 = p2, nullid
1778 1778 else:
1779 1779 if not allow:
1780 1780 self.ui.status(_("this update spans a branch"
1781 1781 " affecting the following files:\n"))
1782 1782 fl = merge.keys() + get.keys()
1783 1783 fl.sort()
1784 1784 for f in fl:
1785 1785 cf = ""
1786 1786 if f in merge:
1787 1787 cf = _(" (resolve)")
1788 1788 self.ui.status(" %s%s\n" % (f, cf))
1789 1789 self.ui.warn(_("aborting update spanning branches!\n"))
1790 1790 self.ui.status(_("(use 'hg merge' to merge across branches"
1791 1791 " or 'hg update -C' to lose changes)\n"))
1792 1792 return 1
1793 1793 branch_merge = True
1794 1794
1795 1795 xp1 = hex(p1)
1796 1796 xp2 = hex(p2)
1797 1797 if p2 == nullid: xxp2 = ''
1798 1798 else: xxp2 = xp2
1799 1799
1800 1800 self.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2)
1801 1801
1802 1802 # get the files we don't need to change
1803 1803 files = get.keys()
1804 1804 files.sort()
1805 1805 for f in files:
1806 1806 if f[0] == "/":
1807 1807 continue
1808 1808 self.ui.note(_("getting %s\n") % f)
1809 1809 t = self.file(f).read(get[f])
1810 1810 self.wwrite(f, t)
1811 1811 util.set_exec(self.wjoin(f), mf2[f])
1812 1812 if moddirstate:
1813 1813 if branch_merge:
1814 1814 self.dirstate.update([f], 'n', st_mtime=-1)
1815 1815 else:
1816 1816 self.dirstate.update([f], 'n')
1817 1817
1818 1818 # merge the tricky bits
1819 1819 failedmerge = []
1820 1820 files = merge.keys()
1821 1821 files.sort()
1822 1822 for f in files:
1823 1823 self.ui.status(_("merging %s\n") % f)
1824 1824 my, other, flag = merge[f]
1825 1825 ret = self.merge3(f, my, other, xp1, xp2)
1826 1826 if ret:
1827 1827 err = True
1828 1828 failedmerge.append(f)
1829 1829 util.set_exec(self.wjoin(f), flag)
1830 1830 if moddirstate:
1831 1831 if branch_merge:
1832 1832 # We've done a branch merge, mark this file as merged
1833 1833 # so that we properly record the merger later
1834 1834 self.dirstate.update([f], 'm')
1835 1835 else:
1836 1836 # We've update-merged a locally modified file, so
1837 1837 # we set the dirstate to emulate a normal checkout
1838 1838 # of that file some time in the past. Thus our
1839 1839 # merge will appear as a normal local file
1840 1840 # modification.
1841 1841 f_len = len(self.file(f).read(other))
1842 1842 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1843 1843
1844 1844 remove.sort()
1845 1845 for f in remove:
1846 1846 self.ui.note(_("removing %s\n") % f)
1847 1847 util.audit_path(f)
1848 1848 try:
1849 1849 util.unlink(self.wjoin(f))
1850 1850 except OSError, inst:
1851 1851 if inst.errno != errno.ENOENT:
1852 1852 self.ui.warn(_("update failed to remove %s: %s!\n") %
1853 1853 (f, inst.strerror))
1854 1854 if moddirstate:
1855 1855 if branch_merge:
1856 1856 self.dirstate.update(remove, 'r')
1857 1857 else:
1858 1858 self.dirstate.forget(remove)
1859 1859
1860 1860 if moddirstate:
1861 1861 self.dirstate.setparents(p1, p2)
1862 1862
1863 1863 if show_stats:
1864 1864 stats = ((len(get), _("updated")),
1865 1865 (len(merge) - len(failedmerge), _("merged")),
1866 1866 (len(remove), _("removed")),
1867 1867 (len(failedmerge), _("unresolved")))
1868 1868 note = ", ".join([_("%d files %s") % s for s in stats])
1869 1869 self.ui.status("%s\n" % note)
1870 1870 if moddirstate:
1871 1871 if branch_merge:
1872 1872 if failedmerge:
1873 1873 self.ui.status(_("There are unresolved merges,"
1874 1874 " you can redo the full merge using:\n"
1875 1875 " hg update -C %s\n"
1876 1876 " hg merge %s\n"
1877 1877 % (self.changelog.rev(p1),
1878 1878 self.changelog.rev(p2))))
1879 1879 else:
1880 1880 self.ui.status(_("(branch merge, don't forget to commit)\n"))
1881 1881 elif failedmerge:
1882 1882 self.ui.status(_("There are unresolved merges with"
1883 1883 " locally modified files.\n"))
1884 1884
1885 1885 self.hook('update', parent1=xp1, parent2=xxp2, error=int(err))
1886 1886 return err
1887 1887
1888 1888 def merge3(self, fn, my, other, p1, p2):
1889 1889 """perform a 3-way merge in the working directory"""
1890 1890
1891 1891 def temp(prefix, node):
1892 1892 pre = "%s~%s." % (os.path.basename(fn), prefix)
1893 1893 (fd, name) = tempfile.mkstemp(prefix=pre)
1894 1894 f = os.fdopen(fd, "wb")
1895 1895 self.wwrite(fn, fl.read(node), f)
1896 1896 f.close()
1897 1897 return name
1898 1898
1899 1899 fl = self.file(fn)
1900 1900 base = fl.ancestor(my, other)
1901 1901 a = self.wjoin(fn)
1902 1902 b = temp("base", base)
1903 1903 c = temp("other", other)
1904 1904
1905 1905 self.ui.note(_("resolving %s\n") % fn)
1906 1906 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1907 1907 (fn, short(my), short(other), short(base)))
1908 1908
1909 1909 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1910 1910 or "hgmerge")
1911 1911 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
1912 1912 environ={'HG_FILE': fn,
1913 1913 'HG_MY_NODE': p1,
1914 1914 'HG_OTHER_NODE': p2,
1915 1915 'HG_FILE_MY_NODE': hex(my),
1916 1916 'HG_FILE_OTHER_NODE': hex(other),
1917 1917 'HG_FILE_BASE_NODE': hex(base)})
1918 1918 if r:
1919 1919 self.ui.warn(_("merging %s failed!\n") % fn)
1920 1920
1921 1921 os.unlink(b)
1922 1922 os.unlink(c)
1923 1923 return r
1924 1924
1925 1925 def verify(self):
1926 1926 filelinkrevs = {}
1927 1927 filenodes = {}
1928 1928 changesets = revisions = files = 0
1929 1929 errors = [0]
1930 1930 warnings = [0]
1931 1931 neededmanifests = {}
1932 1932
1933 1933 def err(msg):
1934 1934 self.ui.warn(msg + "\n")
1935 1935 errors[0] += 1
1936 1936
1937 1937 def warn(msg):
1938 1938 self.ui.warn(msg + "\n")
1939 1939 warnings[0] += 1
1940 1940
1941 1941 def checksize(obj, name):
1942 1942 d = obj.checksize()
1943 1943 if d[0]:
1944 1944 err(_("%s data length off by %d bytes") % (name, d[0]))
1945 1945 if d[1]:
1946 1946 err(_("%s index contains %d extra bytes") % (name, d[1]))
1947 1947
1948 1948 def checkversion(obj, name):
1949 1949 if obj.version != revlog.REVLOGV0:
1950 1950 if not revlogv1:
1951 1951 warn(_("warning: `%s' uses revlog format 1") % name)
1952 1952 elif revlogv1:
1953 1953 warn(_("warning: `%s' uses revlog format 0") % name)
1954 1954
1955 1955 revlogv1 = self.revlogversion != revlog.REVLOGV0
1956 1956 if self.ui.verbose or revlogv1 != self.revlogv1:
1957 1957 self.ui.status(_("repository uses revlog format %d\n") %
1958 1958 (revlogv1 and 1 or 0))
1959 1959
1960 1960 seen = {}
1961 1961 self.ui.status(_("checking changesets\n"))
1962 1962 checksize(self.changelog, "changelog")
1963 1963
1964 1964 for i in range(self.changelog.count()):
1965 1965 changesets += 1
1966 1966 n = self.changelog.node(i)
1967 1967 l = self.changelog.linkrev(n)
1968 1968 if l != i:
1969 1969 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1970 1970 if n in seen:
1971 1971 err(_("duplicate changeset at revision %d") % i)
1972 1972 seen[n] = 1
1973 1973
1974 1974 for p in self.changelog.parents(n):
1975 1975 if p not in self.changelog.nodemap:
1976 1976 err(_("changeset %s has unknown parent %s") %
1977 1977 (short(n), short(p)))
1978 1978 try:
1979 1979 changes = self.changelog.read(n)
1980 1980 except KeyboardInterrupt:
1981 1981 self.ui.warn(_("interrupted"))
1982 1982 raise
1983 1983 except Exception, inst:
1984 1984 err(_("unpacking changeset %s: %s") % (short(n), inst))
1985 1985 continue
1986 1986
1987 1987 neededmanifests[changes[0]] = n
1988 1988
1989 1989 for f in changes[3]:
1990 1990 filelinkrevs.setdefault(f, []).append(i)
1991 1991
1992 1992 seen = {}
1993 1993 self.ui.status(_("checking manifests\n"))
1994 1994 checkversion(self.manifest, "manifest")
1995 1995 checksize(self.manifest, "manifest")
1996 1996
1997 1997 for i in range(self.manifest.count()):
1998 1998 n = self.manifest.node(i)
1999 1999 l = self.manifest.linkrev(n)
2000 2000
2001 2001 if l < 0 or l >= self.changelog.count():
2002 2002 err(_("bad manifest link (%d) at revision %d") % (l, i))
2003 2003
2004 2004 if n in neededmanifests:
2005 2005 del neededmanifests[n]
2006 2006
2007 2007 if n in seen:
2008 2008 err(_("duplicate manifest at revision %d") % i)
2009 2009
2010 2010 seen[n] = 1
2011 2011
2012 2012 for p in self.manifest.parents(n):
2013 2013 if p not in self.manifest.nodemap:
2014 2014 err(_("manifest %s has unknown parent %s") %
2015 2015 (short(n), short(p)))
2016 2016
2017 2017 try:
2018 2018 delta = mdiff.patchtext(self.manifest.delta(n))
2019 2019 except KeyboardInterrupt:
2020 2020 self.ui.warn(_("interrupted"))
2021 2021 raise
2022 2022 except Exception, inst:
2023 2023 err(_("unpacking manifest %s: %s") % (short(n), inst))
2024 2024 continue
2025 2025
2026 2026 try:
2027 2027 ff = [ l.split('\0') for l in delta.splitlines() ]
2028 2028 for f, fn in ff:
2029 2029 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
2030 2030 except (ValueError, TypeError), inst:
2031 2031 err(_("broken delta in manifest %s: %s") % (short(n), inst))
2032 2032
2033 2033 self.ui.status(_("crosschecking files in changesets and manifests\n"))
2034 2034
2035 2035 for m, c in neededmanifests.items():
2036 2036 err(_("Changeset %s refers to unknown manifest %s") %
2037 2037 (short(m), short(c)))
2038 2038 del neededmanifests
2039 2039
2040 2040 for f in filenodes:
2041 2041 if f not in filelinkrevs:
2042 2042 err(_("file %s in manifest but not in changesets") % f)
2043 2043
2044 2044 for f in filelinkrevs:
2045 2045 if f not in filenodes:
2046 2046 err(_("file %s in changeset but not in manifest") % f)
2047 2047
2048 2048 self.ui.status(_("checking files\n"))
2049 2049 ff = filenodes.keys()
2050 2050 ff.sort()
2051 2051 for f in ff:
2052 2052 if f == "/dev/null":
2053 2053 continue
2054 2054 files += 1
2055 2055 if not f:
2056 2056 err(_("file without name in manifest %s") % short(n))
2057 2057 continue
2058 2058 fl = self.file(f)
2059 2059 checkversion(fl, f)
2060 2060 checksize(fl, f)
2061 2061
2062 2062 nodes = {nullid: 1}
2063 2063 seen = {}
2064 2064 for i in range(fl.count()):
2065 2065 revisions += 1
2066 2066 n = fl.node(i)
2067 2067
2068 2068 if n in seen:
2069 2069 err(_("%s: duplicate revision %d") % (f, i))
2070 2070 if n not in filenodes[f]:
2071 2071 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
2072 2072 else:
2073 2073 del filenodes[f][n]
2074 2074
2075 2075 flr = fl.linkrev(n)
2076 2076 if flr not in filelinkrevs.get(f, []):
2077 2077 err(_("%s:%s points to unexpected changeset %d")
2078 2078 % (f, short(n), flr))
2079 2079 else:
2080 2080 filelinkrevs[f].remove(flr)
2081 2081
2082 2082 # verify contents
2083 2083 try:
2084 2084 t = fl.read(n)
2085 2085 except KeyboardInterrupt:
2086 2086 self.ui.warn(_("interrupted"))
2087 2087 raise
2088 2088 except Exception, inst:
2089 2089 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
2090 2090
2091 2091 # verify parents
2092 2092 (p1, p2) = fl.parents(n)
2093 2093 if p1 not in nodes:
2094 2094 err(_("file %s:%s unknown parent 1 %s") %
2095 2095 (f, short(n), short(p1)))
2096 2096 if p2 not in nodes:
2097 2097 err(_("file %s:%s unknown parent 2 %s") %
2098 2098 (f, short(n), short(p1)))
2099 2099 nodes[n] = 1
2100 2100
2101 2101 # cross-check
2102 2102 for node in filenodes[f]:
2103 2103 err(_("node %s in manifests not in %s") % (hex(node), f))
2104 2104
2105 2105 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
2106 2106 (files, changesets, revisions))
2107 2107
2108 2108 if warnings[0]:
2109 2109 self.ui.warn(_("%d warnings encountered!\n") % warnings[0])
2110 2110 if errors[0]:
2111 2111 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
2112 2112 return 1
2113 2113
2114 2114 # used to avoid circular references so destructors work
2115 2115 def aftertrans(base):
2116 2116 p = base
2117 2117 def a():
2118 2118 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
2119 2119 util.rename(os.path.join(p, "journal.dirstate"),
2120 2120 os.path.join(p, "undo.dirstate"))
2121 2121 return a
2122 2122
General Comments 0
You need to be logged in to leave comments. Login now