##// END OF EJS Templates
Force editor to appear for an 'hg backout' if a message or logfile aren't...
john.levon@sun.com -
r2267:d812d91c default
parent child Browse files
Show More
@@ -1,3438 +1,3444 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from demandload import demandload
9 9 from node import *
10 10 from i18n import gettext as _
11 11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
13 13 demandload(globals(), "fnmatch hgweb mdiff random signal tempfile time")
14 14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
15 15 demandload(globals(), "archival changegroup")
16 16
17 17 class UnknownCommand(Exception):
18 18 """Exception raised if command is not in the command table."""
19 19 class AmbiguousCommand(Exception):
20 20 """Exception raised if command shortcut matches more than one command."""
21 21
22 22 def bail_if_changed(repo):
23 23 modified, added, removed, deleted, unknown = repo.changes()
24 24 if modified or added or removed or deleted:
25 25 raise util.Abort(_("outstanding uncommitted changes"))
26 26
27 27 def filterfiles(filters, files):
28 28 l = [x for x in files if x in filters]
29 29
30 30 for t in filters:
31 31 if t and t[-1] != "/":
32 32 t += "/"
33 33 l += [x for x in files if x.startswith(t)]
34 34 return l
35 35
36 36 def relpath(repo, args):
37 37 cwd = repo.getcwd()
38 38 if cwd:
39 39 return [util.normpath(os.path.join(cwd, x)) for x in args]
40 40 return args
41 41
42 42 def matchpats(repo, pats=[], opts={}, head=''):
43 43 cwd = repo.getcwd()
44 44 if not pats and cwd:
45 45 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
46 46 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
47 47 cwd = ''
48 48 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
49 49 opts.get('exclude'), head)
50 50
51 51 def makewalk(repo, pats, opts, node=None, head='', badmatch=None):
52 52 files, matchfn, anypats = matchpats(repo, pats, opts, head)
53 53 exact = dict(zip(files, files))
54 54 def walk():
55 55 for src, fn in repo.walk(node=node, files=files, match=matchfn,
56 56 badmatch=badmatch):
57 57 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
58 58 return files, matchfn, walk()
59 59
60 60 def walk(repo, pats, opts, node=None, head='', badmatch=None):
61 61 files, matchfn, results = makewalk(repo, pats, opts, node, head, badmatch)
62 62 for r in results:
63 63 yield r
64 64
65 65 def walkchangerevs(ui, repo, pats, opts):
66 66 '''Iterate over files and the revs they changed in.
67 67
68 68 Callers most commonly need to iterate backwards over the history
69 69 it is interested in. Doing so has awful (quadratic-looking)
70 70 performance, so we use iterators in a "windowed" way.
71 71
72 72 We walk a window of revisions in the desired order. Within the
73 73 window, we first walk forwards to gather data, then in the desired
74 74 order (usually backwards) to display it.
75 75
76 76 This function returns an (iterator, getchange, matchfn) tuple. The
77 77 getchange function returns the changelog entry for a numeric
78 78 revision. The iterator yields 3-tuples. They will be of one of
79 79 the following forms:
80 80
81 81 "window", incrementing, lastrev: stepping through a window,
82 82 positive if walking forwards through revs, last rev in the
83 83 sequence iterated over - use to reset state for the current window
84 84
85 85 "add", rev, fns: out-of-order traversal of the given file names
86 86 fns, which changed during revision rev - use to gather data for
87 87 possible display
88 88
89 89 "iter", rev, None: in-order traversal of the revs earlier iterated
90 90 over with "add" - use to display data'''
91 91
92 92 def increasing_windows(start, end, windowsize=8, sizelimit=512):
93 93 if start < end:
94 94 while start < end:
95 95 yield start, min(windowsize, end-start)
96 96 start += windowsize
97 97 if windowsize < sizelimit:
98 98 windowsize *= 2
99 99 else:
100 100 while start > end:
101 101 yield start, min(windowsize, start-end-1)
102 102 start -= windowsize
103 103 if windowsize < sizelimit:
104 104 windowsize *= 2
105 105
106 106
107 107 files, matchfn, anypats = matchpats(repo, pats, opts)
108 108
109 109 if repo.changelog.count() == 0:
110 110 return [], False, matchfn
111 111
112 112 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
113 113 wanted = {}
114 114 slowpath = anypats
115 115 fncache = {}
116 116
117 117 chcache = {}
118 118 def getchange(rev):
119 119 ch = chcache.get(rev)
120 120 if ch is None:
121 121 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
122 122 return ch
123 123
124 124 if not slowpath and not files:
125 125 # No files, no patterns. Display all revs.
126 126 wanted = dict(zip(revs, revs))
127 127 if not slowpath:
128 128 # Only files, no patterns. Check the history of each file.
129 129 def filerevgen(filelog):
130 130 for i, window in increasing_windows(filelog.count()-1, -1):
131 131 revs = []
132 132 for j in xrange(i - window, i + 1):
133 133 revs.append(filelog.linkrev(filelog.node(j)))
134 134 revs.reverse()
135 135 for rev in revs:
136 136 yield rev
137 137
138 138 minrev, maxrev = min(revs), max(revs)
139 139 for file_ in files:
140 140 filelog = repo.file(file_)
141 141 # A zero count may be a directory or deleted file, so
142 142 # try to find matching entries on the slow path.
143 143 if filelog.count() == 0:
144 144 slowpath = True
145 145 break
146 146 for rev in filerevgen(filelog):
147 147 if rev <= maxrev:
148 148 if rev < minrev:
149 149 break
150 150 fncache.setdefault(rev, [])
151 151 fncache[rev].append(file_)
152 152 wanted[rev] = 1
153 153 if slowpath:
154 154 # The slow path checks files modified in every changeset.
155 155 def changerevgen():
156 156 for i, window in increasing_windows(repo.changelog.count()-1, -1):
157 157 for j in xrange(i - window, i + 1):
158 158 yield j, getchange(j)[3]
159 159
160 160 for rev, changefiles in changerevgen():
161 161 matches = filter(matchfn, changefiles)
162 162 if matches:
163 163 fncache[rev] = matches
164 164 wanted[rev] = 1
165 165
166 166 def iterate():
167 167 for i, window in increasing_windows(0, len(revs)):
168 168 yield 'window', revs[0] < revs[-1], revs[-1]
169 169 nrevs = [rev for rev in revs[i:i+window]
170 170 if rev in wanted]
171 171 srevs = list(nrevs)
172 172 srevs.sort()
173 173 for rev in srevs:
174 174 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
175 175 yield 'add', rev, fns
176 176 for rev in nrevs:
177 177 yield 'iter', rev, None
178 178 return iterate(), getchange, matchfn
179 179
180 180 revrangesep = ':'
181 181
182 182 def revrange(ui, repo, revs, revlog=None):
183 183 """Yield revision as strings from a list of revision specifications."""
184 184 if revlog is None:
185 185 revlog = repo.changelog
186 186 revcount = revlog.count()
187 187 def fix(val, defval):
188 188 if not val:
189 189 return defval
190 190 try:
191 191 num = int(val)
192 192 if str(num) != val:
193 193 raise ValueError
194 194 if num < 0:
195 195 num += revcount
196 196 if num < 0:
197 197 num = 0
198 198 elif num >= revcount:
199 199 raise ValueError
200 200 except ValueError:
201 201 try:
202 202 num = repo.changelog.rev(repo.lookup(val))
203 203 except KeyError:
204 204 try:
205 205 num = revlog.rev(revlog.lookup(val))
206 206 except KeyError:
207 207 raise util.Abort(_('invalid revision identifier %s'), val)
208 208 return num
209 209 seen = {}
210 210 for spec in revs:
211 211 if spec.find(revrangesep) >= 0:
212 212 start, end = spec.split(revrangesep, 1)
213 213 start = fix(start, 0)
214 214 end = fix(end, revcount - 1)
215 215 step = start > end and -1 or 1
216 216 for rev in xrange(start, end+step, step):
217 217 if rev in seen:
218 218 continue
219 219 seen[rev] = 1
220 220 yield str(rev)
221 221 else:
222 222 rev = fix(spec, None)
223 223 if rev in seen:
224 224 continue
225 225 seen[rev] = 1
226 226 yield str(rev)
227 227
228 228 def make_filename(repo, r, pat, node=None,
229 229 total=None, seqno=None, revwidth=None, pathname=None):
230 230 node_expander = {
231 231 'H': lambda: hex(node),
232 232 'R': lambda: str(r.rev(node)),
233 233 'h': lambda: short(node),
234 234 }
235 235 expander = {
236 236 '%': lambda: '%',
237 237 'b': lambda: os.path.basename(repo.root),
238 238 }
239 239
240 240 try:
241 241 if node:
242 242 expander.update(node_expander)
243 243 if node and revwidth is not None:
244 244 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
245 245 if total is not None:
246 246 expander['N'] = lambda: str(total)
247 247 if seqno is not None:
248 248 expander['n'] = lambda: str(seqno)
249 249 if total is not None and seqno is not None:
250 250 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
251 251 if pathname is not None:
252 252 expander['s'] = lambda: os.path.basename(pathname)
253 253 expander['d'] = lambda: os.path.dirname(pathname) or '.'
254 254 expander['p'] = lambda: pathname
255 255
256 256 newname = []
257 257 patlen = len(pat)
258 258 i = 0
259 259 while i < patlen:
260 260 c = pat[i]
261 261 if c == '%':
262 262 i += 1
263 263 c = pat[i]
264 264 c = expander[c]()
265 265 newname.append(c)
266 266 i += 1
267 267 return ''.join(newname)
268 268 except KeyError, inst:
269 269 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
270 270 inst.args[0])
271 271
272 272 def make_file(repo, r, pat, node=None,
273 273 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
274 274 if not pat or pat == '-':
275 275 return 'w' in mode and sys.stdout or sys.stdin
276 276 if hasattr(pat, 'write') and 'w' in mode:
277 277 return pat
278 278 if hasattr(pat, 'read') and 'r' in mode:
279 279 return pat
280 280 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
281 281 pathname),
282 282 mode)
283 283
284 284 def write_bundle(cg, filename=None, compress=True):
285 285 """Write a bundle file and return its filename.
286 286
287 287 Existing files will not be overwritten.
288 288 If no filename is specified, a temporary file is created.
289 289 bz2 compression can be turned off.
290 290 The bundle file will be deleted in case of errors.
291 291 """
292 292 class nocompress(object):
293 293 def compress(self, x):
294 294 return x
295 295 def flush(self):
296 296 return ""
297 297
298 298 fh = None
299 299 cleanup = None
300 300 try:
301 301 if filename:
302 302 if os.path.exists(filename):
303 303 raise util.Abort(_("file '%s' already exists"), filename)
304 304 fh = open(filename, "wb")
305 305 else:
306 306 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
307 307 fh = os.fdopen(fd, "wb")
308 308 cleanup = filename
309 309
310 310 if compress:
311 311 fh.write("HG10")
312 312 z = bz2.BZ2Compressor(9)
313 313 else:
314 314 fh.write("HG10UN")
315 315 z = nocompress()
316 316 # parse the changegroup data, otherwise we will block
317 317 # in case of sshrepo because we don't know the end of the stream
318 318
319 319 # an empty chunkiter is the end of the changegroup
320 320 empty = False
321 321 while not empty:
322 322 empty = True
323 323 for chunk in changegroup.chunkiter(cg):
324 324 empty = False
325 325 fh.write(z.compress(changegroup.genchunk(chunk)))
326 326 fh.write(z.compress(changegroup.closechunk()))
327 327 fh.write(z.flush())
328 328 cleanup = None
329 329 return filename
330 330 finally:
331 331 if fh is not None:
332 332 fh.close()
333 333 if cleanup is not None:
334 334 os.unlink(cleanup)
335 335
336 336 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
337 337 changes=None, text=False, opts={}):
338 338 if not node1:
339 339 node1 = repo.dirstate.parents()[0]
340 340 # reading the data for node1 early allows it to play nicely
341 341 # with repo.changes and the revlog cache.
342 342 change = repo.changelog.read(node1)
343 343 mmap = repo.manifest.read(change[0])
344 344 date1 = util.datestr(change[2])
345 345
346 346 if not changes:
347 347 changes = repo.changes(node1, node2, files, match=match)
348 348 modified, added, removed, deleted, unknown = changes
349 349 if files:
350 350 modified, added, removed = map(lambda x: filterfiles(files, x),
351 351 (modified, added, removed))
352 352
353 353 if not modified and not added and not removed:
354 354 return
355 355
356 356 if node2:
357 357 change = repo.changelog.read(node2)
358 358 mmap2 = repo.manifest.read(change[0])
359 359 date2 = util.datestr(change[2])
360 360 def read(f):
361 361 return repo.file(f).read(mmap2[f])
362 362 else:
363 363 date2 = util.datestr()
364 364 def read(f):
365 365 return repo.wread(f)
366 366
367 367 if ui.quiet:
368 368 r = None
369 369 else:
370 370 hexfunc = ui.verbose and hex or short
371 371 r = [hexfunc(node) for node in [node1, node2] if node]
372 372
373 373 diffopts = ui.diffopts()
374 374 showfunc = opts.get('show_function') or diffopts['showfunc']
375 375 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
376 376 for f in modified:
377 377 to = None
378 378 if f in mmap:
379 379 to = repo.file(f).read(mmap[f])
380 380 tn = read(f)
381 381 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
382 382 showfunc=showfunc, ignorews=ignorews))
383 383 for f in added:
384 384 to = None
385 385 tn = read(f)
386 386 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
387 387 showfunc=showfunc, ignorews=ignorews))
388 388 for f in removed:
389 389 to = repo.file(f).read(mmap[f])
390 390 tn = None
391 391 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
392 392 showfunc=showfunc, ignorews=ignorews))
393 393
394 394 def trimuser(ui, name, rev, revcache):
395 395 """trim the name of the user who committed a change"""
396 396 user = revcache.get(rev)
397 397 if user is None:
398 398 user = revcache[rev] = ui.shortuser(name)
399 399 return user
400 400
401 401 class changeset_printer(object):
402 402 '''show changeset information when templating not requested.'''
403 403
404 404 def __init__(self, ui, repo):
405 405 self.ui = ui
406 406 self.repo = repo
407 407
408 408 def show(self, rev=0, changenode=None, brinfo=None):
409 409 '''show a single changeset or file revision'''
410 410 log = self.repo.changelog
411 411 if changenode is None:
412 412 changenode = log.node(rev)
413 413 elif not rev:
414 414 rev = log.rev(changenode)
415 415
416 416 if self.ui.quiet:
417 417 self.ui.write("%d:%s\n" % (rev, short(changenode)))
418 418 return
419 419
420 420 changes = log.read(changenode)
421 421 date = util.datestr(changes[2])
422 422
423 423 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
424 424 for p in log.parents(changenode)
425 425 if self.ui.debugflag or p != nullid]
426 426 if (not self.ui.debugflag and len(parents) == 1 and
427 427 parents[0][0] == rev-1):
428 428 parents = []
429 429
430 430 if self.ui.verbose:
431 431 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
432 432 else:
433 433 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
434 434
435 435 for tag in self.repo.nodetags(changenode):
436 436 self.ui.status(_("tag: %s\n") % tag)
437 437 for parent in parents:
438 438 self.ui.write(_("parent: %d:%s\n") % parent)
439 439
440 440 if brinfo and changenode in brinfo:
441 441 br = brinfo[changenode]
442 442 self.ui.write(_("branch: %s\n") % " ".join(br))
443 443
444 444 self.ui.debug(_("manifest: %d:%s\n") %
445 445 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
446 446 self.ui.status(_("user: %s\n") % changes[1])
447 447 self.ui.status(_("date: %s\n") % date)
448 448
449 449 if self.ui.debugflag:
450 450 files = self.repo.changes(log.parents(changenode)[0], changenode)
451 451 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
452 452 files):
453 453 if value:
454 454 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
455 455 else:
456 456 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
457 457
458 458 description = changes[4].strip()
459 459 if description:
460 460 if self.ui.verbose:
461 461 self.ui.status(_("description:\n"))
462 462 self.ui.status(description)
463 463 self.ui.status("\n\n")
464 464 else:
465 465 self.ui.status(_("summary: %s\n") %
466 466 description.splitlines()[0])
467 467 self.ui.status("\n")
468 468
469 469 def show_changeset(ui, repo, opts):
470 470 '''show one changeset. uses template or regular display. caller
471 471 can pass in 'style' and 'template' options in opts.'''
472 472
473 473 tmpl = opts.get('template')
474 474 if tmpl:
475 475 tmpl = templater.parsestring(tmpl, quoted=False)
476 476 else:
477 477 tmpl = ui.config('ui', 'logtemplate')
478 478 if tmpl: tmpl = templater.parsestring(tmpl)
479 479 mapfile = opts.get('style') or ui.config('ui', 'style')
480 480 if tmpl or mapfile:
481 481 if mapfile:
482 482 if not os.path.isfile(mapfile):
483 483 mapname = templater.templatepath('map-cmdline.' + mapfile)
484 484 if not mapname: mapname = templater.templatepath(mapfile)
485 485 if mapname: mapfile = mapname
486 486 try:
487 487 t = templater.changeset_templater(ui, repo, mapfile)
488 488 except SyntaxError, inst:
489 489 raise util.Abort(inst.args[0])
490 490 if tmpl: t.use_template(tmpl)
491 491 return t
492 492 return changeset_printer(ui, repo)
493 493
494 494 def show_version(ui):
495 495 """output version and copyright information"""
496 496 ui.write(_("Mercurial Distributed SCM (version %s)\n")
497 497 % version.get_version())
498 498 ui.status(_(
499 499 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
500 500 "This is free software; see the source for copying conditions. "
501 501 "There is NO\nwarranty; "
502 502 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
503 503 ))
504 504
505 505 def help_(ui, cmd=None, with_version=False):
506 506 """show help for a given command or all commands"""
507 507 option_lists = []
508 508 if cmd and cmd != 'shortlist':
509 509 if with_version:
510 510 show_version(ui)
511 511 ui.write('\n')
512 512 aliases, i = find(cmd)
513 513 # synopsis
514 514 ui.write("%s\n\n" % i[2])
515 515
516 516 # description
517 517 doc = i[0].__doc__
518 518 if not doc:
519 519 doc = _("(No help text available)")
520 520 if ui.quiet:
521 521 doc = doc.splitlines(0)[0]
522 522 ui.write("%s\n" % doc.rstrip())
523 523
524 524 if not ui.quiet:
525 525 # aliases
526 526 if len(aliases) > 1:
527 527 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
528 528
529 529 # options
530 530 if i[1]:
531 531 option_lists.append(("options", i[1]))
532 532
533 533 else:
534 534 # program name
535 535 if ui.verbose or with_version:
536 536 show_version(ui)
537 537 else:
538 538 ui.status(_("Mercurial Distributed SCM\n"))
539 539 ui.status('\n')
540 540
541 541 # list of commands
542 542 if cmd == "shortlist":
543 543 ui.status(_('basic commands (use "hg help" '
544 544 'for the full list or option "-v" for details):\n\n'))
545 545 elif ui.verbose:
546 546 ui.status(_('list of commands:\n\n'))
547 547 else:
548 548 ui.status(_('list of commands (use "hg help -v" '
549 549 'to show aliases and global options):\n\n'))
550 550
551 551 h = {}
552 552 cmds = {}
553 553 for c, e in table.items():
554 554 f = c.split("|")[0]
555 555 if cmd == "shortlist" and not f.startswith("^"):
556 556 continue
557 557 f = f.lstrip("^")
558 558 if not ui.debugflag and f.startswith("debug"):
559 559 continue
560 560 doc = e[0].__doc__
561 561 if not doc:
562 562 doc = _("(No help text available)")
563 563 h[f] = doc.splitlines(0)[0].rstrip()
564 564 cmds[f] = c.lstrip("^")
565 565
566 566 fns = h.keys()
567 567 fns.sort()
568 568 m = max(map(len, fns))
569 569 for f in fns:
570 570 if ui.verbose:
571 571 commands = cmds[f].replace("|",", ")
572 572 ui.write(" %s:\n %s\n"%(commands, h[f]))
573 573 else:
574 574 ui.write(' %-*s %s\n' % (m, f, h[f]))
575 575
576 576 # global options
577 577 if ui.verbose:
578 578 option_lists.append(("global options", globalopts))
579 579
580 580 # list all option lists
581 581 opt_output = []
582 582 for title, options in option_lists:
583 583 opt_output.append(("\n%s:\n" % title, None))
584 584 for shortopt, longopt, default, desc in options:
585 585 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
586 586 longopt and " --%s" % longopt),
587 587 "%s%s" % (desc,
588 588 default
589 589 and _(" (default: %s)") % default
590 590 or "")))
591 591
592 592 if opt_output:
593 593 opts_len = max([len(line[0]) for line in opt_output if line[1]])
594 594 for first, second in opt_output:
595 595 if second:
596 596 ui.write(" %-*s %s\n" % (opts_len, first, second))
597 597 else:
598 598 ui.write("%s\n" % first)
599 599
600 600 # Commands start here, listed alphabetically
601 601
602 602 def add(ui, repo, *pats, **opts):
603 603 """add the specified files on the next commit
604 604
605 605 Schedule files to be version controlled and added to the repository.
606 606
607 607 The files will be added to the repository at the next commit.
608 608
609 609 If no names are given, add all files in the repository.
610 610 """
611 611
612 612 names = []
613 613 for src, abs, rel, exact in walk(repo, pats, opts):
614 614 if exact:
615 615 if ui.verbose:
616 616 ui.status(_('adding %s\n') % rel)
617 617 names.append(abs)
618 618 elif repo.dirstate.state(abs) == '?':
619 619 ui.status(_('adding %s\n') % rel)
620 620 names.append(abs)
621 621 repo.add(names)
622 622
623 623 def addremove(ui, repo, *pats, **opts):
624 624 """add all new files, delete all missing files (DEPRECATED)
625 625
626 626 (DEPRECATED)
627 627 Add all new files and remove all missing files from the repository.
628 628
629 629 New files are ignored if they match any of the patterns in .hgignore. As
630 630 with add, these changes take effect at the next commit.
631 631
632 632 This command is now deprecated and will be removed in a future
633 633 release. Please use add and remove --after instead.
634 634 """
635 635 ui.warn(_('(the addremove command is deprecated; use add and remove '
636 636 '--after instead)\n'))
637 637 return addremove_lock(ui, repo, pats, opts)
638 638
639 639 def addremove_lock(ui, repo, pats, opts, wlock=None):
640 640 add, remove = [], []
641 641 for src, abs, rel, exact in walk(repo, pats, opts):
642 642 if src == 'f' and repo.dirstate.state(abs) == '?':
643 643 add.append(abs)
644 644 if ui.verbose or not exact:
645 645 ui.status(_('adding %s\n') % ((pats and rel) or abs))
646 646 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
647 647 remove.append(abs)
648 648 if ui.verbose or not exact:
649 649 ui.status(_('removing %s\n') % ((pats and rel) or abs))
650 650 repo.add(add, wlock=wlock)
651 651 repo.remove(remove, wlock=wlock)
652 652
653 653 def annotate(ui, repo, *pats, **opts):
654 654 """show changeset information per file line
655 655
656 656 List changes in files, showing the revision id responsible for each line
657 657
658 658 This command is useful to discover who did a change or when a change took
659 659 place.
660 660
661 661 Without the -a option, annotate will avoid processing files it
662 662 detects as binary. With -a, annotate will generate an annotation
663 663 anyway, probably with undesirable results.
664 664 """
665 665 def getnode(rev):
666 666 return short(repo.changelog.node(rev))
667 667
668 668 ucache = {}
669 669 def getname(rev):
670 670 cl = repo.changelog.read(repo.changelog.node(rev))
671 671 return trimuser(ui, cl[1], rev, ucache)
672 672
673 673 dcache = {}
674 674 def getdate(rev):
675 675 datestr = dcache.get(rev)
676 676 if datestr is None:
677 677 cl = repo.changelog.read(repo.changelog.node(rev))
678 678 datestr = dcache[rev] = util.datestr(cl[2])
679 679 return datestr
680 680
681 681 if not pats:
682 682 raise util.Abort(_('at least one file name or pattern required'))
683 683
684 684 opmap = [['user', getname], ['number', str], ['changeset', getnode],
685 685 ['date', getdate]]
686 686 if not opts['user'] and not opts['changeset'] and not opts['date']:
687 687 opts['number'] = 1
688 688
689 689 if opts['rev']:
690 690 node = repo.changelog.lookup(opts['rev'])
691 691 else:
692 692 node = repo.dirstate.parents()[0]
693 693 change = repo.changelog.read(node)
694 694 mmap = repo.manifest.read(change[0])
695 695
696 696 for src, abs, rel, exact in walk(repo, pats, opts, node=node):
697 697 f = repo.file(abs)
698 698 if not opts['text'] and util.binary(f.read(mmap[abs])):
699 699 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
700 700 continue
701 701
702 702 lines = f.annotate(mmap[abs])
703 703 pieces = []
704 704
705 705 for o, f in opmap:
706 706 if opts[o]:
707 707 l = [f(n) for n, dummy in lines]
708 708 if l:
709 709 m = max(map(len, l))
710 710 pieces.append(["%*s" % (m, x) for x in l])
711 711
712 712 if pieces:
713 713 for p, l in zip(zip(*pieces), lines):
714 714 ui.write("%s: %s" % (" ".join(p), l[1]))
715 715
716 716 def archive(ui, repo, dest, **opts):
717 717 '''create unversioned archive of a repository revision
718 718
719 719 By default, the revision used is the parent of the working
720 720 directory; use "-r" to specify a different revision.
721 721
722 722 To specify the type of archive to create, use "-t". Valid
723 723 types are:
724 724
725 725 "files" (default): a directory full of files
726 726 "tar": tar archive, uncompressed
727 727 "tbz2": tar archive, compressed using bzip2
728 728 "tgz": tar archive, compressed using gzip
729 729 "uzip": zip archive, uncompressed
730 730 "zip": zip archive, compressed using deflate
731 731
732 732 The exact name of the destination archive or directory is given
733 733 using a format string; see "hg help export" for details.
734 734
735 735 Each member added to an archive file has a directory prefix
736 736 prepended. Use "-p" to specify a format string for the prefix.
737 737 The default is the basename of the archive, with suffixes removed.
738 738 '''
739 739
740 740 if opts['rev']:
741 741 node = repo.lookup(opts['rev'])
742 742 else:
743 743 node, p2 = repo.dirstate.parents()
744 744 if p2 != nullid:
745 745 raise util.Abort(_('uncommitted merge - please provide a '
746 746 'specific revision'))
747 747
748 748 dest = make_filename(repo, repo.changelog, dest, node)
749 749 prefix = make_filename(repo, repo.changelog, opts['prefix'], node)
750 750 if os.path.realpath(dest) == repo.root:
751 751 raise util.Abort(_('repository root cannot be destination'))
752 752 dummy, matchfn, dummy = matchpats(repo, [], opts)
753 753 archival.archive(repo, dest, node, opts.get('type') or 'files',
754 754 not opts['no_decode'], matchfn, prefix)
755 755
756 756 def backout(ui, repo, rev, **opts):
757 757 '''reverse effect of earlier changeset
758 758
759 759 Commit the backed out changes as a new changeset. The new
760 760 changeset is a child of the backed out changeset.
761 761
762 762 If you back out a changeset other than the tip, a new head is
763 763 created. This head is the parent of the working directory. If
764 764 you back out an old changeset, your working directory will appear
765 765 old after the backout. You should merge the backout changeset
766 766 with another head.
767 767
768 768 The --merge option remembers the parent of the working directory
769 769 before starting the backout, then merges the new head with that
770 770 changeset afterwards. This saves you from doing the merge by
771 771 hand. The result of this merge is not committed, as for a normal
772 772 merge.'''
773 773
774 774 bail_if_changed(repo)
775 775 op1, op2 = repo.dirstate.parents()
776 776 if op2 != nullid:
777 777 raise util.Abort(_('outstanding uncommitted merge'))
778 778 node = repo.lookup(rev)
779 779 parent, p2 = repo.changelog.parents(node)
780 780 if parent == nullid:
781 781 raise util.Abort(_('cannot back out a change with no parents'))
782 782 if p2 != nullid:
783 783 raise util.Abort(_('cannot back out a merge'))
784 784 repo.update(node, force=True, show_stats=False)
785 785 revert_opts = opts.copy()
786 786 revert_opts['rev'] = hex(parent)
787 787 revert(ui, repo, **revert_opts)
788 788 commit_opts = opts.copy()
789 789 commit_opts['addremove'] = False
790 790 if not commit_opts['message'] and not commit_opts['logfile']:
791 791 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
792 commit_opts['force_editor'] = True;
792 793 commit(ui, repo, **commit_opts)
793 794 def nice(node):
794 795 return '%d:%s' % (repo.changelog.rev(node), short(node))
795 796 ui.status(_('changeset %s backs out changeset %s\n') %
796 797 (nice(repo.changelog.tip()), nice(node)))
797 798 if opts['merge'] and op1 != node:
798 799 ui.status(_('merging with changeset %s\n') % nice(op1))
799 800 update(ui, repo, hex(op1), **opts)
800 801
801 802 def bundle(ui, repo, fname, dest="default-push", **opts):
802 803 """create a changegroup file
803 804
804 805 Generate a compressed changegroup file collecting all changesets
805 806 not found in the other repository.
806 807
807 808 This file can then be transferred using conventional means and
808 809 applied to another repository with the unbundle command. This is
809 810 useful when native push and pull are not available or when
810 811 exporting an entire repository is undesirable. The standard file
811 812 extension is ".hg".
812 813
813 814 Unlike import/export, this exactly preserves all changeset
814 815 contents including permissions, rename data, and revision history.
815 816 """
816 817 dest = ui.expandpath(dest)
817 818 other = hg.repository(ui, dest)
818 819 o = repo.findoutgoing(other, force=opts['force'])
819 820 cg = repo.changegroup(o, 'bundle')
820 821 write_bundle(cg, fname)
821 822
822 823 def cat(ui, repo, file1, *pats, **opts):
823 824 """output the latest or given revisions of files
824 825
825 826 Print the specified files as they were at the given revision.
826 827 If no revision is given then the tip is used.
827 828
828 829 Output may be to a file, in which case the name of the file is
829 830 given using a format string. The formatting rules are the same as
830 831 for the export command, with the following additions:
831 832
832 833 %s basename of file being printed
833 834 %d dirname of file being printed, or '.' if in repo root
834 835 %p root-relative path name of file being printed
835 836 """
836 837 mf = {}
837 838 rev = opts['rev']
838 839 if rev:
839 840 node = repo.lookup(rev)
840 841 else:
841 842 node = repo.changelog.tip()
842 843 change = repo.changelog.read(node)
843 844 mf = repo.manifest.read(change[0])
844 845 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, node):
845 846 r = repo.file(abs)
846 847 n = mf[abs]
847 848 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
848 849 fp.write(r.read(n))
849 850
850 851 def clone(ui, source, dest=None, **opts):
851 852 """make a copy of an existing repository
852 853
853 854 Create a copy of an existing repository in a new directory.
854 855
855 856 If no destination directory name is specified, it defaults to the
856 857 basename of the source.
857 858
858 859 The location of the source is added to the new repository's
859 860 .hg/hgrc file, as the default to be used for future pulls.
860 861
861 862 For efficiency, hardlinks are used for cloning whenever the source
862 863 and destination are on the same filesystem. Some filesystems,
863 864 such as AFS, implement hardlinking incorrectly, but do not report
864 865 errors. In these cases, use the --pull option to avoid
865 866 hardlinking.
866 867
867 868 See pull for valid source format details.
868 869 """
869 870 if dest is None:
870 871 dest = os.path.basename(os.path.normpath(source))
871 872
872 873 if os.path.exists(dest):
873 874 raise util.Abort(_("destination '%s' already exists"), dest)
874 875
875 876 dest = os.path.realpath(dest)
876 877
877 878 class Dircleanup(object):
878 879 def __init__(self, dir_):
879 880 self.rmtree = shutil.rmtree
880 881 self.dir_ = dir_
881 882 os.mkdir(dir_)
882 883 def close(self):
883 884 self.dir_ = None
884 885 def __del__(self):
885 886 if self.dir_:
886 887 self.rmtree(self.dir_, True)
887 888
888 889 if opts['ssh']:
889 890 ui.setconfig("ui", "ssh", opts['ssh'])
890 891 if opts['remotecmd']:
891 892 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
892 893
893 894 source = ui.expandpath(source)
894 895
895 896 d = Dircleanup(dest)
896 897 abspath = source
897 898 other = hg.repository(ui, source)
898 899
899 900 copy = False
900 901 if other.dev() != -1:
901 902 abspath = os.path.abspath(source)
902 903 if not opts['pull'] and not opts['rev']:
903 904 copy = True
904 905
905 906 if copy:
906 907 try:
907 908 # we use a lock here because if we race with commit, we
908 909 # can end up with extra data in the cloned revlogs that's
909 910 # not pointed to by changesets, thus causing verify to
910 911 # fail
911 912 l1 = other.lock()
912 913 except lock.LockException:
913 914 copy = False
914 915
915 916 if copy:
916 917 # we lock here to avoid premature writing to the target
917 918 os.mkdir(os.path.join(dest, ".hg"))
918 919 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
919 920
920 921 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
921 922 for f in files.split():
922 923 src = os.path.join(source, ".hg", f)
923 924 dst = os.path.join(dest, ".hg", f)
924 925 try:
925 926 util.copyfiles(src, dst)
926 927 except OSError, inst:
927 928 if inst.errno != errno.ENOENT:
928 929 raise
929 930
930 931 repo = hg.repository(ui, dest)
931 932
932 933 else:
933 934 revs = None
934 935 if opts['rev']:
935 936 if not other.local():
936 937 error = _("clone -r not supported yet for remote repositories.")
937 938 raise util.Abort(error)
938 939 else:
939 940 revs = [other.lookup(rev) for rev in opts['rev']]
940 941 repo = hg.repository(ui, dest, create=1)
941 942 repo.pull(other, heads = revs)
942 943
943 944 f = repo.opener("hgrc", "w", text=True)
944 945 f.write("[paths]\n")
945 946 f.write("default = %s\n" % abspath)
946 947 f.close()
947 948
948 949 if not opts['noupdate']:
949 950 update(repo.ui, repo)
950 951
951 952 d.close()
952 953
953 954 def commit(ui, repo, *pats, **opts):
954 955 """commit the specified files or all outstanding changes
955 956
956 957 Commit changes to the given files into the repository.
957 958
958 959 If a list of files is omitted, all changes reported by "hg status"
959 960 will be committed.
960 961
961 962 If no commit message is specified, the editor configured in your hgrc
962 963 or in the EDITOR environment variable is started to enter a message.
963 964 """
964 965 message = opts['message']
965 966 logfile = opts['logfile']
966 967
967 968 if message and logfile:
968 969 raise util.Abort(_('options --message and --logfile are mutually '
969 970 'exclusive'))
970 971 if not message and logfile:
971 972 try:
972 973 if logfile == '-':
973 974 message = sys.stdin.read()
974 975 else:
975 976 message = open(logfile).read()
976 977 except IOError, inst:
977 978 raise util.Abort(_("can't read commit message '%s': %s") %
978 979 (logfile, inst.strerror))
979 980
980 981 if opts['addremove']:
981 982 addremove_lock(ui, repo, pats, opts)
982 983 fns, match, anypats = matchpats(repo, pats, opts)
983 984 if pats:
984 985 modified, added, removed, deleted, unknown = (
985 986 repo.changes(files=fns, match=match))
986 987 files = modified + added + removed
987 988 else:
988 989 files = []
989 990 try:
990 repo.commit(files, message, opts['user'], opts['date'], match)
991 try:
992 force_editor = opts['force_editor']
993 except KeyError:
994 force_editor = False
995 repo.commit(files, message, opts['user'], opts['date'], match,
996 force_editor=force_editor)
991 997 except ValueError, inst:
992 998 raise util.Abort(str(inst))
993 999
994 1000 def docopy(ui, repo, pats, opts, wlock):
995 1001 # called with the repo lock held
996 1002 cwd = repo.getcwd()
997 1003 errors = 0
998 1004 copied = []
999 1005 targets = {}
1000 1006
1001 1007 def okaytocopy(abs, rel, exact):
1002 1008 reasons = {'?': _('is not managed'),
1003 1009 'a': _('has been marked for add'),
1004 1010 'r': _('has been marked for remove')}
1005 1011 state = repo.dirstate.state(abs)
1006 1012 reason = reasons.get(state)
1007 1013 if reason:
1008 1014 if state == 'a':
1009 1015 origsrc = repo.dirstate.copied(abs)
1010 1016 if origsrc is not None:
1011 1017 return origsrc
1012 1018 if exact:
1013 1019 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
1014 1020 else:
1015 1021 return abs
1016 1022
1017 1023 def copy(origsrc, abssrc, relsrc, target, exact):
1018 1024 abstarget = util.canonpath(repo.root, cwd, target)
1019 1025 reltarget = util.pathto(cwd, abstarget)
1020 1026 prevsrc = targets.get(abstarget)
1021 1027 if prevsrc is not None:
1022 1028 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1023 1029 (reltarget, abssrc, prevsrc))
1024 1030 return
1025 1031 if (not opts['after'] and os.path.exists(reltarget) or
1026 1032 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
1027 1033 if not opts['force']:
1028 1034 ui.warn(_('%s: not overwriting - file exists\n') %
1029 1035 reltarget)
1030 1036 return
1031 1037 if not opts['after']:
1032 1038 os.unlink(reltarget)
1033 1039 if opts['after']:
1034 1040 if not os.path.exists(reltarget):
1035 1041 return
1036 1042 else:
1037 1043 targetdir = os.path.dirname(reltarget) or '.'
1038 1044 if not os.path.isdir(targetdir):
1039 1045 os.makedirs(targetdir)
1040 1046 try:
1041 1047 restore = repo.dirstate.state(abstarget) == 'r'
1042 1048 if restore:
1043 1049 repo.undelete([abstarget], wlock)
1044 1050 try:
1045 1051 shutil.copyfile(relsrc, reltarget)
1046 1052 shutil.copymode(relsrc, reltarget)
1047 1053 restore = False
1048 1054 finally:
1049 1055 if restore:
1050 1056 repo.remove([abstarget], wlock)
1051 1057 except shutil.Error, inst:
1052 1058 raise util.Abort(str(inst))
1053 1059 except IOError, inst:
1054 1060 if inst.errno == errno.ENOENT:
1055 1061 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1056 1062 else:
1057 1063 ui.warn(_('%s: cannot copy - %s\n') %
1058 1064 (relsrc, inst.strerror))
1059 1065 errors += 1
1060 1066 return
1061 1067 if ui.verbose or not exact:
1062 1068 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1063 1069 targets[abstarget] = abssrc
1064 1070 if abstarget != origsrc:
1065 1071 repo.copy(origsrc, abstarget, wlock)
1066 1072 copied.append((abssrc, relsrc, exact))
1067 1073
1068 1074 def targetpathfn(pat, dest, srcs):
1069 1075 if os.path.isdir(pat):
1070 1076 abspfx = util.canonpath(repo.root, cwd, pat)
1071 1077 if destdirexists:
1072 1078 striplen = len(os.path.split(abspfx)[0])
1073 1079 else:
1074 1080 striplen = len(abspfx)
1075 1081 if striplen:
1076 1082 striplen += len(os.sep)
1077 1083 res = lambda p: os.path.join(dest, p[striplen:])
1078 1084 elif destdirexists:
1079 1085 res = lambda p: os.path.join(dest, os.path.basename(p))
1080 1086 else:
1081 1087 res = lambda p: dest
1082 1088 return res
1083 1089
1084 1090 def targetpathafterfn(pat, dest, srcs):
1085 1091 if util.patkind(pat, None)[0]:
1086 1092 # a mercurial pattern
1087 1093 res = lambda p: os.path.join(dest, os.path.basename(p))
1088 1094 else:
1089 1095 abspfx = util.canonpath(repo.root, cwd, pat)
1090 1096 if len(abspfx) < len(srcs[0][0]):
1091 1097 # A directory. Either the target path contains the last
1092 1098 # component of the source path or it does not.
1093 1099 def evalpath(striplen):
1094 1100 score = 0
1095 1101 for s in srcs:
1096 1102 t = os.path.join(dest, s[0][striplen:])
1097 1103 if os.path.exists(t):
1098 1104 score += 1
1099 1105 return score
1100 1106
1101 1107 striplen = len(abspfx)
1102 1108 if striplen:
1103 1109 striplen += len(os.sep)
1104 1110 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1105 1111 score = evalpath(striplen)
1106 1112 striplen1 = len(os.path.split(abspfx)[0])
1107 1113 if striplen1:
1108 1114 striplen1 += len(os.sep)
1109 1115 if evalpath(striplen1) > score:
1110 1116 striplen = striplen1
1111 1117 res = lambda p: os.path.join(dest, p[striplen:])
1112 1118 else:
1113 1119 # a file
1114 1120 if destdirexists:
1115 1121 res = lambda p: os.path.join(dest, os.path.basename(p))
1116 1122 else:
1117 1123 res = lambda p: dest
1118 1124 return res
1119 1125
1120 1126
1121 1127 pats = list(pats)
1122 1128 if not pats:
1123 1129 raise util.Abort(_('no source or destination specified'))
1124 1130 if len(pats) == 1:
1125 1131 raise util.Abort(_('no destination specified'))
1126 1132 dest = pats.pop()
1127 1133 destdirexists = os.path.isdir(dest)
1128 1134 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1129 1135 raise util.Abort(_('with multiple sources, destination must be an '
1130 1136 'existing directory'))
1131 1137 if opts['after']:
1132 1138 tfn = targetpathafterfn
1133 1139 else:
1134 1140 tfn = targetpathfn
1135 1141 copylist = []
1136 1142 for pat in pats:
1137 1143 srcs = []
1138 1144 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
1139 1145 origsrc = okaytocopy(abssrc, relsrc, exact)
1140 1146 if origsrc:
1141 1147 srcs.append((origsrc, abssrc, relsrc, exact))
1142 1148 if not srcs:
1143 1149 continue
1144 1150 copylist.append((tfn(pat, dest, srcs), srcs))
1145 1151 if not copylist:
1146 1152 raise util.Abort(_('no files to copy'))
1147 1153
1148 1154 for targetpath, srcs in copylist:
1149 1155 for origsrc, abssrc, relsrc, exact in srcs:
1150 1156 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1151 1157
1152 1158 if errors:
1153 1159 ui.warn(_('(consider using --after)\n'))
1154 1160 return errors, copied
1155 1161
1156 1162 def copy(ui, repo, *pats, **opts):
1157 1163 """mark files as copied for the next commit
1158 1164
1159 1165 Mark dest as having copies of source files. If dest is a
1160 1166 directory, copies are put in that directory. If dest is a file,
1161 1167 there can only be one source.
1162 1168
1163 1169 By default, this command copies the contents of files as they
1164 1170 stand in the working directory. If invoked with --after, the
1165 1171 operation is recorded, but no copying is performed.
1166 1172
1167 1173 This command takes effect in the next commit.
1168 1174
1169 1175 NOTE: This command should be treated as experimental. While it
1170 1176 should properly record copied files, this information is not yet
1171 1177 fully used by merge, nor fully reported by log.
1172 1178 """
1173 1179 wlock = repo.wlock(0)
1174 1180 errs, copied = docopy(ui, repo, pats, opts, wlock)
1175 1181 return errs
1176 1182
1177 1183 def debugancestor(ui, index, rev1, rev2):
1178 1184 """find the ancestor revision of two revisions in a given index"""
1179 1185 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1180 1186 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1181 1187 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1182 1188
1183 1189 def debugcomplete(ui, cmd='', **opts):
1184 1190 """returns the completion list associated with the given command"""
1185 1191
1186 1192 if opts['options']:
1187 1193 options = []
1188 1194 otables = [globalopts]
1189 1195 if cmd:
1190 1196 aliases, entry = find(cmd)
1191 1197 otables.append(entry[1])
1192 1198 for t in otables:
1193 1199 for o in t:
1194 1200 if o[0]:
1195 1201 options.append('-%s' % o[0])
1196 1202 options.append('--%s' % o[1])
1197 1203 ui.write("%s\n" % "\n".join(options))
1198 1204 return
1199 1205
1200 1206 clist = findpossible(cmd).keys()
1201 1207 clist.sort()
1202 1208 ui.write("%s\n" % "\n".join(clist))
1203 1209
1204 1210 def debugrebuildstate(ui, repo, rev=None):
1205 1211 """rebuild the dirstate as it would look like for the given revision"""
1206 1212 if not rev:
1207 1213 rev = repo.changelog.tip()
1208 1214 else:
1209 1215 rev = repo.lookup(rev)
1210 1216 change = repo.changelog.read(rev)
1211 1217 n = change[0]
1212 1218 files = repo.manifest.readflags(n)
1213 1219 wlock = repo.wlock()
1214 1220 repo.dirstate.rebuild(rev, files.iteritems())
1215 1221
1216 1222 def debugcheckstate(ui, repo):
1217 1223 """validate the correctness of the current dirstate"""
1218 1224 parent1, parent2 = repo.dirstate.parents()
1219 1225 repo.dirstate.read()
1220 1226 dc = repo.dirstate.map
1221 1227 keys = dc.keys()
1222 1228 keys.sort()
1223 1229 m1n = repo.changelog.read(parent1)[0]
1224 1230 m2n = repo.changelog.read(parent2)[0]
1225 1231 m1 = repo.manifest.read(m1n)
1226 1232 m2 = repo.manifest.read(m2n)
1227 1233 errors = 0
1228 1234 for f in dc:
1229 1235 state = repo.dirstate.state(f)
1230 1236 if state in "nr" and f not in m1:
1231 1237 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1232 1238 errors += 1
1233 1239 if state in "a" and f in m1:
1234 1240 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1235 1241 errors += 1
1236 1242 if state in "m" and f not in m1 and f not in m2:
1237 1243 ui.warn(_("%s in state %s, but not in either manifest\n") %
1238 1244 (f, state))
1239 1245 errors += 1
1240 1246 for f in m1:
1241 1247 state = repo.dirstate.state(f)
1242 1248 if state not in "nrm":
1243 1249 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1244 1250 errors += 1
1245 1251 if errors:
1246 1252 error = _(".hg/dirstate inconsistent with current parent's manifest")
1247 1253 raise util.Abort(error)
1248 1254
1249 1255 def debugconfig(ui, repo):
1250 1256 """show combined config settings from all hgrc files"""
1251 1257 for section, name, value in ui.walkconfig():
1252 1258 ui.write('%s.%s=%s\n' % (section, name, value))
1253 1259
1254 1260 def debugsetparents(ui, repo, rev1, rev2=None):
1255 1261 """manually set the parents of the current working directory
1256 1262
1257 1263 This is useful for writing repository conversion tools, but should
1258 1264 be used with care.
1259 1265 """
1260 1266
1261 1267 if not rev2:
1262 1268 rev2 = hex(nullid)
1263 1269
1264 1270 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1265 1271
1266 1272 def debugstate(ui, repo):
1267 1273 """show the contents of the current dirstate"""
1268 1274 repo.dirstate.read()
1269 1275 dc = repo.dirstate.map
1270 1276 keys = dc.keys()
1271 1277 keys.sort()
1272 1278 for file_ in keys:
1273 1279 ui.write("%c %3o %10d %s %s\n"
1274 1280 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1275 1281 time.strftime("%x %X",
1276 1282 time.localtime(dc[file_][3])), file_))
1277 1283 for f in repo.dirstate.copies:
1278 1284 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1279 1285
1280 1286 def debugdata(ui, file_, rev):
1281 1287 """dump the contents of an data file revision"""
1282 1288 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1283 1289 file_[:-2] + ".i", file_, 0)
1284 1290 try:
1285 1291 ui.write(r.revision(r.lookup(rev)))
1286 1292 except KeyError:
1287 1293 raise util.Abort(_('invalid revision identifier %s'), rev)
1288 1294
1289 1295 def debugindex(ui, file_):
1290 1296 """dump the contents of an index file"""
1291 1297 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1292 1298 ui.write(" rev offset length base linkrev" +
1293 1299 " nodeid p1 p2\n")
1294 1300 for i in range(r.count()):
1295 1301 node = r.node(i)
1296 1302 pp = r.parents(node)
1297 1303 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1298 1304 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1299 1305 short(node), short(pp[0]), short(pp[1])))
1300 1306
1301 1307 def debugindexdot(ui, file_):
1302 1308 """dump an index DAG as a .dot file"""
1303 1309 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1304 1310 ui.write("digraph G {\n")
1305 1311 for i in range(r.count()):
1306 1312 e = r.index[i]
1307 1313 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
1308 1314 if e[5] != nullid:
1309 1315 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
1310 1316 ui.write("}\n")
1311 1317
1312 1318 def debugrename(ui, repo, file, rev=None):
1313 1319 """dump rename information"""
1314 1320 r = repo.file(relpath(repo, [file])[0])
1315 1321 if rev:
1316 1322 try:
1317 1323 # assume all revision numbers are for changesets
1318 1324 n = repo.lookup(rev)
1319 1325 change = repo.changelog.read(n)
1320 1326 m = repo.manifest.read(change[0])
1321 1327 n = m[relpath(repo, [file])[0]]
1322 1328 except (hg.RepoError, KeyError):
1323 1329 n = r.lookup(rev)
1324 1330 else:
1325 1331 n = r.tip()
1326 1332 m = r.renamed(n)
1327 1333 if m:
1328 1334 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1329 1335 else:
1330 1336 ui.write(_("not renamed\n"))
1331 1337
1332 1338 def debugwalk(ui, repo, *pats, **opts):
1333 1339 """show how files match on given patterns"""
1334 1340 items = list(walk(repo, pats, opts))
1335 1341 if not items:
1336 1342 return
1337 1343 fmt = '%%s %%-%ds %%-%ds %%s' % (
1338 1344 max([len(abs) for (src, abs, rel, exact) in items]),
1339 1345 max([len(rel) for (src, abs, rel, exact) in items]))
1340 1346 for src, abs, rel, exact in items:
1341 1347 line = fmt % (src, abs, rel, exact and 'exact' or '')
1342 1348 ui.write("%s\n" % line.rstrip())
1343 1349
1344 1350 def diff(ui, repo, *pats, **opts):
1345 1351 """diff repository (or selected files)
1346 1352
1347 1353 Show differences between revisions for the specified files.
1348 1354
1349 1355 Differences between files are shown using the unified diff format.
1350 1356
1351 1357 When two revision arguments are given, then changes are shown
1352 1358 between those revisions. If only one revision is specified then
1353 1359 that revision is compared to the working directory, and, when no
1354 1360 revisions are specified, the working directory files are compared
1355 1361 to its parent.
1356 1362
1357 1363 Without the -a option, diff will avoid generating diffs of files
1358 1364 it detects as binary. With -a, diff will generate a diff anyway,
1359 1365 probably with undesirable results.
1360 1366 """
1361 1367 node1, node2 = None, None
1362 1368 revs = [repo.lookup(x) for x in opts['rev']]
1363 1369
1364 1370 if len(revs) > 0:
1365 1371 node1 = revs[0]
1366 1372 if len(revs) > 1:
1367 1373 node2 = revs[1]
1368 1374 if len(revs) > 2:
1369 1375 raise util.Abort(_("too many revisions to diff"))
1370 1376
1371 1377 fns, matchfn, anypats = matchpats(repo, pats, opts)
1372 1378
1373 1379 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1374 1380 text=opts['text'], opts=opts)
1375 1381
1376 1382 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1377 1383 node = repo.lookup(changeset)
1378 1384 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1379 1385 if opts['switch_parent']:
1380 1386 parents.reverse()
1381 1387 prev = (parents and parents[0]) or nullid
1382 1388 change = repo.changelog.read(node)
1383 1389
1384 1390 fp = make_file(repo, repo.changelog, opts['output'],
1385 1391 node=node, total=total, seqno=seqno,
1386 1392 revwidth=revwidth)
1387 1393 if fp != sys.stdout:
1388 1394 ui.note("%s\n" % fp.name)
1389 1395
1390 1396 fp.write("# HG changeset patch\n")
1391 1397 fp.write("# User %s\n" % change[1])
1392 1398 fp.write("# Node ID %s\n" % hex(node))
1393 1399 fp.write("# Parent %s\n" % hex(prev))
1394 1400 if len(parents) > 1:
1395 1401 fp.write("# Parent %s\n" % hex(parents[1]))
1396 1402 fp.write(change[4].rstrip())
1397 1403 fp.write("\n\n")
1398 1404
1399 1405 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1400 1406 if fp != sys.stdout:
1401 1407 fp.close()
1402 1408
1403 1409 def export(ui, repo, *changesets, **opts):
1404 1410 """dump the header and diffs for one or more changesets
1405 1411
1406 1412 Print the changeset header and diffs for one or more revisions.
1407 1413
1408 1414 The information shown in the changeset header is: author,
1409 1415 changeset hash, parent and commit comment.
1410 1416
1411 1417 Output may be to a file, in which case the name of the file is
1412 1418 given using a format string. The formatting rules are as follows:
1413 1419
1414 1420 %% literal "%" character
1415 1421 %H changeset hash (40 bytes of hexadecimal)
1416 1422 %N number of patches being generated
1417 1423 %R changeset revision number
1418 1424 %b basename of the exporting repository
1419 1425 %h short-form changeset hash (12 bytes of hexadecimal)
1420 1426 %n zero-padded sequence number, starting at 1
1421 1427 %r zero-padded changeset revision number
1422 1428
1423 1429 Without the -a option, export will avoid generating diffs of files
1424 1430 it detects as binary. With -a, export will generate a diff anyway,
1425 1431 probably with undesirable results.
1426 1432
1427 1433 With the --switch-parent option, the diff will be against the second
1428 1434 parent. It can be useful to review a merge.
1429 1435 """
1430 1436 if not changesets:
1431 1437 raise util.Abort(_("export requires at least one changeset"))
1432 1438 seqno = 0
1433 1439 revs = list(revrange(ui, repo, changesets))
1434 1440 total = len(revs)
1435 1441 revwidth = max(map(len, revs))
1436 1442 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1437 1443 ui.note(msg)
1438 1444 for cset in revs:
1439 1445 seqno += 1
1440 1446 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1441 1447
1442 1448 def forget(ui, repo, *pats, **opts):
1443 1449 """don't add the specified files on the next commit (DEPRECATED)
1444 1450
1445 1451 (DEPRECATED)
1446 1452 Undo an 'hg add' scheduled for the next commit.
1447 1453
1448 1454 This command is now deprecated and will be removed in a future
1449 1455 release. Please use revert instead.
1450 1456 """
1451 1457 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1452 1458 forget = []
1453 1459 for src, abs, rel, exact in walk(repo, pats, opts):
1454 1460 if repo.dirstate.state(abs) == 'a':
1455 1461 forget.append(abs)
1456 1462 if ui.verbose or not exact:
1457 1463 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1458 1464 repo.forget(forget)
1459 1465
1460 1466 def grep(ui, repo, pattern, *pats, **opts):
1461 1467 """search for a pattern in specified files and revisions
1462 1468
1463 1469 Search revisions of files for a regular expression.
1464 1470
1465 1471 This command behaves differently than Unix grep. It only accepts
1466 1472 Python/Perl regexps. It searches repository history, not the
1467 1473 working directory. It always prints the revision number in which
1468 1474 a match appears.
1469 1475
1470 1476 By default, grep only prints output for the first revision of a
1471 1477 file in which it finds a match. To get it to print every revision
1472 1478 that contains a change in match status ("-" for a match that
1473 1479 becomes a non-match, or "+" for a non-match that becomes a match),
1474 1480 use the --all flag.
1475 1481 """
1476 1482 reflags = 0
1477 1483 if opts['ignore_case']:
1478 1484 reflags |= re.I
1479 1485 regexp = re.compile(pattern, reflags)
1480 1486 sep, eol = ':', '\n'
1481 1487 if opts['print0']:
1482 1488 sep = eol = '\0'
1483 1489
1484 1490 fcache = {}
1485 1491 def getfile(fn):
1486 1492 if fn not in fcache:
1487 1493 fcache[fn] = repo.file(fn)
1488 1494 return fcache[fn]
1489 1495
1490 1496 def matchlines(body):
1491 1497 begin = 0
1492 1498 linenum = 0
1493 1499 while True:
1494 1500 match = regexp.search(body, begin)
1495 1501 if not match:
1496 1502 break
1497 1503 mstart, mend = match.span()
1498 1504 linenum += body.count('\n', begin, mstart) + 1
1499 1505 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1500 1506 lend = body.find('\n', mend)
1501 1507 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1502 1508 begin = lend + 1
1503 1509
1504 1510 class linestate(object):
1505 1511 def __init__(self, line, linenum, colstart, colend):
1506 1512 self.line = line
1507 1513 self.linenum = linenum
1508 1514 self.colstart = colstart
1509 1515 self.colend = colend
1510 1516 def __eq__(self, other):
1511 1517 return self.line == other.line
1512 1518 def __hash__(self):
1513 1519 return hash(self.line)
1514 1520
1515 1521 matches = {}
1516 1522 def grepbody(fn, rev, body):
1517 1523 matches[rev].setdefault(fn, {})
1518 1524 m = matches[rev][fn]
1519 1525 for lnum, cstart, cend, line in matchlines(body):
1520 1526 s = linestate(line, lnum, cstart, cend)
1521 1527 m[s] = s
1522 1528
1523 1529 # FIXME: prev isn't used, why ?
1524 1530 prev = {}
1525 1531 ucache = {}
1526 1532 def display(fn, rev, states, prevstates):
1527 1533 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1528 1534 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1529 1535 counts = {'-': 0, '+': 0}
1530 1536 filerevmatches = {}
1531 1537 for l in diff:
1532 1538 if incrementing or not opts['all']:
1533 1539 change = ((l in prevstates) and '-') or '+'
1534 1540 r = rev
1535 1541 else:
1536 1542 change = ((l in states) and '-') or '+'
1537 1543 r = prev[fn]
1538 1544 cols = [fn, str(rev)]
1539 1545 if opts['line_number']:
1540 1546 cols.append(str(l.linenum))
1541 1547 if opts['all']:
1542 1548 cols.append(change)
1543 1549 if opts['user']:
1544 1550 cols.append(trimuser(ui, getchange(rev)[1], rev,
1545 1551 ucache))
1546 1552 if opts['files_with_matches']:
1547 1553 c = (fn, rev)
1548 1554 if c in filerevmatches:
1549 1555 continue
1550 1556 filerevmatches[c] = 1
1551 1557 else:
1552 1558 cols.append(l.line)
1553 1559 ui.write(sep.join(cols), eol)
1554 1560 counts[change] += 1
1555 1561 return counts['+'], counts['-']
1556 1562
1557 1563 fstate = {}
1558 1564 skip = {}
1559 1565 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1560 1566 count = 0
1561 1567 incrementing = False
1562 1568 for st, rev, fns in changeiter:
1563 1569 if st == 'window':
1564 1570 incrementing = rev
1565 1571 matches.clear()
1566 1572 elif st == 'add':
1567 1573 change = repo.changelog.read(repo.lookup(str(rev)))
1568 1574 mf = repo.manifest.read(change[0])
1569 1575 matches[rev] = {}
1570 1576 for fn in fns:
1571 1577 if fn in skip:
1572 1578 continue
1573 1579 fstate.setdefault(fn, {})
1574 1580 try:
1575 1581 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1576 1582 except KeyError:
1577 1583 pass
1578 1584 elif st == 'iter':
1579 1585 states = matches[rev].items()
1580 1586 states.sort()
1581 1587 for fn, m in states:
1582 1588 if fn in skip:
1583 1589 continue
1584 1590 if incrementing or not opts['all'] or fstate[fn]:
1585 1591 pos, neg = display(fn, rev, m, fstate[fn])
1586 1592 count += pos + neg
1587 1593 if pos and not opts['all']:
1588 1594 skip[fn] = True
1589 1595 fstate[fn] = m
1590 1596 prev[fn] = rev
1591 1597
1592 1598 if not incrementing:
1593 1599 fstate = fstate.items()
1594 1600 fstate.sort()
1595 1601 for fn, state in fstate:
1596 1602 if fn in skip:
1597 1603 continue
1598 1604 display(fn, rev, {}, state)
1599 1605 return (count == 0 and 1) or 0
1600 1606
1601 1607 def heads(ui, repo, **opts):
1602 1608 """show current repository heads
1603 1609
1604 1610 Show all repository head changesets.
1605 1611
1606 1612 Repository "heads" are changesets that don't have children
1607 1613 changesets. They are where development generally takes place and
1608 1614 are the usual targets for update and merge operations.
1609 1615 """
1610 1616 if opts['rev']:
1611 1617 heads = repo.heads(repo.lookup(opts['rev']))
1612 1618 else:
1613 1619 heads = repo.heads()
1614 1620 br = None
1615 1621 if opts['branches']:
1616 1622 br = repo.branchlookup(heads)
1617 1623 displayer = show_changeset(ui, repo, opts)
1618 1624 for n in heads:
1619 1625 displayer.show(changenode=n, brinfo=br)
1620 1626
1621 1627 def identify(ui, repo):
1622 1628 """print information about the working copy
1623 1629
1624 1630 Print a short summary of the current state of the repo.
1625 1631
1626 1632 This summary identifies the repository state using one or two parent
1627 1633 hash identifiers, followed by a "+" if there are uncommitted changes
1628 1634 in the working directory, followed by a list of tags for this revision.
1629 1635 """
1630 1636 parents = [p for p in repo.dirstate.parents() if p != nullid]
1631 1637 if not parents:
1632 1638 ui.write(_("unknown\n"))
1633 1639 return
1634 1640
1635 1641 hexfunc = ui.verbose and hex or short
1636 1642 modified, added, removed, deleted, unknown = repo.changes()
1637 1643 output = ["%s%s" %
1638 1644 ('+'.join([hexfunc(parent) for parent in parents]),
1639 1645 (modified or added or removed or deleted) and "+" or "")]
1640 1646
1641 1647 if not ui.quiet:
1642 1648 # multiple tags for a single parent separated by '/'
1643 1649 parenttags = ['/'.join(tags)
1644 1650 for tags in map(repo.nodetags, parents) if tags]
1645 1651 # tags for multiple parents separated by ' + '
1646 1652 if parenttags:
1647 1653 output.append(' + '.join(parenttags))
1648 1654
1649 1655 ui.write("%s\n" % ' '.join(output))
1650 1656
1651 1657 def import_(ui, repo, patch1, *patches, **opts):
1652 1658 """import an ordered set of patches
1653 1659
1654 1660 Import a list of patches and commit them individually.
1655 1661
1656 1662 If there are outstanding changes in the working directory, import
1657 1663 will abort unless given the -f flag.
1658 1664
1659 1665 If a patch looks like a mail message (its first line starts with
1660 1666 "From " or looks like an RFC822 header), it will not be applied
1661 1667 unless the -f option is used. The importer neither parses nor
1662 1668 discards mail headers, so use -f only to override the "mailness"
1663 1669 safety check, not to import a real mail message.
1664 1670 """
1665 1671 patches = (patch1,) + patches
1666 1672
1667 1673 if not opts['force']:
1668 1674 bail_if_changed(repo)
1669 1675
1670 1676 d = opts["base"]
1671 1677 strip = opts["strip"]
1672 1678
1673 1679 mailre = re.compile(r'(?:From |[\w-]+:)')
1674 1680
1675 1681 # attempt to detect the start of a patch
1676 1682 # (this heuristic is borrowed from quilt)
1677 1683 diffre = re.compile(r'(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1678 1684 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1679 1685 '(---|\*\*\*)[ \t])')
1680 1686
1681 1687 for patch in patches:
1682 1688 ui.status(_("applying %s\n") % patch)
1683 1689 pf = os.path.join(d, patch)
1684 1690
1685 1691 message = []
1686 1692 user = None
1687 1693 hgpatch = False
1688 1694 for line in file(pf):
1689 1695 line = line.rstrip()
1690 1696 if (not message and not hgpatch and
1691 1697 mailre.match(line) and not opts['force']):
1692 1698 if len(line) > 35:
1693 1699 line = line[:32] + '...'
1694 1700 raise util.Abort(_('first line looks like a '
1695 1701 'mail header: ') + line)
1696 1702 if diffre.match(line):
1697 1703 break
1698 1704 elif hgpatch:
1699 1705 # parse values when importing the result of an hg export
1700 1706 if line.startswith("# User "):
1701 1707 user = line[7:]
1702 1708 ui.debug(_('User: %s\n') % user)
1703 1709 elif not line.startswith("# ") and line:
1704 1710 message.append(line)
1705 1711 hgpatch = False
1706 1712 elif line == '# HG changeset patch':
1707 1713 hgpatch = True
1708 1714 message = [] # We may have collected garbage
1709 1715 else:
1710 1716 message.append(line)
1711 1717
1712 1718 # make sure message isn't empty
1713 1719 if not message:
1714 1720 message = _("imported patch %s\n") % patch
1715 1721 else:
1716 1722 message = "%s\n" % '\n'.join(message)
1717 1723 ui.debug(_('message:\n%s\n') % message)
1718 1724
1719 1725 files = util.patch(strip, pf, ui)
1720 1726
1721 1727 if len(files) > 0:
1722 1728 addremove_lock(ui, repo, files, {})
1723 1729 repo.commit(files, message, user)
1724 1730
1725 1731 def incoming(ui, repo, source="default", **opts):
1726 1732 """show new changesets found in source
1727 1733
1728 1734 Show new changesets found in the specified path/URL or the default
1729 1735 pull location. These are the changesets that would be pulled if a pull
1730 1736 was requested.
1731 1737
1732 1738 For remote repository, using --bundle avoids downloading the changesets
1733 1739 twice if the incoming is followed by a pull.
1734 1740
1735 1741 See pull for valid source format details.
1736 1742 """
1737 1743 source = ui.expandpath(source)
1738 1744 if opts['ssh']:
1739 1745 ui.setconfig("ui", "ssh", opts['ssh'])
1740 1746 if opts['remotecmd']:
1741 1747 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1742 1748
1743 1749 other = hg.repository(ui, source)
1744 1750 incoming = repo.findincoming(other, force=opts["force"])
1745 1751 if not incoming:
1746 1752 ui.status(_("no changes found\n"))
1747 1753 return
1748 1754
1749 1755 cleanup = None
1750 1756 try:
1751 1757 fname = opts["bundle"]
1752 1758 if fname or not other.local():
1753 1759 # create a bundle (uncompressed if other repo is not local)
1754 1760 cg = other.changegroup(incoming, "incoming")
1755 1761 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1756 1762 # keep written bundle?
1757 1763 if opts["bundle"]:
1758 1764 cleanup = None
1759 1765 if not other.local():
1760 1766 # use the created uncompressed bundlerepo
1761 1767 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1762 1768
1763 1769 o = other.changelog.nodesbetween(incoming)[0]
1764 1770 if opts['newest_first']:
1765 1771 o.reverse()
1766 1772 displayer = show_changeset(ui, other, opts)
1767 1773 for n in o:
1768 1774 parents = [p for p in other.changelog.parents(n) if p != nullid]
1769 1775 if opts['no_merges'] and len(parents) == 2:
1770 1776 continue
1771 1777 displayer.show(changenode=n)
1772 1778 if opts['patch']:
1773 1779 prev = (parents and parents[0]) or nullid
1774 1780 dodiff(ui, ui, other, prev, n)
1775 1781 ui.write("\n")
1776 1782 finally:
1777 1783 if hasattr(other, 'close'):
1778 1784 other.close()
1779 1785 if cleanup:
1780 1786 os.unlink(cleanup)
1781 1787
1782 1788 def init(ui, dest="."):
1783 1789 """create a new repository in the given directory
1784 1790
1785 1791 Initialize a new repository in the given directory. If the given
1786 1792 directory does not exist, it is created.
1787 1793
1788 1794 If no directory is given, the current directory is used.
1789 1795 """
1790 1796 if not os.path.exists(dest):
1791 1797 os.mkdir(dest)
1792 1798 hg.repository(ui, dest, create=1)
1793 1799
1794 1800 def locate(ui, repo, *pats, **opts):
1795 1801 """locate files matching specific patterns
1796 1802
1797 1803 Print all files under Mercurial control whose names match the
1798 1804 given patterns.
1799 1805
1800 1806 This command searches the current directory and its
1801 1807 subdirectories. To search an entire repository, move to the root
1802 1808 of the repository.
1803 1809
1804 1810 If no patterns are given to match, this command prints all file
1805 1811 names.
1806 1812
1807 1813 If you want to feed the output of this command into the "xargs"
1808 1814 command, use the "-0" option to both this command and "xargs".
1809 1815 This will avoid the problem of "xargs" treating single filenames
1810 1816 that contain white space as multiple filenames.
1811 1817 """
1812 1818 end = opts['print0'] and '\0' or '\n'
1813 1819 rev = opts['rev']
1814 1820 if rev:
1815 1821 node = repo.lookup(rev)
1816 1822 else:
1817 1823 node = None
1818 1824
1819 1825 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
1820 1826 head='(?:.*/|)'):
1821 1827 if not node and repo.dirstate.state(abs) == '?':
1822 1828 continue
1823 1829 if opts['fullpath']:
1824 1830 ui.write(os.path.join(repo.root, abs), end)
1825 1831 else:
1826 1832 ui.write(((pats and rel) or abs), end)
1827 1833
1828 1834 def log(ui, repo, *pats, **opts):
1829 1835 """show revision history of entire repository or files
1830 1836
1831 1837 Print the revision history of the specified files or the entire project.
1832 1838
1833 1839 By default this command outputs: changeset id and hash, tags,
1834 1840 non-trivial parents, user, date and time, and a summary for each
1835 1841 commit. When the -v/--verbose switch is used, the list of changed
1836 1842 files and full commit message is shown.
1837 1843 """
1838 1844 class dui(object):
1839 1845 # Implement and delegate some ui protocol. Save hunks of
1840 1846 # output for later display in the desired order.
1841 1847 def __init__(self, ui):
1842 1848 self.ui = ui
1843 1849 self.hunk = {}
1844 1850 self.header = {}
1845 1851 def bump(self, rev):
1846 1852 self.rev = rev
1847 1853 self.hunk[rev] = []
1848 1854 self.header[rev] = []
1849 1855 def note(self, *args):
1850 1856 if self.verbose:
1851 1857 self.write(*args)
1852 1858 def status(self, *args):
1853 1859 if not self.quiet:
1854 1860 self.write(*args)
1855 1861 def write(self, *args):
1856 1862 self.hunk[self.rev].append(args)
1857 1863 def write_header(self, *args):
1858 1864 self.header[self.rev].append(args)
1859 1865 def debug(self, *args):
1860 1866 if self.debugflag:
1861 1867 self.write(*args)
1862 1868 def __getattr__(self, key):
1863 1869 return getattr(self.ui, key)
1864 1870
1865 1871 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1866 1872
1867 1873 if opts['limit']:
1868 1874 try:
1869 1875 limit = int(opts['limit'])
1870 1876 except ValueError:
1871 1877 raise util.Abort(_('limit must be a positive integer'))
1872 1878 if limit <= 0: raise util.Abort(_('limit must be positive'))
1873 1879 else:
1874 1880 limit = sys.maxint
1875 1881 count = 0
1876 1882
1877 1883 displayer = show_changeset(ui, repo, opts)
1878 1884 for st, rev, fns in changeiter:
1879 1885 if st == 'window':
1880 1886 du = dui(ui)
1881 1887 displayer.ui = du
1882 1888 elif st == 'add':
1883 1889 du.bump(rev)
1884 1890 changenode = repo.changelog.node(rev)
1885 1891 parents = [p for p in repo.changelog.parents(changenode)
1886 1892 if p != nullid]
1887 1893 if opts['no_merges'] and len(parents) == 2:
1888 1894 continue
1889 1895 if opts['only_merges'] and len(parents) != 2:
1890 1896 continue
1891 1897
1892 1898 if opts['keyword']:
1893 1899 changes = getchange(rev)
1894 1900 miss = 0
1895 1901 for k in [kw.lower() for kw in opts['keyword']]:
1896 1902 if not (k in changes[1].lower() or
1897 1903 k in changes[4].lower() or
1898 1904 k in " ".join(changes[3][:20]).lower()):
1899 1905 miss = 1
1900 1906 break
1901 1907 if miss:
1902 1908 continue
1903 1909
1904 1910 br = None
1905 1911 if opts['branches']:
1906 1912 br = repo.branchlookup([repo.changelog.node(rev)])
1907 1913
1908 1914 displayer.show(rev, brinfo=br)
1909 1915 if opts['patch']:
1910 1916 prev = (parents and parents[0]) or nullid
1911 1917 dodiff(du, du, repo, prev, changenode, match=matchfn)
1912 1918 du.write("\n\n")
1913 1919 elif st == 'iter':
1914 1920 if count == limit: break
1915 1921 if du.header[rev]:
1916 1922 for args in du.header[rev]:
1917 1923 ui.write_header(*args)
1918 1924 if du.hunk[rev]:
1919 1925 count += 1
1920 1926 for args in du.hunk[rev]:
1921 1927 ui.write(*args)
1922 1928
1923 1929 def manifest(ui, repo, rev=None):
1924 1930 """output the latest or given revision of the project manifest
1925 1931
1926 1932 Print a list of version controlled files for the given revision.
1927 1933
1928 1934 The manifest is the list of files being version controlled. If no revision
1929 1935 is given then the tip is used.
1930 1936 """
1931 1937 if rev:
1932 1938 try:
1933 1939 # assume all revision numbers are for changesets
1934 1940 n = repo.lookup(rev)
1935 1941 change = repo.changelog.read(n)
1936 1942 n = change[0]
1937 1943 except hg.RepoError:
1938 1944 n = repo.manifest.lookup(rev)
1939 1945 else:
1940 1946 n = repo.manifest.tip()
1941 1947 m = repo.manifest.read(n)
1942 1948 mf = repo.manifest.readflags(n)
1943 1949 files = m.keys()
1944 1950 files.sort()
1945 1951
1946 1952 for f in files:
1947 1953 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
1948 1954
1949 1955 def merge(ui, repo, node=None, **opts):
1950 1956 """Merge working directory with another revision
1951 1957
1952 1958 Merge the contents of the current working directory and the
1953 1959 requested revision. Files that changed between either parent are
1954 1960 marked as changed for the next commit and a commit must be
1955 1961 performed before any further updates are allowed.
1956 1962 """
1957 1963 return update(ui, repo, node=node, merge=True, **opts)
1958 1964
1959 1965 def outgoing(ui, repo, dest="default-push", **opts):
1960 1966 """show changesets not found in destination
1961 1967
1962 1968 Show changesets not found in the specified destination repository or
1963 1969 the default push location. These are the changesets that would be pushed
1964 1970 if a push was requested.
1965 1971
1966 1972 See pull for valid destination format details.
1967 1973 """
1968 1974 dest = ui.expandpath(dest)
1969 1975 if opts['ssh']:
1970 1976 ui.setconfig("ui", "ssh", opts['ssh'])
1971 1977 if opts['remotecmd']:
1972 1978 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1973 1979
1974 1980 other = hg.repository(ui, dest)
1975 1981 o = repo.findoutgoing(other, force=opts['force'])
1976 1982 if not o:
1977 1983 ui.status(_("no changes found\n"))
1978 1984 return
1979 1985 o = repo.changelog.nodesbetween(o)[0]
1980 1986 if opts['newest_first']:
1981 1987 o.reverse()
1982 1988 displayer = show_changeset(ui, repo, opts)
1983 1989 for n in o:
1984 1990 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1985 1991 if opts['no_merges'] and len(parents) == 2:
1986 1992 continue
1987 1993 displayer.show(changenode=n)
1988 1994 if opts['patch']:
1989 1995 prev = (parents and parents[0]) or nullid
1990 1996 dodiff(ui, ui, repo, prev, n)
1991 1997 ui.write("\n")
1992 1998
1993 1999 def parents(ui, repo, rev=None, branches=None, **opts):
1994 2000 """show the parents of the working dir or revision
1995 2001
1996 2002 Print the working directory's parent revisions.
1997 2003 """
1998 2004 if rev:
1999 2005 p = repo.changelog.parents(repo.lookup(rev))
2000 2006 else:
2001 2007 p = repo.dirstate.parents()
2002 2008
2003 2009 br = None
2004 2010 if branches is not None:
2005 2011 br = repo.branchlookup(p)
2006 2012 displayer = show_changeset(ui, repo, opts)
2007 2013 for n in p:
2008 2014 if n != nullid:
2009 2015 displayer.show(changenode=n, brinfo=br)
2010 2016
2011 2017 def paths(ui, repo, search=None):
2012 2018 """show definition of symbolic path names
2013 2019
2014 2020 Show definition of symbolic path name NAME. If no name is given, show
2015 2021 definition of available names.
2016 2022
2017 2023 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2018 2024 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2019 2025 """
2020 2026 if search:
2021 2027 for name, path in ui.configitems("paths"):
2022 2028 if name == search:
2023 2029 ui.write("%s\n" % path)
2024 2030 return
2025 2031 ui.warn(_("not found!\n"))
2026 2032 return 1
2027 2033 else:
2028 2034 for name, path in ui.configitems("paths"):
2029 2035 ui.write("%s = %s\n" % (name, path))
2030 2036
2031 2037 def postincoming(ui, repo, modheads, optupdate):
2032 2038 if modheads == 0:
2033 2039 return
2034 2040 if optupdate:
2035 2041 if modheads == 1:
2036 2042 return update(ui, repo)
2037 2043 else:
2038 2044 ui.status(_("not updating, since new heads added\n"))
2039 2045 if modheads > 1:
2040 2046 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2041 2047 else:
2042 2048 ui.status(_("(run 'hg update' to get a working copy)\n"))
2043 2049
2044 2050 def pull(ui, repo, source="default", **opts):
2045 2051 """pull changes from the specified source
2046 2052
2047 2053 Pull changes from a remote repository to a local one.
2048 2054
2049 2055 This finds all changes from the repository at the specified path
2050 2056 or URL and adds them to the local repository. By default, this
2051 2057 does not update the copy of the project in the working directory.
2052 2058
2053 2059 Valid URLs are of the form:
2054 2060
2055 2061 local/filesystem/path
2056 2062 http://[user@]host[:port][/path]
2057 2063 https://[user@]host[:port][/path]
2058 2064 ssh://[user@]host[:port][/path]
2059 2065
2060 2066 Some notes about using SSH with Mercurial:
2061 2067 - SSH requires an accessible shell account on the destination machine
2062 2068 and a copy of hg in the remote path or specified with as remotecmd.
2063 2069 - /path is relative to the remote user's home directory by default.
2064 2070 Use two slashes at the start of a path to specify an absolute path.
2065 2071 - Mercurial doesn't use its own compression via SSH; the right thing
2066 2072 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2067 2073 Host *.mylocalnetwork.example.com
2068 2074 Compression off
2069 2075 Host *
2070 2076 Compression on
2071 2077 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2072 2078 with the --ssh command line option.
2073 2079 """
2074 2080 source = ui.expandpath(source)
2075 2081 ui.status(_('pulling from %s\n') % (source))
2076 2082
2077 2083 if opts['ssh']:
2078 2084 ui.setconfig("ui", "ssh", opts['ssh'])
2079 2085 if opts['remotecmd']:
2080 2086 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
2081 2087
2082 2088 other = hg.repository(ui, source)
2083 2089 revs = None
2084 2090 if opts['rev'] and not other.local():
2085 2091 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2086 2092 elif opts['rev']:
2087 2093 revs = [other.lookup(rev) for rev in opts['rev']]
2088 2094 modheads = repo.pull(other, heads=revs, force=opts['force'])
2089 2095 return postincoming(ui, repo, modheads, opts['update'])
2090 2096
2091 2097 def push(ui, repo, dest="default-push", **opts):
2092 2098 """push changes to the specified destination
2093 2099
2094 2100 Push changes from the local repository to the given destination.
2095 2101
2096 2102 This is the symmetrical operation for pull. It helps to move
2097 2103 changes from the current repository to a different one. If the
2098 2104 destination is local this is identical to a pull in that directory
2099 2105 from the current one.
2100 2106
2101 2107 By default, push will refuse to run if it detects the result would
2102 2108 increase the number of remote heads. This generally indicates the
2103 2109 the client has forgotten to sync and merge before pushing.
2104 2110
2105 2111 Valid URLs are of the form:
2106 2112
2107 2113 local/filesystem/path
2108 2114 ssh://[user@]host[:port][/path]
2109 2115
2110 2116 Look at the help text for the pull command for important details
2111 2117 about ssh:// URLs.
2112 2118 """
2113 2119 dest = ui.expandpath(dest)
2114 2120 ui.status('pushing to %s\n' % (dest))
2115 2121
2116 2122 if opts['ssh']:
2117 2123 ui.setconfig("ui", "ssh", opts['ssh'])
2118 2124 if opts['remotecmd']:
2119 2125 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
2120 2126
2121 2127 other = hg.repository(ui, dest)
2122 2128 revs = None
2123 2129 if opts['rev']:
2124 2130 revs = [repo.lookup(rev) for rev in opts['rev']]
2125 2131 r = repo.push(other, opts['force'], revs=revs)
2126 2132 return r == 0
2127 2133
2128 2134 def rawcommit(ui, repo, *flist, **rc):
2129 2135 """raw commit interface (DEPRECATED)
2130 2136
2131 2137 (DEPRECATED)
2132 2138 Lowlevel commit, for use in helper scripts.
2133 2139
2134 2140 This command is not intended to be used by normal users, as it is
2135 2141 primarily useful for importing from other SCMs.
2136 2142
2137 2143 This command is now deprecated and will be removed in a future
2138 2144 release, please use debugsetparents and commit instead.
2139 2145 """
2140 2146
2141 2147 ui.warn(_("(the rawcommit command is deprecated)\n"))
2142 2148
2143 2149 message = rc['message']
2144 2150 if not message and rc['logfile']:
2145 2151 try:
2146 2152 message = open(rc['logfile']).read()
2147 2153 except IOError:
2148 2154 pass
2149 2155 if not message and not rc['logfile']:
2150 2156 raise util.Abort(_("missing commit message"))
2151 2157
2152 2158 files = relpath(repo, list(flist))
2153 2159 if rc['files']:
2154 2160 files += open(rc['files']).read().splitlines()
2155 2161
2156 2162 rc['parent'] = map(repo.lookup, rc['parent'])
2157 2163
2158 2164 try:
2159 2165 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2160 2166 except ValueError, inst:
2161 2167 raise util.Abort(str(inst))
2162 2168
2163 2169 def recover(ui, repo):
2164 2170 """roll back an interrupted transaction
2165 2171
2166 2172 Recover from an interrupted commit or pull.
2167 2173
2168 2174 This command tries to fix the repository status after an interrupted
2169 2175 operation. It should only be necessary when Mercurial suggests it.
2170 2176 """
2171 2177 if repo.recover():
2172 2178 return repo.verify()
2173 2179 return 1
2174 2180
2175 2181 def remove(ui, repo, *pats, **opts):
2176 2182 """remove the specified files on the next commit
2177 2183
2178 2184 Schedule the indicated files for removal from the repository.
2179 2185
2180 2186 This command schedules the files to be removed at the next commit.
2181 2187 This only removes files from the current branch, not from the
2182 2188 entire project history. If the files still exist in the working
2183 2189 directory, they will be deleted from it. If invoked with --after,
2184 2190 files that have been manually deleted are marked as removed.
2185 2191 """
2186 2192 names = []
2187 2193 if not opts['after'] and not pats:
2188 2194 raise util.Abort(_('no files specified'))
2189 2195 def okaytoremove(abs, rel, exact):
2190 2196 modified, added, removed, deleted, unknown = repo.changes(files=[abs])
2191 2197 reason = None
2192 2198 if not deleted and opts['after']:
2193 2199 reason = _('is still present')
2194 2200 elif modified and not opts['force']:
2195 2201 reason = _('is modified')
2196 2202 elif added:
2197 2203 reason = _('has been marked for add')
2198 2204 elif unknown:
2199 2205 reason = _('is not managed')
2200 2206 elif removed:
2201 2207 return False
2202 2208 if reason:
2203 2209 if exact:
2204 2210 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2205 2211 else:
2206 2212 return True
2207 2213 for src, abs, rel, exact in walk(repo, pats, opts):
2208 2214 if okaytoremove(abs, rel, exact):
2209 2215 if ui.verbose or not exact:
2210 2216 ui.status(_('removing %s\n') % rel)
2211 2217 names.append(abs)
2212 2218 repo.remove(names, unlink=not opts['after'])
2213 2219
2214 2220 def rename(ui, repo, *pats, **opts):
2215 2221 """rename files; equivalent of copy + remove
2216 2222
2217 2223 Mark dest as copies of sources; mark sources for deletion. If
2218 2224 dest is a directory, copies are put in that directory. If dest is
2219 2225 a file, there can only be one source.
2220 2226
2221 2227 By default, this command copies the contents of files as they
2222 2228 stand in the working directory. If invoked with --after, the
2223 2229 operation is recorded, but no copying is performed.
2224 2230
2225 2231 This command takes effect in the next commit.
2226 2232
2227 2233 NOTE: This command should be treated as experimental. While it
2228 2234 should properly record rename files, this information is not yet
2229 2235 fully used by merge, nor fully reported by log.
2230 2236 """
2231 2237 wlock = repo.wlock(0)
2232 2238 errs, copied = docopy(ui, repo, pats, opts, wlock)
2233 2239 names = []
2234 2240 for abs, rel, exact in copied:
2235 2241 if ui.verbose or not exact:
2236 2242 ui.status(_('removing %s\n') % rel)
2237 2243 names.append(abs)
2238 2244 repo.remove(names, True, wlock)
2239 2245 return errs
2240 2246
2241 2247 def revert(ui, repo, *pats, **opts):
2242 2248 """revert files or dirs to their states as of some revision
2243 2249
2244 2250 With no revision specified, revert the named files or directories
2245 2251 to the contents they had in the parent of the working directory.
2246 2252 This restores the contents of the affected files to an unmodified
2247 2253 state. If the working directory has two parents, you must
2248 2254 explicitly specify the revision to revert to.
2249 2255
2250 2256 Modified files are saved with a .orig suffix before reverting.
2251 2257 To disable these backups, use --no-backup.
2252 2258
2253 2259 Using the -r option, revert the given files or directories to
2254 2260 their contents as of a specific revision. This can be helpful to"roll
2255 2261 back" some or all of a change that should not have been committed.
2256 2262
2257 2263 Revert modifies the working directory. It does not commit any
2258 2264 changes, or change the parent of the working directory. If you
2259 2265 revert to a revision other than the parent of the working
2260 2266 directory, the reverted files will thus appear modified
2261 2267 afterwards.
2262 2268
2263 2269 If a file has been deleted, it is recreated. If the executable
2264 2270 mode of a file was changed, it is reset.
2265 2271
2266 2272 If names are given, all files matching the names are reverted.
2267 2273
2268 2274 If no arguments are given, all files in the repository are reverted.
2269 2275 """
2270 2276 parent, p2 = repo.dirstate.parents()
2271 2277 if opts['rev']:
2272 2278 node = repo.lookup(opts['rev'])
2273 2279 elif p2 != nullid:
2274 2280 raise util.Abort(_('working dir has two parents; '
2275 2281 'you must specify the revision to revert to'))
2276 2282 else:
2277 2283 node = parent
2278 2284 mf = repo.manifest.read(repo.changelog.read(node)[0])
2279 2285
2280 2286 wlock = repo.wlock()
2281 2287
2282 2288 # need all matching names in dirstate and manifest of target rev,
2283 2289 # so have to walk both. do not print errors if files exist in one
2284 2290 # but not other.
2285 2291
2286 2292 names = {}
2287 2293 target_only = {}
2288 2294
2289 2295 # walk dirstate.
2290 2296
2291 2297 for src, abs, rel, exact in walk(repo, pats, opts, badmatch=mf.has_key):
2292 2298 names[abs] = (rel, exact)
2293 2299 if src == 'b':
2294 2300 target_only[abs] = True
2295 2301
2296 2302 # walk target manifest.
2297 2303
2298 2304 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2299 2305 badmatch=names.has_key):
2300 2306 if abs in names: continue
2301 2307 names[abs] = (rel, exact)
2302 2308 target_only[abs] = True
2303 2309
2304 2310 changes = repo.changes(match=names.has_key, wlock=wlock)
2305 2311 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2306 2312
2307 2313 revert = ([], _('reverting %s\n'))
2308 2314 add = ([], _('adding %s\n'))
2309 2315 remove = ([], _('removing %s\n'))
2310 2316 forget = ([], _('forgetting %s\n'))
2311 2317 undelete = ([], _('undeleting %s\n'))
2312 2318 update = {}
2313 2319
2314 2320 disptable = (
2315 2321 # dispatch table:
2316 2322 # file state
2317 2323 # action if in target manifest
2318 2324 # action if not in target manifest
2319 2325 # make backup if in target manifest
2320 2326 # make backup if not in target manifest
2321 2327 (modified, revert, remove, True, True),
2322 2328 (added, revert, forget, True, False),
2323 2329 (removed, undelete, None, False, False),
2324 2330 (deleted, revert, remove, False, False),
2325 2331 (unknown, add, None, True, False),
2326 2332 (target_only, add, None, False, False),
2327 2333 )
2328 2334
2329 2335 entries = names.items()
2330 2336 entries.sort()
2331 2337
2332 2338 for abs, (rel, exact) in entries:
2333 2339 in_mf = abs in mf
2334 2340 def handle(xlist, dobackup):
2335 2341 xlist[0].append(abs)
2336 2342 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2337 2343 bakname = "%s.orig" % rel
2338 2344 ui.note(_('saving current version of %s as %s\n') %
2339 2345 (rel, bakname))
2340 2346 shutil.copyfile(rel, bakname)
2341 2347 shutil.copymode(rel, bakname)
2342 2348 if ui.verbose or not exact:
2343 2349 ui.status(xlist[1] % rel)
2344 2350 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2345 2351 if abs not in table: continue
2346 2352 # file has changed in dirstate
2347 2353 if in_mf:
2348 2354 handle(hitlist, backuphit)
2349 2355 elif misslist is not None:
2350 2356 handle(misslist, backupmiss)
2351 2357 else:
2352 2358 if exact: ui.warn(_('file not managed: %s\n' % rel))
2353 2359 break
2354 2360 else:
2355 2361 # file has not changed in dirstate
2356 2362 if node == parent:
2357 2363 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2358 2364 continue
2359 2365 if not in_mf:
2360 2366 handle(remove, False)
2361 2367 update[abs] = True
2362 2368
2363 2369 repo.dirstate.forget(forget[0])
2364 2370 r = repo.update(node, False, True, update.has_key, False, wlock=wlock,
2365 2371 show_stats=False)
2366 2372 repo.dirstate.update(add[0], 'a')
2367 2373 repo.dirstate.update(undelete[0], 'n')
2368 2374 repo.dirstate.update(remove[0], 'r')
2369 2375 return r
2370 2376
2371 2377 def rollback(ui, repo):
2372 2378 """roll back the last transaction in this repository
2373 2379
2374 2380 Roll back the last transaction in this repository, restoring the
2375 2381 project to its state prior to the transaction.
2376 2382
2377 2383 Transactions are used to encapsulate the effects of all commands
2378 2384 that create new changesets or propagate existing changesets into a
2379 2385 repository. For example, the following commands are transactional,
2380 2386 and their effects can be rolled back:
2381 2387
2382 2388 commit
2383 2389 import
2384 2390 pull
2385 2391 push (with this repository as destination)
2386 2392 unbundle
2387 2393
2388 2394 This command should be used with care. There is only one level of
2389 2395 rollback, and there is no way to undo a rollback.
2390 2396
2391 2397 This command is not intended for use on public repositories. Once
2392 2398 changes are visible for pull by other users, rolling a transaction
2393 2399 back locally is ineffective (someone else may already have pulled
2394 2400 the changes). Furthermore, a race is possible with readers of the
2395 2401 repository; for example an in-progress pull from the repository
2396 2402 may fail if a rollback is performed.
2397 2403 """
2398 2404 repo.undo()
2399 2405
2400 2406 def root(ui, repo):
2401 2407 """print the root (top) of the current working dir
2402 2408
2403 2409 Print the root directory of the current repository.
2404 2410 """
2405 2411 ui.write(repo.root + "\n")
2406 2412
2407 2413 def serve(ui, repo, **opts):
2408 2414 """export the repository via HTTP
2409 2415
2410 2416 Start a local HTTP repository browser and pull server.
2411 2417
2412 2418 By default, the server logs accesses to stdout and errors to
2413 2419 stderr. Use the "-A" and "-E" options to log to files.
2414 2420 """
2415 2421
2416 2422 if opts["stdio"]:
2417 2423 if repo is None:
2418 2424 raise hg.RepoError(_('no repo found'))
2419 2425 fin, fout = sys.stdin, sys.stdout
2420 2426 sys.stdout = sys.stderr
2421 2427
2422 2428 # Prevent insertion/deletion of CRs
2423 2429 util.set_binary(fin)
2424 2430 util.set_binary(fout)
2425 2431
2426 2432 def getarg():
2427 2433 argline = fin.readline()[:-1]
2428 2434 arg, l = argline.split()
2429 2435 val = fin.read(int(l))
2430 2436 return arg, val
2431 2437 def respond(v):
2432 2438 fout.write("%d\n" % len(v))
2433 2439 fout.write(v)
2434 2440 fout.flush()
2435 2441
2436 2442 lock = None
2437 2443
2438 2444 while 1:
2439 2445 cmd = fin.readline()[:-1]
2440 2446 if cmd == '':
2441 2447 return
2442 2448 if cmd == "heads":
2443 2449 h = repo.heads()
2444 2450 respond(" ".join(map(hex, h)) + "\n")
2445 2451 if cmd == "lock":
2446 2452 lock = repo.lock()
2447 2453 respond("")
2448 2454 if cmd == "unlock":
2449 2455 if lock:
2450 2456 lock.release()
2451 2457 lock = None
2452 2458 respond("")
2453 2459 elif cmd == "branches":
2454 2460 arg, nodes = getarg()
2455 2461 nodes = map(bin, nodes.split(" "))
2456 2462 r = []
2457 2463 for b in repo.branches(nodes):
2458 2464 r.append(" ".join(map(hex, b)) + "\n")
2459 2465 respond("".join(r))
2460 2466 elif cmd == "between":
2461 2467 arg, pairs = getarg()
2462 2468 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
2463 2469 r = []
2464 2470 for b in repo.between(pairs):
2465 2471 r.append(" ".join(map(hex, b)) + "\n")
2466 2472 respond("".join(r))
2467 2473 elif cmd == "changegroup":
2468 2474 nodes = []
2469 2475 arg, roots = getarg()
2470 2476 nodes = map(bin, roots.split(" "))
2471 2477
2472 2478 cg = repo.changegroup(nodes, 'serve')
2473 2479 while 1:
2474 2480 d = cg.read(4096)
2475 2481 if not d:
2476 2482 break
2477 2483 fout.write(d)
2478 2484
2479 2485 fout.flush()
2480 2486
2481 2487 elif cmd == "addchangegroup":
2482 2488 if not lock:
2483 2489 respond("not locked")
2484 2490 continue
2485 2491 respond("")
2486 2492
2487 2493 r = repo.addchangegroup(fin, 'serve')
2488 2494 respond(str(r))
2489 2495
2490 2496 optlist = ("name templates style address port ipv6"
2491 2497 " accesslog errorlog webdir_conf")
2492 2498 for o in optlist.split():
2493 2499 if opts[o]:
2494 2500 ui.setconfig("web", o, opts[o])
2495 2501
2496 2502 if repo is None and not ui.config("web", "webdir_conf"):
2497 2503 raise hg.RepoError(_('no repo found'))
2498 2504
2499 2505 if opts['daemon'] and not opts['daemon_pipefds']:
2500 2506 rfd, wfd = os.pipe()
2501 2507 args = sys.argv[:]
2502 2508 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2503 2509 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2504 2510 args[0], args)
2505 2511 os.close(wfd)
2506 2512 os.read(rfd, 1)
2507 2513 os._exit(0)
2508 2514
2509 2515 try:
2510 2516 httpd = hgweb.create_server(ui, repo)
2511 2517 except socket.error, inst:
2512 2518 raise util.Abort(_('cannot start server: ') + inst.args[1])
2513 2519
2514 2520 if ui.verbose:
2515 2521 addr, port = httpd.socket.getsockname()
2516 2522 if addr == '0.0.0.0':
2517 2523 addr = socket.gethostname()
2518 2524 else:
2519 2525 try:
2520 2526 addr = socket.gethostbyaddr(addr)[0]
2521 2527 except socket.error:
2522 2528 pass
2523 2529 if port != 80:
2524 2530 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2525 2531 else:
2526 2532 ui.status(_('listening at http://%s/\n') % addr)
2527 2533
2528 2534 if opts['pid_file']:
2529 2535 fp = open(opts['pid_file'], 'w')
2530 2536 fp.write(str(os.getpid()))
2531 2537 fp.close()
2532 2538
2533 2539 if opts['daemon_pipefds']:
2534 2540 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2535 2541 os.close(rfd)
2536 2542 os.write(wfd, 'y')
2537 2543 os.close(wfd)
2538 2544 sys.stdout.flush()
2539 2545 sys.stderr.flush()
2540 2546 fd = os.open(util.nulldev, os.O_RDWR)
2541 2547 if fd != 0: os.dup2(fd, 0)
2542 2548 if fd != 1: os.dup2(fd, 1)
2543 2549 if fd != 2: os.dup2(fd, 2)
2544 2550 if fd not in (0, 1, 2): os.close(fd)
2545 2551
2546 2552 httpd.serve_forever()
2547 2553
2548 2554 def status(ui, repo, *pats, **opts):
2549 2555 """show changed files in the working directory
2550 2556
2551 2557 Show changed files in the repository. If names are
2552 2558 given, only files that match are shown.
2553 2559
2554 2560 The codes used to show the status of files are:
2555 2561 M = modified
2556 2562 A = added
2557 2563 R = removed
2558 2564 ! = deleted, but still tracked
2559 2565 ? = not tracked
2560 2566 I = ignored (not shown by default)
2561 2567 """
2562 2568
2563 2569 show_ignored = opts['ignored'] and True or False
2564 2570 files, matchfn, anypats = matchpats(repo, pats, opts)
2565 2571 cwd = (pats and repo.getcwd()) or ''
2566 2572 modified, added, removed, deleted, unknown, ignored = [
2567 2573 [util.pathto(cwd, x) for x in n]
2568 2574 for n in repo.changes(files=files, match=matchfn,
2569 2575 show_ignored=show_ignored)]
2570 2576
2571 2577 changetypes = [('modified', 'M', modified),
2572 2578 ('added', 'A', added),
2573 2579 ('removed', 'R', removed),
2574 2580 ('deleted', '!', deleted),
2575 2581 ('unknown', '?', unknown),
2576 2582 ('ignored', 'I', ignored)]
2577 2583
2578 2584 end = opts['print0'] and '\0' or '\n'
2579 2585
2580 2586 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
2581 2587 or changetypes):
2582 2588 if opts['no_status']:
2583 2589 format = "%%s%s" % end
2584 2590 else:
2585 2591 format = "%s %%s%s" % (char, end)
2586 2592
2587 2593 for f in changes:
2588 2594 ui.write(format % f)
2589 2595
2590 2596 def tag(ui, repo, name, rev_=None, **opts):
2591 2597 """add a tag for the current tip or a given revision
2592 2598
2593 2599 Name a particular revision using <name>.
2594 2600
2595 2601 Tags are used to name particular revisions of the repository and are
2596 2602 very useful to compare different revision, to go back to significant
2597 2603 earlier versions or to mark branch points as releases, etc.
2598 2604
2599 2605 If no revision is given, the tip is used.
2600 2606
2601 2607 To facilitate version control, distribution, and merging of tags,
2602 2608 they are stored as a file named ".hgtags" which is managed
2603 2609 similarly to other project files and can be hand-edited if
2604 2610 necessary. The file '.hg/localtags' is used for local tags (not
2605 2611 shared among repositories).
2606 2612 """
2607 2613 if name == "tip":
2608 2614 raise util.Abort(_("the name 'tip' is reserved"))
2609 2615 if rev_ is not None:
2610 2616 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2611 2617 "please use 'hg tag [-r REV] NAME' instead\n"))
2612 2618 if opts['rev']:
2613 2619 raise util.Abort(_("use only one form to specify the revision"))
2614 2620 if opts['rev']:
2615 2621 rev_ = opts['rev']
2616 2622 if rev_:
2617 2623 r = hex(repo.lookup(rev_))
2618 2624 else:
2619 2625 r = hex(repo.changelog.tip())
2620 2626
2621 2627 disallowed = (revrangesep, '\r', '\n')
2622 2628 for c in disallowed:
2623 2629 if name.find(c) >= 0:
2624 2630 raise util.Abort(_("%s cannot be used in a tag name") % repr(c))
2625 2631
2626 2632 repo.hook('pretag', throw=True, node=r, tag=name,
2627 2633 local=int(not not opts['local']))
2628 2634
2629 2635 if opts['local']:
2630 2636 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
2631 2637 repo.hook('tag', node=r, tag=name, local=1)
2632 2638 return
2633 2639
2634 2640 for x in repo.changes():
2635 2641 if ".hgtags" in x:
2636 2642 raise util.Abort(_("working copy of .hgtags is changed "
2637 2643 "(please commit .hgtags manually)"))
2638 2644
2639 2645 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
2640 2646 if repo.dirstate.state(".hgtags") == '?':
2641 2647 repo.add([".hgtags"])
2642 2648
2643 2649 message = (opts['message'] or
2644 2650 _("Added tag %s for changeset %s") % (name, r))
2645 2651 try:
2646 2652 repo.commit([".hgtags"], message, opts['user'], opts['date'])
2647 2653 repo.hook('tag', node=r, tag=name, local=0)
2648 2654 except ValueError, inst:
2649 2655 raise util.Abort(str(inst))
2650 2656
2651 2657 def tags(ui, repo):
2652 2658 """list repository tags
2653 2659
2654 2660 List the repository tags.
2655 2661
2656 2662 This lists both regular and local tags.
2657 2663 """
2658 2664
2659 2665 l = repo.tagslist()
2660 2666 l.reverse()
2661 2667 for t, n in l:
2662 2668 try:
2663 2669 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2664 2670 except KeyError:
2665 2671 r = " ?:?"
2666 2672 if ui.quiet:
2667 2673 ui.write("%s\n" % t)
2668 2674 else:
2669 2675 ui.write("%-30s %s\n" % (t, r))
2670 2676
2671 2677 def tip(ui, repo, **opts):
2672 2678 """show the tip revision
2673 2679
2674 2680 Show the tip revision.
2675 2681 """
2676 2682 n = repo.changelog.tip()
2677 2683 br = None
2678 2684 if opts['branches']:
2679 2685 br = repo.branchlookup([n])
2680 2686 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2681 2687 if opts['patch']:
2682 2688 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2683 2689
2684 2690 def unbundle(ui, repo, fname, **opts):
2685 2691 """apply a changegroup file
2686 2692
2687 2693 Apply a compressed changegroup file generated by the bundle
2688 2694 command.
2689 2695 """
2690 2696 f = urllib.urlopen(fname)
2691 2697
2692 2698 header = f.read(6)
2693 2699 if not header.startswith("HG"):
2694 2700 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2695 2701 elif not header.startswith("HG10"):
2696 2702 raise util.Abort(_("%s: unknown bundle version") % fname)
2697 2703 elif header == "HG10BZ":
2698 2704 def generator(f):
2699 2705 zd = bz2.BZ2Decompressor()
2700 2706 zd.decompress("BZ")
2701 2707 for chunk in f:
2702 2708 yield zd.decompress(chunk)
2703 2709 elif header == "HG10UN":
2704 2710 def generator(f):
2705 2711 for chunk in f:
2706 2712 yield chunk
2707 2713 else:
2708 2714 raise util.Abort(_("%s: unknown bundle compression type")
2709 2715 % fname)
2710 2716 gen = generator(util.filechunkiter(f, 4096))
2711 2717 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle')
2712 2718 return postincoming(ui, repo, modheads, opts['update'])
2713 2719
2714 2720 def undo(ui, repo):
2715 2721 """undo the last commit or pull (DEPRECATED)
2716 2722
2717 2723 (DEPRECATED)
2718 2724 This command is now deprecated and will be removed in a future
2719 2725 release. Please use the rollback command instead. For usage
2720 2726 instructions, see the rollback command.
2721 2727 """
2722 2728 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2723 2729 repo.undo()
2724 2730
2725 2731 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2726 2732 branch=None, **opts):
2727 2733 """update or merge working directory
2728 2734
2729 2735 Update the working directory to the specified revision.
2730 2736
2731 2737 If there are no outstanding changes in the working directory and
2732 2738 there is a linear relationship between the current version and the
2733 2739 requested version, the result is the requested version.
2734 2740
2735 2741 Otherwise the result is a merge between the contents of the
2736 2742 current working directory and the requested version. Files that
2737 2743 changed between either parent are marked as changed for the next
2738 2744 commit and a commit must be performed before any further updates
2739 2745 are allowed.
2740 2746
2741 2747 By default, update will refuse to run if doing so would require
2742 2748 merging or discarding local changes.
2743 2749 """
2744 2750 if branch:
2745 2751 br = repo.branchlookup(branch=branch)
2746 2752 found = []
2747 2753 for x in br:
2748 2754 if branch in br[x]:
2749 2755 found.append(x)
2750 2756 if len(found) > 1:
2751 2757 ui.warn(_("Found multiple heads for %s\n") % branch)
2752 2758 for x in found:
2753 2759 show_changeset(ui, repo, opts).show(changenode=x, brinfo=br)
2754 2760 return 1
2755 2761 if len(found) == 1:
2756 2762 node = found[0]
2757 2763 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2758 2764 else:
2759 2765 ui.warn(_("branch %s not found\n") % (branch))
2760 2766 return 1
2761 2767 else:
2762 2768 node = node and repo.lookup(node) or repo.changelog.tip()
2763 2769 return repo.update(node, allow=merge, force=clean, forcemerge=force)
2764 2770
2765 2771 def verify(ui, repo):
2766 2772 """verify the integrity of the repository
2767 2773
2768 2774 Verify the integrity of the current repository.
2769 2775
2770 2776 This will perform an extensive check of the repository's
2771 2777 integrity, validating the hashes and checksums of each entry in
2772 2778 the changelog, manifest, and tracked files, as well as the
2773 2779 integrity of their crosslinks and indices.
2774 2780 """
2775 2781 return repo.verify()
2776 2782
2777 2783 # Command options and aliases are listed here, alphabetically
2778 2784
2779 2785 table = {
2780 2786 "^add":
2781 2787 (add,
2782 2788 [('I', 'include', [], _('include names matching the given patterns')),
2783 2789 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2784 2790 _('hg add [OPTION]... [FILE]...')),
2785 2791 "debugaddremove|addremove":
2786 2792 (addremove,
2787 2793 [('I', 'include', [], _('include names matching the given patterns')),
2788 2794 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2789 2795 _('hg addremove [OPTION]... [FILE]...')),
2790 2796 "^annotate":
2791 2797 (annotate,
2792 2798 [('r', 'rev', '', _('annotate the specified revision')),
2793 2799 ('a', 'text', None, _('treat all files as text')),
2794 2800 ('u', 'user', None, _('list the author')),
2795 2801 ('d', 'date', None, _('list the date')),
2796 2802 ('n', 'number', None, _('list the revision number (default)')),
2797 2803 ('c', 'changeset', None, _('list the changeset')),
2798 2804 ('I', 'include', [], _('include names matching the given patterns')),
2799 2805 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2800 2806 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2801 2807 "archive":
2802 2808 (archive,
2803 2809 [('', 'no-decode', None, _('do not pass files through decoders')),
2804 2810 ('p', 'prefix', '', _('directory prefix for files in archive')),
2805 2811 ('r', 'rev', '', _('revision to distribute')),
2806 2812 ('t', 'type', '', _('type of distribution to create')),
2807 2813 ('I', 'include', [], _('include names matching the given patterns')),
2808 2814 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2809 2815 _('hg archive [OPTION]... DEST')),
2810 2816 "backout":
2811 2817 (backout,
2812 2818 [('', 'merge', None,
2813 2819 _('merge with old dirstate parent after backout')),
2814 2820 ('m', 'message', '', _('use <text> as commit message')),
2815 2821 ('l', 'logfile', '', _('read commit message from <file>')),
2816 2822 ('d', 'date', '', _('record datecode as commit date')),
2817 2823 ('u', 'user', '', _('record user as committer')),
2818 2824 ('I', 'include', [], _('include names matching the given patterns')),
2819 2825 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2820 2826 _('hg backout [OPTION]... REV')),
2821 2827 "bundle":
2822 2828 (bundle,
2823 2829 [('f', 'force', None,
2824 2830 _('run even when remote repository is unrelated'))],
2825 2831 _('hg bundle FILE DEST')),
2826 2832 "cat":
2827 2833 (cat,
2828 2834 [('o', 'output', '', _('print output to file with formatted name')),
2829 2835 ('r', 'rev', '', _('print the given revision')),
2830 2836 ('I', 'include', [], _('include names matching the given patterns')),
2831 2837 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2832 2838 _('hg cat [OPTION]... FILE...')),
2833 2839 "^clone":
2834 2840 (clone,
2835 2841 [('U', 'noupdate', None, _('do not update the new working directory')),
2836 2842 ('r', 'rev', [],
2837 2843 _('a changeset you would like to have after cloning')),
2838 2844 ('', 'pull', None, _('use pull protocol to copy metadata')),
2839 2845 ('e', 'ssh', '', _('specify ssh command to use')),
2840 2846 ('', 'remotecmd', '',
2841 2847 _('specify hg command to run on the remote side'))],
2842 2848 _('hg clone [OPTION]... SOURCE [DEST]')),
2843 2849 "^commit|ci":
2844 2850 (commit,
2845 2851 [('A', 'addremove', None,
2846 2852 _('mark new/missing files as added/removed before committing')),
2847 2853 ('m', 'message', '', _('use <text> as commit message')),
2848 2854 ('l', 'logfile', '', _('read the commit message from <file>')),
2849 2855 ('d', 'date', '', _('record datecode as commit date')),
2850 2856 ('u', 'user', '', _('record user as commiter')),
2851 2857 ('I', 'include', [], _('include names matching the given patterns')),
2852 2858 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2853 2859 _('hg commit [OPTION]... [FILE]...')),
2854 2860 "copy|cp":
2855 2861 (copy,
2856 2862 [('A', 'after', None, _('record a copy that has already occurred')),
2857 2863 ('f', 'force', None,
2858 2864 _('forcibly copy over an existing managed file')),
2859 2865 ('I', 'include', [], _('include names matching the given patterns')),
2860 2866 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2861 2867 _('hg copy [OPTION]... [SOURCE]... DEST')),
2862 2868 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2863 2869 "debugcomplete":
2864 2870 (debugcomplete,
2865 2871 [('o', 'options', None, _('show the command options'))],
2866 2872 _('debugcomplete [-o] CMD')),
2867 2873 "debugrebuildstate":
2868 2874 (debugrebuildstate,
2869 2875 [('r', 'rev', '', _('revision to rebuild to'))],
2870 2876 _('debugrebuildstate [-r REV] [REV]')),
2871 2877 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2872 2878 "debugconfig": (debugconfig, [], _('debugconfig')),
2873 2879 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2874 2880 "debugstate": (debugstate, [], _('debugstate')),
2875 2881 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2876 2882 "debugindex": (debugindex, [], _('debugindex FILE')),
2877 2883 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2878 2884 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2879 2885 "debugwalk":
2880 2886 (debugwalk,
2881 2887 [('I', 'include', [], _('include names matching the given patterns')),
2882 2888 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2883 2889 _('debugwalk [OPTION]... [FILE]...')),
2884 2890 "^diff":
2885 2891 (diff,
2886 2892 [('r', 'rev', [], _('revision')),
2887 2893 ('a', 'text', None, _('treat all files as text')),
2888 2894 ('p', 'show-function', None,
2889 2895 _('show which function each change is in')),
2890 2896 ('w', 'ignore-all-space', None,
2891 2897 _('ignore white space when comparing lines')),
2892 2898 ('I', 'include', [], _('include names matching the given patterns')),
2893 2899 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2894 2900 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2895 2901 "^export":
2896 2902 (export,
2897 2903 [('o', 'output', '', _('print output to file with formatted name')),
2898 2904 ('a', 'text', None, _('treat all files as text')),
2899 2905 ('', 'switch-parent', None, _('diff against the second parent'))],
2900 2906 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2901 2907 "debugforget|forget":
2902 2908 (forget,
2903 2909 [('I', 'include', [], _('include names matching the given patterns')),
2904 2910 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2905 2911 _('hg forget [OPTION]... FILE...')),
2906 2912 "grep":
2907 2913 (grep,
2908 2914 [('0', 'print0', None, _('end fields with NUL')),
2909 2915 ('', 'all', None, _('print all revisions that match')),
2910 2916 ('i', 'ignore-case', None, _('ignore case when matching')),
2911 2917 ('l', 'files-with-matches', None,
2912 2918 _('print only filenames and revs that match')),
2913 2919 ('n', 'line-number', None, _('print matching line numbers')),
2914 2920 ('r', 'rev', [], _('search in given revision range')),
2915 2921 ('u', 'user', None, _('print user who committed change')),
2916 2922 ('I', 'include', [], _('include names matching the given patterns')),
2917 2923 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2918 2924 _('hg grep [OPTION]... PATTERN [FILE]...')),
2919 2925 "heads":
2920 2926 (heads,
2921 2927 [('b', 'branches', None, _('show branches')),
2922 2928 ('', 'style', '', _('display using template map file')),
2923 2929 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2924 2930 ('', 'template', '', _('display with template'))],
2925 2931 _('hg heads [-b] [-r <rev>]')),
2926 2932 "help": (help_, [], _('hg help [COMMAND]')),
2927 2933 "identify|id": (identify, [], _('hg identify')),
2928 2934 "import|patch":
2929 2935 (import_,
2930 2936 [('p', 'strip', 1,
2931 2937 _('directory strip option for patch. This has the same\n'
2932 2938 'meaning as the corresponding patch option')),
2933 2939 ('b', 'base', '', _('base path')),
2934 2940 ('f', 'force', None,
2935 2941 _('skip check for outstanding uncommitted changes'))],
2936 2942 _('hg import [-p NUM] [-b BASE] [-f] PATCH...')),
2937 2943 "incoming|in": (incoming,
2938 2944 [('M', 'no-merges', None, _('do not show merges')),
2939 2945 ('f', 'force', None,
2940 2946 _('run even when remote repository is unrelated')),
2941 2947 ('', 'style', '', _('display using template map file')),
2942 2948 ('n', 'newest-first', None, _('show newest record first')),
2943 2949 ('', 'bundle', '', _('file to store the bundles into')),
2944 2950 ('p', 'patch', None, _('show patch')),
2945 2951 ('', 'template', '', _('display with template')),
2946 2952 ('e', 'ssh', '', _('specify ssh command to use')),
2947 2953 ('', 'remotecmd', '',
2948 2954 _('specify hg command to run on the remote side'))],
2949 2955 _('hg incoming [-p] [-n] [-M] [--bundle FILENAME] [SOURCE]')),
2950 2956 "^init": (init, [], _('hg init [DEST]')),
2951 2957 "locate":
2952 2958 (locate,
2953 2959 [('r', 'rev', '', _('search the repository as it stood at rev')),
2954 2960 ('0', 'print0', None,
2955 2961 _('end filenames with NUL, for use with xargs')),
2956 2962 ('f', 'fullpath', None,
2957 2963 _('print complete paths from the filesystem root')),
2958 2964 ('I', 'include', [], _('include names matching the given patterns')),
2959 2965 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2960 2966 _('hg locate [OPTION]... [PATTERN]...')),
2961 2967 "^log|history":
2962 2968 (log,
2963 2969 [('b', 'branches', None, _('show branches')),
2964 2970 ('k', 'keyword', [], _('search for a keyword')),
2965 2971 ('l', 'limit', '', _('limit number of changes displayed')),
2966 2972 ('r', 'rev', [], _('show the specified revision or range')),
2967 2973 ('M', 'no-merges', None, _('do not show merges')),
2968 2974 ('', 'style', '', _('display using template map file')),
2969 2975 ('m', 'only-merges', None, _('show only merges')),
2970 2976 ('p', 'patch', None, _('show patch')),
2971 2977 ('', 'template', '', _('display with template')),
2972 2978 ('I', 'include', [], _('include names matching the given patterns')),
2973 2979 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2974 2980 _('hg log [OPTION]... [FILE]')),
2975 2981 "manifest": (manifest, [], _('hg manifest [REV]')),
2976 2982 "merge":
2977 2983 (merge,
2978 2984 [('b', 'branch', '', _('merge with head of a specific branch')),
2979 2985 ('f', 'force', None, _('force a merge with outstanding changes'))],
2980 2986 _('hg merge [-b TAG] [-f] [REV]')),
2981 2987 "outgoing|out": (outgoing,
2982 2988 [('M', 'no-merges', None, _('do not show merges')),
2983 2989 ('f', 'force', None,
2984 2990 _('run even when remote repository is unrelated')),
2985 2991 ('p', 'patch', None, _('show patch')),
2986 2992 ('', 'style', '', _('display using template map file')),
2987 2993 ('n', 'newest-first', None, _('show newest record first')),
2988 2994 ('', 'template', '', _('display with template')),
2989 2995 ('e', 'ssh', '', _('specify ssh command to use')),
2990 2996 ('', 'remotecmd', '',
2991 2997 _('specify hg command to run on the remote side'))],
2992 2998 _('hg outgoing [-M] [-p] [-n] [DEST]')),
2993 2999 "^parents":
2994 3000 (parents,
2995 3001 [('b', 'branches', None, _('show branches')),
2996 3002 ('', 'style', '', _('display using template map file')),
2997 3003 ('', 'template', '', _('display with template'))],
2998 3004 _('hg parents [-b] [REV]')),
2999 3005 "paths": (paths, [], _('hg paths [NAME]')),
3000 3006 "^pull":
3001 3007 (pull,
3002 3008 [('u', 'update', None,
3003 3009 _('update the working directory to tip after pull')),
3004 3010 ('e', 'ssh', '', _('specify ssh command to use')),
3005 3011 ('f', 'force', None,
3006 3012 _('run even when remote repository is unrelated')),
3007 3013 ('r', 'rev', [], _('a specific revision you would like to pull')),
3008 3014 ('', 'remotecmd', '',
3009 3015 _('specify hg command to run on the remote side'))],
3010 3016 _('hg pull [-u] [-e FILE] [-r REV]... [--remotecmd FILE] [SOURCE]')),
3011 3017 "^push":
3012 3018 (push,
3013 3019 [('f', 'force', None, _('force push')),
3014 3020 ('e', 'ssh', '', _('specify ssh command to use')),
3015 3021 ('r', 'rev', [], _('a specific revision you would like to push')),
3016 3022 ('', 'remotecmd', '',
3017 3023 _('specify hg command to run on the remote side'))],
3018 3024 _('hg push [-f] [-e FILE] [-r REV]... [--remotecmd FILE] [DEST]')),
3019 3025 "debugrawcommit|rawcommit":
3020 3026 (rawcommit,
3021 3027 [('p', 'parent', [], _('parent')),
3022 3028 ('d', 'date', '', _('date code')),
3023 3029 ('u', 'user', '', _('user')),
3024 3030 ('F', 'files', '', _('file list')),
3025 3031 ('m', 'message', '', _('commit message')),
3026 3032 ('l', 'logfile', '', _('commit message file'))],
3027 3033 _('hg debugrawcommit [OPTION]... [FILE]...')),
3028 3034 "recover": (recover, [], _('hg recover')),
3029 3035 "^remove|rm":
3030 3036 (remove,
3031 3037 [('A', 'after', None, _('record remove that has already occurred')),
3032 3038 ('f', 'force', None, _('remove file even if modified')),
3033 3039 ('I', 'include', [], _('include names matching the given patterns')),
3034 3040 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3035 3041 _('hg remove [OPTION]... FILE...')),
3036 3042 "rename|mv":
3037 3043 (rename,
3038 3044 [('A', 'after', None, _('record a rename that has already occurred')),
3039 3045 ('f', 'force', None,
3040 3046 _('forcibly copy over an existing managed file')),
3041 3047 ('I', 'include', [], _('include names matching the given patterns')),
3042 3048 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3043 3049 _('hg rename [OPTION]... SOURCE... DEST')),
3044 3050 "^revert":
3045 3051 (revert,
3046 3052 [('r', 'rev', '', _('revision to revert to')),
3047 3053 ('', 'no-backup', None, _('do not save backup copies of files')),
3048 3054 ('I', 'include', [], _('include names matching given patterns')),
3049 3055 ('X', 'exclude', [], _('exclude names matching given patterns'))],
3050 3056 _('hg revert [-r REV] [NAME]...')),
3051 3057 "rollback": (rollback, [], _('hg rollback')),
3052 3058 "root": (root, [], _('hg root')),
3053 3059 "^serve":
3054 3060 (serve,
3055 3061 [('A', 'accesslog', '', _('name of access log file to write to')),
3056 3062 ('d', 'daemon', None, _('run server in background')),
3057 3063 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3058 3064 ('E', 'errorlog', '', _('name of error log file to write to')),
3059 3065 ('p', 'port', 0, _('port to use (default: 8000)')),
3060 3066 ('a', 'address', '', _('address to use')),
3061 3067 ('n', 'name', '',
3062 3068 _('name to show in web pages (default: working dir)')),
3063 3069 ('', 'webdir-conf', '', _('name of the webdir config file'
3064 3070 ' (serve more than one repo)')),
3065 3071 ('', 'pid-file', '', _('name of file to write process ID to')),
3066 3072 ('', 'stdio', None, _('for remote clients')),
3067 3073 ('t', 'templates', '', _('web templates to use')),
3068 3074 ('', 'style', '', _('template style to use')),
3069 3075 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3070 3076 _('hg serve [OPTION]...')),
3071 3077 "^status|st":
3072 3078 (status,
3073 3079 [('m', 'modified', None, _('show only modified files')),
3074 3080 ('a', 'added', None, _('show only added files')),
3075 3081 ('r', 'removed', None, _('show only removed files')),
3076 3082 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3077 3083 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3078 3084 ('i', 'ignored', None, _('show ignored files')),
3079 3085 ('n', 'no-status', None, _('hide status prefix')),
3080 3086 ('0', 'print0', None,
3081 3087 _('end filenames with NUL, for use with xargs')),
3082 3088 ('I', 'include', [], _('include names matching the given patterns')),
3083 3089 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3084 3090 _('hg status [OPTION]... [FILE]...')),
3085 3091 "tag":
3086 3092 (tag,
3087 3093 [('l', 'local', None, _('make the tag local')),
3088 3094 ('m', 'message', '', _('message for tag commit log entry')),
3089 3095 ('d', 'date', '', _('record datecode as commit date')),
3090 3096 ('u', 'user', '', _('record user as commiter')),
3091 3097 ('r', 'rev', '', _('revision to tag'))],
3092 3098 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3093 3099 "tags": (tags, [], _('hg tags')),
3094 3100 "tip":
3095 3101 (tip,
3096 3102 [('b', 'branches', None, _('show branches')),
3097 3103 ('', 'style', '', _('display using template map file')),
3098 3104 ('p', 'patch', None, _('show patch')),
3099 3105 ('', 'template', '', _('display with template'))],
3100 3106 _('hg tip [-b] [-p]')),
3101 3107 "unbundle":
3102 3108 (unbundle,
3103 3109 [('u', 'update', None,
3104 3110 _('update the working directory to tip after unbundle'))],
3105 3111 _('hg unbundle [-u] FILE')),
3106 3112 "debugundo|undo": (undo, [], _('hg undo')),
3107 3113 "^update|up|checkout|co":
3108 3114 (update,
3109 3115 [('b', 'branch', '', _('checkout the head of a specific branch')),
3110 3116 ('m', 'merge', None, _('allow merging of branches')),
3111 3117 ('C', 'clean', None, _('overwrite locally modified files')),
3112 3118 ('f', 'force', None, _('force a merge with outstanding changes'))],
3113 3119 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3114 3120 "verify": (verify, [], _('hg verify')),
3115 3121 "version": (show_version, [], _('hg version')),
3116 3122 }
3117 3123
3118 3124 globalopts = [
3119 3125 ('R', 'repository', '',
3120 3126 _('repository root directory or symbolic path name')),
3121 3127 ('', 'cwd', '', _('change working directory')),
3122 3128 ('y', 'noninteractive', None,
3123 3129 _('do not prompt, assume \'yes\' for any required answers')),
3124 3130 ('q', 'quiet', None, _('suppress output')),
3125 3131 ('v', 'verbose', None, _('enable additional output')),
3126 3132 ('', 'debug', None, _('enable debugging output')),
3127 3133 ('', 'debugger', None, _('start debugger')),
3128 3134 ('', 'traceback', None, _('print traceback on exception')),
3129 3135 ('', 'time', None, _('time how long the command takes')),
3130 3136 ('', 'profile', None, _('print command execution profile')),
3131 3137 ('', 'version', None, _('output version information and exit')),
3132 3138 ('h', 'help', None, _('display help and exit')),
3133 3139 ]
3134 3140
3135 3141 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3136 3142 " debugindex debugindexdot")
3137 3143 optionalrepo = ("paths serve debugconfig")
3138 3144
3139 3145 def findpossible(cmd):
3140 3146 """
3141 3147 Return cmd -> (aliases, command table entry)
3142 3148 for each matching command.
3143 3149 Return debug commands (or their aliases) only if no normal command matches.
3144 3150 """
3145 3151 choice = {}
3146 3152 debugchoice = {}
3147 3153 for e in table.keys():
3148 3154 aliases = e.lstrip("^").split("|")
3149 3155 found = None
3150 3156 if cmd in aliases:
3151 3157 found = cmd
3152 3158 else:
3153 3159 for a in aliases:
3154 3160 if a.startswith(cmd):
3155 3161 found = a
3156 3162 break
3157 3163 if found is not None:
3158 3164 if aliases[0].startswith("debug"):
3159 3165 debugchoice[found] = (aliases, table[e])
3160 3166 else:
3161 3167 choice[found] = (aliases, table[e])
3162 3168
3163 3169 if not choice and debugchoice:
3164 3170 choice = debugchoice
3165 3171
3166 3172 return choice
3167 3173
3168 3174 def find(cmd):
3169 3175 """Return (aliases, command table entry) for command string."""
3170 3176 choice = findpossible(cmd)
3171 3177
3172 3178 if choice.has_key(cmd):
3173 3179 return choice[cmd]
3174 3180
3175 3181 if len(choice) > 1:
3176 3182 clist = choice.keys()
3177 3183 clist.sort()
3178 3184 raise AmbiguousCommand(cmd, clist)
3179 3185
3180 3186 if choice:
3181 3187 return choice.values()[0]
3182 3188
3183 3189 raise UnknownCommand(cmd)
3184 3190
3185 3191 def catchterm(*args):
3186 3192 raise util.SignalInterrupt
3187 3193
3188 3194 def run():
3189 3195 sys.exit(dispatch(sys.argv[1:]))
3190 3196
3191 3197 class ParseError(Exception):
3192 3198 """Exception raised on errors in parsing the command line."""
3193 3199
3194 3200 def parse(ui, args):
3195 3201 options = {}
3196 3202 cmdoptions = {}
3197 3203
3198 3204 try:
3199 3205 args = fancyopts.fancyopts(args, globalopts, options)
3200 3206 except fancyopts.getopt.GetoptError, inst:
3201 3207 raise ParseError(None, inst)
3202 3208
3203 3209 if args:
3204 3210 cmd, args = args[0], args[1:]
3205 3211 aliases, i = find(cmd)
3206 3212 cmd = aliases[0]
3207 3213 defaults = ui.config("defaults", cmd)
3208 3214 if defaults:
3209 3215 args = defaults.split() + args
3210 3216 c = list(i[1])
3211 3217 else:
3212 3218 cmd = None
3213 3219 c = []
3214 3220
3215 3221 # combine global options into local
3216 3222 for o in globalopts:
3217 3223 c.append((o[0], o[1], options[o[1]], o[3]))
3218 3224
3219 3225 try:
3220 3226 args = fancyopts.fancyopts(args, c, cmdoptions)
3221 3227 except fancyopts.getopt.GetoptError, inst:
3222 3228 raise ParseError(cmd, inst)
3223 3229
3224 3230 # separate global options back out
3225 3231 for o in globalopts:
3226 3232 n = o[1]
3227 3233 options[n] = cmdoptions[n]
3228 3234 del cmdoptions[n]
3229 3235
3230 3236 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3231 3237
3232 3238 def dispatch(args):
3233 3239 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3234 3240 num = getattr(signal, name, None)
3235 3241 if num: signal.signal(num, catchterm)
3236 3242
3237 3243 try:
3238 3244 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3239 3245 except util.Abort, inst:
3240 3246 sys.stderr.write(_("abort: %s\n") % inst)
3241 3247 return -1
3242 3248
3243 3249 external = []
3244 3250 for x in u.extensions():
3245 3251 try:
3246 3252 if x[1]:
3247 3253 mod = imp.load_source(x[0], x[1])
3248 3254 else:
3249 3255 def importh(name):
3250 3256 mod = __import__(name)
3251 3257 components = name.split('.')
3252 3258 for comp in components[1:]:
3253 3259 mod = getattr(mod, comp)
3254 3260 return mod
3255 3261 try:
3256 3262 mod = importh("hgext." + x[0])
3257 3263 except ImportError:
3258 3264 mod = importh(x[0])
3259 3265 external.append(mod)
3260 3266 except Exception, inst:
3261 3267 u.warn(_("*** failed to import extension %s: %s\n") % (x[0], inst))
3262 3268 if u.traceback:
3263 3269 traceback.print_exc()
3264 3270 return 1
3265 3271 continue
3266 3272
3267 3273 for x in external:
3268 3274 cmdtable = getattr(x, 'cmdtable', {})
3269 3275 for t in cmdtable:
3270 3276 if t in table:
3271 3277 u.warn(_("module %s overrides %s\n") % (x.__name__, t))
3272 3278 table.update(cmdtable)
3273 3279
3274 3280 try:
3275 3281 cmd, func, args, options, cmdoptions = parse(u, args)
3276 3282 if options["time"]:
3277 3283 def get_times():
3278 3284 t = os.times()
3279 3285 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3280 3286 t = (t[0], t[1], t[2], t[3], time.clock())
3281 3287 return t
3282 3288 s = get_times()
3283 3289 def print_time():
3284 3290 t = get_times()
3285 3291 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3286 3292 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3287 3293 atexit.register(print_time)
3288 3294
3289 3295 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3290 3296 not options["noninteractive"], options["traceback"])
3291 3297
3292 3298 # enter the debugger before command execution
3293 3299 if options['debugger']:
3294 3300 pdb.set_trace()
3295 3301
3296 3302 try:
3297 3303 if options['cwd']:
3298 3304 try:
3299 3305 os.chdir(options['cwd'])
3300 3306 except OSError, inst:
3301 3307 raise util.Abort('%s: %s' %
3302 3308 (options['cwd'], inst.strerror))
3303 3309
3304 3310 path = u.expandpath(options["repository"]) or ""
3305 3311 repo = path and hg.repository(u, path=path) or None
3306 3312
3307 3313 if options['help']:
3308 3314 return help_(u, cmd, options['version'])
3309 3315 elif options['version']:
3310 3316 return show_version(u)
3311 3317 elif not cmd:
3312 3318 return help_(u, 'shortlist')
3313 3319
3314 3320 if cmd not in norepo.split():
3315 3321 try:
3316 3322 if not repo:
3317 3323 repo = hg.repository(u, path=path)
3318 3324 u = repo.ui
3319 3325 for x in external:
3320 3326 if hasattr(x, 'reposetup'):
3321 3327 x.reposetup(u, repo)
3322 3328 except hg.RepoError:
3323 3329 if cmd not in optionalrepo.split():
3324 3330 raise
3325 3331 d = lambda: func(u, repo, *args, **cmdoptions)
3326 3332 else:
3327 3333 d = lambda: func(u, *args, **cmdoptions)
3328 3334
3329 3335 try:
3330 3336 if options['profile']:
3331 3337 import hotshot, hotshot.stats
3332 3338 prof = hotshot.Profile("hg.prof")
3333 3339 try:
3334 3340 try:
3335 3341 return prof.runcall(d)
3336 3342 except:
3337 3343 try:
3338 3344 u.warn(_('exception raised - generating '
3339 3345 'profile anyway\n'))
3340 3346 except:
3341 3347 pass
3342 3348 raise
3343 3349 finally:
3344 3350 prof.close()
3345 3351 stats = hotshot.stats.load("hg.prof")
3346 3352 stats.strip_dirs()
3347 3353 stats.sort_stats('time', 'calls')
3348 3354 stats.print_stats(40)
3349 3355 else:
3350 3356 return d()
3351 3357 finally:
3352 3358 u.flush()
3353 3359 except:
3354 3360 # enter the debugger when we hit an exception
3355 3361 if options['debugger']:
3356 3362 pdb.post_mortem(sys.exc_info()[2])
3357 3363 if u.traceback:
3358 3364 traceback.print_exc()
3359 3365 raise
3360 3366 except ParseError, inst:
3361 3367 if inst.args[0]:
3362 3368 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3363 3369 help_(u, inst.args[0])
3364 3370 else:
3365 3371 u.warn(_("hg: %s\n") % inst.args[1])
3366 3372 help_(u, 'shortlist')
3367 3373 except AmbiguousCommand, inst:
3368 3374 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3369 3375 (inst.args[0], " ".join(inst.args[1])))
3370 3376 except UnknownCommand, inst:
3371 3377 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3372 3378 help_(u, 'shortlist')
3373 3379 except hg.RepoError, inst:
3374 3380 u.warn(_("abort: %s!\n") % inst)
3375 3381 except lock.LockHeld, inst:
3376 3382 if inst.errno == errno.ETIMEDOUT:
3377 3383 reason = _('timed out waiting for lock held by %s') % inst.locker
3378 3384 else:
3379 3385 reason = _('lock held by %s') % inst.locker
3380 3386 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3381 3387 except lock.LockUnavailable, inst:
3382 3388 u.warn(_("abort: could not lock %s: %s\n") %
3383 3389 (inst.desc or inst.filename, inst.strerror))
3384 3390 except revlog.RevlogError, inst:
3385 3391 u.warn(_("abort: "), inst, "!\n")
3386 3392 except util.SignalInterrupt:
3387 3393 u.warn(_("killed!\n"))
3388 3394 except KeyboardInterrupt:
3389 3395 try:
3390 3396 u.warn(_("interrupted!\n"))
3391 3397 except IOError, inst:
3392 3398 if inst.errno == errno.EPIPE:
3393 3399 if u.debugflag:
3394 3400 u.warn(_("\nbroken pipe\n"))
3395 3401 else:
3396 3402 raise
3397 3403 except IOError, inst:
3398 3404 if hasattr(inst, "code"):
3399 3405 u.warn(_("abort: %s\n") % inst)
3400 3406 elif hasattr(inst, "reason"):
3401 3407 u.warn(_("abort: error: %s\n") % inst.reason[1])
3402 3408 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3403 3409 if u.debugflag:
3404 3410 u.warn(_("broken pipe\n"))
3405 3411 elif getattr(inst, "strerror", None):
3406 3412 if getattr(inst, "filename", None):
3407 3413 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3408 3414 else:
3409 3415 u.warn(_("abort: %s\n") % inst.strerror)
3410 3416 else:
3411 3417 raise
3412 3418 except OSError, inst:
3413 3419 if hasattr(inst, "filename"):
3414 3420 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3415 3421 else:
3416 3422 u.warn(_("abort: %s\n") % inst.strerror)
3417 3423 except util.Abort, inst:
3418 3424 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3419 3425 except TypeError, inst:
3420 3426 # was this an argument error?
3421 3427 tb = traceback.extract_tb(sys.exc_info()[2])
3422 3428 if len(tb) > 2: # no
3423 3429 raise
3424 3430 u.debug(inst, "\n")
3425 3431 u.warn(_("%s: invalid arguments\n") % cmd)
3426 3432 help_(u, cmd)
3427 3433 except SystemExit, inst:
3428 3434 # Commands shouldn't sys.exit directly, but give a return code.
3429 3435 # Just in case catch this and and pass exit code to caller.
3430 3436 return inst.code
3431 3437 except:
3432 3438 u.warn(_("** unknown exception encountered, details follow\n"))
3433 3439 u.warn(_("** report bug details to mercurial@selenic.com\n"))
3434 3440 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3435 3441 % version.get_version())
3436 3442 raise
3437 3443
3438 3444 return -1
@@ -1,2090 +1,2094 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import os, util
9 9 import filelog, manifest, changelog, dirstate, repo
10 10 from node import *
11 11 from i18n import gettext as _
12 12 from demandload import *
13 13 demandload(globals(), "appendfile changegroup")
14 14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 15 demandload(globals(), "revlog traceback")
16 16
17 17 class localrepository(object):
18 18 def __del__(self):
19 19 self.transhandle = None
20 20 def __init__(self, parentui, path=None, create=0):
21 21 if not path:
22 22 p = os.getcwd()
23 23 while not os.path.isdir(os.path.join(p, ".hg")):
24 24 oldp = p
25 25 p = os.path.dirname(p)
26 26 if p == oldp:
27 27 raise repo.RepoError(_("no repo found"))
28 28 path = p
29 29 self.path = os.path.join(path, ".hg")
30 30
31 31 if not create and not os.path.isdir(self.path):
32 32 raise repo.RepoError(_("repository %s not found") % path)
33 33
34 34 self.root = os.path.abspath(path)
35 35 self.origroot = path
36 36 self.ui = ui.ui(parentui=parentui)
37 37 self.opener = util.opener(self.path)
38 38 self.wopener = util.opener(self.root)
39 39
40 40 try:
41 41 self.ui.readconfig(self.join("hgrc"), self.root)
42 42 except IOError:
43 43 pass
44 44
45 45 v = self.ui.revlogopts
46 46 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
47 47 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
48 48 fl = v.get('flags', None)
49 49 flags = 0
50 50 if fl != None:
51 51 for x in fl.split():
52 52 flags |= revlog.flagstr(x)
53 53 elif self.revlogv1:
54 54 flags = revlog.REVLOG_DEFAULT_FLAGS
55 55
56 56 v = self.revlogversion | flags
57 57 self.manifest = manifest.manifest(self.opener, v)
58 58 self.changelog = changelog.changelog(self.opener, v)
59 59
60 60 # the changelog might not have the inline index flag
61 61 # on. If the format of the changelog is the same as found in
62 62 # .hgrc, apply any flags found in the .hgrc as well.
63 63 # Otherwise, just version from the changelog
64 64 v = self.changelog.version
65 65 if v == self.revlogversion:
66 66 v |= flags
67 67 self.revlogversion = v
68 68
69 69 self.tagscache = None
70 70 self.nodetagscache = None
71 71 self.encodepats = None
72 72 self.decodepats = None
73 73 self.transhandle = None
74 74
75 75 if create:
76 76 os.mkdir(self.path)
77 77 os.mkdir(self.join("data"))
78 78
79 79 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
80 80
81 81 def hook(self, name, throw=False, **args):
82 82 def callhook(hname, funcname):
83 83 '''call python hook. hook is callable object, looked up as
84 84 name in python module. if callable returns "true", hook
85 85 fails, else passes. if hook raises exception, treated as
86 86 hook failure. exception propagates if throw is "true".
87 87
88 88 reason for "true" meaning "hook failed" is so that
89 89 unmodified commands (e.g. mercurial.commands.update) can
90 90 be run as hooks without wrappers to convert return values.'''
91 91
92 92 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
93 93 d = funcname.rfind('.')
94 94 if d == -1:
95 95 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
96 96 % (hname, funcname))
97 97 modname = funcname[:d]
98 98 try:
99 99 obj = __import__(modname)
100 100 except ImportError:
101 101 raise util.Abort(_('%s hook is invalid '
102 102 '(import of "%s" failed)') %
103 103 (hname, modname))
104 104 try:
105 105 for p in funcname.split('.')[1:]:
106 106 obj = getattr(obj, p)
107 107 except AttributeError, err:
108 108 raise util.Abort(_('%s hook is invalid '
109 109 '("%s" is not defined)') %
110 110 (hname, funcname))
111 111 if not callable(obj):
112 112 raise util.Abort(_('%s hook is invalid '
113 113 '("%s" is not callable)') %
114 114 (hname, funcname))
115 115 try:
116 116 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
117 117 except (KeyboardInterrupt, util.SignalInterrupt):
118 118 raise
119 119 except Exception, exc:
120 120 if isinstance(exc, util.Abort):
121 121 self.ui.warn(_('error: %s hook failed: %s\n') %
122 122 (hname, exc.args[0] % exc.args[1:]))
123 123 else:
124 124 self.ui.warn(_('error: %s hook raised an exception: '
125 125 '%s\n') % (hname, exc))
126 126 if throw:
127 127 raise
128 128 if self.ui.traceback:
129 129 traceback.print_exc()
130 130 return True
131 131 if r:
132 132 if throw:
133 133 raise util.Abort(_('%s hook failed') % hname)
134 134 self.ui.warn(_('warning: %s hook failed\n') % hname)
135 135 return r
136 136
137 137 def runhook(name, cmd):
138 138 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
139 139 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()] +
140 140 [(k.upper(), v) for k, v in args.iteritems()])
141 141 r = util.system(cmd, environ=env, cwd=self.root)
142 142 if r:
143 143 desc, r = util.explain_exit(r)
144 144 if throw:
145 145 raise util.Abort(_('%s hook %s') % (name, desc))
146 146 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
147 147 return r
148 148
149 149 r = False
150 150 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
151 151 if hname.split(".", 1)[0] == name and cmd]
152 152 hooks.sort()
153 153 for hname, cmd in hooks:
154 154 if cmd.startswith('python:'):
155 155 r = callhook(hname, cmd[7:].strip()) or r
156 156 else:
157 157 r = runhook(hname, cmd) or r
158 158 return r
159 159
160 160 def tags(self):
161 161 '''return a mapping of tag to node'''
162 162 if not self.tagscache:
163 163 self.tagscache = {}
164 164
165 165 def parsetag(line, context):
166 166 if not line:
167 167 return
168 168 s = l.split(" ", 1)
169 169 if len(s) != 2:
170 170 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
171 171 return
172 172 node, key = s
173 173 try:
174 174 bin_n = bin(node)
175 175 except TypeError:
176 176 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
177 177 return
178 178 if bin_n not in self.changelog.nodemap:
179 179 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
180 180 return
181 181 self.tagscache[key.strip()] = bin_n
182 182
183 183 # read each head of the tags file, ending with the tip
184 184 # and add each tag found to the map, with "newer" ones
185 185 # taking precedence
186 186 fl = self.file(".hgtags")
187 187 h = fl.heads()
188 188 h.reverse()
189 189 for r in h:
190 190 count = 0
191 191 for l in fl.read(r).splitlines():
192 192 count += 1
193 193 parsetag(l, ".hgtags:%d" % count)
194 194
195 195 try:
196 196 f = self.opener("localtags")
197 197 count = 0
198 198 for l in f:
199 199 count += 1
200 200 parsetag(l, "localtags:%d" % count)
201 201 except IOError:
202 202 pass
203 203
204 204 self.tagscache['tip'] = self.changelog.tip()
205 205
206 206 return self.tagscache
207 207
208 208 def tagslist(self):
209 209 '''return a list of tags ordered by revision'''
210 210 l = []
211 211 for t, n in self.tags().items():
212 212 try:
213 213 r = self.changelog.rev(n)
214 214 except:
215 215 r = -2 # sort to the beginning of the list if unknown
216 216 l.append((r, t, n))
217 217 l.sort()
218 218 return [(t, n) for r, t, n in l]
219 219
220 220 def nodetags(self, node):
221 221 '''return the tags associated with a node'''
222 222 if not self.nodetagscache:
223 223 self.nodetagscache = {}
224 224 for t, n in self.tags().items():
225 225 self.nodetagscache.setdefault(n, []).append(t)
226 226 return self.nodetagscache.get(node, [])
227 227
228 228 def lookup(self, key):
229 229 try:
230 230 return self.tags()[key]
231 231 except KeyError:
232 232 try:
233 233 return self.changelog.lookup(key)
234 234 except:
235 235 raise repo.RepoError(_("unknown revision '%s'") % key)
236 236
237 237 def dev(self):
238 238 return os.stat(self.path).st_dev
239 239
240 240 def local(self):
241 241 return True
242 242
243 243 def join(self, f):
244 244 return os.path.join(self.path, f)
245 245
246 246 def wjoin(self, f):
247 247 return os.path.join(self.root, f)
248 248
249 249 def file(self, f):
250 250 if f[0] == '/':
251 251 f = f[1:]
252 252 return filelog.filelog(self.opener, f, self.revlogversion)
253 253
254 254 def getcwd(self):
255 255 return self.dirstate.getcwd()
256 256
257 257 def wfile(self, f, mode='r'):
258 258 return self.wopener(f, mode)
259 259
260 260 def wread(self, filename):
261 261 if self.encodepats == None:
262 262 l = []
263 263 for pat, cmd in self.ui.configitems("encode"):
264 264 mf = util.matcher(self.root, "", [pat], [], [])[1]
265 265 l.append((mf, cmd))
266 266 self.encodepats = l
267 267
268 268 data = self.wopener(filename, 'r').read()
269 269
270 270 for mf, cmd in self.encodepats:
271 271 if mf(filename):
272 272 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
273 273 data = util.filter(data, cmd)
274 274 break
275 275
276 276 return data
277 277
278 278 def wwrite(self, filename, data, fd=None):
279 279 if self.decodepats == None:
280 280 l = []
281 281 for pat, cmd in self.ui.configitems("decode"):
282 282 mf = util.matcher(self.root, "", [pat], [], [])[1]
283 283 l.append((mf, cmd))
284 284 self.decodepats = l
285 285
286 286 for mf, cmd in self.decodepats:
287 287 if mf(filename):
288 288 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
289 289 data = util.filter(data, cmd)
290 290 break
291 291
292 292 if fd:
293 293 return fd.write(data)
294 294 return self.wopener(filename, 'w').write(data)
295 295
296 296 def transaction(self):
297 297 tr = self.transhandle
298 298 if tr != None and tr.running():
299 299 return tr.nest()
300 300
301 301 # save dirstate for undo
302 302 try:
303 303 ds = self.opener("dirstate").read()
304 304 except IOError:
305 305 ds = ""
306 306 self.opener("journal.dirstate", "w").write(ds)
307 307
308 308 tr = transaction.transaction(self.ui.warn, self.opener,
309 309 self.join("journal"),
310 310 aftertrans(self.path))
311 311 self.transhandle = tr
312 312 return tr
313 313
314 314 def recover(self):
315 315 l = self.lock()
316 316 if os.path.exists(self.join("journal")):
317 317 self.ui.status(_("rolling back interrupted transaction\n"))
318 318 transaction.rollback(self.opener, self.join("journal"))
319 319 self.reload()
320 320 return True
321 321 else:
322 322 self.ui.warn(_("no interrupted transaction available\n"))
323 323 return False
324 324
325 325 def undo(self, wlock=None):
326 326 if not wlock:
327 327 wlock = self.wlock()
328 328 l = self.lock()
329 329 if os.path.exists(self.join("undo")):
330 330 self.ui.status(_("rolling back last transaction\n"))
331 331 transaction.rollback(self.opener, self.join("undo"))
332 332 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
333 333 self.reload()
334 334 self.wreload()
335 335 else:
336 336 self.ui.warn(_("no undo information available\n"))
337 337
338 338 def wreload(self):
339 339 self.dirstate.read()
340 340
341 341 def reload(self):
342 342 self.changelog.load()
343 343 self.manifest.load()
344 344 self.tagscache = None
345 345 self.nodetagscache = None
346 346
347 347 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
348 348 desc=None):
349 349 try:
350 350 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
351 351 except lock.LockHeld, inst:
352 352 if not wait:
353 353 raise
354 354 self.ui.warn(_("waiting for lock on %s held by %s\n") %
355 355 (desc, inst.args[0]))
356 356 # default to 600 seconds timeout
357 357 l = lock.lock(self.join(lockname),
358 358 int(self.ui.config("ui", "timeout") or 600),
359 359 releasefn, desc=desc)
360 360 if acquirefn:
361 361 acquirefn()
362 362 return l
363 363
364 364 def lock(self, wait=1):
365 365 return self.do_lock("lock", wait, acquirefn=self.reload,
366 366 desc=_('repository %s') % self.origroot)
367 367
368 368 def wlock(self, wait=1):
369 369 return self.do_lock("wlock", wait, self.dirstate.write,
370 370 self.wreload,
371 371 desc=_('working directory of %s') % self.origroot)
372 372
373 373 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
374 374 "determine whether a new filenode is needed"
375 375 fp1 = manifest1.get(filename, nullid)
376 376 fp2 = manifest2.get(filename, nullid)
377 377
378 378 if fp2 != nullid:
379 379 # is one parent an ancestor of the other?
380 380 fpa = filelog.ancestor(fp1, fp2)
381 381 if fpa == fp1:
382 382 fp1, fp2 = fp2, nullid
383 383 elif fpa == fp2:
384 384 fp2 = nullid
385 385
386 386 # is the file unmodified from the parent? report existing entry
387 387 if fp2 == nullid and text == filelog.read(fp1):
388 388 return (fp1, None, None)
389 389
390 390 return (None, fp1, fp2)
391 391
392 392 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
393 393 orig_parent = self.dirstate.parents()[0] or nullid
394 394 p1 = p1 or self.dirstate.parents()[0] or nullid
395 395 p2 = p2 or self.dirstate.parents()[1] or nullid
396 396 c1 = self.changelog.read(p1)
397 397 c2 = self.changelog.read(p2)
398 398 m1 = self.manifest.read(c1[0])
399 399 mf1 = self.manifest.readflags(c1[0])
400 400 m2 = self.manifest.read(c2[0])
401 401 changed = []
402 402
403 403 if orig_parent == p1:
404 404 update_dirstate = 1
405 405 else:
406 406 update_dirstate = 0
407 407
408 408 if not wlock:
409 409 wlock = self.wlock()
410 410 l = self.lock()
411 411 tr = self.transaction()
412 412 mm = m1.copy()
413 413 mfm = mf1.copy()
414 414 linkrev = self.changelog.count()
415 415 for f in files:
416 416 try:
417 417 t = self.wread(f)
418 418 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
419 419 r = self.file(f)
420 420 mfm[f] = tm
421 421
422 422 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
423 423 if entry:
424 424 mm[f] = entry
425 425 continue
426 426
427 427 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
428 428 changed.append(f)
429 429 if update_dirstate:
430 430 self.dirstate.update([f], "n")
431 431 except IOError:
432 432 try:
433 433 del mm[f]
434 434 del mfm[f]
435 435 if update_dirstate:
436 436 self.dirstate.forget([f])
437 437 except:
438 438 # deleted from p2?
439 439 pass
440 440
441 441 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
442 442 user = user or self.ui.username()
443 443 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
444 444 tr.close()
445 445 if update_dirstate:
446 446 self.dirstate.setparents(n, nullid)
447 447
448 448 def commit(self, files=None, text="", user=None, date=None,
449 match=util.always, force=False, lock=None, wlock=None):
449 match=util.always, force=False, lock=None, wlock=None,
450 force_editor=False):
450 451 commit = []
451 452 remove = []
452 453 changed = []
453 454
454 455 if files:
455 456 for f in files:
456 457 s = self.dirstate.state(f)
457 458 if s in 'nmai':
458 459 commit.append(f)
459 460 elif s == 'r':
460 461 remove.append(f)
461 462 else:
462 463 self.ui.warn(_("%s not tracked!\n") % f)
463 464 else:
464 465 modified, added, removed, deleted, unknown = self.changes(match=match)
465 466 commit = modified + added
466 467 remove = removed
467 468
468 469 p1, p2 = self.dirstate.parents()
469 470 c1 = self.changelog.read(p1)
470 471 c2 = self.changelog.read(p2)
471 472 m1 = self.manifest.read(c1[0])
472 473 mf1 = self.manifest.readflags(c1[0])
473 474 m2 = self.manifest.read(c2[0])
474 475
475 476 if not commit and not remove and not force and p2 == nullid:
476 477 self.ui.status(_("nothing changed\n"))
477 478 return None
478 479
479 480 xp1 = hex(p1)
480 481 if p2 == nullid: xp2 = ''
481 482 else: xp2 = hex(p2)
482 483
483 484 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
484 485
485 486 if not wlock:
486 487 wlock = self.wlock()
487 488 if not lock:
488 489 lock = self.lock()
489 490 tr = self.transaction()
490 491
491 492 # check in files
492 493 new = {}
493 494 linkrev = self.changelog.count()
494 495 commit.sort()
495 496 for f in commit:
496 497 self.ui.note(f + "\n")
497 498 try:
498 499 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
499 500 t = self.wread(f)
500 501 except IOError:
501 502 self.ui.warn(_("trouble committing %s!\n") % f)
502 503 raise
503 504
504 505 r = self.file(f)
505 506
506 507 meta = {}
507 508 cp = self.dirstate.copied(f)
508 509 if cp:
509 510 meta["copy"] = cp
510 511 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
511 512 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
512 513 fp1, fp2 = nullid, nullid
513 514 else:
514 515 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
515 516 if entry:
516 517 new[f] = entry
517 518 continue
518 519
519 520 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
520 521 # remember what we've added so that we can later calculate
521 522 # the files to pull from a set of changesets
522 523 changed.append(f)
523 524
524 525 # update manifest
525 526 m1 = m1.copy()
526 527 m1.update(new)
527 528 for f in remove:
528 529 if f in m1:
529 530 del m1[f]
530 531 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
531 532 (new, remove))
532 533
533 534 # add changeset
534 535 new = new.keys()
535 536 new.sort()
536 537
537 538 user = user or self.ui.username()
538 if not text:
539 edittext = [""]
539 if not text or force_editor:
540 edittext = []
541 if text:
542 edittext.append(text)
543 edittext.append("")
540 544 if p2 != nullid:
541 545 edittext.append("HG: branch merge")
542 546 edittext.extend(["HG: changed %s" % f for f in changed])
543 547 edittext.extend(["HG: removed %s" % f for f in remove])
544 548 if not changed and not remove:
545 549 edittext.append("HG: no files changed")
546 550 edittext.append("")
547 551 # run editor in the repository root
548 552 olddir = os.getcwd()
549 553 os.chdir(self.root)
550 554 edittext = self.ui.edit("\n".join(edittext), user)
551 555 os.chdir(olddir)
552 556 if not edittext.rstrip():
553 557 return None
554 558 text = edittext
555 559
556 560 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
557 561 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
558 562 parent2=xp2)
559 563 tr.close()
560 564
561 565 self.dirstate.setparents(n)
562 566 self.dirstate.update(new, "n")
563 567 self.dirstate.forget(remove)
564 568
565 569 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
566 570 return n
567 571
568 572 def walk(self, node=None, files=[], match=util.always, badmatch=None):
569 573 if node:
570 574 fdict = dict.fromkeys(files)
571 575 for fn in self.manifest.read(self.changelog.read(node)[0]):
572 576 fdict.pop(fn, None)
573 577 if match(fn):
574 578 yield 'm', fn
575 579 for fn in fdict:
576 580 if badmatch and badmatch(fn):
577 581 if match(fn):
578 582 yield 'b', fn
579 583 else:
580 584 self.ui.warn(_('%s: No such file in rev %s\n') % (
581 585 util.pathto(self.getcwd(), fn), short(node)))
582 586 else:
583 587 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
584 588 yield src, fn
585 589
586 590 def changes(self, node1=None, node2=None, files=[], match=util.always,
587 591 wlock=None, show_ignored=None):
588 592 """return changes between two nodes or node and working directory
589 593
590 594 If node1 is None, use the first dirstate parent instead.
591 595 If node2 is None, compare node1 with working directory.
592 596 """
593 597
594 598 def fcmp(fn, mf):
595 599 t1 = self.wread(fn)
596 600 t2 = self.file(fn).read(mf.get(fn, nullid))
597 601 return cmp(t1, t2)
598 602
599 603 def mfmatches(node):
600 604 change = self.changelog.read(node)
601 605 mf = dict(self.manifest.read(change[0]))
602 606 for fn in mf.keys():
603 607 if not match(fn):
604 608 del mf[fn]
605 609 return mf
606 610
607 611 if node1:
608 612 # read the manifest from node1 before the manifest from node2,
609 613 # so that we'll hit the manifest cache if we're going through
610 614 # all the revisions in parent->child order.
611 615 mf1 = mfmatches(node1)
612 616
613 617 # are we comparing the working directory?
614 618 if not node2:
615 619 if not wlock:
616 620 try:
617 621 wlock = self.wlock(wait=0)
618 622 except lock.LockException:
619 623 wlock = None
620 624 lookup, modified, added, removed, deleted, unknown, ignored = (
621 625 self.dirstate.changes(files, match, show_ignored))
622 626
623 627 # are we comparing working dir against its parent?
624 628 if not node1:
625 629 if lookup:
626 630 # do a full compare of any files that might have changed
627 631 mf2 = mfmatches(self.dirstate.parents()[0])
628 632 for f in lookup:
629 633 if fcmp(f, mf2):
630 634 modified.append(f)
631 635 elif wlock is not None:
632 636 self.dirstate.update([f], "n")
633 637 else:
634 638 # we are comparing working dir against non-parent
635 639 # generate a pseudo-manifest for the working dir
636 640 mf2 = mfmatches(self.dirstate.parents()[0])
637 641 for f in lookup + modified + added:
638 642 mf2[f] = ""
639 643 for f in removed:
640 644 if f in mf2:
641 645 del mf2[f]
642 646 else:
643 647 # we are comparing two revisions
644 648 deleted, unknown, ignored = [], [], []
645 649 mf2 = mfmatches(node2)
646 650
647 651 if node1:
648 652 # flush lists from dirstate before comparing manifests
649 653 modified, added = [], []
650 654
651 655 for fn in mf2:
652 656 if mf1.has_key(fn):
653 657 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
654 658 modified.append(fn)
655 659 del mf1[fn]
656 660 else:
657 661 added.append(fn)
658 662
659 663 removed = mf1.keys()
660 664
661 665 # sort and return results:
662 666 for l in modified, added, removed, deleted, unknown, ignored:
663 667 l.sort()
664 668 if show_ignored is None:
665 669 return (modified, added, removed, deleted, unknown)
666 670 else:
667 671 return (modified, added, removed, deleted, unknown, ignored)
668 672
669 673 def add(self, list, wlock=None):
670 674 if not wlock:
671 675 wlock = self.wlock()
672 676 for f in list:
673 677 p = self.wjoin(f)
674 678 if not os.path.exists(p):
675 679 self.ui.warn(_("%s does not exist!\n") % f)
676 680 elif not os.path.isfile(p):
677 681 self.ui.warn(_("%s not added: only files supported currently\n")
678 682 % f)
679 683 elif self.dirstate.state(f) in 'an':
680 684 self.ui.warn(_("%s already tracked!\n") % f)
681 685 else:
682 686 self.dirstate.update([f], "a")
683 687
684 688 def forget(self, list, wlock=None):
685 689 if not wlock:
686 690 wlock = self.wlock()
687 691 for f in list:
688 692 if self.dirstate.state(f) not in 'ai':
689 693 self.ui.warn(_("%s not added!\n") % f)
690 694 else:
691 695 self.dirstate.forget([f])
692 696
693 697 def remove(self, list, unlink=False, wlock=None):
694 698 if unlink:
695 699 for f in list:
696 700 try:
697 701 util.unlink(self.wjoin(f))
698 702 except OSError, inst:
699 703 if inst.errno != errno.ENOENT:
700 704 raise
701 705 if not wlock:
702 706 wlock = self.wlock()
703 707 for f in list:
704 708 p = self.wjoin(f)
705 709 if os.path.exists(p):
706 710 self.ui.warn(_("%s still exists!\n") % f)
707 711 elif self.dirstate.state(f) == 'a':
708 712 self.dirstate.forget([f])
709 713 elif f not in self.dirstate:
710 714 self.ui.warn(_("%s not tracked!\n") % f)
711 715 else:
712 716 self.dirstate.update([f], "r")
713 717
714 718 def undelete(self, list, wlock=None):
715 719 p = self.dirstate.parents()[0]
716 720 mn = self.changelog.read(p)[0]
717 721 mf = self.manifest.readflags(mn)
718 722 m = self.manifest.read(mn)
719 723 if not wlock:
720 724 wlock = self.wlock()
721 725 for f in list:
722 726 if self.dirstate.state(f) not in "r":
723 727 self.ui.warn("%s not removed!\n" % f)
724 728 else:
725 729 t = self.file(f).read(m[f])
726 730 self.wwrite(f, t)
727 731 util.set_exec(self.wjoin(f), mf[f])
728 732 self.dirstate.update([f], "n")
729 733
730 734 def copy(self, source, dest, wlock=None):
731 735 p = self.wjoin(dest)
732 736 if not os.path.exists(p):
733 737 self.ui.warn(_("%s does not exist!\n") % dest)
734 738 elif not os.path.isfile(p):
735 739 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
736 740 else:
737 741 if not wlock:
738 742 wlock = self.wlock()
739 743 if self.dirstate.state(dest) == '?':
740 744 self.dirstate.update([dest], "a")
741 745 self.dirstate.copy(source, dest)
742 746
743 747 def heads(self, start=None):
744 748 heads = self.changelog.heads(start)
745 749 # sort the output in rev descending order
746 750 heads = [(-self.changelog.rev(h), h) for h in heads]
747 751 heads.sort()
748 752 return [n for (r, n) in heads]
749 753
750 754 # branchlookup returns a dict giving a list of branches for
751 755 # each head. A branch is defined as the tag of a node or
752 756 # the branch of the node's parents. If a node has multiple
753 757 # branch tags, tags are eliminated if they are visible from other
754 758 # branch tags.
755 759 #
756 760 # So, for this graph: a->b->c->d->e
757 761 # \ /
758 762 # aa -----/
759 763 # a has tag 2.6.12
760 764 # d has tag 2.6.13
761 765 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
762 766 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
763 767 # from the list.
764 768 #
765 769 # It is possible that more than one head will have the same branch tag.
766 770 # callers need to check the result for multiple heads under the same
767 771 # branch tag if that is a problem for them (ie checkout of a specific
768 772 # branch).
769 773 #
770 774 # passing in a specific branch will limit the depth of the search
771 775 # through the parents. It won't limit the branches returned in the
772 776 # result though.
773 777 def branchlookup(self, heads=None, branch=None):
774 778 if not heads:
775 779 heads = self.heads()
776 780 headt = [ h for h in heads ]
777 781 chlog = self.changelog
778 782 branches = {}
779 783 merges = []
780 784 seenmerge = {}
781 785
782 786 # traverse the tree once for each head, recording in the branches
783 787 # dict which tags are visible from this head. The branches
784 788 # dict also records which tags are visible from each tag
785 789 # while we traverse.
786 790 while headt or merges:
787 791 if merges:
788 792 n, found = merges.pop()
789 793 visit = [n]
790 794 else:
791 795 h = headt.pop()
792 796 visit = [h]
793 797 found = [h]
794 798 seen = {}
795 799 while visit:
796 800 n = visit.pop()
797 801 if n in seen:
798 802 continue
799 803 pp = chlog.parents(n)
800 804 tags = self.nodetags(n)
801 805 if tags:
802 806 for x in tags:
803 807 if x == 'tip':
804 808 continue
805 809 for f in found:
806 810 branches.setdefault(f, {})[n] = 1
807 811 branches.setdefault(n, {})[n] = 1
808 812 break
809 813 if n not in found:
810 814 found.append(n)
811 815 if branch in tags:
812 816 continue
813 817 seen[n] = 1
814 818 if pp[1] != nullid and n not in seenmerge:
815 819 merges.append((pp[1], [x for x in found]))
816 820 seenmerge[n] = 1
817 821 if pp[0] != nullid:
818 822 visit.append(pp[0])
819 823 # traverse the branches dict, eliminating branch tags from each
820 824 # head that are visible from another branch tag for that head.
821 825 out = {}
822 826 viscache = {}
823 827 for h in heads:
824 828 def visible(node):
825 829 if node in viscache:
826 830 return viscache[node]
827 831 ret = {}
828 832 visit = [node]
829 833 while visit:
830 834 x = visit.pop()
831 835 if x in viscache:
832 836 ret.update(viscache[x])
833 837 elif x not in ret:
834 838 ret[x] = 1
835 839 if x in branches:
836 840 visit[len(visit):] = branches[x].keys()
837 841 viscache[node] = ret
838 842 return ret
839 843 if h not in branches:
840 844 continue
841 845 # O(n^2), but somewhat limited. This only searches the
842 846 # tags visible from a specific head, not all the tags in the
843 847 # whole repo.
844 848 for b in branches[h]:
845 849 vis = False
846 850 for bb in branches[h].keys():
847 851 if b != bb:
848 852 if b in visible(bb):
849 853 vis = True
850 854 break
851 855 if not vis:
852 856 l = out.setdefault(h, [])
853 857 l[len(l):] = self.nodetags(b)
854 858 return out
855 859
856 860 def branches(self, nodes):
857 861 if not nodes:
858 862 nodes = [self.changelog.tip()]
859 863 b = []
860 864 for n in nodes:
861 865 t = n
862 866 while n:
863 867 p = self.changelog.parents(n)
864 868 if p[1] != nullid or p[0] == nullid:
865 869 b.append((t, n, p[0], p[1]))
866 870 break
867 871 n = p[0]
868 872 return b
869 873
870 874 def between(self, pairs):
871 875 r = []
872 876
873 877 for top, bottom in pairs:
874 878 n, l, i = top, [], 0
875 879 f = 1
876 880
877 881 while n != bottom:
878 882 p = self.changelog.parents(n)[0]
879 883 if i == f:
880 884 l.append(n)
881 885 f = f * 2
882 886 n = p
883 887 i += 1
884 888
885 889 r.append(l)
886 890
887 891 return r
888 892
889 893 def findincoming(self, remote, base=None, heads=None, force=False):
890 894 m = self.changelog.nodemap
891 895 search = []
892 896 fetch = {}
893 897 seen = {}
894 898 seenbranch = {}
895 899 if base == None:
896 900 base = {}
897 901
898 902 if not heads:
899 903 heads = remote.heads()
900 904
901 905 if self.changelog.tip() == nullid:
902 906 if heads != [nullid]:
903 907 return [nullid]
904 908 return []
905 909
906 910 # assume we're closer to the tip than the root
907 911 # and start by examining the heads
908 912 self.ui.status(_("searching for changes\n"))
909 913
910 914 unknown = []
911 915 for h in heads:
912 916 if h not in m:
913 917 unknown.append(h)
914 918 else:
915 919 base[h] = 1
916 920
917 921 if not unknown:
918 922 return []
919 923
920 924 rep = {}
921 925 reqcnt = 0
922 926
923 927 # search through remote branches
924 928 # a 'branch' here is a linear segment of history, with four parts:
925 929 # head, root, first parent, second parent
926 930 # (a branch always has two parents (or none) by definition)
927 931 unknown = remote.branches(unknown)
928 932 while unknown:
929 933 r = []
930 934 while unknown:
931 935 n = unknown.pop(0)
932 936 if n[0] in seen:
933 937 continue
934 938
935 939 self.ui.debug(_("examining %s:%s\n")
936 940 % (short(n[0]), short(n[1])))
937 941 if n[0] == nullid:
938 942 break
939 943 if n in seenbranch:
940 944 self.ui.debug(_("branch already found\n"))
941 945 continue
942 946 if n[1] and n[1] in m: # do we know the base?
943 947 self.ui.debug(_("found incomplete branch %s:%s\n")
944 948 % (short(n[0]), short(n[1])))
945 949 search.append(n) # schedule branch range for scanning
946 950 seenbranch[n] = 1
947 951 else:
948 952 if n[1] not in seen and n[1] not in fetch:
949 953 if n[2] in m and n[3] in m:
950 954 self.ui.debug(_("found new changeset %s\n") %
951 955 short(n[1]))
952 956 fetch[n[1]] = 1 # earliest unknown
953 957 base[n[2]] = 1 # latest known
954 958 continue
955 959
956 960 for a in n[2:4]:
957 961 if a not in rep:
958 962 r.append(a)
959 963 rep[a] = 1
960 964
961 965 seen[n[0]] = 1
962 966
963 967 if r:
964 968 reqcnt += 1
965 969 self.ui.debug(_("request %d: %s\n") %
966 970 (reqcnt, " ".join(map(short, r))))
967 971 for p in range(0, len(r), 10):
968 972 for b in remote.branches(r[p:p+10]):
969 973 self.ui.debug(_("received %s:%s\n") %
970 974 (short(b[0]), short(b[1])))
971 975 if b[0] in m:
972 976 self.ui.debug(_("found base node %s\n")
973 977 % short(b[0]))
974 978 base[b[0]] = 1
975 979 elif b[0] not in seen:
976 980 unknown.append(b)
977 981
978 982 # do binary search on the branches we found
979 983 while search:
980 984 n = search.pop(0)
981 985 reqcnt += 1
982 986 l = remote.between([(n[0], n[1])])[0]
983 987 l.append(n[1])
984 988 p = n[0]
985 989 f = 1
986 990 for i in l:
987 991 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
988 992 if i in m:
989 993 if f <= 2:
990 994 self.ui.debug(_("found new branch changeset %s\n") %
991 995 short(p))
992 996 fetch[p] = 1
993 997 base[i] = 1
994 998 else:
995 999 self.ui.debug(_("narrowed branch search to %s:%s\n")
996 1000 % (short(p), short(i)))
997 1001 search.append((p, i))
998 1002 break
999 1003 p, f = i, f * 2
1000 1004
1001 1005 # sanity check our fetch list
1002 1006 for f in fetch.keys():
1003 1007 if f in m:
1004 1008 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1005 1009
1006 1010 if base.keys() == [nullid]:
1007 1011 if force:
1008 1012 self.ui.warn(_("warning: repository is unrelated\n"))
1009 1013 else:
1010 1014 raise util.Abort(_("repository is unrelated"))
1011 1015
1012 1016 self.ui.note(_("found new changesets starting at ") +
1013 1017 " ".join([short(f) for f in fetch]) + "\n")
1014 1018
1015 1019 self.ui.debug(_("%d total queries\n") % reqcnt)
1016 1020
1017 1021 return fetch.keys()
1018 1022
1019 1023 def findoutgoing(self, remote, base=None, heads=None, force=False):
1020 1024 """Return list of nodes that are roots of subsets not in remote
1021 1025
1022 1026 If base dict is specified, assume that these nodes and their parents
1023 1027 exist on the remote side.
1024 1028 If a list of heads is specified, return only nodes which are heads
1025 1029 or ancestors of these heads, and return a second element which
1026 1030 contains all remote heads which get new children.
1027 1031 """
1028 1032 if base == None:
1029 1033 base = {}
1030 1034 self.findincoming(remote, base, heads, force=force)
1031 1035
1032 1036 self.ui.debug(_("common changesets up to ")
1033 1037 + " ".join(map(short, base.keys())) + "\n")
1034 1038
1035 1039 remain = dict.fromkeys(self.changelog.nodemap)
1036 1040
1037 1041 # prune everything remote has from the tree
1038 1042 del remain[nullid]
1039 1043 remove = base.keys()
1040 1044 while remove:
1041 1045 n = remove.pop(0)
1042 1046 if n in remain:
1043 1047 del remain[n]
1044 1048 for p in self.changelog.parents(n):
1045 1049 remove.append(p)
1046 1050
1047 1051 # find every node whose parents have been pruned
1048 1052 subset = []
1049 1053 # find every remote head that will get new children
1050 1054 updated_heads = {}
1051 1055 for n in remain:
1052 1056 p1, p2 = self.changelog.parents(n)
1053 1057 if p1 not in remain and p2 not in remain:
1054 1058 subset.append(n)
1055 1059 if heads:
1056 1060 if p1 in heads:
1057 1061 updated_heads[p1] = True
1058 1062 if p2 in heads:
1059 1063 updated_heads[p2] = True
1060 1064
1061 1065 # this is the set of all roots we have to push
1062 1066 if heads:
1063 1067 return subset, updated_heads.keys()
1064 1068 else:
1065 1069 return subset
1066 1070
1067 1071 def pull(self, remote, heads=None, force=False):
1068 1072 l = self.lock()
1069 1073
1070 1074 fetch = self.findincoming(remote, force=force)
1071 1075 if fetch == [nullid]:
1072 1076 self.ui.status(_("requesting all changes\n"))
1073 1077
1074 1078 if not fetch:
1075 1079 self.ui.status(_("no changes found\n"))
1076 1080 return 0
1077 1081
1078 1082 if heads is None:
1079 1083 cg = remote.changegroup(fetch, 'pull')
1080 1084 else:
1081 1085 cg = remote.changegroupsubset(fetch, heads, 'pull')
1082 1086 return self.addchangegroup(cg, 'pull')
1083 1087
1084 1088 def push(self, remote, force=False, revs=None):
1085 1089 lock = remote.lock()
1086 1090
1087 1091 base = {}
1088 1092 remote_heads = remote.heads()
1089 1093 inc = self.findincoming(remote, base, remote_heads, force=force)
1090 1094 if not force and inc:
1091 1095 self.ui.warn(_("abort: unsynced remote changes!\n"))
1092 1096 self.ui.status(_("(did you forget to sync?"
1093 1097 " use push -f to force)\n"))
1094 1098 return 1
1095 1099
1096 1100 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1097 1101 if revs is not None:
1098 1102 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1099 1103 else:
1100 1104 bases, heads = update, self.changelog.heads()
1101 1105
1102 1106 if not bases:
1103 1107 self.ui.status(_("no changes found\n"))
1104 1108 return 1
1105 1109 elif not force:
1106 1110 # FIXME we don't properly detect creation of new heads
1107 1111 # in the push -r case, assume the user knows what he's doing
1108 1112 if not revs and len(remote_heads) < len(heads) \
1109 1113 and remote_heads != [nullid]:
1110 1114 self.ui.warn(_("abort: push creates new remote branches!\n"))
1111 1115 self.ui.status(_("(did you forget to merge?"
1112 1116 " use push -f to force)\n"))
1113 1117 return 1
1114 1118
1115 1119 if revs is None:
1116 1120 cg = self.changegroup(update, 'push')
1117 1121 else:
1118 1122 cg = self.changegroupsubset(update, revs, 'push')
1119 1123 return remote.addchangegroup(cg, 'push')
1120 1124
1121 1125 def changegroupsubset(self, bases, heads, source):
1122 1126 """This function generates a changegroup consisting of all the nodes
1123 1127 that are descendents of any of the bases, and ancestors of any of
1124 1128 the heads.
1125 1129
1126 1130 It is fairly complex as determining which filenodes and which
1127 1131 manifest nodes need to be included for the changeset to be complete
1128 1132 is non-trivial.
1129 1133
1130 1134 Another wrinkle is doing the reverse, figuring out which changeset in
1131 1135 the changegroup a particular filenode or manifestnode belongs to."""
1132 1136
1133 1137 self.hook('preoutgoing', throw=True, source=source)
1134 1138
1135 1139 # Set up some initial variables
1136 1140 # Make it easy to refer to self.changelog
1137 1141 cl = self.changelog
1138 1142 # msng is short for missing - compute the list of changesets in this
1139 1143 # changegroup.
1140 1144 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1141 1145 # Some bases may turn out to be superfluous, and some heads may be
1142 1146 # too. nodesbetween will return the minimal set of bases and heads
1143 1147 # necessary to re-create the changegroup.
1144 1148
1145 1149 # Known heads are the list of heads that it is assumed the recipient
1146 1150 # of this changegroup will know about.
1147 1151 knownheads = {}
1148 1152 # We assume that all parents of bases are known heads.
1149 1153 for n in bases:
1150 1154 for p in cl.parents(n):
1151 1155 if p != nullid:
1152 1156 knownheads[p] = 1
1153 1157 knownheads = knownheads.keys()
1154 1158 if knownheads:
1155 1159 # Now that we know what heads are known, we can compute which
1156 1160 # changesets are known. The recipient must know about all
1157 1161 # changesets required to reach the known heads from the null
1158 1162 # changeset.
1159 1163 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1160 1164 junk = None
1161 1165 # Transform the list into an ersatz set.
1162 1166 has_cl_set = dict.fromkeys(has_cl_set)
1163 1167 else:
1164 1168 # If there were no known heads, the recipient cannot be assumed to
1165 1169 # know about any changesets.
1166 1170 has_cl_set = {}
1167 1171
1168 1172 # Make it easy to refer to self.manifest
1169 1173 mnfst = self.manifest
1170 1174 # We don't know which manifests are missing yet
1171 1175 msng_mnfst_set = {}
1172 1176 # Nor do we know which filenodes are missing.
1173 1177 msng_filenode_set = {}
1174 1178
1175 1179 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1176 1180 junk = None
1177 1181
1178 1182 # A changeset always belongs to itself, so the changenode lookup
1179 1183 # function for a changenode is identity.
1180 1184 def identity(x):
1181 1185 return x
1182 1186
1183 1187 # A function generating function. Sets up an environment for the
1184 1188 # inner function.
1185 1189 def cmp_by_rev_func(revlog):
1186 1190 # Compare two nodes by their revision number in the environment's
1187 1191 # revision history. Since the revision number both represents the
1188 1192 # most efficient order to read the nodes in, and represents a
1189 1193 # topological sorting of the nodes, this function is often useful.
1190 1194 def cmp_by_rev(a, b):
1191 1195 return cmp(revlog.rev(a), revlog.rev(b))
1192 1196 return cmp_by_rev
1193 1197
1194 1198 # If we determine that a particular file or manifest node must be a
1195 1199 # node that the recipient of the changegroup will already have, we can
1196 1200 # also assume the recipient will have all the parents. This function
1197 1201 # prunes them from the set of missing nodes.
1198 1202 def prune_parents(revlog, hasset, msngset):
1199 1203 haslst = hasset.keys()
1200 1204 haslst.sort(cmp_by_rev_func(revlog))
1201 1205 for node in haslst:
1202 1206 parentlst = [p for p in revlog.parents(node) if p != nullid]
1203 1207 while parentlst:
1204 1208 n = parentlst.pop()
1205 1209 if n not in hasset:
1206 1210 hasset[n] = 1
1207 1211 p = [p for p in revlog.parents(n) if p != nullid]
1208 1212 parentlst.extend(p)
1209 1213 for n in hasset:
1210 1214 msngset.pop(n, None)
1211 1215
1212 1216 # This is a function generating function used to set up an environment
1213 1217 # for the inner function to execute in.
1214 1218 def manifest_and_file_collector(changedfileset):
1215 1219 # This is an information gathering function that gathers
1216 1220 # information from each changeset node that goes out as part of
1217 1221 # the changegroup. The information gathered is a list of which
1218 1222 # manifest nodes are potentially required (the recipient may
1219 1223 # already have them) and total list of all files which were
1220 1224 # changed in any changeset in the changegroup.
1221 1225 #
1222 1226 # We also remember the first changenode we saw any manifest
1223 1227 # referenced by so we can later determine which changenode 'owns'
1224 1228 # the manifest.
1225 1229 def collect_manifests_and_files(clnode):
1226 1230 c = cl.read(clnode)
1227 1231 for f in c[3]:
1228 1232 # This is to make sure we only have one instance of each
1229 1233 # filename string for each filename.
1230 1234 changedfileset.setdefault(f, f)
1231 1235 msng_mnfst_set.setdefault(c[0], clnode)
1232 1236 return collect_manifests_and_files
1233 1237
1234 1238 # Figure out which manifest nodes (of the ones we think might be part
1235 1239 # of the changegroup) the recipient must know about and remove them
1236 1240 # from the changegroup.
1237 1241 def prune_manifests():
1238 1242 has_mnfst_set = {}
1239 1243 for n in msng_mnfst_set:
1240 1244 # If a 'missing' manifest thinks it belongs to a changenode
1241 1245 # the recipient is assumed to have, obviously the recipient
1242 1246 # must have that manifest.
1243 1247 linknode = cl.node(mnfst.linkrev(n))
1244 1248 if linknode in has_cl_set:
1245 1249 has_mnfst_set[n] = 1
1246 1250 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1247 1251
1248 1252 # Use the information collected in collect_manifests_and_files to say
1249 1253 # which changenode any manifestnode belongs to.
1250 1254 def lookup_manifest_link(mnfstnode):
1251 1255 return msng_mnfst_set[mnfstnode]
1252 1256
1253 1257 # A function generating function that sets up the initial environment
1254 1258 # the inner function.
1255 1259 def filenode_collector(changedfiles):
1256 1260 next_rev = [0]
1257 1261 # This gathers information from each manifestnode included in the
1258 1262 # changegroup about which filenodes the manifest node references
1259 1263 # so we can include those in the changegroup too.
1260 1264 #
1261 1265 # It also remembers which changenode each filenode belongs to. It
1262 1266 # does this by assuming the a filenode belongs to the changenode
1263 1267 # the first manifest that references it belongs to.
1264 1268 def collect_msng_filenodes(mnfstnode):
1265 1269 r = mnfst.rev(mnfstnode)
1266 1270 if r == next_rev[0]:
1267 1271 # If the last rev we looked at was the one just previous,
1268 1272 # we only need to see a diff.
1269 1273 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1270 1274 # For each line in the delta
1271 1275 for dline in delta.splitlines():
1272 1276 # get the filename and filenode for that line
1273 1277 f, fnode = dline.split('\0')
1274 1278 fnode = bin(fnode[:40])
1275 1279 f = changedfiles.get(f, None)
1276 1280 # And if the file is in the list of files we care
1277 1281 # about.
1278 1282 if f is not None:
1279 1283 # Get the changenode this manifest belongs to
1280 1284 clnode = msng_mnfst_set[mnfstnode]
1281 1285 # Create the set of filenodes for the file if
1282 1286 # there isn't one already.
1283 1287 ndset = msng_filenode_set.setdefault(f, {})
1284 1288 # And set the filenode's changelog node to the
1285 1289 # manifest's if it hasn't been set already.
1286 1290 ndset.setdefault(fnode, clnode)
1287 1291 else:
1288 1292 # Otherwise we need a full manifest.
1289 1293 m = mnfst.read(mnfstnode)
1290 1294 # For every file in we care about.
1291 1295 for f in changedfiles:
1292 1296 fnode = m.get(f, None)
1293 1297 # If it's in the manifest
1294 1298 if fnode is not None:
1295 1299 # See comments above.
1296 1300 clnode = msng_mnfst_set[mnfstnode]
1297 1301 ndset = msng_filenode_set.setdefault(f, {})
1298 1302 ndset.setdefault(fnode, clnode)
1299 1303 # Remember the revision we hope to see next.
1300 1304 next_rev[0] = r + 1
1301 1305 return collect_msng_filenodes
1302 1306
1303 1307 # We have a list of filenodes we think we need for a file, lets remove
1304 1308 # all those we now the recipient must have.
1305 1309 def prune_filenodes(f, filerevlog):
1306 1310 msngset = msng_filenode_set[f]
1307 1311 hasset = {}
1308 1312 # If a 'missing' filenode thinks it belongs to a changenode we
1309 1313 # assume the recipient must have, then the recipient must have
1310 1314 # that filenode.
1311 1315 for n in msngset:
1312 1316 clnode = cl.node(filerevlog.linkrev(n))
1313 1317 if clnode in has_cl_set:
1314 1318 hasset[n] = 1
1315 1319 prune_parents(filerevlog, hasset, msngset)
1316 1320
1317 1321 # A function generator function that sets up the a context for the
1318 1322 # inner function.
1319 1323 def lookup_filenode_link_func(fname):
1320 1324 msngset = msng_filenode_set[fname]
1321 1325 # Lookup the changenode the filenode belongs to.
1322 1326 def lookup_filenode_link(fnode):
1323 1327 return msngset[fnode]
1324 1328 return lookup_filenode_link
1325 1329
1326 1330 # Now that we have all theses utility functions to help out and
1327 1331 # logically divide up the task, generate the group.
1328 1332 def gengroup():
1329 1333 # The set of changed files starts empty.
1330 1334 changedfiles = {}
1331 1335 # Create a changenode group generator that will call our functions
1332 1336 # back to lookup the owning changenode and collect information.
1333 1337 group = cl.group(msng_cl_lst, identity,
1334 1338 manifest_and_file_collector(changedfiles))
1335 1339 for chnk in group:
1336 1340 yield chnk
1337 1341
1338 1342 # The list of manifests has been collected by the generator
1339 1343 # calling our functions back.
1340 1344 prune_manifests()
1341 1345 msng_mnfst_lst = msng_mnfst_set.keys()
1342 1346 # Sort the manifestnodes by revision number.
1343 1347 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1344 1348 # Create a generator for the manifestnodes that calls our lookup
1345 1349 # and data collection functions back.
1346 1350 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1347 1351 filenode_collector(changedfiles))
1348 1352 for chnk in group:
1349 1353 yield chnk
1350 1354
1351 1355 # These are no longer needed, dereference and toss the memory for
1352 1356 # them.
1353 1357 msng_mnfst_lst = None
1354 1358 msng_mnfst_set.clear()
1355 1359
1356 1360 changedfiles = changedfiles.keys()
1357 1361 changedfiles.sort()
1358 1362 # Go through all our files in order sorted by name.
1359 1363 for fname in changedfiles:
1360 1364 filerevlog = self.file(fname)
1361 1365 # Toss out the filenodes that the recipient isn't really
1362 1366 # missing.
1363 1367 if msng_filenode_set.has_key(fname):
1364 1368 prune_filenodes(fname, filerevlog)
1365 1369 msng_filenode_lst = msng_filenode_set[fname].keys()
1366 1370 else:
1367 1371 msng_filenode_lst = []
1368 1372 # If any filenodes are left, generate the group for them,
1369 1373 # otherwise don't bother.
1370 1374 if len(msng_filenode_lst) > 0:
1371 1375 yield changegroup.genchunk(fname)
1372 1376 # Sort the filenodes by their revision #
1373 1377 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1374 1378 # Create a group generator and only pass in a changenode
1375 1379 # lookup function as we need to collect no information
1376 1380 # from filenodes.
1377 1381 group = filerevlog.group(msng_filenode_lst,
1378 1382 lookup_filenode_link_func(fname))
1379 1383 for chnk in group:
1380 1384 yield chnk
1381 1385 if msng_filenode_set.has_key(fname):
1382 1386 # Don't need this anymore, toss it to free memory.
1383 1387 del msng_filenode_set[fname]
1384 1388 # Signal that no more groups are left.
1385 1389 yield changegroup.closechunk()
1386 1390
1387 1391 if msng_cl_lst:
1388 1392 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1389 1393
1390 1394 return util.chunkbuffer(gengroup())
1391 1395
1392 1396 def changegroup(self, basenodes, source):
1393 1397 """Generate a changegroup of all nodes that we have that a recipient
1394 1398 doesn't.
1395 1399
1396 1400 This is much easier than the previous function as we can assume that
1397 1401 the recipient has any changenode we aren't sending them."""
1398 1402
1399 1403 self.hook('preoutgoing', throw=True, source=source)
1400 1404
1401 1405 cl = self.changelog
1402 1406 nodes = cl.nodesbetween(basenodes, None)[0]
1403 1407 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1404 1408
1405 1409 def identity(x):
1406 1410 return x
1407 1411
1408 1412 def gennodelst(revlog):
1409 1413 for r in xrange(0, revlog.count()):
1410 1414 n = revlog.node(r)
1411 1415 if revlog.linkrev(n) in revset:
1412 1416 yield n
1413 1417
1414 1418 def changed_file_collector(changedfileset):
1415 1419 def collect_changed_files(clnode):
1416 1420 c = cl.read(clnode)
1417 1421 for fname in c[3]:
1418 1422 changedfileset[fname] = 1
1419 1423 return collect_changed_files
1420 1424
1421 1425 def lookuprevlink_func(revlog):
1422 1426 def lookuprevlink(n):
1423 1427 return cl.node(revlog.linkrev(n))
1424 1428 return lookuprevlink
1425 1429
1426 1430 def gengroup():
1427 1431 # construct a list of all changed files
1428 1432 changedfiles = {}
1429 1433
1430 1434 for chnk in cl.group(nodes, identity,
1431 1435 changed_file_collector(changedfiles)):
1432 1436 yield chnk
1433 1437 changedfiles = changedfiles.keys()
1434 1438 changedfiles.sort()
1435 1439
1436 1440 mnfst = self.manifest
1437 1441 nodeiter = gennodelst(mnfst)
1438 1442 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1439 1443 yield chnk
1440 1444
1441 1445 for fname in changedfiles:
1442 1446 filerevlog = self.file(fname)
1443 1447 nodeiter = gennodelst(filerevlog)
1444 1448 nodeiter = list(nodeiter)
1445 1449 if nodeiter:
1446 1450 yield changegroup.genchunk(fname)
1447 1451 lookup = lookuprevlink_func(filerevlog)
1448 1452 for chnk in filerevlog.group(nodeiter, lookup):
1449 1453 yield chnk
1450 1454
1451 1455 yield changegroup.closechunk()
1452 1456
1453 1457 if nodes:
1454 1458 self.hook('outgoing', node=hex(nodes[0]), source=source)
1455 1459
1456 1460 return util.chunkbuffer(gengroup())
1457 1461
1458 1462 def addchangegroup(self, source, srctype):
1459 1463 """add changegroup to repo.
1460 1464 returns number of heads modified or added + 1."""
1461 1465
1462 1466 def csmap(x):
1463 1467 self.ui.debug(_("add changeset %s\n") % short(x))
1464 1468 return cl.count()
1465 1469
1466 1470 def revmap(x):
1467 1471 return cl.rev(x)
1468 1472
1469 1473 if not source:
1470 1474 return 0
1471 1475
1472 1476 self.hook('prechangegroup', throw=True, source=srctype)
1473 1477
1474 1478 changesets = files = revisions = 0
1475 1479
1476 1480 tr = self.transaction()
1477 1481
1478 1482 # write changelog and manifest data to temp files so
1479 1483 # concurrent readers will not see inconsistent view
1480 1484 cl = None
1481 1485 try:
1482 1486 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1483 1487
1484 1488 oldheads = len(cl.heads())
1485 1489
1486 1490 # pull off the changeset group
1487 1491 self.ui.status(_("adding changesets\n"))
1488 1492 co = cl.tip()
1489 1493 chunkiter = changegroup.chunkiter(source)
1490 1494 cn = cl.addgroup(chunkiter, csmap, tr, 1) # unique
1491 1495 cnr, cor = map(cl.rev, (cn, co))
1492 1496 if cn == nullid:
1493 1497 cnr = cor
1494 1498 changesets = cnr - cor
1495 1499
1496 1500 mf = None
1497 1501 try:
1498 1502 mf = appendfile.appendmanifest(self.opener,
1499 1503 self.manifest.version)
1500 1504
1501 1505 # pull off the manifest group
1502 1506 self.ui.status(_("adding manifests\n"))
1503 1507 mm = mf.tip()
1504 1508 chunkiter = changegroup.chunkiter(source)
1505 1509 mo = mf.addgroup(chunkiter, revmap, tr)
1506 1510
1507 1511 # process the files
1508 1512 self.ui.status(_("adding file changes\n"))
1509 1513 while 1:
1510 1514 f = changegroup.getchunk(source)
1511 1515 if not f:
1512 1516 break
1513 1517 self.ui.debug(_("adding %s revisions\n") % f)
1514 1518 fl = self.file(f)
1515 1519 o = fl.count()
1516 1520 chunkiter = changegroup.chunkiter(source)
1517 1521 n = fl.addgroup(chunkiter, revmap, tr)
1518 1522 revisions += fl.count() - o
1519 1523 files += 1
1520 1524
1521 1525 # write order here is important so concurrent readers will see
1522 1526 # consistent view of repo
1523 1527 mf.writedata()
1524 1528 finally:
1525 1529 if mf:
1526 1530 mf.cleanup()
1527 1531 cl.writedata()
1528 1532 finally:
1529 1533 if cl:
1530 1534 cl.cleanup()
1531 1535
1532 1536 # make changelog and manifest see real files again
1533 1537 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1534 1538 self.manifest = manifest.manifest(self.opener, self.manifest.version)
1535 1539 self.changelog.checkinlinesize(tr)
1536 1540 self.manifest.checkinlinesize(tr)
1537 1541
1538 1542 newheads = len(self.changelog.heads())
1539 1543 heads = ""
1540 1544 if oldheads and newheads > oldheads:
1541 1545 heads = _(" (+%d heads)") % (newheads - oldheads)
1542 1546
1543 1547 self.ui.status(_("added %d changesets"
1544 1548 " with %d changes to %d files%s\n")
1545 1549 % (changesets, revisions, files, heads))
1546 1550
1547 1551 if changesets > 0:
1548 1552 self.hook('pretxnchangegroup', throw=True,
1549 1553 node=hex(self.changelog.node(cor+1)), source=srctype)
1550 1554
1551 1555 tr.close()
1552 1556
1553 1557 if changesets > 0:
1554 1558 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1555 1559 source=srctype)
1556 1560
1557 1561 for i in range(cor + 1, cnr + 1):
1558 1562 self.hook("incoming", node=hex(self.changelog.node(i)),
1559 1563 source=srctype)
1560 1564
1561 1565 return newheads - oldheads + 1
1562 1566
1563 1567 def update(self, node, allow=False, force=False, choose=None,
1564 1568 moddirstate=True, forcemerge=False, wlock=None, show_stats=True):
1565 1569 pl = self.dirstate.parents()
1566 1570 if not force and pl[1] != nullid:
1567 1571 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1568 1572 return 1
1569 1573
1570 1574 err = False
1571 1575
1572 1576 p1, p2 = pl[0], node
1573 1577 pa = self.changelog.ancestor(p1, p2)
1574 1578 m1n = self.changelog.read(p1)[0]
1575 1579 m2n = self.changelog.read(p2)[0]
1576 1580 man = self.manifest.ancestor(m1n, m2n)
1577 1581 m1 = self.manifest.read(m1n)
1578 1582 mf1 = self.manifest.readflags(m1n)
1579 1583 m2 = self.manifest.read(m2n).copy()
1580 1584 mf2 = self.manifest.readflags(m2n)
1581 1585 ma = self.manifest.read(man)
1582 1586 mfa = self.manifest.readflags(man)
1583 1587
1584 1588 modified, added, removed, deleted, unknown = self.changes()
1585 1589
1586 1590 # is this a jump, or a merge? i.e. is there a linear path
1587 1591 # from p1 to p2?
1588 1592 linear_path = (pa == p1 or pa == p2)
1589 1593
1590 1594 if allow and linear_path:
1591 1595 raise util.Abort(_("there is nothing to merge, "
1592 1596 "just use 'hg update'"))
1593 1597 if allow and not forcemerge:
1594 1598 if modified or added or removed:
1595 1599 raise util.Abort(_("outstanding uncommitted changes"))
1596 1600 if not forcemerge and not force:
1597 1601 for f in unknown:
1598 1602 if f in m2:
1599 1603 t1 = self.wread(f)
1600 1604 t2 = self.file(f).read(m2[f])
1601 1605 if cmp(t1, t2) != 0:
1602 1606 raise util.Abort(_("'%s' already exists in the working"
1603 1607 " dir and differs from remote") % f)
1604 1608
1605 1609 # resolve the manifest to determine which files
1606 1610 # we care about merging
1607 1611 self.ui.note(_("resolving manifests\n"))
1608 1612 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1609 1613 (force, allow, moddirstate, linear_path))
1610 1614 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1611 1615 (short(man), short(m1n), short(m2n)))
1612 1616
1613 1617 merge = {}
1614 1618 get = {}
1615 1619 remove = []
1616 1620
1617 1621 # construct a working dir manifest
1618 1622 mw = m1.copy()
1619 1623 mfw = mf1.copy()
1620 1624 umap = dict.fromkeys(unknown)
1621 1625
1622 1626 for f in added + modified + unknown:
1623 1627 mw[f] = ""
1624 1628 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1625 1629
1626 1630 if moddirstate and not wlock:
1627 1631 wlock = self.wlock()
1628 1632
1629 1633 for f in deleted + removed:
1630 1634 if f in mw:
1631 1635 del mw[f]
1632 1636
1633 1637 # If we're jumping between revisions (as opposed to merging),
1634 1638 # and if neither the working directory nor the target rev has
1635 1639 # the file, then we need to remove it from the dirstate, to
1636 1640 # prevent the dirstate from listing the file when it is no
1637 1641 # longer in the manifest.
1638 1642 if moddirstate and linear_path and f not in m2:
1639 1643 self.dirstate.forget((f,))
1640 1644
1641 1645 # Compare manifests
1642 1646 for f, n in mw.iteritems():
1643 1647 if choose and not choose(f):
1644 1648 continue
1645 1649 if f in m2:
1646 1650 s = 0
1647 1651
1648 1652 # is the wfile new since m1, and match m2?
1649 1653 if f not in m1:
1650 1654 t1 = self.wread(f)
1651 1655 t2 = self.file(f).read(m2[f])
1652 1656 if cmp(t1, t2) == 0:
1653 1657 n = m2[f]
1654 1658 del t1, t2
1655 1659
1656 1660 # are files different?
1657 1661 if n != m2[f]:
1658 1662 a = ma.get(f, nullid)
1659 1663 # are both different from the ancestor?
1660 1664 if n != a and m2[f] != a:
1661 1665 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1662 1666 # merge executable bits
1663 1667 # "if we changed or they changed, change in merge"
1664 1668 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1665 1669 mode = ((a^b) | (a^c)) ^ a
1666 1670 merge[f] = (m1.get(f, nullid), m2[f], mode)
1667 1671 s = 1
1668 1672 # are we clobbering?
1669 1673 # is remote's version newer?
1670 1674 # or are we going back in time?
1671 1675 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1672 1676 self.ui.debug(_(" remote %s is newer, get\n") % f)
1673 1677 get[f] = m2[f]
1674 1678 s = 1
1675 1679 elif f in umap or f in added:
1676 1680 # this unknown file is the same as the checkout
1677 1681 # we need to reset the dirstate if the file was added
1678 1682 get[f] = m2[f]
1679 1683
1680 1684 if not s and mfw[f] != mf2[f]:
1681 1685 if force:
1682 1686 self.ui.debug(_(" updating permissions for %s\n") % f)
1683 1687 util.set_exec(self.wjoin(f), mf2[f])
1684 1688 else:
1685 1689 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1686 1690 mode = ((a^b) | (a^c)) ^ a
1687 1691 if mode != b:
1688 1692 self.ui.debug(_(" updating permissions for %s\n")
1689 1693 % f)
1690 1694 util.set_exec(self.wjoin(f), mode)
1691 1695 del m2[f]
1692 1696 elif f in ma:
1693 1697 if n != ma[f]:
1694 1698 r = _("d")
1695 1699 if not force and (linear_path or allow):
1696 1700 r = self.ui.prompt(
1697 1701 (_(" local changed %s which remote deleted\n") % f) +
1698 1702 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1699 1703 if r == _("d"):
1700 1704 remove.append(f)
1701 1705 else:
1702 1706 self.ui.debug(_("other deleted %s\n") % f)
1703 1707 remove.append(f) # other deleted it
1704 1708 else:
1705 1709 # file is created on branch or in working directory
1706 1710 if force and f not in umap:
1707 1711 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1708 1712 remove.append(f)
1709 1713 elif n == m1.get(f, nullid): # same as parent
1710 1714 if p2 == pa: # going backwards?
1711 1715 self.ui.debug(_("remote deleted %s\n") % f)
1712 1716 remove.append(f)
1713 1717 else:
1714 1718 self.ui.debug(_("local modified %s, keeping\n") % f)
1715 1719 else:
1716 1720 self.ui.debug(_("working dir created %s, keeping\n") % f)
1717 1721
1718 1722 for f, n in m2.iteritems():
1719 1723 if choose and not choose(f):
1720 1724 continue
1721 1725 if f[0] == "/":
1722 1726 continue
1723 1727 if f in ma and n != ma[f]:
1724 1728 r = _("k")
1725 1729 if not force and (linear_path or allow):
1726 1730 r = self.ui.prompt(
1727 1731 (_("remote changed %s which local deleted\n") % f) +
1728 1732 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1729 1733 if r == _("k"):
1730 1734 get[f] = n
1731 1735 elif f not in ma:
1732 1736 self.ui.debug(_("remote created %s\n") % f)
1733 1737 get[f] = n
1734 1738 else:
1735 1739 if force or p2 == pa: # going backwards?
1736 1740 self.ui.debug(_("local deleted %s, recreating\n") % f)
1737 1741 get[f] = n
1738 1742 else:
1739 1743 self.ui.debug(_("local deleted %s\n") % f)
1740 1744
1741 1745 del mw, m1, m2, ma
1742 1746
1743 1747 if force:
1744 1748 for f in merge:
1745 1749 get[f] = merge[f][1]
1746 1750 merge = {}
1747 1751
1748 1752 if linear_path or force:
1749 1753 # we don't need to do any magic, just jump to the new rev
1750 1754 branch_merge = False
1751 1755 p1, p2 = p2, nullid
1752 1756 else:
1753 1757 if not allow:
1754 1758 self.ui.status(_("this update spans a branch"
1755 1759 " affecting the following files:\n"))
1756 1760 fl = merge.keys() + get.keys()
1757 1761 fl.sort()
1758 1762 for f in fl:
1759 1763 cf = ""
1760 1764 if f in merge:
1761 1765 cf = _(" (resolve)")
1762 1766 self.ui.status(" %s%s\n" % (f, cf))
1763 1767 self.ui.warn(_("aborting update spanning branches!\n"))
1764 1768 self.ui.status(_("(use 'hg merge' to merge across branches"
1765 1769 " or 'hg update -C' to lose changes)\n"))
1766 1770 return 1
1767 1771 branch_merge = True
1768 1772
1769 1773 # get the files we don't need to change
1770 1774 files = get.keys()
1771 1775 files.sort()
1772 1776 for f in files:
1773 1777 if f[0] == "/":
1774 1778 continue
1775 1779 self.ui.note(_("getting %s\n") % f)
1776 1780 t = self.file(f).read(get[f])
1777 1781 self.wwrite(f, t)
1778 1782 util.set_exec(self.wjoin(f), mf2[f])
1779 1783 if moddirstate:
1780 1784 if branch_merge:
1781 1785 self.dirstate.update([f], 'n', st_mtime=-1)
1782 1786 else:
1783 1787 self.dirstate.update([f], 'n')
1784 1788
1785 1789 # merge the tricky bits
1786 1790 failedmerge = []
1787 1791 files = merge.keys()
1788 1792 files.sort()
1789 1793 xp1 = hex(p1)
1790 1794 xp2 = hex(p2)
1791 1795 for f in files:
1792 1796 self.ui.status(_("merging %s\n") % f)
1793 1797 my, other, flag = merge[f]
1794 1798 ret = self.merge3(f, my, other, xp1, xp2)
1795 1799 if ret:
1796 1800 err = True
1797 1801 failedmerge.append(f)
1798 1802 util.set_exec(self.wjoin(f), flag)
1799 1803 if moddirstate:
1800 1804 if branch_merge:
1801 1805 # We've done a branch merge, mark this file as merged
1802 1806 # so that we properly record the merger later
1803 1807 self.dirstate.update([f], 'm')
1804 1808 else:
1805 1809 # We've update-merged a locally modified file, so
1806 1810 # we set the dirstate to emulate a normal checkout
1807 1811 # of that file some time in the past. Thus our
1808 1812 # merge will appear as a normal local file
1809 1813 # modification.
1810 1814 f_len = len(self.file(f).read(other))
1811 1815 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1812 1816
1813 1817 remove.sort()
1814 1818 for f in remove:
1815 1819 self.ui.note(_("removing %s\n") % f)
1816 1820 util.audit_path(f)
1817 1821 try:
1818 1822 util.unlink(self.wjoin(f))
1819 1823 except OSError, inst:
1820 1824 if inst.errno != errno.ENOENT:
1821 1825 self.ui.warn(_("update failed to remove %s: %s!\n") %
1822 1826 (f, inst.strerror))
1823 1827 if moddirstate:
1824 1828 if branch_merge:
1825 1829 self.dirstate.update(remove, 'r')
1826 1830 else:
1827 1831 self.dirstate.forget(remove)
1828 1832
1829 1833 if moddirstate:
1830 1834 self.dirstate.setparents(p1, p2)
1831 1835
1832 1836 if show_stats:
1833 1837 stats = ((len(get), _("updated")),
1834 1838 (len(merge) - len(failedmerge), _("merged")),
1835 1839 (len(remove), _("removed")),
1836 1840 (len(failedmerge), _("unresolved")))
1837 1841 note = ", ".join([_("%d files %s") % s for s in stats])
1838 1842 self.ui.status("%s\n" % note)
1839 1843 if moddirstate:
1840 1844 if branch_merge:
1841 1845 if failedmerge:
1842 1846 self.ui.status(_("There are unresolved merges,"
1843 1847 " you can redo the full merge using:\n"
1844 1848 " hg update -C %s\n"
1845 1849 " hg merge %s\n"
1846 1850 % (self.changelog.rev(p1),
1847 1851 self.changelog.rev(p2))))
1848 1852 else:
1849 1853 self.ui.status(_("(branch merge, don't forget to commit)\n"))
1850 1854 elif failedmerge:
1851 1855 self.ui.status(_("There are unresolved merges with"
1852 1856 " locally modified files.\n"))
1853 1857
1854 1858 return err
1855 1859
1856 1860 def merge3(self, fn, my, other, p1, p2):
1857 1861 """perform a 3-way merge in the working directory"""
1858 1862
1859 1863 def temp(prefix, node):
1860 1864 pre = "%s~%s." % (os.path.basename(fn), prefix)
1861 1865 (fd, name) = tempfile.mkstemp(prefix=pre)
1862 1866 f = os.fdopen(fd, "wb")
1863 1867 self.wwrite(fn, fl.read(node), f)
1864 1868 f.close()
1865 1869 return name
1866 1870
1867 1871 fl = self.file(fn)
1868 1872 base = fl.ancestor(my, other)
1869 1873 a = self.wjoin(fn)
1870 1874 b = temp("base", base)
1871 1875 c = temp("other", other)
1872 1876
1873 1877 self.ui.note(_("resolving %s\n") % fn)
1874 1878 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1875 1879 (fn, short(my), short(other), short(base)))
1876 1880
1877 1881 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1878 1882 or "hgmerge")
1879 1883 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
1880 1884 environ={'HG_FILE': fn,
1881 1885 'HG_MY_NODE': p1,
1882 1886 'HG_OTHER_NODE': p2,
1883 1887 'HG_FILE_MY_NODE': hex(my),
1884 1888 'HG_FILE_OTHER_NODE': hex(other),
1885 1889 'HG_FILE_BASE_NODE': hex(base)})
1886 1890 if r:
1887 1891 self.ui.warn(_("merging %s failed!\n") % fn)
1888 1892
1889 1893 os.unlink(b)
1890 1894 os.unlink(c)
1891 1895 return r
1892 1896
1893 1897 def verify(self):
1894 1898 filelinkrevs = {}
1895 1899 filenodes = {}
1896 1900 changesets = revisions = files = 0
1897 1901 errors = [0]
1898 1902 warnings = [0]
1899 1903 neededmanifests = {}
1900 1904
1901 1905 def err(msg):
1902 1906 self.ui.warn(msg + "\n")
1903 1907 errors[0] += 1
1904 1908
1905 1909 def warn(msg):
1906 1910 self.ui.warn(msg + "\n")
1907 1911 warnings[0] += 1
1908 1912
1909 1913 def checksize(obj, name):
1910 1914 d = obj.checksize()
1911 1915 if d[0]:
1912 1916 err(_("%s data length off by %d bytes") % (name, d[0]))
1913 1917 if d[1]:
1914 1918 err(_("%s index contains %d extra bytes") % (name, d[1]))
1915 1919
1916 1920 def checkversion(obj, name):
1917 1921 if obj.version != revlog.REVLOGV0:
1918 1922 if not revlogv1:
1919 1923 warn(_("warning: `%s' uses revlog format 1") % name)
1920 1924 elif revlogv1:
1921 1925 warn(_("warning: `%s' uses revlog format 0") % name)
1922 1926
1923 1927 revlogv1 = self.revlogversion != revlog.REVLOGV0
1924 1928 if self.ui.verbose or revlogv1 != self.revlogv1:
1925 1929 self.ui.status(_("repository uses revlog format %d\n") %
1926 1930 (revlogv1 and 1 or 0))
1927 1931
1928 1932 seen = {}
1929 1933 self.ui.status(_("checking changesets\n"))
1930 1934 checksize(self.changelog, "changelog")
1931 1935
1932 1936 for i in range(self.changelog.count()):
1933 1937 changesets += 1
1934 1938 n = self.changelog.node(i)
1935 1939 l = self.changelog.linkrev(n)
1936 1940 if l != i:
1937 1941 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1938 1942 if n in seen:
1939 1943 err(_("duplicate changeset at revision %d") % i)
1940 1944 seen[n] = 1
1941 1945
1942 1946 for p in self.changelog.parents(n):
1943 1947 if p not in self.changelog.nodemap:
1944 1948 err(_("changeset %s has unknown parent %s") %
1945 1949 (short(n), short(p)))
1946 1950 try:
1947 1951 changes = self.changelog.read(n)
1948 1952 except KeyboardInterrupt:
1949 1953 self.ui.warn(_("interrupted"))
1950 1954 raise
1951 1955 except Exception, inst:
1952 1956 err(_("unpacking changeset %s: %s") % (short(n), inst))
1953 1957 continue
1954 1958
1955 1959 neededmanifests[changes[0]] = n
1956 1960
1957 1961 for f in changes[3]:
1958 1962 filelinkrevs.setdefault(f, []).append(i)
1959 1963
1960 1964 seen = {}
1961 1965 self.ui.status(_("checking manifests\n"))
1962 1966 checkversion(self.manifest, "manifest")
1963 1967 checksize(self.manifest, "manifest")
1964 1968
1965 1969 for i in range(self.manifest.count()):
1966 1970 n = self.manifest.node(i)
1967 1971 l = self.manifest.linkrev(n)
1968 1972
1969 1973 if l < 0 or l >= self.changelog.count():
1970 1974 err(_("bad manifest link (%d) at revision %d") % (l, i))
1971 1975
1972 1976 if n in neededmanifests:
1973 1977 del neededmanifests[n]
1974 1978
1975 1979 if n in seen:
1976 1980 err(_("duplicate manifest at revision %d") % i)
1977 1981
1978 1982 seen[n] = 1
1979 1983
1980 1984 for p in self.manifest.parents(n):
1981 1985 if p not in self.manifest.nodemap:
1982 1986 err(_("manifest %s has unknown parent %s") %
1983 1987 (short(n), short(p)))
1984 1988
1985 1989 try:
1986 1990 delta = mdiff.patchtext(self.manifest.delta(n))
1987 1991 except KeyboardInterrupt:
1988 1992 self.ui.warn(_("interrupted"))
1989 1993 raise
1990 1994 except Exception, inst:
1991 1995 err(_("unpacking manifest %s: %s") % (short(n), inst))
1992 1996 continue
1993 1997
1994 1998 try:
1995 1999 ff = [ l.split('\0') for l in delta.splitlines() ]
1996 2000 for f, fn in ff:
1997 2001 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1998 2002 except (ValueError, TypeError), inst:
1999 2003 err(_("broken delta in manifest %s: %s") % (short(n), inst))
2000 2004
2001 2005 self.ui.status(_("crosschecking files in changesets and manifests\n"))
2002 2006
2003 2007 for m, c in neededmanifests.items():
2004 2008 err(_("Changeset %s refers to unknown manifest %s") %
2005 2009 (short(m), short(c)))
2006 2010 del neededmanifests
2007 2011
2008 2012 for f in filenodes:
2009 2013 if f not in filelinkrevs:
2010 2014 err(_("file %s in manifest but not in changesets") % f)
2011 2015
2012 2016 for f in filelinkrevs:
2013 2017 if f not in filenodes:
2014 2018 err(_("file %s in changeset but not in manifest") % f)
2015 2019
2016 2020 self.ui.status(_("checking files\n"))
2017 2021 ff = filenodes.keys()
2018 2022 ff.sort()
2019 2023 for f in ff:
2020 2024 if f == "/dev/null":
2021 2025 continue
2022 2026 files += 1
2023 2027 if not f:
2024 2028 err(_("file without name in manifest %s") % short(n))
2025 2029 continue
2026 2030 fl = self.file(f)
2027 2031 checkversion(fl, f)
2028 2032 checksize(fl, f)
2029 2033
2030 2034 nodes = {nullid: 1}
2031 2035 seen = {}
2032 2036 for i in range(fl.count()):
2033 2037 revisions += 1
2034 2038 n = fl.node(i)
2035 2039
2036 2040 if n in seen:
2037 2041 err(_("%s: duplicate revision %d") % (f, i))
2038 2042 if n not in filenodes[f]:
2039 2043 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
2040 2044 else:
2041 2045 del filenodes[f][n]
2042 2046
2043 2047 flr = fl.linkrev(n)
2044 2048 if flr not in filelinkrevs.get(f, []):
2045 2049 err(_("%s:%s points to unexpected changeset %d")
2046 2050 % (f, short(n), flr))
2047 2051 else:
2048 2052 filelinkrevs[f].remove(flr)
2049 2053
2050 2054 # verify contents
2051 2055 try:
2052 2056 t = fl.read(n)
2053 2057 except KeyboardInterrupt:
2054 2058 self.ui.warn(_("interrupted"))
2055 2059 raise
2056 2060 except Exception, inst:
2057 2061 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
2058 2062
2059 2063 # verify parents
2060 2064 (p1, p2) = fl.parents(n)
2061 2065 if p1 not in nodes:
2062 2066 err(_("file %s:%s unknown parent 1 %s") %
2063 2067 (f, short(n), short(p1)))
2064 2068 if p2 not in nodes:
2065 2069 err(_("file %s:%s unknown parent 2 %s") %
2066 2070 (f, short(n), short(p1)))
2067 2071 nodes[n] = 1
2068 2072
2069 2073 # cross-check
2070 2074 for node in filenodes[f]:
2071 2075 err(_("node %s in manifests not in %s") % (hex(node), f))
2072 2076
2073 2077 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
2074 2078 (files, changesets, revisions))
2075 2079
2076 2080 if warnings[0]:
2077 2081 self.ui.warn(_("%d warnings encountered!\n") % warnings[0])
2078 2082 if errors[0]:
2079 2083 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
2080 2084 return 1
2081 2085
2082 2086 # used to avoid circular references so destructors work
2083 2087 def aftertrans(base):
2084 2088 p = base
2085 2089 def a():
2086 2090 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
2087 2091 util.rename(os.path.join(p, "journal.dirstate"),
2088 2092 os.path.join(p, "undo.dirstate"))
2089 2093 return a
2090 2094
General Comments 0
You need to be logged in to leave comments. Login now