##// END OF EJS Templates
Add '.' as a shortcut revision name for the working directory parent.
Brendan Cully -
r2789:e3564699 default
parent child Browse files
Show More
@@ -1,3680 +1,3680
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from demandload import demandload
9 9 from node import *
10 10 from i18n import gettext as _
11 11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
13 13 demandload(globals(), "fnmatch mdiff random signal tempfile time")
14 14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
15 15 demandload(globals(), "archival cStringIO changegroup email.Parser")
16 16 demandload(globals(), "hgweb.server sshserver")
17 17
18 18 class UnknownCommand(Exception):
19 19 """Exception raised if command is not in the command table."""
20 20 class AmbiguousCommand(Exception):
21 21 """Exception raised if command shortcut matches more than one command."""
22 22
23 23 def bail_if_changed(repo):
24 24 modified, added, removed, deleted, unknown = repo.changes()
25 25 if modified or added or removed or deleted:
26 26 raise util.Abort(_("outstanding uncommitted changes"))
27 27
28 28 def filterfiles(filters, files):
29 29 l = [x for x in files if x in filters]
30 30
31 31 for t in filters:
32 32 if t and t[-1] != "/":
33 33 t += "/"
34 34 l += [x for x in files if x.startswith(t)]
35 35 return l
36 36
37 37 def relpath(repo, args):
38 38 cwd = repo.getcwd()
39 39 if cwd:
40 40 return [util.normpath(os.path.join(cwd, x)) for x in args]
41 41 return args
42 42
43 43 def logmessage(**opts):
44 44 """ get the log message according to -m and -l option """
45 45 message = opts['message']
46 46 logfile = opts['logfile']
47 47
48 48 if message and logfile:
49 49 raise util.Abort(_('options --message and --logfile are mutually '
50 50 'exclusive'))
51 51 if not message and logfile:
52 52 try:
53 53 if logfile == '-':
54 54 message = sys.stdin.read()
55 55 else:
56 56 message = open(logfile).read()
57 57 except IOError, inst:
58 58 raise util.Abort(_("can't read commit message '%s': %s") %
59 59 (logfile, inst.strerror))
60 60 return message
61 61
62 62 def matchpats(repo, pats=[], opts={}, head=''):
63 63 cwd = repo.getcwd()
64 64 if not pats and cwd:
65 65 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
66 66 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
67 67 cwd = ''
68 68 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
69 69 opts.get('exclude'), head)
70 70
71 71 def makewalk(repo, pats, opts, node=None, head='', badmatch=None):
72 72 files, matchfn, anypats = matchpats(repo, pats, opts, head)
73 73 exact = dict(zip(files, files))
74 74 def walk():
75 75 for src, fn in repo.walk(node=node, files=files, match=matchfn,
76 76 badmatch=badmatch):
77 77 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
78 78 return files, matchfn, walk()
79 79
80 80 def walk(repo, pats, opts, node=None, head='', badmatch=None):
81 81 files, matchfn, results = makewalk(repo, pats, opts, node, head, badmatch)
82 82 for r in results:
83 83 yield r
84 84
85 85 def walkchangerevs(ui, repo, pats, opts):
86 86 '''Iterate over files and the revs they changed in.
87 87
88 88 Callers most commonly need to iterate backwards over the history
89 89 it is interested in. Doing so has awful (quadratic-looking)
90 90 performance, so we use iterators in a "windowed" way.
91 91
92 92 We walk a window of revisions in the desired order. Within the
93 93 window, we first walk forwards to gather data, then in the desired
94 94 order (usually backwards) to display it.
95 95
96 96 This function returns an (iterator, getchange, matchfn) tuple. The
97 97 getchange function returns the changelog entry for a numeric
98 98 revision. The iterator yields 3-tuples. They will be of one of
99 99 the following forms:
100 100
101 101 "window", incrementing, lastrev: stepping through a window,
102 102 positive if walking forwards through revs, last rev in the
103 103 sequence iterated over - use to reset state for the current window
104 104
105 105 "add", rev, fns: out-of-order traversal of the given file names
106 106 fns, which changed during revision rev - use to gather data for
107 107 possible display
108 108
109 109 "iter", rev, None: in-order traversal of the revs earlier iterated
110 110 over with "add" - use to display data'''
111 111
112 112 def increasing_windows(start, end, windowsize=8, sizelimit=512):
113 113 if start < end:
114 114 while start < end:
115 115 yield start, min(windowsize, end-start)
116 116 start += windowsize
117 117 if windowsize < sizelimit:
118 118 windowsize *= 2
119 119 else:
120 120 while start > end:
121 121 yield start, min(windowsize, start-end-1)
122 122 start -= windowsize
123 123 if windowsize < sizelimit:
124 124 windowsize *= 2
125 125
126 126
127 127 files, matchfn, anypats = matchpats(repo, pats, opts)
128 128 follow = opts.get('follow') or opts.get('follow_first')
129 129
130 130 if repo.changelog.count() == 0:
131 131 return [], False, matchfn
132 132
133 133 if follow:
134 134 p = repo.dirstate.parents()[0]
135 135 if p == nullid:
136 136 ui.warn(_('No working directory revision; defaulting to tip\n'))
137 137 start = 'tip'
138 138 else:
139 139 start = repo.changelog.rev(p)
140 140 defrange = '%s:0' % start
141 141 else:
142 142 defrange = 'tip:0'
143 143 revs = map(int, revrange(ui, repo, opts['rev'] or [defrange]))
144 144 wanted = {}
145 145 slowpath = anypats
146 146 fncache = {}
147 147
148 148 chcache = {}
149 149 def getchange(rev):
150 150 ch = chcache.get(rev)
151 151 if ch is None:
152 152 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
153 153 return ch
154 154
155 155 if not slowpath and not files:
156 156 # No files, no patterns. Display all revs.
157 157 wanted = dict(zip(revs, revs))
158 158 copies = []
159 159 if not slowpath:
160 160 # Only files, no patterns. Check the history of each file.
161 161 def filerevgen(filelog, node):
162 162 cl_count = repo.changelog.count()
163 163 if node is None:
164 164 last = filelog.count() - 1
165 165 else:
166 166 last = filelog.rev(node)
167 167 for i, window in increasing_windows(last, -1):
168 168 revs = []
169 169 for j in xrange(i - window, i + 1):
170 170 n = filelog.node(j)
171 171 revs.append((filelog.linkrev(n),
172 172 follow and filelog.renamed(n)))
173 173 revs.reverse()
174 174 for rev in revs:
175 175 # only yield rev for which we have the changelog, it can
176 176 # happen while doing "hg log" during a pull or commit
177 177 if rev[0] < cl_count:
178 178 yield rev
179 179 def iterfiles():
180 180 for filename in files:
181 181 yield filename, None
182 182 for filename_node in copies:
183 183 yield filename_node
184 184 minrev, maxrev = min(revs), max(revs)
185 185 for file_, node in iterfiles():
186 186 filelog = repo.file(file_)
187 187 # A zero count may be a directory or deleted file, so
188 188 # try to find matching entries on the slow path.
189 189 if filelog.count() == 0:
190 190 slowpath = True
191 191 break
192 192 for rev, copied in filerevgen(filelog, node):
193 193 if rev <= maxrev:
194 194 if rev < minrev:
195 195 break
196 196 fncache.setdefault(rev, [])
197 197 fncache[rev].append(file_)
198 198 wanted[rev] = 1
199 199 if follow and copied:
200 200 copies.append(copied)
201 201 if slowpath:
202 202 if follow:
203 203 raise util.Abort(_('can only follow copies/renames for explicit '
204 204 'file names'))
205 205
206 206 # The slow path checks files modified in every changeset.
207 207 def changerevgen():
208 208 for i, window in increasing_windows(repo.changelog.count()-1, -1):
209 209 for j in xrange(i - window, i + 1):
210 210 yield j, getchange(j)[3]
211 211
212 212 for rev, changefiles in changerevgen():
213 213 matches = filter(matchfn, changefiles)
214 214 if matches:
215 215 fncache[rev] = matches
216 216 wanted[rev] = 1
217 217
218 218 def iterate():
219 219 class followfilter:
220 220 def __init__(self, onlyfirst=False):
221 221 self.startrev = -1
222 222 self.roots = []
223 223 self.onlyfirst = onlyfirst
224 224
225 225 def match(self, rev):
226 226 def realparents(rev):
227 227 if self.onlyfirst:
228 228 return repo.changelog.parentrevs(rev)[0:1]
229 229 else:
230 230 return filter(lambda x: x != -1, repo.changelog.parentrevs(rev))
231 231
232 232 if self.startrev == -1:
233 233 self.startrev = rev
234 234 return True
235 235
236 236 if rev > self.startrev:
237 237 # forward: all descendants
238 238 if not self.roots:
239 239 self.roots.append(self.startrev)
240 240 for parent in realparents(rev):
241 241 if parent in self.roots:
242 242 self.roots.append(rev)
243 243 return True
244 244 else:
245 245 # backwards: all parents
246 246 if not self.roots:
247 247 self.roots.extend(realparents(self.startrev))
248 248 if rev in self.roots:
249 249 self.roots.remove(rev)
250 250 self.roots.extend(realparents(rev))
251 251 return True
252 252
253 253 return False
254 254
255 255 if follow and not files:
256 256 ff = followfilter(onlyfirst=opts.get('follow_first'))
257 257 def want(rev):
258 258 if rev not in wanted:
259 259 return False
260 260 return ff.match(rev)
261 261 else:
262 262 def want(rev):
263 263 return rev in wanted
264 264
265 265 for i, window in increasing_windows(0, len(revs)):
266 266 yield 'window', revs[0] < revs[-1], revs[-1]
267 267 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
268 268 srevs = list(nrevs)
269 269 srevs.sort()
270 270 for rev in srevs:
271 271 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
272 272 yield 'add', rev, fns
273 273 for rev in nrevs:
274 274 yield 'iter', rev, None
275 275 return iterate(), getchange, matchfn
276 276
277 277 revrangesep = ':'
278 278
279 279 def revfix(repo, val, defval):
280 280 '''turn user-level id of changeset into rev number.
281 281 user-level id can be tag, changeset, rev number, or negative rev
282 282 number relative to number of revs (-1 is tip, etc).'''
283 283 if not val:
284 284 return defval
285 285 try:
286 286 num = int(val)
287 287 if str(num) != val:
288 288 raise ValueError
289 289 if num < 0:
290 290 num += repo.changelog.count()
291 291 if num < 0:
292 292 num = 0
293 293 elif num >= repo.changelog.count():
294 294 raise ValueError
295 295 except ValueError:
296 296 try:
297 297 num = repo.changelog.rev(repo.lookup(val))
298 298 except KeyError:
299 299 raise util.Abort(_('invalid revision identifier %s'), val)
300 300 return num
301 301
302 302 def revpair(ui, repo, revs):
303 303 '''return pair of nodes, given list of revisions. second item can
304 304 be None, meaning use working dir.'''
305 305 if not revs:
306 306 return repo.dirstate.parents()[0], None
307 307 end = None
308 308 if len(revs) == 1:
309 309 start = revs[0]
310 310 if revrangesep in start:
311 311 start, end = start.split(revrangesep, 1)
312 312 start = revfix(repo, start, 0)
313 313 end = revfix(repo, end, repo.changelog.count() - 1)
314 314 else:
315 315 start = revfix(repo, start, None)
316 316 elif len(revs) == 2:
317 317 if revrangesep in revs[0] or revrangesep in revs[1]:
318 318 raise util.Abort(_('too many revisions specified'))
319 319 start = revfix(repo, revs[0], None)
320 320 end = revfix(repo, revs[1], None)
321 321 else:
322 322 raise util.Abort(_('too many revisions specified'))
323 323 if end is not None: end = repo.lookup(str(end))
324 324 return repo.lookup(str(start)), end
325 325
326 326 def revrange(ui, repo, revs):
327 327 """Yield revision as strings from a list of revision specifications."""
328 328 seen = {}
329 329 for spec in revs:
330 330 if revrangesep in spec:
331 331 start, end = spec.split(revrangesep, 1)
332 332 start = revfix(repo, start, 0)
333 333 end = revfix(repo, end, repo.changelog.count() - 1)
334 334 step = start > end and -1 or 1
335 335 for rev in xrange(start, end+step, step):
336 336 if rev in seen:
337 337 continue
338 338 seen[rev] = 1
339 339 yield str(rev)
340 340 else:
341 341 rev = revfix(repo, spec, None)
342 342 if rev in seen:
343 343 continue
344 344 seen[rev] = 1
345 345 yield str(rev)
346 346
347 347 def make_filename(repo, pat, node,
348 348 total=None, seqno=None, revwidth=None, pathname=None):
349 349 node_expander = {
350 350 'H': lambda: hex(node),
351 351 'R': lambda: str(repo.changelog.rev(node)),
352 352 'h': lambda: short(node),
353 353 }
354 354 expander = {
355 355 '%': lambda: '%',
356 356 'b': lambda: os.path.basename(repo.root),
357 357 }
358 358
359 359 try:
360 360 if node:
361 361 expander.update(node_expander)
362 362 if node and revwidth is not None:
363 363 expander['r'] = (lambda:
364 364 str(repo.changelog.rev(node)).zfill(revwidth))
365 365 if total is not None:
366 366 expander['N'] = lambda: str(total)
367 367 if seqno is not None:
368 368 expander['n'] = lambda: str(seqno)
369 369 if total is not None and seqno is not None:
370 370 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
371 371 if pathname is not None:
372 372 expander['s'] = lambda: os.path.basename(pathname)
373 373 expander['d'] = lambda: os.path.dirname(pathname) or '.'
374 374 expander['p'] = lambda: pathname
375 375
376 376 newname = []
377 377 patlen = len(pat)
378 378 i = 0
379 379 while i < patlen:
380 380 c = pat[i]
381 381 if c == '%':
382 382 i += 1
383 383 c = pat[i]
384 384 c = expander[c]()
385 385 newname.append(c)
386 386 i += 1
387 387 return ''.join(newname)
388 388 except KeyError, inst:
389 389 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
390 390 inst.args[0])
391 391
392 392 def make_file(repo, pat, node=None,
393 393 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
394 394 if not pat or pat == '-':
395 395 return 'w' in mode and sys.stdout or sys.stdin
396 396 if hasattr(pat, 'write') and 'w' in mode:
397 397 return pat
398 398 if hasattr(pat, 'read') and 'r' in mode:
399 399 return pat
400 400 return open(make_filename(repo, pat, node, total, seqno, revwidth,
401 401 pathname),
402 402 mode)
403 403
404 404 def write_bundle(cg, filename=None, compress=True):
405 405 """Write a bundle file and return its filename.
406 406
407 407 Existing files will not be overwritten.
408 408 If no filename is specified, a temporary file is created.
409 409 bz2 compression can be turned off.
410 410 The bundle file will be deleted in case of errors.
411 411 """
412 412 class nocompress(object):
413 413 def compress(self, x):
414 414 return x
415 415 def flush(self):
416 416 return ""
417 417
418 418 fh = None
419 419 cleanup = None
420 420 try:
421 421 if filename:
422 422 if os.path.exists(filename):
423 423 raise util.Abort(_("file '%s' already exists"), filename)
424 424 fh = open(filename, "wb")
425 425 else:
426 426 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
427 427 fh = os.fdopen(fd, "wb")
428 428 cleanup = filename
429 429
430 430 if compress:
431 431 fh.write("HG10")
432 432 z = bz2.BZ2Compressor(9)
433 433 else:
434 434 fh.write("HG10UN")
435 435 z = nocompress()
436 436 # parse the changegroup data, otherwise we will block
437 437 # in case of sshrepo because we don't know the end of the stream
438 438
439 439 # an empty chunkiter is the end of the changegroup
440 440 empty = False
441 441 while not empty:
442 442 empty = True
443 443 for chunk in changegroup.chunkiter(cg):
444 444 empty = False
445 445 fh.write(z.compress(changegroup.genchunk(chunk)))
446 446 fh.write(z.compress(changegroup.closechunk()))
447 447 fh.write(z.flush())
448 448 cleanup = None
449 449 return filename
450 450 finally:
451 451 if fh is not None:
452 452 fh.close()
453 453 if cleanup is not None:
454 454 os.unlink(cleanup)
455 455
456 456 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
457 457 changes=None, text=False, opts={}):
458 458 if not node1:
459 459 node1 = repo.dirstate.parents()[0]
460 460 # reading the data for node1 early allows it to play nicely
461 461 # with repo.changes and the revlog cache.
462 462 change = repo.changelog.read(node1)
463 463 mmap = repo.manifest.read(change[0])
464 464 date1 = util.datestr(change[2])
465 465
466 466 if not changes:
467 467 changes = repo.changes(node1, node2, files, match=match)
468 468 modified, added, removed, deleted, unknown = changes
469 469 if files:
470 470 modified, added, removed = map(lambda x: filterfiles(files, x),
471 471 (modified, added, removed))
472 472
473 473 if not modified and not added and not removed:
474 474 return
475 475
476 476 if node2:
477 477 change = repo.changelog.read(node2)
478 478 mmap2 = repo.manifest.read(change[0])
479 479 _date2 = util.datestr(change[2])
480 480 def date2(f):
481 481 return _date2
482 482 def read(f):
483 483 return repo.file(f).read(mmap2[f])
484 484 else:
485 485 tz = util.makedate()[1]
486 486 _date2 = util.datestr()
487 487 def date2(f):
488 488 try:
489 489 return util.datestr((os.lstat(repo.wjoin(f)).st_mtime, tz))
490 490 except OSError, err:
491 491 if err.errno != errno.ENOENT: raise
492 492 return _date2
493 493 def read(f):
494 494 return repo.wread(f)
495 495
496 496 if ui.quiet:
497 497 r = None
498 498 else:
499 499 hexfunc = ui.verbose and hex or short
500 500 r = [hexfunc(node) for node in [node1, node2] if node]
501 501
502 502 diffopts = ui.diffopts()
503 503 showfunc = opts.get('show_function') or diffopts['showfunc']
504 504 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
505 505 ignorewsamount = opts.get('ignore_space_change') or \
506 506 diffopts['ignorewsamount']
507 507 ignoreblanklines = opts.get('ignore_blank_lines') or \
508 508 diffopts['ignoreblanklines']
509 509
510 510 all = modified + added + removed
511 511 all.sort()
512 512 for f in all:
513 513 to = None
514 514 tn = None
515 515 if f in mmap:
516 516 to = repo.file(f).read(mmap[f])
517 517 if f not in removed:
518 518 tn = read(f)
519 519 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
520 520 showfunc=showfunc, ignorews=ignorews,
521 521 ignorewsamount=ignorewsamount,
522 522 ignoreblanklines=ignoreblanklines))
523 523
524 524 def trimuser(ui, name, rev, revcache):
525 525 """trim the name of the user who committed a change"""
526 526 user = revcache.get(rev)
527 527 if user is None:
528 528 user = revcache[rev] = ui.shortuser(name)
529 529 return user
530 530
531 531 class changeset_printer(object):
532 532 '''show changeset information when templating not requested.'''
533 533
534 534 def __init__(self, ui, repo):
535 535 self.ui = ui
536 536 self.repo = repo
537 537
538 538 def show(self, rev=0, changenode=None, brinfo=None):
539 539 '''show a single changeset or file revision'''
540 540 log = self.repo.changelog
541 541 if changenode is None:
542 542 changenode = log.node(rev)
543 543 elif not rev:
544 544 rev = log.rev(changenode)
545 545
546 546 if self.ui.quiet:
547 547 self.ui.write("%d:%s\n" % (rev, short(changenode)))
548 548 return
549 549
550 550 changes = log.read(changenode)
551 551 date = util.datestr(changes[2])
552 552
553 553 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
554 554 for p in log.parents(changenode)
555 555 if self.ui.debugflag or p != nullid]
556 556 if (not self.ui.debugflag and len(parents) == 1 and
557 557 parents[0][0] == rev-1):
558 558 parents = []
559 559
560 560 if self.ui.verbose:
561 561 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
562 562 else:
563 563 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
564 564
565 565 for tag in self.repo.nodetags(changenode):
566 566 self.ui.status(_("tag: %s\n") % tag)
567 567 for parent in parents:
568 568 self.ui.write(_("parent: %d:%s\n") % parent)
569 569
570 570 if brinfo and changenode in brinfo:
571 571 br = brinfo[changenode]
572 572 self.ui.write(_("branch: %s\n") % " ".join(br))
573 573
574 574 self.ui.debug(_("manifest: %d:%s\n") %
575 575 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
576 576 self.ui.status(_("user: %s\n") % changes[1])
577 577 self.ui.status(_("date: %s\n") % date)
578 578
579 579 if self.ui.debugflag:
580 580 files = self.repo.changes(log.parents(changenode)[0], changenode)
581 581 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
582 582 files):
583 583 if value:
584 584 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
585 585 else:
586 586 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
587 587
588 588 description = changes[4].strip()
589 589 if description:
590 590 if self.ui.verbose:
591 591 self.ui.status(_("description:\n"))
592 592 self.ui.status(description)
593 593 self.ui.status("\n\n")
594 594 else:
595 595 self.ui.status(_("summary: %s\n") %
596 596 description.splitlines()[0])
597 597 self.ui.status("\n")
598 598
599 599 def show_changeset(ui, repo, opts):
600 600 '''show one changeset. uses template or regular display. caller
601 601 can pass in 'style' and 'template' options in opts.'''
602 602
603 603 tmpl = opts.get('template')
604 604 if tmpl:
605 605 tmpl = templater.parsestring(tmpl, quoted=False)
606 606 else:
607 607 tmpl = ui.config('ui', 'logtemplate')
608 608 if tmpl: tmpl = templater.parsestring(tmpl)
609 609 mapfile = opts.get('style') or ui.config('ui', 'style')
610 610 if tmpl or mapfile:
611 611 if mapfile:
612 612 if not os.path.isfile(mapfile):
613 613 mapname = templater.templatepath('map-cmdline.' + mapfile)
614 614 if not mapname: mapname = templater.templatepath(mapfile)
615 615 if mapname: mapfile = mapname
616 616 try:
617 617 t = templater.changeset_templater(ui, repo, mapfile)
618 618 except SyntaxError, inst:
619 619 raise util.Abort(inst.args[0])
620 620 if tmpl: t.use_template(tmpl)
621 621 return t
622 622 return changeset_printer(ui, repo)
623 623
624 624 def setremoteconfig(ui, opts):
625 625 "copy remote options to ui tree"
626 626 if opts.get('ssh'):
627 627 ui.setconfig("ui", "ssh", opts['ssh'])
628 628 if opts.get('remotecmd'):
629 629 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
630 630
631 631 def show_version(ui):
632 632 """output version and copyright information"""
633 633 ui.write(_("Mercurial Distributed SCM (version %s)\n")
634 634 % version.get_version())
635 635 ui.status(_(
636 636 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
637 637 "This is free software; see the source for copying conditions. "
638 638 "There is NO\nwarranty; "
639 639 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
640 640 ))
641 641
642 642 def help_(ui, name=None, with_version=False):
643 643 """show help for a command, extension, or list of commands
644 644
645 645 With no arguments, print a list of commands and short help.
646 646
647 647 Given a command name, print help for that command.
648 648
649 649 Given an extension name, print help for that extension, and the
650 650 commands it provides."""
651 651 option_lists = []
652 652
653 653 def helpcmd(name):
654 654 if with_version:
655 655 show_version(ui)
656 656 ui.write('\n')
657 657 aliases, i = findcmd(name)
658 658 # synopsis
659 659 ui.write("%s\n\n" % i[2])
660 660
661 661 # description
662 662 doc = i[0].__doc__
663 663 if not doc:
664 664 doc = _("(No help text available)")
665 665 if ui.quiet:
666 666 doc = doc.splitlines(0)[0]
667 667 ui.write("%s\n" % doc.rstrip())
668 668
669 669 if not ui.quiet:
670 670 # aliases
671 671 if len(aliases) > 1:
672 672 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
673 673
674 674 # options
675 675 if i[1]:
676 676 option_lists.append(("options", i[1]))
677 677
678 678 def helplist(select=None):
679 679 h = {}
680 680 cmds = {}
681 681 for c, e in table.items():
682 682 f = c.split("|", 1)[0]
683 683 if select and not select(f):
684 684 continue
685 685 if name == "shortlist" and not f.startswith("^"):
686 686 continue
687 687 f = f.lstrip("^")
688 688 if not ui.debugflag and f.startswith("debug"):
689 689 continue
690 690 doc = e[0].__doc__
691 691 if not doc:
692 692 doc = _("(No help text available)")
693 693 h[f] = doc.splitlines(0)[0].rstrip()
694 694 cmds[f] = c.lstrip("^")
695 695
696 696 fns = h.keys()
697 697 fns.sort()
698 698 m = max(map(len, fns))
699 699 for f in fns:
700 700 if ui.verbose:
701 701 commands = cmds[f].replace("|",", ")
702 702 ui.write(" %s:\n %s\n"%(commands, h[f]))
703 703 else:
704 704 ui.write(' %-*s %s\n' % (m, f, h[f]))
705 705
706 706 def helpext(name):
707 707 try:
708 708 mod = findext(name)
709 709 except KeyError:
710 710 raise UnknownCommand(name)
711 711
712 712 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
713 713 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
714 714 for d in doc[1:]:
715 715 ui.write(d, '\n')
716 716
717 717 ui.status('\n')
718 718 if ui.verbose:
719 719 ui.status(_('list of commands:\n\n'))
720 720 else:
721 721 ui.status(_('list of commands (use "hg help -v %s" '
722 722 'to show aliases and global options):\n\n') % name)
723 723
724 724 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
725 725 helplist(modcmds.has_key)
726 726
727 727 if name and name != 'shortlist':
728 728 try:
729 729 helpcmd(name)
730 730 except UnknownCommand:
731 731 helpext(name)
732 732
733 733 else:
734 734 # program name
735 735 if ui.verbose or with_version:
736 736 show_version(ui)
737 737 else:
738 738 ui.status(_("Mercurial Distributed SCM\n"))
739 739 ui.status('\n')
740 740
741 741 # list of commands
742 742 if name == "shortlist":
743 743 ui.status(_('basic commands (use "hg help" '
744 744 'for the full list or option "-v" for details):\n\n'))
745 745 elif ui.verbose:
746 746 ui.status(_('list of commands:\n\n'))
747 747 else:
748 748 ui.status(_('list of commands (use "hg help -v" '
749 749 'to show aliases and global options):\n\n'))
750 750
751 751 helplist()
752 752
753 753 # global options
754 754 if ui.verbose:
755 755 option_lists.append(("global options", globalopts))
756 756
757 757 # list all option lists
758 758 opt_output = []
759 759 for title, options in option_lists:
760 760 opt_output.append(("\n%s:\n" % title, None))
761 761 for shortopt, longopt, default, desc in options:
762 762 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
763 763 longopt and " --%s" % longopt),
764 764 "%s%s" % (desc,
765 765 default
766 766 and _(" (default: %s)") % default
767 767 or "")))
768 768
769 769 if opt_output:
770 770 opts_len = max([len(line[0]) for line in opt_output if line[1]])
771 771 for first, second in opt_output:
772 772 if second:
773 773 ui.write(" %-*s %s\n" % (opts_len, first, second))
774 774 else:
775 775 ui.write("%s\n" % first)
776 776
777 777 # Commands start here, listed alphabetically
778 778
779 779 def add(ui, repo, *pats, **opts):
780 780 """add the specified files on the next commit
781 781
782 782 Schedule files to be version controlled and added to the repository.
783 783
784 784 The files will be added to the repository at the next commit.
785 785
786 786 If no names are given, add all files in the repository.
787 787 """
788 788
789 789 names = []
790 790 for src, abs, rel, exact in walk(repo, pats, opts):
791 791 if exact:
792 792 if ui.verbose:
793 793 ui.status(_('adding %s\n') % rel)
794 794 names.append(abs)
795 795 elif repo.dirstate.state(abs) == '?':
796 796 ui.status(_('adding %s\n') % rel)
797 797 names.append(abs)
798 798 if not opts.get('dry_run'):
799 799 repo.add(names)
800 800
801 801 def addremove(ui, repo, *pats, **opts):
802 802 """add all new files, delete all missing files (DEPRECATED)
803 803
804 804 (DEPRECATED)
805 805 Add all new files and remove all missing files from the repository.
806 806
807 807 New files are ignored if they match any of the patterns in .hgignore. As
808 808 with add, these changes take effect at the next commit.
809 809
810 810 This command is now deprecated and will be removed in a future
811 811 release. Please use add and remove --after instead.
812 812 """
813 813 ui.warn(_('(the addremove command is deprecated; use add and remove '
814 814 '--after instead)\n'))
815 815 return addremove_lock(ui, repo, pats, opts)
816 816
817 817 def addremove_lock(ui, repo, pats, opts, wlock=None):
818 818 add, remove = [], []
819 819 for src, abs, rel, exact in walk(repo, pats, opts):
820 820 if src == 'f' and repo.dirstate.state(abs) == '?':
821 821 add.append(abs)
822 822 if ui.verbose or not exact:
823 823 ui.status(_('adding %s\n') % ((pats and rel) or abs))
824 824 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
825 825 remove.append(abs)
826 826 if ui.verbose or not exact:
827 827 ui.status(_('removing %s\n') % ((pats and rel) or abs))
828 828 if not opts.get('dry_run'):
829 829 repo.add(add, wlock=wlock)
830 830 repo.remove(remove, wlock=wlock)
831 831
832 832 def annotate(ui, repo, *pats, **opts):
833 833 """show changeset information per file line
834 834
835 835 List changes in files, showing the revision id responsible for each line
836 836
837 837 This command is useful to discover who did a change or when a change took
838 838 place.
839 839
840 840 Without the -a option, annotate will avoid processing files it
841 841 detects as binary. With -a, annotate will generate an annotation
842 842 anyway, probably with undesirable results.
843 843 """
844 844 def getnode(rev):
845 845 return short(repo.changelog.node(rev))
846 846
847 847 ucache = {}
848 848 def getname(rev):
849 849 try:
850 850 return ucache[rev]
851 851 except:
852 852 u = trimuser(ui, repo.changectx(rev).user(), rev, ucache)
853 853 ucache[rev] = u
854 854 return u
855 855
856 856 dcache = {}
857 857 def getdate(rev):
858 858 datestr = dcache.get(rev)
859 859 if datestr is None:
860 860 datestr = dcache[rev] = util.datestr(repo.changectx(rev).date())
861 861 return datestr
862 862
863 863 if not pats:
864 864 raise util.Abort(_('at least one file name or pattern required'))
865 865
866 866 opmap = [['user', getname], ['number', str], ['changeset', getnode],
867 867 ['date', getdate]]
868 868 if not opts['user'] and not opts['changeset'] and not opts['date']:
869 869 opts['number'] = 1
870 870
871 871 ctx = repo.changectx(opts['rev'] or repo.dirstate.parents()[0])
872 872
873 873 for src, abs, rel, exact in walk(repo, pats, opts, node=ctx.node()):
874 874 fctx = ctx.filectx(abs)
875 875 if not opts['text'] and util.binary(fctx.data()):
876 876 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
877 877 continue
878 878
879 879 lines = fctx.annotate()
880 880 pieces = []
881 881
882 882 for o, f in opmap:
883 883 if opts[o]:
884 884 l = [f(n) for n, dummy in lines]
885 885 if l:
886 886 m = max(map(len, l))
887 887 pieces.append(["%*s" % (m, x) for x in l])
888 888
889 889 if pieces:
890 890 for p, l in zip(zip(*pieces), lines):
891 891 ui.write("%s: %s" % (" ".join(p), l[1]))
892 892
893 893 def archive(ui, repo, dest, **opts):
894 894 '''create unversioned archive of a repository revision
895 895
896 896 By default, the revision used is the parent of the working
897 897 directory; use "-r" to specify a different revision.
898 898
899 899 To specify the type of archive to create, use "-t". Valid
900 900 types are:
901 901
902 902 "files" (default): a directory full of files
903 903 "tar": tar archive, uncompressed
904 904 "tbz2": tar archive, compressed using bzip2
905 905 "tgz": tar archive, compressed using gzip
906 906 "uzip": zip archive, uncompressed
907 907 "zip": zip archive, compressed using deflate
908 908
909 909 The exact name of the destination archive or directory is given
910 910 using a format string; see "hg help export" for details.
911 911
912 912 Each member added to an archive file has a directory prefix
913 913 prepended. Use "-p" to specify a format string for the prefix.
914 914 The default is the basename of the archive, with suffixes removed.
915 915 '''
916 916
917 917 if opts['rev']:
918 918 node = repo.lookup(opts['rev'])
919 919 else:
920 920 node, p2 = repo.dirstate.parents()
921 921 if p2 != nullid:
922 922 raise util.Abort(_('uncommitted merge - please provide a '
923 923 'specific revision'))
924 924
925 925 dest = make_filename(repo, dest, node)
926 926 if os.path.realpath(dest) == repo.root:
927 927 raise util.Abort(_('repository root cannot be destination'))
928 928 dummy, matchfn, dummy = matchpats(repo, [], opts)
929 929 kind = opts.get('type') or 'files'
930 930 prefix = opts['prefix']
931 931 if dest == '-':
932 932 if kind == 'files':
933 933 raise util.Abort(_('cannot archive plain files to stdout'))
934 934 dest = sys.stdout
935 935 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
936 936 prefix = make_filename(repo, prefix, node)
937 937 archival.archive(repo, dest, node, kind, not opts['no_decode'],
938 938 matchfn, prefix)
939 939
940 940 def backout(ui, repo, rev, **opts):
941 941 '''reverse effect of earlier changeset
942 942
943 943 Commit the backed out changes as a new changeset. The new
944 944 changeset is a child of the backed out changeset.
945 945
946 946 If you back out a changeset other than the tip, a new head is
947 947 created. This head is the parent of the working directory. If
948 948 you back out an old changeset, your working directory will appear
949 949 old after the backout. You should merge the backout changeset
950 950 with another head.
951 951
952 952 The --merge option remembers the parent of the working directory
953 953 before starting the backout, then merges the new head with that
954 954 changeset afterwards. This saves you from doing the merge by
955 955 hand. The result of this merge is not committed, as for a normal
956 956 merge.'''
957 957
958 958 bail_if_changed(repo)
959 959 op1, op2 = repo.dirstate.parents()
960 960 if op2 != nullid:
961 961 raise util.Abort(_('outstanding uncommitted merge'))
962 962 node = repo.lookup(rev)
963 963 p1, p2 = repo.changelog.parents(node)
964 964 if p1 == nullid:
965 965 raise util.Abort(_('cannot back out a change with no parents'))
966 966 if p2 != nullid:
967 967 if not opts['parent']:
968 968 raise util.Abort(_('cannot back out a merge changeset without '
969 969 '--parent'))
970 970 p = repo.lookup(opts['parent'])
971 971 if p not in (p1, p2):
972 972 raise util.Abort(_('%s is not a parent of %s' %
973 973 (short(p), short(node))))
974 974 parent = p
975 975 else:
976 976 if opts['parent']:
977 977 raise util.Abort(_('cannot use --parent on non-merge changeset'))
978 978 parent = p1
979 979 repo.update(node, force=True, show_stats=False)
980 980 revert_opts = opts.copy()
981 981 revert_opts['rev'] = hex(parent)
982 982 revert(ui, repo, **revert_opts)
983 983 commit_opts = opts.copy()
984 984 commit_opts['addremove'] = False
985 985 if not commit_opts['message'] and not commit_opts['logfile']:
986 986 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
987 987 commit_opts['force_editor'] = True
988 988 commit(ui, repo, **commit_opts)
989 989 def nice(node):
990 990 return '%d:%s' % (repo.changelog.rev(node), short(node))
991 991 ui.status(_('changeset %s backs out changeset %s\n') %
992 992 (nice(repo.changelog.tip()), nice(node)))
993 993 if op1 != node:
994 994 if opts['merge']:
995 995 ui.status(_('merging with changeset %s\n') % nice(op1))
996 996 doupdate(ui, repo, hex(op1), **opts)
997 997 else:
998 998 ui.status(_('the backout changeset is a new head - '
999 999 'do not forget to merge\n'))
1000 1000 ui.status(_('(use "backout --merge" '
1001 1001 'if you want to auto-merge)\n'))
1002 1002
1003 1003 def bundle(ui, repo, fname, dest=None, **opts):
1004 1004 """create a changegroup file
1005 1005
1006 1006 Generate a compressed changegroup file collecting all changesets
1007 1007 not found in the other repository.
1008 1008
1009 1009 This file can then be transferred using conventional means and
1010 1010 applied to another repository with the unbundle command. This is
1011 1011 useful when native push and pull are not available or when
1012 1012 exporting an entire repository is undesirable. The standard file
1013 1013 extension is ".hg".
1014 1014
1015 1015 Unlike import/export, this exactly preserves all changeset
1016 1016 contents including permissions, rename data, and revision history.
1017 1017 """
1018 1018 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1019 1019 other = hg.repository(ui, dest)
1020 1020 o = repo.findoutgoing(other, force=opts['force'])
1021 1021 cg = repo.changegroup(o, 'bundle')
1022 1022 write_bundle(cg, fname)
1023 1023
1024 1024 def cat(ui, repo, file1, *pats, **opts):
1025 1025 """output the latest or given revisions of files
1026 1026
1027 1027 Print the specified files as they were at the given revision.
1028 1028 If no revision is given then the tip is used.
1029 1029
1030 1030 Output may be to a file, in which case the name of the file is
1031 1031 given using a format string. The formatting rules are the same as
1032 1032 for the export command, with the following additions:
1033 1033
1034 1034 %s basename of file being printed
1035 1035 %d dirname of file being printed, or '.' if in repo root
1036 1036 %p root-relative path name of file being printed
1037 1037 """
1038 1038 ctx = repo.changectx(opts['rev'] or "-1")
1039 1039 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, ctx.node()):
1040 1040 fp = make_file(repo, opts['output'], ctx.node(), pathname=abs)
1041 1041 fp.write(ctx.filectx(abs).data())
1042 1042
1043 1043 def clone(ui, source, dest=None, **opts):
1044 1044 """make a copy of an existing repository
1045 1045
1046 1046 Create a copy of an existing repository in a new directory.
1047 1047
1048 1048 If no destination directory name is specified, it defaults to the
1049 1049 basename of the source.
1050 1050
1051 1051 The location of the source is added to the new repository's
1052 1052 .hg/hgrc file, as the default to be used for future pulls.
1053 1053
1054 1054 For efficiency, hardlinks are used for cloning whenever the source
1055 1055 and destination are on the same filesystem (note this applies only
1056 1056 to the repository data, not to the checked out files). Some
1057 1057 filesystems, such as AFS, implement hardlinking incorrectly, but
1058 1058 do not report errors. In these cases, use the --pull option to
1059 1059 avoid hardlinking.
1060 1060
1061 1061 You can safely clone repositories and checked out files using full
1062 1062 hardlinks with
1063 1063
1064 1064 $ cp -al REPO REPOCLONE
1065 1065
1066 1066 which is the fastest way to clone. However, the operation is not
1067 1067 atomic (making sure REPO is not modified during the operation is
1068 1068 up to you) and you have to make sure your editor breaks hardlinks
1069 1069 (Emacs and most Linux Kernel tools do so).
1070 1070
1071 1071 If you use the -r option to clone up to a specific revision, no
1072 1072 subsequent revisions will be present in the cloned repository.
1073 1073 This option implies --pull, even on local repositories.
1074 1074
1075 1075 See pull for valid source format details.
1076 1076
1077 1077 It is possible to specify an ssh:// URL as the destination, but no
1078 1078 .hg/hgrc will be created on the remote side. Look at the help text
1079 1079 for the pull command for important details about ssh:// URLs.
1080 1080 """
1081 1081 setremoteconfig(ui, opts)
1082 1082 hg.clone(ui, ui.expandpath(source), dest,
1083 1083 pull=opts['pull'],
1084 1084 stream=opts['uncompressed'],
1085 1085 rev=opts['rev'],
1086 1086 update=not opts['noupdate'])
1087 1087
1088 1088 def commit(ui, repo, *pats, **opts):
1089 1089 """commit the specified files or all outstanding changes
1090 1090
1091 1091 Commit changes to the given files into the repository.
1092 1092
1093 1093 If a list of files is omitted, all changes reported by "hg status"
1094 1094 will be committed.
1095 1095
1096 1096 If no commit message is specified, the editor configured in your hgrc
1097 1097 or in the EDITOR environment variable is started to enter a message.
1098 1098 """
1099 1099 message = logmessage(**opts)
1100 1100
1101 1101 if opts['addremove']:
1102 1102 addremove_lock(ui, repo, pats, opts)
1103 1103 fns, match, anypats = matchpats(repo, pats, opts)
1104 1104 if pats:
1105 1105 modified, added, removed, deleted, unknown = (
1106 1106 repo.changes(files=fns, match=match))
1107 1107 files = modified + added + removed
1108 1108 else:
1109 1109 files = []
1110 1110 try:
1111 1111 repo.commit(files, message, opts['user'], opts['date'], match,
1112 1112 force_editor=opts.get('force_editor'))
1113 1113 except ValueError, inst:
1114 1114 raise util.Abort(str(inst))
1115 1115
1116 1116 def docopy(ui, repo, pats, opts, wlock):
1117 1117 # called with the repo lock held
1118 1118 cwd = repo.getcwd()
1119 1119 errors = 0
1120 1120 copied = []
1121 1121 targets = {}
1122 1122
1123 1123 def okaytocopy(abs, rel, exact):
1124 1124 reasons = {'?': _('is not managed'),
1125 1125 'a': _('has been marked for add'),
1126 1126 'r': _('has been marked for remove')}
1127 1127 state = repo.dirstate.state(abs)
1128 1128 reason = reasons.get(state)
1129 1129 if reason:
1130 1130 if state == 'a':
1131 1131 origsrc = repo.dirstate.copied(abs)
1132 1132 if origsrc is not None:
1133 1133 return origsrc
1134 1134 if exact:
1135 1135 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
1136 1136 else:
1137 1137 return abs
1138 1138
1139 1139 def copy(origsrc, abssrc, relsrc, target, exact):
1140 1140 abstarget = util.canonpath(repo.root, cwd, target)
1141 1141 reltarget = util.pathto(cwd, abstarget)
1142 1142 prevsrc = targets.get(abstarget)
1143 1143 if prevsrc is not None:
1144 1144 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1145 1145 (reltarget, abssrc, prevsrc))
1146 1146 return
1147 1147 if (not opts['after'] and os.path.exists(reltarget) or
1148 1148 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
1149 1149 if not opts['force']:
1150 1150 ui.warn(_('%s: not overwriting - file exists\n') %
1151 1151 reltarget)
1152 1152 return
1153 1153 if not opts['after'] and not opts.get('dry_run'):
1154 1154 os.unlink(reltarget)
1155 1155 if opts['after']:
1156 1156 if not os.path.exists(reltarget):
1157 1157 return
1158 1158 else:
1159 1159 targetdir = os.path.dirname(reltarget) or '.'
1160 1160 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
1161 1161 os.makedirs(targetdir)
1162 1162 try:
1163 1163 restore = repo.dirstate.state(abstarget) == 'r'
1164 1164 if restore and not opts.get('dry_run'):
1165 1165 repo.undelete([abstarget], wlock)
1166 1166 try:
1167 1167 if not opts.get('dry_run'):
1168 1168 shutil.copyfile(relsrc, reltarget)
1169 1169 shutil.copymode(relsrc, reltarget)
1170 1170 restore = False
1171 1171 finally:
1172 1172 if restore:
1173 1173 repo.remove([abstarget], wlock)
1174 1174 except shutil.Error, inst:
1175 1175 raise util.Abort(str(inst))
1176 1176 except IOError, inst:
1177 1177 if inst.errno == errno.ENOENT:
1178 1178 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1179 1179 else:
1180 1180 ui.warn(_('%s: cannot copy - %s\n') %
1181 1181 (relsrc, inst.strerror))
1182 1182 errors += 1
1183 1183 return
1184 1184 if ui.verbose or not exact:
1185 1185 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1186 1186 targets[abstarget] = abssrc
1187 1187 if abstarget != origsrc and not opts.get('dry_run'):
1188 1188 repo.copy(origsrc, abstarget, wlock)
1189 1189 copied.append((abssrc, relsrc, exact))
1190 1190
1191 1191 def targetpathfn(pat, dest, srcs):
1192 1192 if os.path.isdir(pat):
1193 1193 abspfx = util.canonpath(repo.root, cwd, pat)
1194 1194 if destdirexists:
1195 1195 striplen = len(os.path.split(abspfx)[0])
1196 1196 else:
1197 1197 striplen = len(abspfx)
1198 1198 if striplen:
1199 1199 striplen += len(os.sep)
1200 1200 res = lambda p: os.path.join(dest, p[striplen:])
1201 1201 elif destdirexists:
1202 1202 res = lambda p: os.path.join(dest, os.path.basename(p))
1203 1203 else:
1204 1204 res = lambda p: dest
1205 1205 return res
1206 1206
1207 1207 def targetpathafterfn(pat, dest, srcs):
1208 1208 if util.patkind(pat, None)[0]:
1209 1209 # a mercurial pattern
1210 1210 res = lambda p: os.path.join(dest, os.path.basename(p))
1211 1211 else:
1212 1212 abspfx = util.canonpath(repo.root, cwd, pat)
1213 1213 if len(abspfx) < len(srcs[0][0]):
1214 1214 # A directory. Either the target path contains the last
1215 1215 # component of the source path or it does not.
1216 1216 def evalpath(striplen):
1217 1217 score = 0
1218 1218 for s in srcs:
1219 1219 t = os.path.join(dest, s[0][striplen:])
1220 1220 if os.path.exists(t):
1221 1221 score += 1
1222 1222 return score
1223 1223
1224 1224 striplen = len(abspfx)
1225 1225 if striplen:
1226 1226 striplen += len(os.sep)
1227 1227 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1228 1228 score = evalpath(striplen)
1229 1229 striplen1 = len(os.path.split(abspfx)[0])
1230 1230 if striplen1:
1231 1231 striplen1 += len(os.sep)
1232 1232 if evalpath(striplen1) > score:
1233 1233 striplen = striplen1
1234 1234 res = lambda p: os.path.join(dest, p[striplen:])
1235 1235 else:
1236 1236 # a file
1237 1237 if destdirexists:
1238 1238 res = lambda p: os.path.join(dest, os.path.basename(p))
1239 1239 else:
1240 1240 res = lambda p: dest
1241 1241 return res
1242 1242
1243 1243
1244 1244 pats = list(pats)
1245 1245 if not pats:
1246 1246 raise util.Abort(_('no source or destination specified'))
1247 1247 if len(pats) == 1:
1248 1248 raise util.Abort(_('no destination specified'))
1249 1249 dest = pats.pop()
1250 1250 destdirexists = os.path.isdir(dest)
1251 1251 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1252 1252 raise util.Abort(_('with multiple sources, destination must be an '
1253 1253 'existing directory'))
1254 1254 if opts['after']:
1255 1255 tfn = targetpathafterfn
1256 1256 else:
1257 1257 tfn = targetpathfn
1258 1258 copylist = []
1259 1259 for pat in pats:
1260 1260 srcs = []
1261 1261 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
1262 1262 origsrc = okaytocopy(abssrc, relsrc, exact)
1263 1263 if origsrc:
1264 1264 srcs.append((origsrc, abssrc, relsrc, exact))
1265 1265 if not srcs:
1266 1266 continue
1267 1267 copylist.append((tfn(pat, dest, srcs), srcs))
1268 1268 if not copylist:
1269 1269 raise util.Abort(_('no files to copy'))
1270 1270
1271 1271 for targetpath, srcs in copylist:
1272 1272 for origsrc, abssrc, relsrc, exact in srcs:
1273 1273 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1274 1274
1275 1275 if errors:
1276 1276 ui.warn(_('(consider using --after)\n'))
1277 1277 return errors, copied
1278 1278
1279 1279 def copy(ui, repo, *pats, **opts):
1280 1280 """mark files as copied for the next commit
1281 1281
1282 1282 Mark dest as having copies of source files. If dest is a
1283 1283 directory, copies are put in that directory. If dest is a file,
1284 1284 there can only be one source.
1285 1285
1286 1286 By default, this command copies the contents of files as they
1287 1287 stand in the working directory. If invoked with --after, the
1288 1288 operation is recorded, but no copying is performed.
1289 1289
1290 1290 This command takes effect in the next commit.
1291 1291
1292 1292 NOTE: This command should be treated as experimental. While it
1293 1293 should properly record copied files, this information is not yet
1294 1294 fully used by merge, nor fully reported by log.
1295 1295 """
1296 1296 wlock = repo.wlock(0)
1297 1297 errs, copied = docopy(ui, repo, pats, opts, wlock)
1298 1298 return errs
1299 1299
1300 1300 def debugancestor(ui, index, rev1, rev2):
1301 1301 """find the ancestor revision of two revisions in a given index"""
1302 1302 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1303 1303 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1304 1304 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1305 1305
1306 1306 def debugcomplete(ui, cmd='', **opts):
1307 1307 """returns the completion list associated with the given command"""
1308 1308
1309 1309 if opts['options']:
1310 1310 options = []
1311 1311 otables = [globalopts]
1312 1312 if cmd:
1313 1313 aliases, entry = findcmd(cmd)
1314 1314 otables.append(entry[1])
1315 1315 for t in otables:
1316 1316 for o in t:
1317 1317 if o[0]:
1318 1318 options.append('-%s' % o[0])
1319 1319 options.append('--%s' % o[1])
1320 1320 ui.write("%s\n" % "\n".join(options))
1321 1321 return
1322 1322
1323 1323 clist = findpossible(cmd).keys()
1324 1324 clist.sort()
1325 1325 ui.write("%s\n" % "\n".join(clist))
1326 1326
1327 1327 def debugrebuildstate(ui, repo, rev=None):
1328 1328 """rebuild the dirstate as it would look like for the given revision"""
1329 1329 if not rev:
1330 1330 rev = repo.changelog.tip()
1331 1331 else:
1332 1332 rev = repo.lookup(rev)
1333 1333 change = repo.changelog.read(rev)
1334 1334 n = change[0]
1335 1335 files = repo.manifest.readflags(n)
1336 1336 wlock = repo.wlock()
1337 1337 repo.dirstate.rebuild(rev, files.iteritems())
1338 1338
1339 1339 def debugcheckstate(ui, repo):
1340 1340 """validate the correctness of the current dirstate"""
1341 1341 parent1, parent2 = repo.dirstate.parents()
1342 1342 repo.dirstate.read()
1343 1343 dc = repo.dirstate.map
1344 1344 keys = dc.keys()
1345 1345 keys.sort()
1346 1346 m1n = repo.changelog.read(parent1)[0]
1347 1347 m2n = repo.changelog.read(parent2)[0]
1348 1348 m1 = repo.manifest.read(m1n)
1349 1349 m2 = repo.manifest.read(m2n)
1350 1350 errors = 0
1351 1351 for f in dc:
1352 1352 state = repo.dirstate.state(f)
1353 1353 if state in "nr" and f not in m1:
1354 1354 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1355 1355 errors += 1
1356 1356 if state in "a" and f in m1:
1357 1357 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1358 1358 errors += 1
1359 1359 if state in "m" and f not in m1 and f not in m2:
1360 1360 ui.warn(_("%s in state %s, but not in either manifest\n") %
1361 1361 (f, state))
1362 1362 errors += 1
1363 1363 for f in m1:
1364 1364 state = repo.dirstate.state(f)
1365 1365 if state not in "nrm":
1366 1366 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1367 1367 errors += 1
1368 1368 if errors:
1369 1369 error = _(".hg/dirstate inconsistent with current parent's manifest")
1370 1370 raise util.Abort(error)
1371 1371
1372 1372 def debugconfig(ui, repo, *values):
1373 1373 """show combined config settings from all hgrc files
1374 1374
1375 1375 With no args, print names and values of all config items.
1376 1376
1377 1377 With one arg of the form section.name, print just the value of
1378 1378 that config item.
1379 1379
1380 1380 With multiple args, print names and values of all config items
1381 1381 with matching section names."""
1382 1382
1383 1383 if values:
1384 1384 if len([v for v in values if '.' in v]) > 1:
1385 1385 raise util.Abort(_('only one config item permitted'))
1386 1386 for section, name, value in ui.walkconfig():
1387 1387 sectname = section + '.' + name
1388 1388 if values:
1389 1389 for v in values:
1390 1390 if v == section:
1391 1391 ui.write('%s=%s\n' % (sectname, value))
1392 1392 elif v == sectname:
1393 1393 ui.write(value, '\n')
1394 1394 else:
1395 1395 ui.write('%s=%s\n' % (sectname, value))
1396 1396
1397 1397 def debugsetparents(ui, repo, rev1, rev2=None):
1398 1398 """manually set the parents of the current working directory
1399 1399
1400 1400 This is useful for writing repository conversion tools, but should
1401 1401 be used with care.
1402 1402 """
1403 1403
1404 1404 if not rev2:
1405 1405 rev2 = hex(nullid)
1406 1406
1407 1407 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1408 1408
1409 1409 def debugstate(ui, repo):
1410 1410 """show the contents of the current dirstate"""
1411 1411 repo.dirstate.read()
1412 1412 dc = repo.dirstate.map
1413 1413 keys = dc.keys()
1414 1414 keys.sort()
1415 1415 for file_ in keys:
1416 1416 ui.write("%c %3o %10d %s %s\n"
1417 1417 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1418 1418 time.strftime("%x %X",
1419 1419 time.localtime(dc[file_][3])), file_))
1420 1420 for f in repo.dirstate.copies:
1421 1421 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1422 1422
1423 1423 def debugdata(ui, file_, rev):
1424 1424 """dump the contents of an data file revision"""
1425 1425 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1426 1426 file_[:-2] + ".i", file_, 0)
1427 1427 try:
1428 1428 ui.write(r.revision(r.lookup(rev)))
1429 1429 except KeyError:
1430 1430 raise util.Abort(_('invalid revision identifier %s'), rev)
1431 1431
1432 1432 def debugindex(ui, file_):
1433 1433 """dump the contents of an index file"""
1434 1434 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1435 1435 ui.write(" rev offset length base linkrev" +
1436 1436 " nodeid p1 p2\n")
1437 1437 for i in range(r.count()):
1438 1438 node = r.node(i)
1439 1439 pp = r.parents(node)
1440 1440 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1441 1441 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1442 1442 short(node), short(pp[0]), short(pp[1])))
1443 1443
1444 1444 def debugindexdot(ui, file_):
1445 1445 """dump an index DAG as a .dot file"""
1446 1446 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1447 1447 ui.write("digraph G {\n")
1448 1448 for i in range(r.count()):
1449 1449 node = r.node(i)
1450 1450 pp = r.parents(node)
1451 1451 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1452 1452 if pp[1] != nullid:
1453 1453 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1454 1454 ui.write("}\n")
1455 1455
1456 1456 def debugrename(ui, repo, file, rev=None):
1457 1457 """dump rename information"""
1458 1458 r = repo.file(relpath(repo, [file])[0])
1459 1459 if rev:
1460 1460 try:
1461 1461 # assume all revision numbers are for changesets
1462 1462 n = repo.lookup(rev)
1463 1463 change = repo.changelog.read(n)
1464 1464 m = repo.manifest.read(change[0])
1465 1465 n = m[relpath(repo, [file])[0]]
1466 1466 except (hg.RepoError, KeyError):
1467 1467 n = r.lookup(rev)
1468 1468 else:
1469 1469 n = r.tip()
1470 1470 m = r.renamed(n)
1471 1471 if m:
1472 1472 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1473 1473 else:
1474 1474 ui.write(_("not renamed\n"))
1475 1475
1476 1476 def debugwalk(ui, repo, *pats, **opts):
1477 1477 """show how files match on given patterns"""
1478 1478 items = list(walk(repo, pats, opts))
1479 1479 if not items:
1480 1480 return
1481 1481 fmt = '%%s %%-%ds %%-%ds %%s' % (
1482 1482 max([len(abs) for (src, abs, rel, exact) in items]),
1483 1483 max([len(rel) for (src, abs, rel, exact) in items]))
1484 1484 for src, abs, rel, exact in items:
1485 1485 line = fmt % (src, abs, rel, exact and 'exact' or '')
1486 1486 ui.write("%s\n" % line.rstrip())
1487 1487
1488 1488 def diff(ui, repo, *pats, **opts):
1489 1489 """diff repository (or selected files)
1490 1490
1491 1491 Show differences between revisions for the specified files.
1492 1492
1493 1493 Differences between files are shown using the unified diff format.
1494 1494
1495 1495 When two revision arguments are given, then changes are shown
1496 1496 between those revisions. If only one revision is specified then
1497 1497 that revision is compared to the working directory, and, when no
1498 1498 revisions are specified, the working directory files are compared
1499 1499 to its parent.
1500 1500
1501 1501 Without the -a option, diff will avoid generating diffs of files
1502 1502 it detects as binary. With -a, diff will generate a diff anyway,
1503 1503 probably with undesirable results.
1504 1504 """
1505 1505 node1, node2 = revpair(ui, repo, opts['rev'])
1506 1506
1507 1507 fns, matchfn, anypats = matchpats(repo, pats, opts)
1508 1508
1509 1509 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1510 1510 text=opts['text'], opts=opts)
1511 1511
1512 1512 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1513 1513 node = repo.lookup(changeset)
1514 1514 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1515 1515 if opts['switch_parent']:
1516 1516 parents.reverse()
1517 1517 prev = (parents and parents[0]) or nullid
1518 1518 change = repo.changelog.read(node)
1519 1519
1520 1520 fp = make_file(repo, opts['output'], node, total=total, seqno=seqno,
1521 1521 revwidth=revwidth)
1522 1522 if fp != sys.stdout:
1523 1523 ui.note("%s\n" % fp.name)
1524 1524
1525 1525 fp.write("# HG changeset patch\n")
1526 1526 fp.write("# User %s\n" % change[1])
1527 1527 fp.write("# Date %d %d\n" % change[2])
1528 1528 fp.write("# Node ID %s\n" % hex(node))
1529 1529 fp.write("# Parent %s\n" % hex(prev))
1530 1530 if len(parents) > 1:
1531 1531 fp.write("# Parent %s\n" % hex(parents[1]))
1532 1532 fp.write(change[4].rstrip())
1533 1533 fp.write("\n\n")
1534 1534
1535 1535 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1536 1536 if fp != sys.stdout:
1537 1537 fp.close()
1538 1538
1539 1539 def export(ui, repo, *changesets, **opts):
1540 1540 """dump the header and diffs for one or more changesets
1541 1541
1542 1542 Print the changeset header and diffs for one or more revisions.
1543 1543
1544 1544 The information shown in the changeset header is: author,
1545 1545 changeset hash, parent and commit comment.
1546 1546
1547 1547 Output may be to a file, in which case the name of the file is
1548 1548 given using a format string. The formatting rules are as follows:
1549 1549
1550 1550 %% literal "%" character
1551 1551 %H changeset hash (40 bytes of hexadecimal)
1552 1552 %N number of patches being generated
1553 1553 %R changeset revision number
1554 1554 %b basename of the exporting repository
1555 1555 %h short-form changeset hash (12 bytes of hexadecimal)
1556 1556 %n zero-padded sequence number, starting at 1
1557 1557 %r zero-padded changeset revision number
1558 1558
1559 1559 Without the -a option, export will avoid generating diffs of files
1560 1560 it detects as binary. With -a, export will generate a diff anyway,
1561 1561 probably with undesirable results.
1562 1562
1563 1563 With the --switch-parent option, the diff will be against the second
1564 1564 parent. It can be useful to review a merge.
1565 1565 """
1566 1566 if not changesets:
1567 1567 raise util.Abort(_("export requires at least one changeset"))
1568 1568 seqno = 0
1569 1569 revs = list(revrange(ui, repo, changesets))
1570 1570 total = len(revs)
1571 1571 revwidth = max(map(len, revs))
1572 1572 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1573 1573 ui.note(msg)
1574 1574 for cset in revs:
1575 1575 seqno += 1
1576 1576 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1577 1577
1578 1578 def forget(ui, repo, *pats, **opts):
1579 1579 """don't add the specified files on the next commit (DEPRECATED)
1580 1580
1581 1581 (DEPRECATED)
1582 1582 Undo an 'hg add' scheduled for the next commit.
1583 1583
1584 1584 This command is now deprecated and will be removed in a future
1585 1585 release. Please use revert instead.
1586 1586 """
1587 1587 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1588 1588 forget = []
1589 1589 for src, abs, rel, exact in walk(repo, pats, opts):
1590 1590 if repo.dirstate.state(abs) == 'a':
1591 1591 forget.append(abs)
1592 1592 if ui.verbose or not exact:
1593 1593 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1594 1594 repo.forget(forget)
1595 1595
1596 1596 def grep(ui, repo, pattern, *pats, **opts):
1597 1597 """search for a pattern in specified files and revisions
1598 1598
1599 1599 Search revisions of files for a regular expression.
1600 1600
1601 1601 This command behaves differently than Unix grep. It only accepts
1602 1602 Python/Perl regexps. It searches repository history, not the
1603 1603 working directory. It always prints the revision number in which
1604 1604 a match appears.
1605 1605
1606 1606 By default, grep only prints output for the first revision of a
1607 1607 file in which it finds a match. To get it to print every revision
1608 1608 that contains a change in match status ("-" for a match that
1609 1609 becomes a non-match, or "+" for a non-match that becomes a match),
1610 1610 use the --all flag.
1611 1611 """
1612 1612 reflags = 0
1613 1613 if opts['ignore_case']:
1614 1614 reflags |= re.I
1615 1615 regexp = re.compile(pattern, reflags)
1616 1616 sep, eol = ':', '\n'
1617 1617 if opts['print0']:
1618 1618 sep = eol = '\0'
1619 1619
1620 1620 fcache = {}
1621 1621 def getfile(fn):
1622 1622 if fn not in fcache:
1623 1623 fcache[fn] = repo.file(fn)
1624 1624 return fcache[fn]
1625 1625
1626 1626 def matchlines(body):
1627 1627 begin = 0
1628 1628 linenum = 0
1629 1629 while True:
1630 1630 match = regexp.search(body, begin)
1631 1631 if not match:
1632 1632 break
1633 1633 mstart, mend = match.span()
1634 1634 linenum += body.count('\n', begin, mstart) + 1
1635 1635 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1636 1636 lend = body.find('\n', mend)
1637 1637 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1638 1638 begin = lend + 1
1639 1639
1640 1640 class linestate(object):
1641 1641 def __init__(self, line, linenum, colstart, colend):
1642 1642 self.line = line
1643 1643 self.linenum = linenum
1644 1644 self.colstart = colstart
1645 1645 self.colend = colend
1646 1646 def __eq__(self, other):
1647 1647 return self.line == other.line
1648 1648 def __hash__(self):
1649 1649 return hash(self.line)
1650 1650
1651 1651 matches = {}
1652 1652 def grepbody(fn, rev, body):
1653 1653 matches[rev].setdefault(fn, {})
1654 1654 m = matches[rev][fn]
1655 1655 for lnum, cstart, cend, line in matchlines(body):
1656 1656 s = linestate(line, lnum, cstart, cend)
1657 1657 m[s] = s
1658 1658
1659 1659 # FIXME: prev isn't used, why ?
1660 1660 prev = {}
1661 1661 ucache = {}
1662 1662 def display(fn, rev, states, prevstates):
1663 1663 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1664 1664 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1665 1665 counts = {'-': 0, '+': 0}
1666 1666 filerevmatches = {}
1667 1667 for l in diff:
1668 1668 if incrementing or not opts['all']:
1669 1669 change = ((l in prevstates) and '-') or '+'
1670 1670 r = rev
1671 1671 else:
1672 1672 change = ((l in states) and '-') or '+'
1673 1673 r = prev[fn]
1674 1674 cols = [fn, str(rev)]
1675 1675 if opts['line_number']:
1676 1676 cols.append(str(l.linenum))
1677 1677 if opts['all']:
1678 1678 cols.append(change)
1679 1679 if opts['user']:
1680 1680 cols.append(trimuser(ui, getchange(rev)[1], rev,
1681 1681 ucache))
1682 1682 if opts['files_with_matches']:
1683 1683 c = (fn, rev)
1684 1684 if c in filerevmatches:
1685 1685 continue
1686 1686 filerevmatches[c] = 1
1687 1687 else:
1688 1688 cols.append(l.line)
1689 1689 ui.write(sep.join(cols), eol)
1690 1690 counts[change] += 1
1691 1691 return counts['+'], counts['-']
1692 1692
1693 1693 fstate = {}
1694 1694 skip = {}
1695 1695 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1696 1696 count = 0
1697 1697 incrementing = False
1698 1698 for st, rev, fns in changeiter:
1699 1699 if st == 'window':
1700 1700 incrementing = rev
1701 1701 matches.clear()
1702 1702 elif st == 'add':
1703 1703 change = repo.changelog.read(repo.lookup(str(rev)))
1704 1704 mf = repo.manifest.read(change[0])
1705 1705 matches[rev] = {}
1706 1706 for fn in fns:
1707 1707 if fn in skip:
1708 1708 continue
1709 1709 fstate.setdefault(fn, {})
1710 1710 try:
1711 1711 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1712 1712 except KeyError:
1713 1713 pass
1714 1714 elif st == 'iter':
1715 1715 states = matches[rev].items()
1716 1716 states.sort()
1717 1717 for fn, m in states:
1718 1718 if fn in skip:
1719 1719 continue
1720 1720 if incrementing or not opts['all'] or fstate[fn]:
1721 1721 pos, neg = display(fn, rev, m, fstate[fn])
1722 1722 count += pos + neg
1723 1723 if pos and not opts['all']:
1724 1724 skip[fn] = True
1725 1725 fstate[fn] = m
1726 1726 prev[fn] = rev
1727 1727
1728 1728 if not incrementing:
1729 1729 fstate = fstate.items()
1730 1730 fstate.sort()
1731 1731 for fn, state in fstate:
1732 1732 if fn in skip:
1733 1733 continue
1734 1734 display(fn, rev, {}, state)
1735 1735 return (count == 0 and 1) or 0
1736 1736
1737 1737 def heads(ui, repo, **opts):
1738 1738 """show current repository heads
1739 1739
1740 1740 Show all repository head changesets.
1741 1741
1742 1742 Repository "heads" are changesets that don't have children
1743 1743 changesets. They are where development generally takes place and
1744 1744 are the usual targets for update and merge operations.
1745 1745 """
1746 1746 if opts['rev']:
1747 1747 heads = repo.heads(repo.lookup(opts['rev']))
1748 1748 else:
1749 1749 heads = repo.heads()
1750 1750 br = None
1751 1751 if opts['branches']:
1752 1752 br = repo.branchlookup(heads)
1753 1753 displayer = show_changeset(ui, repo, opts)
1754 1754 for n in heads:
1755 1755 displayer.show(changenode=n, brinfo=br)
1756 1756
1757 1757 def identify(ui, repo):
1758 1758 """print information about the working copy
1759 1759
1760 1760 Print a short summary of the current state of the repo.
1761 1761
1762 1762 This summary identifies the repository state using one or two parent
1763 1763 hash identifiers, followed by a "+" if there are uncommitted changes
1764 1764 in the working directory, followed by a list of tags for this revision.
1765 1765 """
1766 1766 parents = [p for p in repo.dirstate.parents() if p != nullid]
1767 1767 if not parents:
1768 1768 ui.write(_("unknown\n"))
1769 1769 return
1770 1770
1771 1771 hexfunc = ui.verbose and hex or short
1772 1772 modified, added, removed, deleted, unknown = repo.changes()
1773 1773 output = ["%s%s" %
1774 1774 ('+'.join([hexfunc(parent) for parent in parents]),
1775 1775 (modified or added or removed or deleted) and "+" or "")]
1776 1776
1777 1777 if not ui.quiet:
1778 1778 # multiple tags for a single parent separated by '/'
1779 1779 parenttags = ['/'.join(tags)
1780 1780 for tags in map(repo.nodetags, parents) if tags]
1781 1781 # tags for multiple parents separated by ' + '
1782 1782 if parenttags:
1783 1783 output.append(' + '.join(parenttags))
1784 1784
1785 1785 ui.write("%s\n" % ' '.join(output))
1786 1786
1787 1787 def import_(ui, repo, patch1, *patches, **opts):
1788 1788 """import an ordered set of patches
1789 1789
1790 1790 Import a list of patches and commit them individually.
1791 1791
1792 1792 If there are outstanding changes in the working directory, import
1793 1793 will abort unless given the -f flag.
1794 1794
1795 1795 You can import a patch straight from a mail message. Even patches
1796 1796 as attachments work (body part must be type text/plain or
1797 1797 text/x-patch to be used). From and Subject headers of email
1798 1798 message are used as default committer and commit message. All
1799 1799 text/plain body parts before first diff are added to commit
1800 1800 message.
1801 1801
1802 1802 If imported patch was generated by hg export, user and description
1803 1803 from patch override values from message headers and body. Values
1804 1804 given on command line with -m and -u override these.
1805 1805
1806 1806 To read a patch from standard input, use patch name "-".
1807 1807 """
1808 1808 patches = (patch1,) + patches
1809 1809
1810 1810 if not opts['force']:
1811 1811 bail_if_changed(repo)
1812 1812
1813 1813 d = opts["base"]
1814 1814 strip = opts["strip"]
1815 1815
1816 1816 mailre = re.compile(r'(?:From |[\w-]+:)')
1817 1817
1818 1818 # attempt to detect the start of a patch
1819 1819 # (this heuristic is borrowed from quilt)
1820 1820 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1821 1821 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1822 1822 '(---|\*\*\*)[ \t])', re.MULTILINE)
1823 1823
1824 1824 for patch in patches:
1825 1825 pf = os.path.join(d, patch)
1826 1826
1827 1827 message = None
1828 1828 user = None
1829 1829 date = None
1830 1830 hgpatch = False
1831 1831
1832 1832 p = email.Parser.Parser()
1833 1833 if pf == '-':
1834 1834 msg = p.parse(sys.stdin)
1835 1835 ui.status(_("applying patch from stdin\n"))
1836 1836 else:
1837 1837 msg = p.parse(file(pf))
1838 1838 ui.status(_("applying %s\n") % patch)
1839 1839
1840 1840 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
1841 1841 tmpfp = os.fdopen(fd, 'w')
1842 1842 try:
1843 1843 message = msg['Subject']
1844 1844 if message:
1845 1845 message = message.replace('\n\t', ' ')
1846 1846 ui.debug('Subject: %s\n' % message)
1847 1847 user = msg['From']
1848 1848 if user:
1849 1849 ui.debug('From: %s\n' % user)
1850 1850 diffs_seen = 0
1851 1851 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
1852 1852 for part in msg.walk():
1853 1853 content_type = part.get_content_type()
1854 1854 ui.debug('Content-Type: %s\n' % content_type)
1855 1855 if content_type not in ok_types:
1856 1856 continue
1857 1857 payload = part.get_payload(decode=True)
1858 1858 m = diffre.search(payload)
1859 1859 if m:
1860 1860 ui.debug(_('found patch at byte %d\n') % m.start(0))
1861 1861 diffs_seen += 1
1862 1862 hgpatch = False
1863 1863 fp = cStringIO.StringIO()
1864 1864 if message:
1865 1865 fp.write(message)
1866 1866 fp.write('\n')
1867 1867 for line in payload[:m.start(0)].splitlines():
1868 1868 if line.startswith('# HG changeset patch'):
1869 1869 ui.debug(_('patch generated by hg export\n'))
1870 1870 hgpatch = True
1871 1871 # drop earlier commit message content
1872 1872 fp.seek(0)
1873 1873 fp.truncate()
1874 1874 elif hgpatch:
1875 1875 if line.startswith('# User '):
1876 1876 user = line[7:]
1877 1877 ui.debug('From: %s\n' % user)
1878 1878 elif line.startswith("# Date "):
1879 1879 date = line[7:]
1880 1880 if not line.startswith('# '):
1881 1881 fp.write(line)
1882 1882 fp.write('\n')
1883 1883 message = fp.getvalue()
1884 1884 if tmpfp:
1885 1885 tmpfp.write(payload)
1886 1886 if not payload.endswith('\n'):
1887 1887 tmpfp.write('\n')
1888 1888 elif not diffs_seen and message and content_type == 'text/plain':
1889 1889 message += '\n' + payload
1890 1890
1891 1891 if opts['message']:
1892 1892 # pickup the cmdline msg
1893 1893 message = opts['message']
1894 1894 elif message:
1895 1895 # pickup the patch msg
1896 1896 message = message.strip()
1897 1897 else:
1898 1898 # launch the editor
1899 1899 message = None
1900 1900 ui.debug(_('message:\n%s\n') % message)
1901 1901
1902 1902 tmpfp.close()
1903 1903 if not diffs_seen:
1904 1904 raise util.Abort(_('no diffs found'))
1905 1905
1906 1906 files = util.patch(strip, tmpname, ui, cwd=repo.root)
1907 1907 if len(files) > 0:
1908 1908 cfiles = files
1909 1909 cwd = repo.getcwd()
1910 1910 if cwd:
1911 1911 cfiles = [util.pathto(cwd, f) for f in files]
1912 1912 addremove_lock(ui, repo, cfiles, {})
1913 1913 repo.commit(files, message, user, date)
1914 1914 finally:
1915 1915 os.unlink(tmpname)
1916 1916
1917 1917 def incoming(ui, repo, source="default", **opts):
1918 1918 """show new changesets found in source
1919 1919
1920 1920 Show new changesets found in the specified path/URL or the default
1921 1921 pull location. These are the changesets that would be pulled if a pull
1922 1922 was requested.
1923 1923
1924 1924 For remote repository, using --bundle avoids downloading the changesets
1925 1925 twice if the incoming is followed by a pull.
1926 1926
1927 1927 See pull for valid source format details.
1928 1928 """
1929 1929 source = ui.expandpath(source)
1930 1930 setremoteconfig(ui, opts)
1931 1931
1932 1932 other = hg.repository(ui, source)
1933 1933 incoming = repo.findincoming(other, force=opts["force"])
1934 1934 if not incoming:
1935 1935 ui.status(_("no changes found\n"))
1936 1936 return
1937 1937
1938 1938 cleanup = None
1939 1939 try:
1940 1940 fname = opts["bundle"]
1941 1941 if fname or not other.local():
1942 1942 # create a bundle (uncompressed if other repo is not local)
1943 1943 cg = other.changegroup(incoming, "incoming")
1944 1944 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1945 1945 # keep written bundle?
1946 1946 if opts["bundle"]:
1947 1947 cleanup = None
1948 1948 if not other.local():
1949 1949 # use the created uncompressed bundlerepo
1950 1950 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1951 1951
1952 1952 revs = None
1953 1953 if opts['rev']:
1954 1954 revs = [other.lookup(rev) for rev in opts['rev']]
1955 1955 o = other.changelog.nodesbetween(incoming, revs)[0]
1956 1956 if opts['newest_first']:
1957 1957 o.reverse()
1958 1958 displayer = show_changeset(ui, other, opts)
1959 1959 for n in o:
1960 1960 parents = [p for p in other.changelog.parents(n) if p != nullid]
1961 1961 if opts['no_merges'] and len(parents) == 2:
1962 1962 continue
1963 1963 displayer.show(changenode=n)
1964 1964 if opts['patch']:
1965 1965 prev = (parents and parents[0]) or nullid
1966 1966 dodiff(ui, ui, other, prev, n)
1967 1967 ui.write("\n")
1968 1968 finally:
1969 1969 if hasattr(other, 'close'):
1970 1970 other.close()
1971 1971 if cleanup:
1972 1972 os.unlink(cleanup)
1973 1973
1974 1974 def init(ui, dest=".", **opts):
1975 1975 """create a new repository in the given directory
1976 1976
1977 1977 Initialize a new repository in the given directory. If the given
1978 1978 directory does not exist, it is created.
1979 1979
1980 1980 If no directory is given, the current directory is used.
1981 1981
1982 1982 It is possible to specify an ssh:// URL as the destination.
1983 1983 Look at the help text for the pull command for important details
1984 1984 about ssh:// URLs.
1985 1985 """
1986 1986 setremoteconfig(ui, opts)
1987 1987 hg.repository(ui, dest, create=1)
1988 1988
1989 1989 def locate(ui, repo, *pats, **opts):
1990 1990 """locate files matching specific patterns
1991 1991
1992 1992 Print all files under Mercurial control whose names match the
1993 1993 given patterns.
1994 1994
1995 1995 This command searches the current directory and its
1996 1996 subdirectories. To search an entire repository, move to the root
1997 1997 of the repository.
1998 1998
1999 1999 If no patterns are given to match, this command prints all file
2000 2000 names.
2001 2001
2002 2002 If you want to feed the output of this command into the "xargs"
2003 2003 command, use the "-0" option to both this command and "xargs".
2004 2004 This will avoid the problem of "xargs" treating single filenames
2005 2005 that contain white space as multiple filenames.
2006 2006 """
2007 2007 end = opts['print0'] and '\0' or '\n'
2008 2008 rev = opts['rev']
2009 2009 if rev:
2010 2010 node = repo.lookup(rev)
2011 2011 else:
2012 2012 node = None
2013 2013
2014 2014 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2015 2015 head='(?:.*/|)'):
2016 2016 if not node and repo.dirstate.state(abs) == '?':
2017 2017 continue
2018 2018 if opts['fullpath']:
2019 2019 ui.write(os.path.join(repo.root, abs), end)
2020 2020 else:
2021 2021 ui.write(((pats and rel) or abs), end)
2022 2022
2023 2023 def log(ui, repo, *pats, **opts):
2024 2024 """show revision history of entire repository or files
2025 2025
2026 2026 Print the revision history of the specified files or the entire
2027 2027 project.
2028 2028
2029 2029 File history is shown without following rename or copy history of
2030 2030 files. Use -f/--follow with a file name to follow history across
2031 2031 renames and copies. --follow without a file name will only show
2032 2032 ancestors or descendants of the starting revision. --follow-first
2033 2033 only follows the first parent of merge revisions.
2034 2034
2035 2035 If no revision range is specified, the default is tip:0 unless
2036 2036 --follow is set, in which case the working directory parent is
2037 2037 used as the starting revision.
2038 2038
2039 2039 By default this command outputs: changeset id and hash, tags,
2040 2040 non-trivial parents, user, date and time, and a summary for each
2041 2041 commit. When the -v/--verbose switch is used, the list of changed
2042 2042 files and full commit message is shown.
2043 2043 """
2044 2044 class dui(object):
2045 2045 # Implement and delegate some ui protocol. Save hunks of
2046 2046 # output for later display in the desired order.
2047 2047 def __init__(self, ui):
2048 2048 self.ui = ui
2049 2049 self.hunk = {}
2050 2050 self.header = {}
2051 2051 def bump(self, rev):
2052 2052 self.rev = rev
2053 2053 self.hunk[rev] = []
2054 2054 self.header[rev] = []
2055 2055 def note(self, *args):
2056 2056 if self.verbose:
2057 2057 self.write(*args)
2058 2058 def status(self, *args):
2059 2059 if not self.quiet:
2060 2060 self.write(*args)
2061 2061 def write(self, *args):
2062 2062 self.hunk[self.rev].append(args)
2063 2063 def write_header(self, *args):
2064 2064 self.header[self.rev].append(args)
2065 2065 def debug(self, *args):
2066 2066 if self.debugflag:
2067 2067 self.write(*args)
2068 2068 def __getattr__(self, key):
2069 2069 return getattr(self.ui, key)
2070 2070
2071 2071 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
2072 2072
2073 2073 if opts['limit']:
2074 2074 try:
2075 2075 limit = int(opts['limit'])
2076 2076 except ValueError:
2077 2077 raise util.Abort(_('limit must be a positive integer'))
2078 2078 if limit <= 0: raise util.Abort(_('limit must be positive'))
2079 2079 else:
2080 2080 limit = sys.maxint
2081 2081 count = 0
2082 2082
2083 2083 displayer = show_changeset(ui, repo, opts)
2084 2084 for st, rev, fns in changeiter:
2085 2085 if st == 'window':
2086 2086 du = dui(ui)
2087 2087 displayer.ui = du
2088 2088 elif st == 'add':
2089 2089 du.bump(rev)
2090 2090 changenode = repo.changelog.node(rev)
2091 2091 parents = [p for p in repo.changelog.parents(changenode)
2092 2092 if p != nullid]
2093 2093 if opts['no_merges'] and len(parents) == 2:
2094 2094 continue
2095 2095 if opts['only_merges'] and len(parents) != 2:
2096 2096 continue
2097 2097
2098 2098 if opts['keyword']:
2099 2099 changes = getchange(rev)
2100 2100 miss = 0
2101 2101 for k in [kw.lower() for kw in opts['keyword']]:
2102 2102 if not (k in changes[1].lower() or
2103 2103 k in changes[4].lower() or
2104 2104 k in " ".join(changes[3][:20]).lower()):
2105 2105 miss = 1
2106 2106 break
2107 2107 if miss:
2108 2108 continue
2109 2109
2110 2110 br = None
2111 2111 if opts['branches']:
2112 2112 br = repo.branchlookup([repo.changelog.node(rev)])
2113 2113
2114 2114 displayer.show(rev, brinfo=br)
2115 2115 if opts['patch']:
2116 2116 prev = (parents and parents[0]) or nullid
2117 2117 dodiff(du, du, repo, prev, changenode, match=matchfn)
2118 2118 du.write("\n\n")
2119 2119 elif st == 'iter':
2120 2120 if count == limit: break
2121 2121 if du.header[rev]:
2122 2122 for args in du.header[rev]:
2123 2123 ui.write_header(*args)
2124 2124 if du.hunk[rev]:
2125 2125 count += 1
2126 2126 for args in du.hunk[rev]:
2127 2127 ui.write(*args)
2128 2128
2129 2129 def manifest(ui, repo, rev=None):
2130 2130 """output the latest or given revision of the project manifest
2131 2131
2132 2132 Print a list of version controlled files for the given revision.
2133 2133
2134 2134 The manifest is the list of files being version controlled. If no revision
2135 2135 is given then the tip is used.
2136 2136 """
2137 2137 if rev:
2138 2138 try:
2139 2139 # assume all revision numbers are for changesets
2140 2140 n = repo.lookup(rev)
2141 2141 change = repo.changelog.read(n)
2142 2142 n = change[0]
2143 2143 except hg.RepoError:
2144 2144 n = repo.manifest.lookup(rev)
2145 2145 else:
2146 2146 n = repo.manifest.tip()
2147 2147 m = repo.manifest.read(n)
2148 2148 mf = repo.manifest.readflags(n)
2149 2149 files = m.keys()
2150 2150 files.sort()
2151 2151
2152 2152 for f in files:
2153 2153 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
2154 2154
2155 2155 def merge(ui, repo, node=None, **opts):
2156 2156 """Merge working directory with another revision
2157 2157
2158 2158 Merge the contents of the current working directory and the
2159 2159 requested revision. Files that changed between either parent are
2160 2160 marked as changed for the next commit and a commit must be
2161 2161 performed before any further updates are allowed.
2162 2162 """
2163 2163 return doupdate(ui, repo, node=node, merge=True, **opts)
2164 2164
2165 2165 def outgoing(ui, repo, dest=None, **opts):
2166 2166 """show changesets not found in destination
2167 2167
2168 2168 Show changesets not found in the specified destination repository or
2169 2169 the default push location. These are the changesets that would be pushed
2170 2170 if a push was requested.
2171 2171
2172 2172 See pull for valid destination format details.
2173 2173 """
2174 2174 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2175 2175 setremoteconfig(ui, opts)
2176 2176 revs = None
2177 2177 if opts['rev']:
2178 2178 revs = [repo.lookup(rev) for rev in opts['rev']]
2179 2179
2180 2180 other = hg.repository(ui, dest)
2181 2181 o = repo.findoutgoing(other, force=opts['force'])
2182 2182 if not o:
2183 2183 ui.status(_("no changes found\n"))
2184 2184 return
2185 2185 o = repo.changelog.nodesbetween(o, revs)[0]
2186 2186 if opts['newest_first']:
2187 2187 o.reverse()
2188 2188 displayer = show_changeset(ui, repo, opts)
2189 2189 for n in o:
2190 2190 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2191 2191 if opts['no_merges'] and len(parents) == 2:
2192 2192 continue
2193 2193 displayer.show(changenode=n)
2194 2194 if opts['patch']:
2195 2195 prev = (parents and parents[0]) or nullid
2196 2196 dodiff(ui, ui, repo, prev, n)
2197 2197 ui.write("\n")
2198 2198
2199 2199 def parents(ui, repo, file_=None, rev=None, branches=None, **opts):
2200 2200 """show the parents of the working dir or revision
2201 2201
2202 2202 Print the working directory's parent revisions.
2203 2203 """
2204 2204 # legacy
2205 2205 if file_ and not rev:
2206 2206 try:
2207 2207 rev = repo.lookup(file_)
2208 2208 file_ = None
2209 2209 except hg.RepoError:
2210 2210 pass
2211 2211 else:
2212 2212 ui.warn(_("'hg parent REV' is deprecated, "
2213 2213 "please use 'hg parents -r REV instead\n"))
2214 2214
2215 2215 if rev:
2216 2216 if file_:
2217 2217 ctx = repo.filectx(file_, changeid=rev)
2218 2218 else:
2219 2219 ctx = repo.changectx(rev)
2220 2220 p = [cp.node() for cp in ctx.parents()]
2221 2221 else:
2222 2222 p = repo.dirstate.parents()
2223 2223
2224 2224 br = None
2225 2225 if branches is not None:
2226 2226 br = repo.branchlookup(p)
2227 2227 displayer = show_changeset(ui, repo, opts)
2228 2228 for n in p:
2229 2229 if n != nullid:
2230 2230 displayer.show(changenode=n, brinfo=br)
2231 2231
2232 2232 def paths(ui, repo, search=None):
2233 2233 """show definition of symbolic path names
2234 2234
2235 2235 Show definition of symbolic path name NAME. If no name is given, show
2236 2236 definition of available names.
2237 2237
2238 2238 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2239 2239 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2240 2240 """
2241 2241 if search:
2242 2242 for name, path in ui.configitems("paths"):
2243 2243 if name == search:
2244 2244 ui.write("%s\n" % path)
2245 2245 return
2246 2246 ui.warn(_("not found!\n"))
2247 2247 return 1
2248 2248 else:
2249 2249 for name, path in ui.configitems("paths"):
2250 2250 ui.write("%s = %s\n" % (name, path))
2251 2251
2252 2252 def postincoming(ui, repo, modheads, optupdate):
2253 2253 if modheads == 0:
2254 2254 return
2255 2255 if optupdate:
2256 2256 if modheads == 1:
2257 2257 return doupdate(ui, repo)
2258 2258 else:
2259 2259 ui.status(_("not updating, since new heads added\n"))
2260 2260 if modheads > 1:
2261 2261 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2262 2262 else:
2263 2263 ui.status(_("(run 'hg update' to get a working copy)\n"))
2264 2264
2265 2265 def pull(ui, repo, source="default", **opts):
2266 2266 """pull changes from the specified source
2267 2267
2268 2268 Pull changes from a remote repository to a local one.
2269 2269
2270 2270 This finds all changes from the repository at the specified path
2271 2271 or URL and adds them to the local repository. By default, this
2272 2272 does not update the copy of the project in the working directory.
2273 2273
2274 2274 Valid URLs are of the form:
2275 2275
2276 2276 local/filesystem/path
2277 2277 http://[user@]host[:port]/[path]
2278 2278 https://[user@]host[:port]/[path]
2279 2279 ssh://[user@]host[:port]/[path]
2280 2280
2281 2281 Some notes about using SSH with Mercurial:
2282 2282 - SSH requires an accessible shell account on the destination machine
2283 2283 and a copy of hg in the remote path or specified with as remotecmd.
2284 2284 - path is relative to the remote user's home directory by default.
2285 2285 Use an extra slash at the start of a path to specify an absolute path:
2286 2286 ssh://example.com//tmp/repository
2287 2287 - Mercurial doesn't use its own compression via SSH; the right thing
2288 2288 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2289 2289 Host *.mylocalnetwork.example.com
2290 2290 Compression off
2291 2291 Host *
2292 2292 Compression on
2293 2293 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2294 2294 with the --ssh command line option.
2295 2295 """
2296 2296 source = ui.expandpath(source)
2297 2297 setremoteconfig(ui, opts)
2298 2298
2299 2299 other = hg.repository(ui, source)
2300 2300 ui.status(_('pulling from %s\n') % (source))
2301 2301 revs = None
2302 2302 if opts['rev'] and not other.local():
2303 2303 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2304 2304 elif opts['rev']:
2305 2305 revs = [other.lookup(rev) for rev in opts['rev']]
2306 2306 modheads = repo.pull(other, heads=revs, force=opts['force'])
2307 2307 return postincoming(ui, repo, modheads, opts['update'])
2308 2308
2309 2309 def push(ui, repo, dest=None, **opts):
2310 2310 """push changes to the specified destination
2311 2311
2312 2312 Push changes from the local repository to the given destination.
2313 2313
2314 2314 This is the symmetrical operation for pull. It helps to move
2315 2315 changes from the current repository to a different one. If the
2316 2316 destination is local this is identical to a pull in that directory
2317 2317 from the current one.
2318 2318
2319 2319 By default, push will refuse to run if it detects the result would
2320 2320 increase the number of remote heads. This generally indicates the
2321 2321 the client has forgotten to sync and merge before pushing.
2322 2322
2323 2323 Valid URLs are of the form:
2324 2324
2325 2325 local/filesystem/path
2326 2326 ssh://[user@]host[:port]/[path]
2327 2327
2328 2328 Look at the help text for the pull command for important details
2329 2329 about ssh:// URLs.
2330 2330
2331 2331 Pushing to http:// and https:// URLs is possible, too, if this
2332 2332 feature is enabled on the remote Mercurial server.
2333 2333 """
2334 2334 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2335 2335 setremoteconfig(ui, opts)
2336 2336
2337 2337 other = hg.repository(ui, dest)
2338 2338 ui.status('pushing to %s\n' % (dest))
2339 2339 revs = None
2340 2340 if opts['rev']:
2341 2341 revs = [repo.lookup(rev) for rev in opts['rev']]
2342 2342 r = repo.push(other, opts['force'], revs=revs)
2343 2343 return r == 0
2344 2344
2345 2345 def rawcommit(ui, repo, *flist, **rc):
2346 2346 """raw commit interface (DEPRECATED)
2347 2347
2348 2348 (DEPRECATED)
2349 2349 Lowlevel commit, for use in helper scripts.
2350 2350
2351 2351 This command is not intended to be used by normal users, as it is
2352 2352 primarily useful for importing from other SCMs.
2353 2353
2354 2354 This command is now deprecated and will be removed in a future
2355 2355 release, please use debugsetparents and commit instead.
2356 2356 """
2357 2357
2358 2358 ui.warn(_("(the rawcommit command is deprecated)\n"))
2359 2359
2360 2360 message = rc['message']
2361 2361 if not message and rc['logfile']:
2362 2362 try:
2363 2363 message = open(rc['logfile']).read()
2364 2364 except IOError:
2365 2365 pass
2366 2366 if not message and not rc['logfile']:
2367 2367 raise util.Abort(_("missing commit message"))
2368 2368
2369 2369 files = relpath(repo, list(flist))
2370 2370 if rc['files']:
2371 2371 files += open(rc['files']).read().splitlines()
2372 2372
2373 2373 rc['parent'] = map(repo.lookup, rc['parent'])
2374 2374
2375 2375 try:
2376 2376 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2377 2377 except ValueError, inst:
2378 2378 raise util.Abort(str(inst))
2379 2379
2380 2380 def recover(ui, repo):
2381 2381 """roll back an interrupted transaction
2382 2382
2383 2383 Recover from an interrupted commit or pull.
2384 2384
2385 2385 This command tries to fix the repository status after an interrupted
2386 2386 operation. It should only be necessary when Mercurial suggests it.
2387 2387 """
2388 2388 if repo.recover():
2389 2389 return repo.verify()
2390 2390 return 1
2391 2391
2392 2392 def remove(ui, repo, *pats, **opts):
2393 2393 """remove the specified files on the next commit
2394 2394
2395 2395 Schedule the indicated files for removal from the repository.
2396 2396
2397 2397 This command schedules the files to be removed at the next commit.
2398 2398 This only removes files from the current branch, not from the
2399 2399 entire project history. If the files still exist in the working
2400 2400 directory, they will be deleted from it. If invoked with --after,
2401 2401 files that have been manually deleted are marked as removed.
2402 2402
2403 2403 Modified files and added files are not removed by default. To
2404 2404 remove them, use the -f/--force option.
2405 2405 """
2406 2406 names = []
2407 2407 if not opts['after'] and not pats:
2408 2408 raise util.Abort(_('no files specified'))
2409 2409 files, matchfn, anypats = matchpats(repo, pats, opts)
2410 2410 exact = dict.fromkeys(files)
2411 2411 mardu = map(dict.fromkeys, repo.changes(files=files, match=matchfn))
2412 2412 modified, added, removed, deleted, unknown = mardu
2413 2413 remove, forget = [], []
2414 2414 for src, abs, rel, exact in walk(repo, pats, opts):
2415 2415 reason = None
2416 2416 if abs not in deleted and opts['after']:
2417 2417 reason = _('is still present')
2418 2418 elif abs in modified and not opts['force']:
2419 2419 reason = _('is modified (use -f to force removal)')
2420 2420 elif abs in added:
2421 2421 if opts['force']:
2422 2422 forget.append(abs)
2423 2423 continue
2424 2424 reason = _('has been marked for add (use -f to force removal)')
2425 2425 elif abs in unknown:
2426 2426 reason = _('is not managed')
2427 2427 elif abs in removed:
2428 2428 continue
2429 2429 if reason:
2430 2430 if exact:
2431 2431 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2432 2432 else:
2433 2433 if ui.verbose or not exact:
2434 2434 ui.status(_('removing %s\n') % rel)
2435 2435 remove.append(abs)
2436 2436 repo.forget(forget)
2437 2437 repo.remove(remove, unlink=not opts['after'])
2438 2438
2439 2439 def rename(ui, repo, *pats, **opts):
2440 2440 """rename files; equivalent of copy + remove
2441 2441
2442 2442 Mark dest as copies of sources; mark sources for deletion. If
2443 2443 dest is a directory, copies are put in that directory. If dest is
2444 2444 a file, there can only be one source.
2445 2445
2446 2446 By default, this command copies the contents of files as they
2447 2447 stand in the working directory. If invoked with --after, the
2448 2448 operation is recorded, but no copying is performed.
2449 2449
2450 2450 This command takes effect in the next commit.
2451 2451
2452 2452 NOTE: This command should be treated as experimental. While it
2453 2453 should properly record rename files, this information is not yet
2454 2454 fully used by merge, nor fully reported by log.
2455 2455 """
2456 2456 wlock = repo.wlock(0)
2457 2457 errs, copied = docopy(ui, repo, pats, opts, wlock)
2458 2458 names = []
2459 2459 for abs, rel, exact in copied:
2460 2460 if ui.verbose or not exact:
2461 2461 ui.status(_('removing %s\n') % rel)
2462 2462 names.append(abs)
2463 2463 if not opts.get('dry_run'):
2464 2464 repo.remove(names, True, wlock)
2465 2465 return errs
2466 2466
2467 2467 def revert(ui, repo, *pats, **opts):
2468 2468 """revert files or dirs to their states as of some revision
2469 2469
2470 2470 With no revision specified, revert the named files or directories
2471 2471 to the contents they had in the parent of the working directory.
2472 2472 This restores the contents of the affected files to an unmodified
2473 2473 state. If the working directory has two parents, you must
2474 2474 explicitly specify the revision to revert to.
2475 2475
2476 2476 Modified files are saved with a .orig suffix before reverting.
2477 2477 To disable these backups, use --no-backup.
2478 2478
2479 2479 Using the -r option, revert the given files or directories to
2480 2480 their contents as of a specific revision. This can be helpful to"roll
2481 2481 back" some or all of a change that should not have been committed.
2482 2482
2483 2483 Revert modifies the working directory. It does not commit any
2484 2484 changes, or change the parent of the working directory. If you
2485 2485 revert to a revision other than the parent of the working
2486 2486 directory, the reverted files will thus appear modified
2487 2487 afterwards.
2488 2488
2489 2489 If a file has been deleted, it is recreated. If the executable
2490 2490 mode of a file was changed, it is reset.
2491 2491
2492 2492 If names are given, all files matching the names are reverted.
2493 2493
2494 2494 If no arguments are given, all files in the repository are reverted.
2495 2495 """
2496 2496 parent, p2 = repo.dirstate.parents()
2497 2497 if opts['rev']:
2498 2498 node = repo.lookup(opts['rev'])
2499 2499 elif p2 != nullid:
2500 2500 raise util.Abort(_('working dir has two parents; '
2501 2501 'you must specify the revision to revert to'))
2502 2502 else:
2503 2503 node = parent
2504 2504 mf = repo.manifest.read(repo.changelog.read(node)[0])
2505 2505 if node == parent:
2506 2506 pmf = mf
2507 2507 else:
2508 2508 pmf = None
2509 2509
2510 2510 wlock = repo.wlock()
2511 2511
2512 2512 # need all matching names in dirstate and manifest of target rev,
2513 2513 # so have to walk both. do not print errors if files exist in one
2514 2514 # but not other.
2515 2515
2516 2516 names = {}
2517 2517 target_only = {}
2518 2518
2519 2519 # walk dirstate.
2520 2520
2521 2521 for src, abs, rel, exact in walk(repo, pats, opts, badmatch=mf.has_key):
2522 2522 names[abs] = (rel, exact)
2523 2523 if src == 'b':
2524 2524 target_only[abs] = True
2525 2525
2526 2526 # walk target manifest.
2527 2527
2528 2528 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2529 2529 badmatch=names.has_key):
2530 2530 if abs in names: continue
2531 2531 names[abs] = (rel, exact)
2532 2532 target_only[abs] = True
2533 2533
2534 2534 changes = repo.changes(match=names.has_key, wlock=wlock)
2535 2535 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2536 2536
2537 2537 revert = ([], _('reverting %s\n'))
2538 2538 add = ([], _('adding %s\n'))
2539 2539 remove = ([], _('removing %s\n'))
2540 2540 forget = ([], _('forgetting %s\n'))
2541 2541 undelete = ([], _('undeleting %s\n'))
2542 2542 update = {}
2543 2543
2544 2544 disptable = (
2545 2545 # dispatch table:
2546 2546 # file state
2547 2547 # action if in target manifest
2548 2548 # action if not in target manifest
2549 2549 # make backup if in target manifest
2550 2550 # make backup if not in target manifest
2551 2551 (modified, revert, remove, True, True),
2552 2552 (added, revert, forget, True, False),
2553 2553 (removed, undelete, None, False, False),
2554 2554 (deleted, revert, remove, False, False),
2555 2555 (unknown, add, None, True, False),
2556 2556 (target_only, add, None, False, False),
2557 2557 )
2558 2558
2559 2559 entries = names.items()
2560 2560 entries.sort()
2561 2561
2562 2562 for abs, (rel, exact) in entries:
2563 2563 mfentry = mf.get(abs)
2564 2564 def handle(xlist, dobackup):
2565 2565 xlist[0].append(abs)
2566 2566 update[abs] = 1
2567 2567 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2568 2568 bakname = "%s.orig" % rel
2569 2569 ui.note(_('saving current version of %s as %s\n') %
2570 2570 (rel, bakname))
2571 2571 if not opts.get('dry_run'):
2572 2572 shutil.copyfile(rel, bakname)
2573 2573 shutil.copymode(rel, bakname)
2574 2574 if ui.verbose or not exact:
2575 2575 ui.status(xlist[1] % rel)
2576 2576 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2577 2577 if abs not in table: continue
2578 2578 # file has changed in dirstate
2579 2579 if mfentry:
2580 2580 handle(hitlist, backuphit)
2581 2581 elif misslist is not None:
2582 2582 handle(misslist, backupmiss)
2583 2583 else:
2584 2584 if exact: ui.warn(_('file not managed: %s\n' % rel))
2585 2585 break
2586 2586 else:
2587 2587 # file has not changed in dirstate
2588 2588 if node == parent:
2589 2589 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2590 2590 continue
2591 2591 if pmf is None:
2592 2592 # only need parent manifest in this unlikely case,
2593 2593 # so do not read by default
2594 2594 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2595 2595 if abs in pmf:
2596 2596 if mfentry:
2597 2597 # if version of file is same in parent and target
2598 2598 # manifests, do nothing
2599 2599 if pmf[abs] != mfentry:
2600 2600 handle(revert, False)
2601 2601 else:
2602 2602 handle(remove, False)
2603 2603
2604 2604 if not opts.get('dry_run'):
2605 2605 repo.dirstate.forget(forget[0])
2606 2606 r = repo.update(node, False, True, update.has_key, False, wlock=wlock,
2607 2607 show_stats=False)
2608 2608 repo.dirstate.update(add[0], 'a')
2609 2609 repo.dirstate.update(undelete[0], 'n')
2610 2610 repo.dirstate.update(remove[0], 'r')
2611 2611 return r
2612 2612
2613 2613 def rollback(ui, repo):
2614 2614 """roll back the last transaction in this repository
2615 2615
2616 2616 Roll back the last transaction in this repository, restoring the
2617 2617 project to its state prior to the transaction.
2618 2618
2619 2619 Transactions are used to encapsulate the effects of all commands
2620 2620 that create new changesets or propagate existing changesets into a
2621 2621 repository. For example, the following commands are transactional,
2622 2622 and their effects can be rolled back:
2623 2623
2624 2624 commit
2625 2625 import
2626 2626 pull
2627 2627 push (with this repository as destination)
2628 2628 unbundle
2629 2629
2630 2630 This command should be used with care. There is only one level of
2631 2631 rollback, and there is no way to undo a rollback.
2632 2632
2633 2633 This command is not intended for use on public repositories. Once
2634 2634 changes are visible for pull by other users, rolling a transaction
2635 2635 back locally is ineffective (someone else may already have pulled
2636 2636 the changes). Furthermore, a race is possible with readers of the
2637 2637 repository; for example an in-progress pull from the repository
2638 2638 may fail if a rollback is performed.
2639 2639 """
2640 2640 repo.rollback()
2641 2641
2642 2642 def root(ui, repo):
2643 2643 """print the root (top) of the current working dir
2644 2644
2645 2645 Print the root directory of the current repository.
2646 2646 """
2647 2647 ui.write(repo.root + "\n")
2648 2648
2649 2649 def serve(ui, repo, **opts):
2650 2650 """export the repository via HTTP
2651 2651
2652 2652 Start a local HTTP repository browser and pull server.
2653 2653
2654 2654 By default, the server logs accesses to stdout and errors to
2655 2655 stderr. Use the "-A" and "-E" options to log to files.
2656 2656 """
2657 2657
2658 2658 if opts["stdio"]:
2659 2659 if repo is None:
2660 2660 raise hg.RepoError(_('no repo found'))
2661 2661 s = sshserver.sshserver(ui, repo)
2662 2662 s.serve_forever()
2663 2663
2664 2664 optlist = ("name templates style address port ipv6"
2665 2665 " accesslog errorlog webdir_conf")
2666 2666 for o in optlist.split():
2667 2667 if opts[o]:
2668 2668 ui.setconfig("web", o, opts[o])
2669 2669
2670 2670 if repo is None and not ui.config("web", "webdir_conf"):
2671 2671 raise hg.RepoError(_('no repo found'))
2672 2672
2673 2673 if opts['daemon'] and not opts['daemon_pipefds']:
2674 2674 rfd, wfd = os.pipe()
2675 2675 args = sys.argv[:]
2676 2676 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2677 2677 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2678 2678 args[0], args)
2679 2679 os.close(wfd)
2680 2680 os.read(rfd, 1)
2681 2681 os._exit(0)
2682 2682
2683 2683 try:
2684 2684 httpd = hgweb.server.create_server(ui, repo)
2685 2685 except socket.error, inst:
2686 2686 raise util.Abort(_('cannot start server: ') + inst.args[1])
2687 2687
2688 2688 if ui.verbose:
2689 2689 addr, port = httpd.socket.getsockname()
2690 2690 if addr == '0.0.0.0':
2691 2691 addr = socket.gethostname()
2692 2692 else:
2693 2693 try:
2694 2694 addr = socket.gethostbyaddr(addr)[0]
2695 2695 except socket.error:
2696 2696 pass
2697 2697 if port != 80:
2698 2698 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2699 2699 else:
2700 2700 ui.status(_('listening at http://%s/\n') % addr)
2701 2701
2702 2702 if opts['pid_file']:
2703 2703 fp = open(opts['pid_file'], 'w')
2704 2704 fp.write(str(os.getpid()) + '\n')
2705 2705 fp.close()
2706 2706
2707 2707 if opts['daemon_pipefds']:
2708 2708 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2709 2709 os.close(rfd)
2710 2710 os.write(wfd, 'y')
2711 2711 os.close(wfd)
2712 2712 sys.stdout.flush()
2713 2713 sys.stderr.flush()
2714 2714 fd = os.open(util.nulldev, os.O_RDWR)
2715 2715 if fd != 0: os.dup2(fd, 0)
2716 2716 if fd != 1: os.dup2(fd, 1)
2717 2717 if fd != 2: os.dup2(fd, 2)
2718 2718 if fd not in (0, 1, 2): os.close(fd)
2719 2719
2720 2720 httpd.serve_forever()
2721 2721
2722 2722 def status(ui, repo, *pats, **opts):
2723 2723 """show changed files in the working directory
2724 2724
2725 2725 Show status of files in the repository. If names are given, only
2726 2726 files that match are shown. Files that are clean or ignored, are
2727 2727 not listed unless -c (clean), -i (ignored) or -A is given.
2728 2728
2729 2729 The codes used to show the status of files are:
2730 2730 M = modified
2731 2731 A = added
2732 2732 R = removed
2733 2733 C = clean
2734 2734 ! = deleted, but still tracked
2735 2735 ? = not tracked
2736 2736 I = ignored (not shown by default)
2737 2737 = the previous added file was copied from here
2738 2738 """
2739 2739
2740 2740 all = opts['all']
2741 2741
2742 2742 files, matchfn, anypats = matchpats(repo, pats, opts)
2743 2743 cwd = (pats and repo.getcwd()) or ''
2744 2744 modified, added, removed, deleted, unknown, ignored, clean = [
2745 2745 [util.pathto(cwd, x) for x in n]
2746 2746 for n in repo.status(files=files, match=matchfn,
2747 2747 list_ignored=all or opts['ignored'],
2748 2748 list_clean=all or opts['clean'])]
2749 2749
2750 2750 changetypes = (('modified', 'M', modified),
2751 2751 ('added', 'A', added),
2752 2752 ('removed', 'R', removed),
2753 2753 ('deleted', '!', deleted),
2754 2754 ('unknown', '?', unknown),
2755 2755 ('ignored', 'I', ignored))
2756 2756
2757 2757 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2758 2758
2759 2759 end = opts['print0'] and '\0' or '\n'
2760 2760
2761 2761 for opt, char, changes in ([ct for ct in explicit_changetypes
2762 2762 if all or opts[ct[0]]]
2763 2763 or changetypes):
2764 2764 if opts['no_status']:
2765 2765 format = "%%s%s" % end
2766 2766 else:
2767 2767 format = "%s %%s%s" % (char, end)
2768 2768
2769 2769 for f in changes:
2770 2770 ui.write(format % f)
2771 2771 if ((all or opts.get('copies')) and not opts.get('no_status')
2772 2772 and opt == 'added' and repo.dirstate.copies.has_key(f)):
2773 2773 ui.write(' %s%s' % (repo.dirstate.copies[f], end))
2774 2774
2775 2775 def tag(ui, repo, name, rev_=None, **opts):
2776 2776 """add a tag for the current tip or a given revision
2777 2777
2778 2778 Name a particular revision using <name>.
2779 2779
2780 2780 Tags are used to name particular revisions of the repository and are
2781 2781 very useful to compare different revision, to go back to significant
2782 2782 earlier versions or to mark branch points as releases, etc.
2783 2783
2784 2784 If no revision is given, the parent of the working directory is used.
2785 2785
2786 2786 To facilitate version control, distribution, and merging of tags,
2787 2787 they are stored as a file named ".hgtags" which is managed
2788 2788 similarly to other project files and can be hand-edited if
2789 2789 necessary. The file '.hg/localtags' is used for local tags (not
2790 2790 shared among repositories).
2791 2791 """
2792 if name == "tip":
2793 raise util.Abort(_("the name 'tip' is reserved"))
2792 if name in ['tip', '.']:
2793 raise util.Abort(_("the name '%s' is reserved") % name)
2794 2794 if rev_ is not None:
2795 2795 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2796 2796 "please use 'hg tag [-r REV] NAME' instead\n"))
2797 2797 if opts['rev']:
2798 2798 raise util.Abort(_("use only one form to specify the revision"))
2799 2799 if opts['rev']:
2800 2800 rev_ = opts['rev']
2801 2801 if rev_:
2802 2802 r = hex(repo.lookup(rev_))
2803 2803 else:
2804 2804 p1, p2 = repo.dirstate.parents()
2805 2805 if p1 == nullid:
2806 2806 raise util.Abort(_('no revision to tag'))
2807 2807 if p2 != nullid:
2808 2808 raise util.Abort(_('outstanding uncommitted merges'))
2809 2809 r = hex(p1)
2810 2810
2811 2811 repo.tag(name, r, opts['local'], opts['message'], opts['user'],
2812 2812 opts['date'])
2813 2813
2814 2814 def tags(ui, repo):
2815 2815 """list repository tags
2816 2816
2817 2817 List the repository tags.
2818 2818
2819 2819 This lists both regular and local tags.
2820 2820 """
2821 2821
2822 2822 l = repo.tagslist()
2823 2823 l.reverse()
2824 2824 for t, n in l:
2825 2825 try:
2826 2826 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2827 2827 except KeyError:
2828 2828 r = " ?:?"
2829 2829 if ui.quiet:
2830 2830 ui.write("%s\n" % t)
2831 2831 else:
2832 2832 ui.write("%-30s %s\n" % (t, r))
2833 2833
2834 2834 def tip(ui, repo, **opts):
2835 2835 """show the tip revision
2836 2836
2837 2837 Show the tip revision.
2838 2838 """
2839 2839 n = repo.changelog.tip()
2840 2840 br = None
2841 2841 if opts['branches']:
2842 2842 br = repo.branchlookup([n])
2843 2843 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2844 2844 if opts['patch']:
2845 2845 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2846 2846
2847 2847 def unbundle(ui, repo, fname, **opts):
2848 2848 """apply a changegroup file
2849 2849
2850 2850 Apply a compressed changegroup file generated by the bundle
2851 2851 command.
2852 2852 """
2853 2853 f = urllib.urlopen(fname)
2854 2854
2855 2855 header = f.read(6)
2856 2856 if not header.startswith("HG"):
2857 2857 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2858 2858 elif not header.startswith("HG10"):
2859 2859 raise util.Abort(_("%s: unknown bundle version") % fname)
2860 2860 elif header == "HG10BZ":
2861 2861 def generator(f):
2862 2862 zd = bz2.BZ2Decompressor()
2863 2863 zd.decompress("BZ")
2864 2864 for chunk in f:
2865 2865 yield zd.decompress(chunk)
2866 2866 elif header == "HG10UN":
2867 2867 def generator(f):
2868 2868 for chunk in f:
2869 2869 yield chunk
2870 2870 else:
2871 2871 raise util.Abort(_("%s: unknown bundle compression type")
2872 2872 % fname)
2873 2873 gen = generator(util.filechunkiter(f, 4096))
2874 2874 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle',
2875 2875 'bundle:' + fname)
2876 2876 return postincoming(ui, repo, modheads, opts['update'])
2877 2877
2878 2878 def undo(ui, repo):
2879 2879 """undo the last commit or pull (DEPRECATED)
2880 2880
2881 2881 (DEPRECATED)
2882 2882 This command is now deprecated and will be removed in a future
2883 2883 release. Please use the rollback command instead. For usage
2884 2884 instructions, see the rollback command.
2885 2885 """
2886 2886 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2887 2887 repo.rollback()
2888 2888
2889 2889 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2890 2890 branch=None, **opts):
2891 2891 """update or merge working directory
2892 2892
2893 2893 Update the working directory to the specified revision.
2894 2894
2895 2895 If there are no outstanding changes in the working directory and
2896 2896 there is a linear relationship between the current version and the
2897 2897 requested version, the result is the requested version.
2898 2898
2899 2899 To merge the working directory with another revision, use the
2900 2900 merge command.
2901 2901
2902 2902 By default, update will refuse to run if doing so would require
2903 2903 merging or discarding local changes.
2904 2904 """
2905 2905 if merge:
2906 2906 ui.warn(_('(the -m/--merge option is deprecated; '
2907 2907 'use the merge command instead)\n'))
2908 2908 return doupdate(ui, repo, node, merge, clean, force, branch, **opts)
2909 2909
2910 2910 def doupdate(ui, repo, node=None, merge=False, clean=False, force=None,
2911 2911 branch=None, **opts):
2912 2912 if branch:
2913 2913 br = repo.branchlookup(branch=branch)
2914 2914 found = []
2915 2915 for x in br:
2916 2916 if branch in br[x]:
2917 2917 found.append(x)
2918 2918 if len(found) > 1:
2919 2919 ui.warn(_("Found multiple heads for %s\n") % branch)
2920 2920 for x in found:
2921 2921 show_changeset(ui, repo, opts).show(changenode=x, brinfo=br)
2922 2922 return 1
2923 2923 if len(found) == 1:
2924 2924 node = found[0]
2925 2925 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2926 2926 else:
2927 2927 ui.warn(_("branch %s not found\n") % (branch))
2928 2928 return 1
2929 2929 else:
2930 2930 node = node and repo.lookup(node) or repo.changelog.tip()
2931 2931 return repo.update(node, allow=merge, force=clean, forcemerge=force)
2932 2932
2933 2933 def verify(ui, repo):
2934 2934 """verify the integrity of the repository
2935 2935
2936 2936 Verify the integrity of the current repository.
2937 2937
2938 2938 This will perform an extensive check of the repository's
2939 2939 integrity, validating the hashes and checksums of each entry in
2940 2940 the changelog, manifest, and tracked files, as well as the
2941 2941 integrity of their crosslinks and indices.
2942 2942 """
2943 2943 return repo.verify()
2944 2944
2945 2945 # Command options and aliases are listed here, alphabetically
2946 2946
2947 2947 table = {
2948 2948 "^add":
2949 2949 (add,
2950 2950 [('I', 'include', [], _('include names matching the given patterns')),
2951 2951 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2952 2952 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2953 2953 _('hg add [OPTION]... [FILE]...')),
2954 2954 "debugaddremove|addremove":
2955 2955 (addremove,
2956 2956 [('I', 'include', [], _('include names matching the given patterns')),
2957 2957 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2958 2958 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2959 2959 _('hg addremove [OPTION]... [FILE]...')),
2960 2960 "^annotate":
2961 2961 (annotate,
2962 2962 [('r', 'rev', '', _('annotate the specified revision')),
2963 2963 ('a', 'text', None, _('treat all files as text')),
2964 2964 ('u', 'user', None, _('list the author')),
2965 2965 ('d', 'date', None, _('list the date')),
2966 2966 ('n', 'number', None, _('list the revision number (default)')),
2967 2967 ('c', 'changeset', None, _('list the changeset')),
2968 2968 ('I', 'include', [], _('include names matching the given patterns')),
2969 2969 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2970 2970 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2971 2971 "archive":
2972 2972 (archive,
2973 2973 [('', 'no-decode', None, _('do not pass files through decoders')),
2974 2974 ('p', 'prefix', '', _('directory prefix for files in archive')),
2975 2975 ('r', 'rev', '', _('revision to distribute')),
2976 2976 ('t', 'type', '', _('type of distribution to create')),
2977 2977 ('I', 'include', [], _('include names matching the given patterns')),
2978 2978 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2979 2979 _('hg archive [OPTION]... DEST')),
2980 2980 "backout":
2981 2981 (backout,
2982 2982 [('', 'merge', None,
2983 2983 _('merge with old dirstate parent after backout')),
2984 2984 ('m', 'message', '', _('use <text> as commit message')),
2985 2985 ('l', 'logfile', '', _('read commit message from <file>')),
2986 2986 ('d', 'date', '', _('record datecode as commit date')),
2987 2987 ('', 'parent', '', _('parent to choose when backing out merge')),
2988 2988 ('u', 'user', '', _('record user as committer')),
2989 2989 ('I', 'include', [], _('include names matching the given patterns')),
2990 2990 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2991 2991 _('hg backout [OPTION]... REV')),
2992 2992 "bundle":
2993 2993 (bundle,
2994 2994 [('f', 'force', None,
2995 2995 _('run even when remote repository is unrelated'))],
2996 2996 _('hg bundle FILE DEST')),
2997 2997 "cat":
2998 2998 (cat,
2999 2999 [('o', 'output', '', _('print output to file with formatted name')),
3000 3000 ('r', 'rev', '', _('print the given revision')),
3001 3001 ('I', 'include', [], _('include names matching the given patterns')),
3002 3002 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3003 3003 _('hg cat [OPTION]... FILE...')),
3004 3004 "^clone":
3005 3005 (clone,
3006 3006 [('U', 'noupdate', None, _('do not update the new working directory')),
3007 3007 ('r', 'rev', [],
3008 3008 _('a changeset you would like to have after cloning')),
3009 3009 ('', 'pull', None, _('use pull protocol to copy metadata')),
3010 3010 ('', 'uncompressed', None,
3011 3011 _('use uncompressed transfer (fast over LAN)')),
3012 3012 ('e', 'ssh', '', _('specify ssh command to use')),
3013 3013 ('', 'remotecmd', '',
3014 3014 _('specify hg command to run on the remote side'))],
3015 3015 _('hg clone [OPTION]... SOURCE [DEST]')),
3016 3016 "^commit|ci":
3017 3017 (commit,
3018 3018 [('A', 'addremove', None,
3019 3019 _('mark new/missing files as added/removed before committing')),
3020 3020 ('m', 'message', '', _('use <text> as commit message')),
3021 3021 ('l', 'logfile', '', _('read the commit message from <file>')),
3022 3022 ('d', 'date', '', _('record datecode as commit date')),
3023 3023 ('u', 'user', '', _('record user as commiter')),
3024 3024 ('I', 'include', [], _('include names matching the given patterns')),
3025 3025 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3026 3026 _('hg commit [OPTION]... [FILE]...')),
3027 3027 "copy|cp":
3028 3028 (copy,
3029 3029 [('A', 'after', None, _('record a copy that has already occurred')),
3030 3030 ('f', 'force', None,
3031 3031 _('forcibly copy over an existing managed file')),
3032 3032 ('I', 'include', [], _('include names matching the given patterns')),
3033 3033 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3034 3034 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3035 3035 _('hg copy [OPTION]... [SOURCE]... DEST')),
3036 3036 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
3037 3037 "debugcomplete":
3038 3038 (debugcomplete,
3039 3039 [('o', 'options', None, _('show the command options'))],
3040 3040 _('debugcomplete [-o] CMD')),
3041 3041 "debugrebuildstate":
3042 3042 (debugrebuildstate,
3043 3043 [('r', 'rev', '', _('revision to rebuild to'))],
3044 3044 _('debugrebuildstate [-r REV] [REV]')),
3045 3045 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
3046 3046 "debugconfig": (debugconfig, [], _('debugconfig [NAME]...')),
3047 3047 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
3048 3048 "debugstate": (debugstate, [], _('debugstate')),
3049 3049 "debugdata": (debugdata, [], _('debugdata FILE REV')),
3050 3050 "debugindex": (debugindex, [], _('debugindex FILE')),
3051 3051 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
3052 3052 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
3053 3053 "debugwalk":
3054 3054 (debugwalk,
3055 3055 [('I', 'include', [], _('include names matching the given patterns')),
3056 3056 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3057 3057 _('debugwalk [OPTION]... [FILE]...')),
3058 3058 "^diff":
3059 3059 (diff,
3060 3060 [('r', 'rev', [], _('revision')),
3061 3061 ('a', 'text', None, _('treat all files as text')),
3062 3062 ('p', 'show-function', None,
3063 3063 _('show which function each change is in')),
3064 3064 ('w', 'ignore-all-space', None,
3065 3065 _('ignore white space when comparing lines')),
3066 3066 ('b', 'ignore-space-change', None,
3067 3067 _('ignore changes in the amount of white space')),
3068 3068 ('B', 'ignore-blank-lines', None,
3069 3069 _('ignore changes whose lines are all blank')),
3070 3070 ('I', 'include', [], _('include names matching the given patterns')),
3071 3071 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3072 3072 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
3073 3073 "^export":
3074 3074 (export,
3075 3075 [('o', 'output', '', _('print output to file with formatted name')),
3076 3076 ('a', 'text', None, _('treat all files as text')),
3077 3077 ('', 'switch-parent', None, _('diff against the second parent'))],
3078 3078 _('hg export [-a] [-o OUTFILESPEC] REV...')),
3079 3079 "debugforget|forget":
3080 3080 (forget,
3081 3081 [('I', 'include', [], _('include names matching the given patterns')),
3082 3082 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3083 3083 _('hg forget [OPTION]... FILE...')),
3084 3084 "grep":
3085 3085 (grep,
3086 3086 [('0', 'print0', None, _('end fields with NUL')),
3087 3087 ('', 'all', None, _('print all revisions that match')),
3088 3088 ('i', 'ignore-case', None, _('ignore case when matching')),
3089 3089 ('l', 'files-with-matches', None,
3090 3090 _('print only filenames and revs that match')),
3091 3091 ('n', 'line-number', None, _('print matching line numbers')),
3092 3092 ('r', 'rev', [], _('search in given revision range')),
3093 3093 ('u', 'user', None, _('print user who committed change')),
3094 3094 ('I', 'include', [], _('include names matching the given patterns')),
3095 3095 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3096 3096 _('hg grep [OPTION]... PATTERN [FILE]...')),
3097 3097 "heads":
3098 3098 (heads,
3099 3099 [('b', 'branches', None, _('show branches')),
3100 3100 ('', 'style', '', _('display using template map file')),
3101 3101 ('r', 'rev', '', _('show only heads which are descendants of rev')),
3102 3102 ('', 'template', '', _('display with template'))],
3103 3103 _('hg heads [-b] [-r <rev>]')),
3104 3104 "help": (help_, [], _('hg help [COMMAND]')),
3105 3105 "identify|id": (identify, [], _('hg identify')),
3106 3106 "import|patch":
3107 3107 (import_,
3108 3108 [('p', 'strip', 1,
3109 3109 _('directory strip option for patch. This has the same\n'
3110 3110 'meaning as the corresponding patch option')),
3111 3111 ('m', 'message', '', _('use <text> as commit message')),
3112 3112 ('b', 'base', '', _('base path')),
3113 3113 ('f', 'force', None,
3114 3114 _('skip check for outstanding uncommitted changes'))],
3115 3115 _('hg import [-p NUM] [-b BASE] [-m MESSAGE] [-f] PATCH...')),
3116 3116 "incoming|in": (incoming,
3117 3117 [('M', 'no-merges', None, _('do not show merges')),
3118 3118 ('f', 'force', None,
3119 3119 _('run even when remote repository is unrelated')),
3120 3120 ('', 'style', '', _('display using template map file')),
3121 3121 ('n', 'newest-first', None, _('show newest record first')),
3122 3122 ('', 'bundle', '', _('file to store the bundles into')),
3123 3123 ('p', 'patch', None, _('show patch')),
3124 3124 ('r', 'rev', [], _('a specific revision you would like to pull')),
3125 3125 ('', 'template', '', _('display with template')),
3126 3126 ('e', 'ssh', '', _('specify ssh command to use')),
3127 3127 ('', 'remotecmd', '',
3128 3128 _('specify hg command to run on the remote side'))],
3129 3129 _('hg incoming [-p] [-n] [-M] [-r REV]...'
3130 3130 ' [--bundle FILENAME] [SOURCE]')),
3131 3131 "^init":
3132 3132 (init,
3133 3133 [('e', 'ssh', '', _('specify ssh command to use')),
3134 3134 ('', 'remotecmd', '',
3135 3135 _('specify hg command to run on the remote side'))],
3136 3136 _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
3137 3137 "locate":
3138 3138 (locate,
3139 3139 [('r', 'rev', '', _('search the repository as it stood at rev')),
3140 3140 ('0', 'print0', None,
3141 3141 _('end filenames with NUL, for use with xargs')),
3142 3142 ('f', 'fullpath', None,
3143 3143 _('print complete paths from the filesystem root')),
3144 3144 ('I', 'include', [], _('include names matching the given patterns')),
3145 3145 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3146 3146 _('hg locate [OPTION]... [PATTERN]...')),
3147 3147 "^log|history":
3148 3148 (log,
3149 3149 [('b', 'branches', None, _('show branches')),
3150 3150 ('f', 'follow', None,
3151 3151 _('follow changeset history, or file history across copies and renames')),
3152 3152 ('', 'follow-first', None,
3153 3153 _('only follow the first parent of merge changesets')),
3154 3154 ('k', 'keyword', [], _('search for a keyword')),
3155 3155 ('l', 'limit', '', _('limit number of changes displayed')),
3156 3156 ('r', 'rev', [], _('show the specified revision or range')),
3157 3157 ('M', 'no-merges', None, _('do not show merges')),
3158 3158 ('', 'style', '', _('display using template map file')),
3159 3159 ('m', 'only-merges', None, _('show only merges')),
3160 3160 ('p', 'patch', None, _('show patch')),
3161 3161 ('', 'template', '', _('display with template')),
3162 3162 ('I', 'include', [], _('include names matching the given patterns')),
3163 3163 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3164 3164 _('hg log [OPTION]... [FILE]')),
3165 3165 "manifest": (manifest, [], _('hg manifest [REV]')),
3166 3166 "merge":
3167 3167 (merge,
3168 3168 [('b', 'branch', '', _('merge with head of a specific branch')),
3169 3169 ('f', 'force', None, _('force a merge with outstanding changes'))],
3170 3170 _('hg merge [-b TAG] [-f] [REV]')),
3171 3171 "outgoing|out": (outgoing,
3172 3172 [('M', 'no-merges', None, _('do not show merges')),
3173 3173 ('f', 'force', None,
3174 3174 _('run even when remote repository is unrelated')),
3175 3175 ('p', 'patch', None, _('show patch')),
3176 3176 ('', 'style', '', _('display using template map file')),
3177 3177 ('r', 'rev', [], _('a specific revision you would like to push')),
3178 3178 ('n', 'newest-first', None, _('show newest record first')),
3179 3179 ('', 'template', '', _('display with template')),
3180 3180 ('e', 'ssh', '', _('specify ssh command to use')),
3181 3181 ('', 'remotecmd', '',
3182 3182 _('specify hg command to run on the remote side'))],
3183 3183 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3184 3184 "^parents":
3185 3185 (parents,
3186 3186 [('b', 'branches', None, _('show branches')),
3187 3187 ('r', 'rev', '', _('show parents from the specified rev')),
3188 3188 ('', 'style', '', _('display using template map file')),
3189 3189 ('', 'template', '', _('display with template'))],
3190 3190 _('hg parents [-b] [-r REV] [FILE]')),
3191 3191 "paths": (paths, [], _('hg paths [NAME]')),
3192 3192 "^pull":
3193 3193 (pull,
3194 3194 [('u', 'update', None,
3195 3195 _('update the working directory to tip after pull')),
3196 3196 ('e', 'ssh', '', _('specify ssh command to use')),
3197 3197 ('f', 'force', None,
3198 3198 _('run even when remote repository is unrelated')),
3199 3199 ('r', 'rev', [], _('a specific revision you would like to pull')),
3200 3200 ('', 'remotecmd', '',
3201 3201 _('specify hg command to run on the remote side'))],
3202 3202 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3203 3203 "^push":
3204 3204 (push,
3205 3205 [('f', 'force', None, _('force push')),
3206 3206 ('e', 'ssh', '', _('specify ssh command to use')),
3207 3207 ('r', 'rev', [], _('a specific revision you would like to push')),
3208 3208 ('', 'remotecmd', '',
3209 3209 _('specify hg command to run on the remote side'))],
3210 3210 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3211 3211 "debugrawcommit|rawcommit":
3212 3212 (rawcommit,
3213 3213 [('p', 'parent', [], _('parent')),
3214 3214 ('d', 'date', '', _('date code')),
3215 3215 ('u', 'user', '', _('user')),
3216 3216 ('F', 'files', '', _('file list')),
3217 3217 ('m', 'message', '', _('commit message')),
3218 3218 ('l', 'logfile', '', _('commit message file'))],
3219 3219 _('hg debugrawcommit [OPTION]... [FILE]...')),
3220 3220 "recover": (recover, [], _('hg recover')),
3221 3221 "^remove|rm":
3222 3222 (remove,
3223 3223 [('A', 'after', None, _('record remove that has already occurred')),
3224 3224 ('f', 'force', None, _('remove file even if modified')),
3225 3225 ('I', 'include', [], _('include names matching the given patterns')),
3226 3226 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3227 3227 _('hg remove [OPTION]... FILE...')),
3228 3228 "rename|mv":
3229 3229 (rename,
3230 3230 [('A', 'after', None, _('record a rename that has already occurred')),
3231 3231 ('f', 'force', None,
3232 3232 _('forcibly copy over an existing managed file')),
3233 3233 ('I', 'include', [], _('include names matching the given patterns')),
3234 3234 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3235 3235 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3236 3236 _('hg rename [OPTION]... SOURCE... DEST')),
3237 3237 "^revert":
3238 3238 (revert,
3239 3239 [('r', 'rev', '', _('revision to revert to')),
3240 3240 ('', 'no-backup', None, _('do not save backup copies of files')),
3241 3241 ('I', 'include', [], _('include names matching given patterns')),
3242 3242 ('X', 'exclude', [], _('exclude names matching given patterns')),
3243 3243 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3244 3244 _('hg revert [-r REV] [NAME]...')),
3245 3245 "rollback": (rollback, [], _('hg rollback')),
3246 3246 "root": (root, [], _('hg root')),
3247 3247 "^serve":
3248 3248 (serve,
3249 3249 [('A', 'accesslog', '', _('name of access log file to write to')),
3250 3250 ('d', 'daemon', None, _('run server in background')),
3251 3251 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3252 3252 ('E', 'errorlog', '', _('name of error log file to write to')),
3253 3253 ('p', 'port', 0, _('port to use (default: 8000)')),
3254 3254 ('a', 'address', '', _('address to use')),
3255 3255 ('n', 'name', '',
3256 3256 _('name to show in web pages (default: working dir)')),
3257 3257 ('', 'webdir-conf', '', _('name of the webdir config file'
3258 3258 ' (serve more than one repo)')),
3259 3259 ('', 'pid-file', '', _('name of file to write process ID to')),
3260 3260 ('', 'stdio', None, _('for remote clients')),
3261 3261 ('t', 'templates', '', _('web templates to use')),
3262 3262 ('', 'style', '', _('template style to use')),
3263 3263 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3264 3264 _('hg serve [OPTION]...')),
3265 3265 "^status|st":
3266 3266 (status,
3267 3267 [('A', 'all', None, _('show status of all files')),
3268 3268 ('m', 'modified', None, _('show only modified files')),
3269 3269 ('a', 'added', None, _('show only added files')),
3270 3270 ('r', 'removed', None, _('show only removed files')),
3271 3271 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3272 3272 ('c', 'clean', None, _('show only files without changes')),
3273 3273 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3274 3274 ('i', 'ignored', None, _('show ignored files')),
3275 3275 ('n', 'no-status', None, _('hide status prefix')),
3276 3276 ('C', 'copies', None, _('show source of copied files')),
3277 3277 ('0', 'print0', None,
3278 3278 _('end filenames with NUL, for use with xargs')),
3279 3279 ('I', 'include', [], _('include names matching the given patterns')),
3280 3280 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3281 3281 _('hg status [OPTION]... [FILE]...')),
3282 3282 "tag":
3283 3283 (tag,
3284 3284 [('l', 'local', None, _('make the tag local')),
3285 3285 ('m', 'message', '', _('message for tag commit log entry')),
3286 3286 ('d', 'date', '', _('record datecode as commit date')),
3287 3287 ('u', 'user', '', _('record user as commiter')),
3288 3288 ('r', 'rev', '', _('revision to tag'))],
3289 3289 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3290 3290 "tags": (tags, [], _('hg tags')),
3291 3291 "tip":
3292 3292 (tip,
3293 3293 [('b', 'branches', None, _('show branches')),
3294 3294 ('', 'style', '', _('display using template map file')),
3295 3295 ('p', 'patch', None, _('show patch')),
3296 3296 ('', 'template', '', _('display with template'))],
3297 3297 _('hg tip [-b] [-p]')),
3298 3298 "unbundle":
3299 3299 (unbundle,
3300 3300 [('u', 'update', None,
3301 3301 _('update the working directory to tip after unbundle'))],
3302 3302 _('hg unbundle [-u] FILE')),
3303 3303 "debugundo|undo": (undo, [], _('hg undo')),
3304 3304 "^update|up|checkout|co":
3305 3305 (update,
3306 3306 [('b', 'branch', '', _('checkout the head of a specific branch')),
3307 3307 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3308 3308 ('C', 'clean', None, _('overwrite locally modified files')),
3309 3309 ('f', 'force', None, _('force a merge with outstanding changes'))],
3310 3310 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3311 3311 "verify": (verify, [], _('hg verify')),
3312 3312 "version": (show_version, [], _('hg version')),
3313 3313 }
3314 3314
3315 3315 globalopts = [
3316 3316 ('R', 'repository', '',
3317 3317 _('repository root directory or symbolic path name')),
3318 3318 ('', 'cwd', '', _('change working directory')),
3319 3319 ('y', 'noninteractive', None,
3320 3320 _('do not prompt, assume \'yes\' for any required answers')),
3321 3321 ('q', 'quiet', None, _('suppress output')),
3322 3322 ('v', 'verbose', None, _('enable additional output')),
3323 3323 ('', 'config', [], _('set/override config option')),
3324 3324 ('', 'debug', None, _('enable debugging output')),
3325 3325 ('', 'debugger', None, _('start debugger')),
3326 3326 ('', 'lsprof', None, _('print improved command execution profile')),
3327 3327 ('', 'traceback', None, _('print traceback on exception')),
3328 3328 ('', 'time', None, _('time how long the command takes')),
3329 3329 ('', 'profile', None, _('print command execution profile')),
3330 3330 ('', 'version', None, _('output version information and exit')),
3331 3331 ('h', 'help', None, _('display help and exit')),
3332 3332 ]
3333 3333
3334 3334 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3335 3335 " debugindex debugindexdot")
3336 3336 optionalrepo = ("paths serve debugconfig")
3337 3337
3338 3338 def findpossible(cmd):
3339 3339 """
3340 3340 Return cmd -> (aliases, command table entry)
3341 3341 for each matching command.
3342 3342 Return debug commands (or their aliases) only if no normal command matches.
3343 3343 """
3344 3344 choice = {}
3345 3345 debugchoice = {}
3346 3346 for e in table.keys():
3347 3347 aliases = e.lstrip("^").split("|")
3348 3348 found = None
3349 3349 if cmd in aliases:
3350 3350 found = cmd
3351 3351 else:
3352 3352 for a in aliases:
3353 3353 if a.startswith(cmd):
3354 3354 found = a
3355 3355 break
3356 3356 if found is not None:
3357 3357 if aliases[0].startswith("debug"):
3358 3358 debugchoice[found] = (aliases, table[e])
3359 3359 else:
3360 3360 choice[found] = (aliases, table[e])
3361 3361
3362 3362 if not choice and debugchoice:
3363 3363 choice = debugchoice
3364 3364
3365 3365 return choice
3366 3366
3367 3367 def findcmd(cmd):
3368 3368 """Return (aliases, command table entry) for command string."""
3369 3369 choice = findpossible(cmd)
3370 3370
3371 3371 if choice.has_key(cmd):
3372 3372 return choice[cmd]
3373 3373
3374 3374 if len(choice) > 1:
3375 3375 clist = choice.keys()
3376 3376 clist.sort()
3377 3377 raise AmbiguousCommand(cmd, clist)
3378 3378
3379 3379 if choice:
3380 3380 return choice.values()[0]
3381 3381
3382 3382 raise UnknownCommand(cmd)
3383 3383
3384 3384 def catchterm(*args):
3385 3385 raise util.SignalInterrupt
3386 3386
3387 3387 def run():
3388 3388 sys.exit(dispatch(sys.argv[1:]))
3389 3389
3390 3390 class ParseError(Exception):
3391 3391 """Exception raised on errors in parsing the command line."""
3392 3392
3393 3393 def parse(ui, args):
3394 3394 options = {}
3395 3395 cmdoptions = {}
3396 3396
3397 3397 try:
3398 3398 args = fancyopts.fancyopts(args, globalopts, options)
3399 3399 except fancyopts.getopt.GetoptError, inst:
3400 3400 raise ParseError(None, inst)
3401 3401
3402 3402 if args:
3403 3403 cmd, args = args[0], args[1:]
3404 3404 aliases, i = findcmd(cmd)
3405 3405 cmd = aliases[0]
3406 3406 defaults = ui.config("defaults", cmd)
3407 3407 if defaults:
3408 3408 args = defaults.split() + args
3409 3409 c = list(i[1])
3410 3410 else:
3411 3411 cmd = None
3412 3412 c = []
3413 3413
3414 3414 # combine global options into local
3415 3415 for o in globalopts:
3416 3416 c.append((o[0], o[1], options[o[1]], o[3]))
3417 3417
3418 3418 try:
3419 3419 args = fancyopts.fancyopts(args, c, cmdoptions)
3420 3420 except fancyopts.getopt.GetoptError, inst:
3421 3421 raise ParseError(cmd, inst)
3422 3422
3423 3423 # separate global options back out
3424 3424 for o in globalopts:
3425 3425 n = o[1]
3426 3426 options[n] = cmdoptions[n]
3427 3427 del cmdoptions[n]
3428 3428
3429 3429 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3430 3430
3431 3431 external = {}
3432 3432
3433 3433 def findext(name):
3434 3434 '''return module with given extension name'''
3435 3435 try:
3436 3436 return sys.modules[external[name]]
3437 3437 except KeyError:
3438 3438 for k, v in external.iteritems():
3439 3439 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3440 3440 return sys.modules[v]
3441 3441 raise KeyError(name)
3442 3442
3443 3443 def dispatch(args):
3444 3444 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3445 3445 num = getattr(signal, name, None)
3446 3446 if num: signal.signal(num, catchterm)
3447 3447
3448 3448 try:
3449 3449 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3450 3450 except util.Abort, inst:
3451 3451 sys.stderr.write(_("abort: %s\n") % inst)
3452 3452 return -1
3453 3453
3454 3454 for ext_name, load_from_name in u.extensions():
3455 3455 try:
3456 3456 if load_from_name:
3457 3457 # the module will be loaded in sys.modules
3458 3458 # choose an unique name so that it doesn't
3459 3459 # conflicts with other modules
3460 3460 module_name = "hgext_%s" % ext_name.replace('.', '_')
3461 3461 mod = imp.load_source(module_name, load_from_name)
3462 3462 else:
3463 3463 def importh(name):
3464 3464 mod = __import__(name)
3465 3465 components = name.split('.')
3466 3466 for comp in components[1:]:
3467 3467 mod = getattr(mod, comp)
3468 3468 return mod
3469 3469 try:
3470 3470 mod = importh("hgext.%s" % ext_name)
3471 3471 except ImportError:
3472 3472 mod = importh(ext_name)
3473 3473 external[ext_name] = mod.__name__
3474 3474 except (util.SignalInterrupt, KeyboardInterrupt):
3475 3475 raise
3476 3476 except Exception, inst:
3477 3477 u.warn(_("*** failed to import extension %s: %s\n") % (ext_name, inst))
3478 3478 if u.print_exc():
3479 3479 return 1
3480 3480
3481 3481 for name in external.itervalues():
3482 3482 mod = sys.modules[name]
3483 3483 uisetup = getattr(mod, 'uisetup', None)
3484 3484 if uisetup:
3485 3485 uisetup(u)
3486 3486 cmdtable = getattr(mod, 'cmdtable', {})
3487 3487 for t in cmdtable:
3488 3488 if t in table:
3489 3489 u.warn(_("module %s overrides %s\n") % (name, t))
3490 3490 table.update(cmdtable)
3491 3491
3492 3492 try:
3493 3493 cmd, func, args, options, cmdoptions = parse(u, args)
3494 3494 if options["time"]:
3495 3495 def get_times():
3496 3496 t = os.times()
3497 3497 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3498 3498 t = (t[0], t[1], t[2], t[3], time.clock())
3499 3499 return t
3500 3500 s = get_times()
3501 3501 def print_time():
3502 3502 t = get_times()
3503 3503 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3504 3504 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3505 3505 atexit.register(print_time)
3506 3506
3507 3507 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3508 3508 not options["noninteractive"], options["traceback"],
3509 3509 options["config"])
3510 3510
3511 3511 # enter the debugger before command execution
3512 3512 if options['debugger']:
3513 3513 pdb.set_trace()
3514 3514
3515 3515 try:
3516 3516 if options['cwd']:
3517 3517 try:
3518 3518 os.chdir(options['cwd'])
3519 3519 except OSError, inst:
3520 3520 raise util.Abort('%s: %s' %
3521 3521 (options['cwd'], inst.strerror))
3522 3522
3523 3523 path = u.expandpath(options["repository"]) or ""
3524 3524 repo = path and hg.repository(u, path=path) or None
3525 3525
3526 3526 if options['help']:
3527 3527 return help_(u, cmd, options['version'])
3528 3528 elif options['version']:
3529 3529 return show_version(u)
3530 3530 elif not cmd:
3531 3531 return help_(u, 'shortlist')
3532 3532
3533 3533 if cmd not in norepo.split():
3534 3534 try:
3535 3535 if not repo:
3536 3536 repo = hg.repository(u, path=path)
3537 3537 u = repo.ui
3538 3538 for name in external.itervalues():
3539 3539 mod = sys.modules[name]
3540 3540 if hasattr(mod, 'reposetup'):
3541 3541 mod.reposetup(u, repo)
3542 3542 except hg.RepoError:
3543 3543 if cmd not in optionalrepo.split():
3544 3544 raise
3545 3545 d = lambda: func(u, repo, *args, **cmdoptions)
3546 3546 else:
3547 3547 d = lambda: func(u, *args, **cmdoptions)
3548 3548
3549 3549 # reupdate the options, repo/.hg/hgrc may have changed them
3550 3550 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3551 3551 not options["noninteractive"], options["traceback"],
3552 3552 options["config"])
3553 3553
3554 3554 try:
3555 3555 if options['profile']:
3556 3556 import hotshot, hotshot.stats
3557 3557 prof = hotshot.Profile("hg.prof")
3558 3558 try:
3559 3559 try:
3560 3560 return prof.runcall(d)
3561 3561 except:
3562 3562 try:
3563 3563 u.warn(_('exception raised - generating '
3564 3564 'profile anyway\n'))
3565 3565 except:
3566 3566 pass
3567 3567 raise
3568 3568 finally:
3569 3569 prof.close()
3570 3570 stats = hotshot.stats.load("hg.prof")
3571 3571 stats.strip_dirs()
3572 3572 stats.sort_stats('time', 'calls')
3573 3573 stats.print_stats(40)
3574 3574 elif options['lsprof']:
3575 3575 try:
3576 3576 from mercurial import lsprof
3577 3577 except ImportError:
3578 3578 raise util.Abort(_(
3579 3579 'lsprof not available - install from '
3580 3580 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3581 3581 p = lsprof.Profiler()
3582 3582 p.enable(subcalls=True)
3583 3583 try:
3584 3584 return d()
3585 3585 finally:
3586 3586 p.disable()
3587 3587 stats = lsprof.Stats(p.getstats())
3588 3588 stats.sort()
3589 3589 stats.pprint(top=10, file=sys.stderr, climit=5)
3590 3590 else:
3591 3591 return d()
3592 3592 finally:
3593 3593 u.flush()
3594 3594 except:
3595 3595 # enter the debugger when we hit an exception
3596 3596 if options['debugger']:
3597 3597 pdb.post_mortem(sys.exc_info()[2])
3598 3598 u.print_exc()
3599 3599 raise
3600 3600 except ParseError, inst:
3601 3601 if inst.args[0]:
3602 3602 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3603 3603 help_(u, inst.args[0])
3604 3604 else:
3605 3605 u.warn(_("hg: %s\n") % inst.args[1])
3606 3606 help_(u, 'shortlist')
3607 3607 except AmbiguousCommand, inst:
3608 3608 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3609 3609 (inst.args[0], " ".join(inst.args[1])))
3610 3610 except UnknownCommand, inst:
3611 3611 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3612 3612 help_(u, 'shortlist')
3613 3613 except hg.RepoError, inst:
3614 3614 u.warn(_("abort: %s!\n") % inst)
3615 3615 except lock.LockHeld, inst:
3616 3616 if inst.errno == errno.ETIMEDOUT:
3617 3617 reason = _('timed out waiting for lock held by %s') % inst.locker
3618 3618 else:
3619 3619 reason = _('lock held by %s') % inst.locker
3620 3620 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3621 3621 except lock.LockUnavailable, inst:
3622 3622 u.warn(_("abort: could not lock %s: %s\n") %
3623 3623 (inst.desc or inst.filename, inst.strerror))
3624 3624 except revlog.RevlogError, inst:
3625 3625 u.warn(_("abort: "), inst, "!\n")
3626 3626 except util.SignalInterrupt:
3627 3627 u.warn(_("killed!\n"))
3628 3628 except KeyboardInterrupt:
3629 3629 try:
3630 3630 u.warn(_("interrupted!\n"))
3631 3631 except IOError, inst:
3632 3632 if inst.errno == errno.EPIPE:
3633 3633 if u.debugflag:
3634 3634 u.warn(_("\nbroken pipe\n"))
3635 3635 else:
3636 3636 raise
3637 3637 except IOError, inst:
3638 3638 if hasattr(inst, "code"):
3639 3639 u.warn(_("abort: %s\n") % inst)
3640 3640 elif hasattr(inst, "reason"):
3641 3641 u.warn(_("abort: error: %s\n") % inst.reason[1])
3642 3642 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3643 3643 if u.debugflag:
3644 3644 u.warn(_("broken pipe\n"))
3645 3645 elif getattr(inst, "strerror", None):
3646 3646 if getattr(inst, "filename", None):
3647 3647 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3648 3648 else:
3649 3649 u.warn(_("abort: %s\n") % inst.strerror)
3650 3650 else:
3651 3651 raise
3652 3652 except OSError, inst:
3653 3653 if hasattr(inst, "filename"):
3654 3654 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3655 3655 else:
3656 3656 u.warn(_("abort: %s\n") % inst.strerror)
3657 3657 except util.Abort, inst:
3658 3658 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3659 3659 except TypeError, inst:
3660 3660 # was this an argument error?
3661 3661 tb = traceback.extract_tb(sys.exc_info()[2])
3662 3662 if len(tb) > 2: # no
3663 3663 raise
3664 3664 u.debug(inst, "\n")
3665 3665 u.warn(_("%s: invalid arguments\n") % cmd)
3666 3666 help_(u, cmd)
3667 3667 except SystemExit, inst:
3668 3668 # Commands shouldn't sys.exit directly, but give a return code.
3669 3669 # Just in case catch this and and pass exit code to caller.
3670 3670 return inst.code
3671 3671 except:
3672 3672 u.warn(_("** unknown exception encountered, details follow\n"))
3673 3673 u.warn(_("** report bug details to "
3674 3674 "http://www.selenic.com/mercurial/bts\n"))
3675 3675 u.warn(_("** or mercurial@selenic.com\n"))
3676 3676 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3677 3677 % version.get_version())
3678 3678 raise
3679 3679
3680 3680 return -1
@@ -1,2278 +1,2282
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import gettext as _
10 10 from demandload import *
11 11 import repo
12 12 demandload(globals(), "appendfile changegroup")
13 13 demandload(globals(), "changelog dirstate filelog manifest context")
14 14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 15 demandload(globals(), "os revlog time util")
16 16
17 17 class localrepository(repo.repository):
18 18 capabilities = ()
19 19
20 20 def __del__(self):
21 21 self.transhandle = None
22 22 def __init__(self, parentui, path=None, create=0):
23 23 repo.repository.__init__(self)
24 24 if not path:
25 25 p = os.getcwd()
26 26 while not os.path.isdir(os.path.join(p, ".hg")):
27 27 oldp = p
28 28 p = os.path.dirname(p)
29 29 if p == oldp:
30 30 raise repo.RepoError(_("no repo found"))
31 31 path = p
32 32 self.path = os.path.join(path, ".hg")
33 33
34 34 if not create and not os.path.isdir(self.path):
35 35 raise repo.RepoError(_("repository %s not found") % path)
36 36
37 37 self.root = os.path.abspath(path)
38 38 self.origroot = path
39 39 self.ui = ui.ui(parentui=parentui)
40 40 self.opener = util.opener(self.path)
41 41 self.wopener = util.opener(self.root)
42 42
43 43 try:
44 44 self.ui.readconfig(self.join("hgrc"), self.root)
45 45 except IOError:
46 46 pass
47 47
48 48 v = self.ui.revlogopts
49 49 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
50 50 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
51 51 fl = v.get('flags', None)
52 52 flags = 0
53 53 if fl != None:
54 54 for x in fl.split():
55 55 flags |= revlog.flagstr(x)
56 56 elif self.revlogv1:
57 57 flags = revlog.REVLOG_DEFAULT_FLAGS
58 58
59 59 v = self.revlogversion | flags
60 60 self.manifest = manifest.manifest(self.opener, v)
61 61 self.changelog = changelog.changelog(self.opener, v)
62 62
63 63 # the changelog might not have the inline index flag
64 64 # on. If the format of the changelog is the same as found in
65 65 # .hgrc, apply any flags found in the .hgrc as well.
66 66 # Otherwise, just version from the changelog
67 67 v = self.changelog.version
68 68 if v == self.revlogversion:
69 69 v |= flags
70 70 self.revlogversion = v
71 71
72 72 self.tagscache = None
73 73 self.nodetagscache = None
74 74 self.encodepats = None
75 75 self.decodepats = None
76 76 self.transhandle = None
77 77
78 78 if create:
79 79 if not os.path.exists(path):
80 80 os.mkdir(path)
81 81 os.mkdir(self.path)
82 82 os.mkdir(self.join("data"))
83 83
84 84 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
85 85
86 86 def url(self):
87 87 return 'file:' + self.root
88 88
89 89 def hook(self, name, throw=False, **args):
90 90 def callhook(hname, funcname):
91 91 '''call python hook. hook is callable object, looked up as
92 92 name in python module. if callable returns "true", hook
93 93 fails, else passes. if hook raises exception, treated as
94 94 hook failure. exception propagates if throw is "true".
95 95
96 96 reason for "true" meaning "hook failed" is so that
97 97 unmodified commands (e.g. mercurial.commands.update) can
98 98 be run as hooks without wrappers to convert return values.'''
99 99
100 100 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
101 101 d = funcname.rfind('.')
102 102 if d == -1:
103 103 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
104 104 % (hname, funcname))
105 105 modname = funcname[:d]
106 106 try:
107 107 obj = __import__(modname)
108 108 except ImportError:
109 109 try:
110 110 # extensions are loaded with hgext_ prefix
111 111 obj = __import__("hgext_%s" % modname)
112 112 except ImportError:
113 113 raise util.Abort(_('%s hook is invalid '
114 114 '(import of "%s" failed)') %
115 115 (hname, modname))
116 116 try:
117 117 for p in funcname.split('.')[1:]:
118 118 obj = getattr(obj, p)
119 119 except AttributeError, err:
120 120 raise util.Abort(_('%s hook is invalid '
121 121 '("%s" is not defined)') %
122 122 (hname, funcname))
123 123 if not callable(obj):
124 124 raise util.Abort(_('%s hook is invalid '
125 125 '("%s" is not callable)') %
126 126 (hname, funcname))
127 127 try:
128 128 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
129 129 except (KeyboardInterrupt, util.SignalInterrupt):
130 130 raise
131 131 except Exception, exc:
132 132 if isinstance(exc, util.Abort):
133 133 self.ui.warn(_('error: %s hook failed: %s\n') %
134 134 (hname, exc.args[0] % exc.args[1:]))
135 135 else:
136 136 self.ui.warn(_('error: %s hook raised an exception: '
137 137 '%s\n') % (hname, exc))
138 138 if throw:
139 139 raise
140 140 self.ui.print_exc()
141 141 return True
142 142 if r:
143 143 if throw:
144 144 raise util.Abort(_('%s hook failed') % hname)
145 145 self.ui.warn(_('warning: %s hook failed\n') % hname)
146 146 return r
147 147
148 148 def runhook(name, cmd):
149 149 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
150 150 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
151 151 r = util.system(cmd, environ=env, cwd=self.root)
152 152 if r:
153 153 desc, r = util.explain_exit(r)
154 154 if throw:
155 155 raise util.Abort(_('%s hook %s') % (name, desc))
156 156 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
157 157 return r
158 158
159 159 r = False
160 160 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
161 161 if hname.split(".", 1)[0] == name and cmd]
162 162 hooks.sort()
163 163 for hname, cmd in hooks:
164 164 if cmd.startswith('python:'):
165 165 r = callhook(hname, cmd[7:].strip()) or r
166 166 else:
167 167 r = runhook(hname, cmd) or r
168 168 return r
169 169
170 170 tag_disallowed = ':\r\n'
171 171
172 172 def tag(self, name, node, local=False, message=None, user=None, date=None):
173 173 '''tag a revision with a symbolic name.
174 174
175 175 if local is True, the tag is stored in a per-repository file.
176 176 otherwise, it is stored in the .hgtags file, and a new
177 177 changeset is committed with the change.
178 178
179 179 keyword arguments:
180 180
181 181 local: whether to store tag in non-version-controlled file
182 182 (default False)
183 183
184 184 message: commit message to use if committing
185 185
186 186 user: name of user to use if committing
187 187
188 188 date: date tuple to use if committing'''
189 189
190 190 for c in self.tag_disallowed:
191 191 if c in name:
192 192 raise util.Abort(_('%r cannot be used in a tag name') % c)
193 193
194 194 self.hook('pretag', throw=True, node=node, tag=name, local=local)
195 195
196 196 if local:
197 197 self.opener('localtags', 'a').write('%s %s\n' % (node, name))
198 198 self.hook('tag', node=node, tag=name, local=local)
199 199 return
200 200
201 201 for x in self.changes():
202 202 if '.hgtags' in x:
203 203 raise util.Abort(_('working copy of .hgtags is changed '
204 204 '(please commit .hgtags manually)'))
205 205
206 206 self.wfile('.hgtags', 'ab').write('%s %s\n' % (node, name))
207 207 if self.dirstate.state('.hgtags') == '?':
208 208 self.add(['.hgtags'])
209 209
210 210 if not message:
211 211 message = _('Added tag %s for changeset %s') % (name, node)
212 212
213 213 self.commit(['.hgtags'], message, user, date)
214 214 self.hook('tag', node=node, tag=name, local=local)
215 215
216 216 def tags(self):
217 217 '''return a mapping of tag to node'''
218 218 if not self.tagscache:
219 219 self.tagscache = {}
220 220
221 221 def parsetag(line, context):
222 222 if not line:
223 223 return
224 224 s = l.split(" ", 1)
225 225 if len(s) != 2:
226 226 self.ui.warn(_("%s: cannot parse entry\n") % context)
227 227 return
228 228 node, key = s
229 229 key = key.strip()
230 230 try:
231 231 bin_n = bin(node)
232 232 except TypeError:
233 233 self.ui.warn(_("%s: node '%s' is not well formed\n") %
234 234 (context, node))
235 235 return
236 236 if bin_n not in self.changelog.nodemap:
237 237 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
238 238 (context, key))
239 239 return
240 240 self.tagscache[key] = bin_n
241 241
242 242 # read the tags file from each head, ending with the tip,
243 243 # and add each tag found to the map, with "newer" ones
244 244 # taking precedence
245 245 heads = self.heads()
246 246 heads.reverse()
247 247 fl = self.file(".hgtags")
248 248 for node in heads:
249 249 change = self.changelog.read(node)
250 250 rev = self.changelog.rev(node)
251 251 fn, ff = self.manifest.find(change[0], '.hgtags')
252 252 if fn is None: continue
253 253 count = 0
254 254 for l in fl.read(fn).splitlines():
255 255 count += 1
256 256 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
257 257 (rev, short(node), count))
258 258 try:
259 259 f = self.opener("localtags")
260 260 count = 0
261 261 for l in f:
262 262 count += 1
263 263 parsetag(l, _("localtags, line %d") % count)
264 264 except IOError:
265 265 pass
266 266
267 267 self.tagscache['tip'] = self.changelog.tip()
268 268
269 269 return self.tagscache
270 270
271 271 def tagslist(self):
272 272 '''return a list of tags ordered by revision'''
273 273 l = []
274 274 for t, n in self.tags().items():
275 275 try:
276 276 r = self.changelog.rev(n)
277 277 except:
278 278 r = -2 # sort to the beginning of the list if unknown
279 279 l.append((r, t, n))
280 280 l.sort()
281 281 return [(t, n) for r, t, n in l]
282 282
283 283 def nodetags(self, node):
284 284 '''return the tags associated with a node'''
285 285 if not self.nodetagscache:
286 286 self.nodetagscache = {}
287 287 for t, n in self.tags().items():
288 288 self.nodetagscache.setdefault(n, []).append(t)
289 289 return self.nodetagscache.get(node, [])
290 290
291 291 def lookup(self, key):
292 292 try:
293 293 return self.tags()[key]
294 294 except KeyError:
295 if key == '.':
296 key = self.dirstate.parents()[0]
297 if key == nullid:
298 raise repo.RepoError(_("no revision checked out"))
295 299 try:
296 300 return self.changelog.lookup(key)
297 301 except:
298 302 raise repo.RepoError(_("unknown revision '%s'") % key)
299 303
300 304 def dev(self):
301 305 return os.lstat(self.path).st_dev
302 306
303 307 def local(self):
304 308 return True
305 309
306 310 def join(self, f):
307 311 return os.path.join(self.path, f)
308 312
309 313 def wjoin(self, f):
310 314 return os.path.join(self.root, f)
311 315
312 316 def file(self, f):
313 317 if f[0] == '/':
314 318 f = f[1:]
315 319 return filelog.filelog(self.opener, f, self.revlogversion)
316 320
317 321 def changectx(self, changeid):
318 322 return context.changectx(self, changeid)
319 323
320 324 def filectx(self, path, changeid=None, fileid=None):
321 325 """changeid can be a changeset revision, node, or tag.
322 326 fileid can be a file revision or node."""
323 327 return context.filectx(self, path, changeid, fileid)
324 328
325 329 def getcwd(self):
326 330 return self.dirstate.getcwd()
327 331
328 332 def wfile(self, f, mode='r'):
329 333 return self.wopener(f, mode)
330 334
331 335 def wread(self, filename):
332 336 if self.encodepats == None:
333 337 l = []
334 338 for pat, cmd in self.ui.configitems("encode"):
335 339 mf = util.matcher(self.root, "", [pat], [], [])[1]
336 340 l.append((mf, cmd))
337 341 self.encodepats = l
338 342
339 343 data = self.wopener(filename, 'r').read()
340 344
341 345 for mf, cmd in self.encodepats:
342 346 if mf(filename):
343 347 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
344 348 data = util.filter(data, cmd)
345 349 break
346 350
347 351 return data
348 352
349 353 def wwrite(self, filename, data, fd=None):
350 354 if self.decodepats == None:
351 355 l = []
352 356 for pat, cmd in self.ui.configitems("decode"):
353 357 mf = util.matcher(self.root, "", [pat], [], [])[1]
354 358 l.append((mf, cmd))
355 359 self.decodepats = l
356 360
357 361 for mf, cmd in self.decodepats:
358 362 if mf(filename):
359 363 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
360 364 data = util.filter(data, cmd)
361 365 break
362 366
363 367 if fd:
364 368 return fd.write(data)
365 369 return self.wopener(filename, 'w').write(data)
366 370
367 371 def transaction(self):
368 372 tr = self.transhandle
369 373 if tr != None and tr.running():
370 374 return tr.nest()
371 375
372 376 # save dirstate for rollback
373 377 try:
374 378 ds = self.opener("dirstate").read()
375 379 except IOError:
376 380 ds = ""
377 381 self.opener("journal.dirstate", "w").write(ds)
378 382
379 383 tr = transaction.transaction(self.ui.warn, self.opener,
380 384 self.join("journal"),
381 385 aftertrans(self.path))
382 386 self.transhandle = tr
383 387 return tr
384 388
385 389 def recover(self):
386 390 l = self.lock()
387 391 if os.path.exists(self.join("journal")):
388 392 self.ui.status(_("rolling back interrupted transaction\n"))
389 393 transaction.rollback(self.opener, self.join("journal"))
390 394 self.reload()
391 395 return True
392 396 else:
393 397 self.ui.warn(_("no interrupted transaction available\n"))
394 398 return False
395 399
396 400 def rollback(self, wlock=None):
397 401 if not wlock:
398 402 wlock = self.wlock()
399 403 l = self.lock()
400 404 if os.path.exists(self.join("undo")):
401 405 self.ui.status(_("rolling back last transaction\n"))
402 406 transaction.rollback(self.opener, self.join("undo"))
403 407 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
404 408 self.reload()
405 409 self.wreload()
406 410 else:
407 411 self.ui.warn(_("no rollback information available\n"))
408 412
409 413 def wreload(self):
410 414 self.dirstate.read()
411 415
412 416 def reload(self):
413 417 self.changelog.load()
414 418 self.manifest.load()
415 419 self.tagscache = None
416 420 self.nodetagscache = None
417 421
418 422 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
419 423 desc=None):
420 424 try:
421 425 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
422 426 except lock.LockHeld, inst:
423 427 if not wait:
424 428 raise
425 429 self.ui.warn(_("waiting for lock on %s held by %s\n") %
426 430 (desc, inst.args[0]))
427 431 # default to 600 seconds timeout
428 432 l = lock.lock(self.join(lockname),
429 433 int(self.ui.config("ui", "timeout") or 600),
430 434 releasefn, desc=desc)
431 435 if acquirefn:
432 436 acquirefn()
433 437 return l
434 438
435 439 def lock(self, wait=1):
436 440 return self.do_lock("lock", wait, acquirefn=self.reload,
437 441 desc=_('repository %s') % self.origroot)
438 442
439 443 def wlock(self, wait=1):
440 444 return self.do_lock("wlock", wait, self.dirstate.write,
441 445 self.wreload,
442 446 desc=_('working directory of %s') % self.origroot)
443 447
444 448 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
445 449 "determine whether a new filenode is needed"
446 450 fp1 = manifest1.get(filename, nullid)
447 451 fp2 = manifest2.get(filename, nullid)
448 452
449 453 if fp2 != nullid:
450 454 # is one parent an ancestor of the other?
451 455 fpa = filelog.ancestor(fp1, fp2)
452 456 if fpa == fp1:
453 457 fp1, fp2 = fp2, nullid
454 458 elif fpa == fp2:
455 459 fp2 = nullid
456 460
457 461 # is the file unmodified from the parent? report existing entry
458 462 if fp2 == nullid and text == filelog.read(fp1):
459 463 return (fp1, None, None)
460 464
461 465 return (None, fp1, fp2)
462 466
463 467 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
464 468 orig_parent = self.dirstate.parents()[0] or nullid
465 469 p1 = p1 or self.dirstate.parents()[0] or nullid
466 470 p2 = p2 or self.dirstate.parents()[1] or nullid
467 471 c1 = self.changelog.read(p1)
468 472 c2 = self.changelog.read(p2)
469 473 m1 = self.manifest.read(c1[0])
470 474 mf1 = self.manifest.readflags(c1[0])
471 475 m2 = self.manifest.read(c2[0])
472 476 changed = []
473 477
474 478 if orig_parent == p1:
475 479 update_dirstate = 1
476 480 else:
477 481 update_dirstate = 0
478 482
479 483 if not wlock:
480 484 wlock = self.wlock()
481 485 l = self.lock()
482 486 tr = self.transaction()
483 487 mm = m1.copy()
484 488 mfm = mf1.copy()
485 489 linkrev = self.changelog.count()
486 490 for f in files:
487 491 try:
488 492 t = self.wread(f)
489 493 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
490 494 r = self.file(f)
491 495 mfm[f] = tm
492 496
493 497 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
494 498 if entry:
495 499 mm[f] = entry
496 500 continue
497 501
498 502 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
499 503 changed.append(f)
500 504 if update_dirstate:
501 505 self.dirstate.update([f], "n")
502 506 except IOError:
503 507 try:
504 508 del mm[f]
505 509 del mfm[f]
506 510 if update_dirstate:
507 511 self.dirstate.forget([f])
508 512 except:
509 513 # deleted from p2?
510 514 pass
511 515
512 516 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
513 517 user = user or self.ui.username()
514 518 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
515 519 tr.close()
516 520 if update_dirstate:
517 521 self.dirstate.setparents(n, nullid)
518 522
519 523 def commit(self, files=None, text="", user=None, date=None,
520 524 match=util.always, force=False, lock=None, wlock=None,
521 525 force_editor=False):
522 526 commit = []
523 527 remove = []
524 528 changed = []
525 529
526 530 if files:
527 531 for f in files:
528 532 s = self.dirstate.state(f)
529 533 if s in 'nmai':
530 534 commit.append(f)
531 535 elif s == 'r':
532 536 remove.append(f)
533 537 else:
534 538 self.ui.warn(_("%s not tracked!\n") % f)
535 539 else:
536 540 modified, added, removed, deleted, unknown = self.changes(match=match)
537 541 commit = modified + added
538 542 remove = removed
539 543
540 544 p1, p2 = self.dirstate.parents()
541 545 c1 = self.changelog.read(p1)
542 546 c2 = self.changelog.read(p2)
543 547 m1 = self.manifest.read(c1[0])
544 548 mf1 = self.manifest.readflags(c1[0])
545 549 m2 = self.manifest.read(c2[0])
546 550
547 551 if not commit and not remove and not force and p2 == nullid:
548 552 self.ui.status(_("nothing changed\n"))
549 553 return None
550 554
551 555 xp1 = hex(p1)
552 556 if p2 == nullid: xp2 = ''
553 557 else: xp2 = hex(p2)
554 558
555 559 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
556 560
557 561 if not wlock:
558 562 wlock = self.wlock()
559 563 if not lock:
560 564 lock = self.lock()
561 565 tr = self.transaction()
562 566
563 567 # check in files
564 568 new = {}
565 569 linkrev = self.changelog.count()
566 570 commit.sort()
567 571 for f in commit:
568 572 self.ui.note(f + "\n")
569 573 try:
570 574 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
571 575 t = self.wread(f)
572 576 except IOError:
573 577 self.ui.warn(_("trouble committing %s!\n") % f)
574 578 raise
575 579
576 580 r = self.file(f)
577 581
578 582 meta = {}
579 583 cp = self.dirstate.copied(f)
580 584 if cp:
581 585 meta["copy"] = cp
582 586 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
583 587 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
584 588 fp1, fp2 = nullid, nullid
585 589 else:
586 590 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
587 591 if entry:
588 592 new[f] = entry
589 593 continue
590 594
591 595 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
592 596 # remember what we've added so that we can later calculate
593 597 # the files to pull from a set of changesets
594 598 changed.append(f)
595 599
596 600 # update manifest
597 601 m1 = m1.copy()
598 602 m1.update(new)
599 603 for f in remove:
600 604 if f in m1:
601 605 del m1[f]
602 606 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
603 607 (new, remove))
604 608
605 609 # add changeset
606 610 new = new.keys()
607 611 new.sort()
608 612
609 613 user = user or self.ui.username()
610 614 if not text or force_editor:
611 615 edittext = []
612 616 if text:
613 617 edittext.append(text)
614 618 edittext.append("")
615 619 if p2 != nullid:
616 620 edittext.append("HG: branch merge")
617 621 edittext.extend(["HG: changed %s" % f for f in changed])
618 622 edittext.extend(["HG: removed %s" % f for f in remove])
619 623 if not changed and not remove:
620 624 edittext.append("HG: no files changed")
621 625 edittext.append("")
622 626 # run editor in the repository root
623 627 olddir = os.getcwd()
624 628 os.chdir(self.root)
625 629 text = self.ui.edit("\n".join(edittext), user)
626 630 os.chdir(olddir)
627 631
628 632 lines = [line.rstrip() for line in text.rstrip().splitlines()]
629 633 while lines and not lines[0]:
630 634 del lines[0]
631 635 if not lines:
632 636 return None
633 637 text = '\n'.join(lines)
634 638 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
635 639 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
636 640 parent2=xp2)
637 641 tr.close()
638 642
639 643 self.dirstate.setparents(n)
640 644 self.dirstate.update(new, "n")
641 645 self.dirstate.forget(remove)
642 646
643 647 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
644 648 return n
645 649
646 650 def walk(self, node=None, files=[], match=util.always, badmatch=None):
647 651 if node:
648 652 fdict = dict.fromkeys(files)
649 653 for fn in self.manifest.read(self.changelog.read(node)[0]):
650 654 fdict.pop(fn, None)
651 655 if match(fn):
652 656 yield 'm', fn
653 657 for fn in fdict:
654 658 if badmatch and badmatch(fn):
655 659 if match(fn):
656 660 yield 'b', fn
657 661 else:
658 662 self.ui.warn(_('%s: No such file in rev %s\n') % (
659 663 util.pathto(self.getcwd(), fn), short(node)))
660 664 else:
661 665 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
662 666 yield src, fn
663 667
664 668 def status(self, node1=None, node2=None, files=[], match=util.always,
665 669 wlock=None, list_ignored=False, list_clean=False):
666 670 """return status of files between two nodes or node and working directory
667 671
668 672 If node1 is None, use the first dirstate parent instead.
669 673 If node2 is None, compare node1 with working directory.
670 674 """
671 675
672 676 def fcmp(fn, mf):
673 677 t1 = self.wread(fn)
674 678 t2 = self.file(fn).read(mf.get(fn, nullid))
675 679 return cmp(t1, t2)
676 680
677 681 def mfmatches(node):
678 682 change = self.changelog.read(node)
679 683 mf = dict(self.manifest.read(change[0]))
680 684 for fn in mf.keys():
681 685 if not match(fn):
682 686 del mf[fn]
683 687 return mf
684 688
685 689 modified, added, removed, deleted, unknown = [], [], [], [], []
686 690 ignored, clean = [], []
687 691
688 692 compareworking = False
689 693 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
690 694 compareworking = True
691 695
692 696 if not compareworking:
693 697 # read the manifest from node1 before the manifest from node2,
694 698 # so that we'll hit the manifest cache if we're going through
695 699 # all the revisions in parent->child order.
696 700 mf1 = mfmatches(node1)
697 701
698 702 # are we comparing the working directory?
699 703 if not node2:
700 704 if not wlock:
701 705 try:
702 706 wlock = self.wlock(wait=0)
703 707 except lock.LockException:
704 708 wlock = None
705 709 (lookup, modified, added, removed, deleted, unknown,
706 710 ignored, clean) = self.dirstate.status(files, match,
707 711 list_ignored, list_clean)
708 712
709 713 # are we comparing working dir against its parent?
710 714 if compareworking:
711 715 if lookup:
712 716 # do a full compare of any files that might have changed
713 717 mf2 = mfmatches(self.dirstate.parents()[0])
714 718 for f in lookup:
715 719 if fcmp(f, mf2):
716 720 modified.append(f)
717 721 elif wlock is not None:
718 722 self.dirstate.update([f], "n")
719 723 else:
720 724 # we are comparing working dir against non-parent
721 725 # generate a pseudo-manifest for the working dir
722 726 mf2 = mfmatches(self.dirstate.parents()[0])
723 727 for f in lookup + modified + added:
724 728 mf2[f] = ""
725 729 for f in removed:
726 730 if f in mf2:
727 731 del mf2[f]
728 732 else:
729 733 # we are comparing two revisions
730 734 mf2 = mfmatches(node2)
731 735
732 736 if not compareworking:
733 737 # flush lists from dirstate before comparing manifests
734 738 modified, added, clean = [], [], []
735 739
736 740 # make sure to sort the files so we talk to the disk in a
737 741 # reasonable order
738 742 mf2keys = mf2.keys()
739 743 mf2keys.sort()
740 744 for fn in mf2keys:
741 745 if mf1.has_key(fn):
742 746 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
743 747 modified.append(fn)
744 748 elif list_clean:
745 749 clean.append(fn)
746 750 del mf1[fn]
747 751 else:
748 752 added.append(fn)
749 753
750 754 removed = mf1.keys()
751 755
752 756 # sort and return results:
753 757 for l in modified, added, removed, deleted, unknown, ignored, clean:
754 758 l.sort()
755 759 return (modified, added, removed, deleted, unknown, ignored, clean)
756 760
757 761 def changes(self, node1=None, node2=None, files=[], match=util.always,
758 762 wlock=None, list_ignored=False, list_clean=False):
759 763 '''DEPRECATED - use status instead'''
760 764 marduit = self.status(node1, node2, files, match, wlock,
761 765 list_ignored, list_clean)
762 766 if list_ignored:
763 767 return marduit[:-1]
764 768 else:
765 769 return marduit[:-2]
766 770
767 771 def add(self, list, wlock=None):
768 772 if not wlock:
769 773 wlock = self.wlock()
770 774 for f in list:
771 775 p = self.wjoin(f)
772 776 if not os.path.exists(p):
773 777 self.ui.warn(_("%s does not exist!\n") % f)
774 778 elif not os.path.isfile(p):
775 779 self.ui.warn(_("%s not added: only files supported currently\n")
776 780 % f)
777 781 elif self.dirstate.state(f) in 'an':
778 782 self.ui.warn(_("%s already tracked!\n") % f)
779 783 else:
780 784 self.dirstate.update([f], "a")
781 785
782 786 def forget(self, list, wlock=None):
783 787 if not wlock:
784 788 wlock = self.wlock()
785 789 for f in list:
786 790 if self.dirstate.state(f) not in 'ai':
787 791 self.ui.warn(_("%s not added!\n") % f)
788 792 else:
789 793 self.dirstate.forget([f])
790 794
791 795 def remove(self, list, unlink=False, wlock=None):
792 796 if unlink:
793 797 for f in list:
794 798 try:
795 799 util.unlink(self.wjoin(f))
796 800 except OSError, inst:
797 801 if inst.errno != errno.ENOENT:
798 802 raise
799 803 if not wlock:
800 804 wlock = self.wlock()
801 805 for f in list:
802 806 p = self.wjoin(f)
803 807 if os.path.exists(p):
804 808 self.ui.warn(_("%s still exists!\n") % f)
805 809 elif self.dirstate.state(f) == 'a':
806 810 self.dirstate.forget([f])
807 811 elif f not in self.dirstate:
808 812 self.ui.warn(_("%s not tracked!\n") % f)
809 813 else:
810 814 self.dirstate.update([f], "r")
811 815
812 816 def undelete(self, list, wlock=None):
813 817 p = self.dirstate.parents()[0]
814 818 mn = self.changelog.read(p)[0]
815 819 mf = self.manifest.readflags(mn)
816 820 m = self.manifest.read(mn)
817 821 if not wlock:
818 822 wlock = self.wlock()
819 823 for f in list:
820 824 if self.dirstate.state(f) not in "r":
821 825 self.ui.warn("%s not removed!\n" % f)
822 826 else:
823 827 t = self.file(f).read(m[f])
824 828 self.wwrite(f, t)
825 829 util.set_exec(self.wjoin(f), mf[f])
826 830 self.dirstate.update([f], "n")
827 831
828 832 def copy(self, source, dest, wlock=None):
829 833 p = self.wjoin(dest)
830 834 if not os.path.exists(p):
831 835 self.ui.warn(_("%s does not exist!\n") % dest)
832 836 elif not os.path.isfile(p):
833 837 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
834 838 else:
835 839 if not wlock:
836 840 wlock = self.wlock()
837 841 if self.dirstate.state(dest) == '?':
838 842 self.dirstate.update([dest], "a")
839 843 self.dirstate.copy(source, dest)
840 844
841 845 def heads(self, start=None):
842 846 heads = self.changelog.heads(start)
843 847 # sort the output in rev descending order
844 848 heads = [(-self.changelog.rev(h), h) for h in heads]
845 849 heads.sort()
846 850 return [n for (r, n) in heads]
847 851
848 852 # branchlookup returns a dict giving a list of branches for
849 853 # each head. A branch is defined as the tag of a node or
850 854 # the branch of the node's parents. If a node has multiple
851 855 # branch tags, tags are eliminated if they are visible from other
852 856 # branch tags.
853 857 #
854 858 # So, for this graph: a->b->c->d->e
855 859 # \ /
856 860 # aa -----/
857 861 # a has tag 2.6.12
858 862 # d has tag 2.6.13
859 863 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
860 864 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
861 865 # from the list.
862 866 #
863 867 # It is possible that more than one head will have the same branch tag.
864 868 # callers need to check the result for multiple heads under the same
865 869 # branch tag if that is a problem for them (ie checkout of a specific
866 870 # branch).
867 871 #
868 872 # passing in a specific branch will limit the depth of the search
869 873 # through the parents. It won't limit the branches returned in the
870 874 # result though.
871 875 def branchlookup(self, heads=None, branch=None):
872 876 if not heads:
873 877 heads = self.heads()
874 878 headt = [ h for h in heads ]
875 879 chlog = self.changelog
876 880 branches = {}
877 881 merges = []
878 882 seenmerge = {}
879 883
880 884 # traverse the tree once for each head, recording in the branches
881 885 # dict which tags are visible from this head. The branches
882 886 # dict also records which tags are visible from each tag
883 887 # while we traverse.
884 888 while headt or merges:
885 889 if merges:
886 890 n, found = merges.pop()
887 891 visit = [n]
888 892 else:
889 893 h = headt.pop()
890 894 visit = [h]
891 895 found = [h]
892 896 seen = {}
893 897 while visit:
894 898 n = visit.pop()
895 899 if n in seen:
896 900 continue
897 901 pp = chlog.parents(n)
898 902 tags = self.nodetags(n)
899 903 if tags:
900 904 for x in tags:
901 905 if x == 'tip':
902 906 continue
903 907 for f in found:
904 908 branches.setdefault(f, {})[n] = 1
905 909 branches.setdefault(n, {})[n] = 1
906 910 break
907 911 if n not in found:
908 912 found.append(n)
909 913 if branch in tags:
910 914 continue
911 915 seen[n] = 1
912 916 if pp[1] != nullid and n not in seenmerge:
913 917 merges.append((pp[1], [x for x in found]))
914 918 seenmerge[n] = 1
915 919 if pp[0] != nullid:
916 920 visit.append(pp[0])
917 921 # traverse the branches dict, eliminating branch tags from each
918 922 # head that are visible from another branch tag for that head.
919 923 out = {}
920 924 viscache = {}
921 925 for h in heads:
922 926 def visible(node):
923 927 if node in viscache:
924 928 return viscache[node]
925 929 ret = {}
926 930 visit = [node]
927 931 while visit:
928 932 x = visit.pop()
929 933 if x in viscache:
930 934 ret.update(viscache[x])
931 935 elif x not in ret:
932 936 ret[x] = 1
933 937 if x in branches:
934 938 visit[len(visit):] = branches[x].keys()
935 939 viscache[node] = ret
936 940 return ret
937 941 if h not in branches:
938 942 continue
939 943 # O(n^2), but somewhat limited. This only searches the
940 944 # tags visible from a specific head, not all the tags in the
941 945 # whole repo.
942 946 for b in branches[h]:
943 947 vis = False
944 948 for bb in branches[h].keys():
945 949 if b != bb:
946 950 if b in visible(bb):
947 951 vis = True
948 952 break
949 953 if not vis:
950 954 l = out.setdefault(h, [])
951 955 l[len(l):] = self.nodetags(b)
952 956 return out
953 957
954 958 def branches(self, nodes):
955 959 if not nodes:
956 960 nodes = [self.changelog.tip()]
957 961 b = []
958 962 for n in nodes:
959 963 t = n
960 964 while 1:
961 965 p = self.changelog.parents(n)
962 966 if p[1] != nullid or p[0] == nullid:
963 967 b.append((t, n, p[0], p[1]))
964 968 break
965 969 n = p[0]
966 970 return b
967 971
968 972 def between(self, pairs):
969 973 r = []
970 974
971 975 for top, bottom in pairs:
972 976 n, l, i = top, [], 0
973 977 f = 1
974 978
975 979 while n != bottom:
976 980 p = self.changelog.parents(n)[0]
977 981 if i == f:
978 982 l.append(n)
979 983 f = f * 2
980 984 n = p
981 985 i += 1
982 986
983 987 r.append(l)
984 988
985 989 return r
986 990
987 991 def findincoming(self, remote, base=None, heads=None, force=False):
988 992 """Return list of roots of the subsets of missing nodes from remote
989 993
990 994 If base dict is specified, assume that these nodes and their parents
991 995 exist on the remote side and that no child of a node of base exists
992 996 in both remote and self.
993 997 Furthermore base will be updated to include the nodes that exists
994 998 in self and remote but no children exists in self and remote.
995 999 If a list of heads is specified, return only nodes which are heads
996 1000 or ancestors of these heads.
997 1001
998 1002 All the ancestors of base are in self and in remote.
999 1003 All the descendants of the list returned are missing in self.
1000 1004 (and so we know that the rest of the nodes are missing in remote, see
1001 1005 outgoing)
1002 1006 """
1003 1007 m = self.changelog.nodemap
1004 1008 search = []
1005 1009 fetch = {}
1006 1010 seen = {}
1007 1011 seenbranch = {}
1008 1012 if base == None:
1009 1013 base = {}
1010 1014
1011 1015 if not heads:
1012 1016 heads = remote.heads()
1013 1017
1014 1018 if self.changelog.tip() == nullid:
1015 1019 base[nullid] = 1
1016 1020 if heads != [nullid]:
1017 1021 return [nullid]
1018 1022 return []
1019 1023
1020 1024 # assume we're closer to the tip than the root
1021 1025 # and start by examining the heads
1022 1026 self.ui.status(_("searching for changes\n"))
1023 1027
1024 1028 unknown = []
1025 1029 for h in heads:
1026 1030 if h not in m:
1027 1031 unknown.append(h)
1028 1032 else:
1029 1033 base[h] = 1
1030 1034
1031 1035 if not unknown:
1032 1036 return []
1033 1037
1034 1038 req = dict.fromkeys(unknown)
1035 1039 reqcnt = 0
1036 1040
1037 1041 # search through remote branches
1038 1042 # a 'branch' here is a linear segment of history, with four parts:
1039 1043 # head, root, first parent, second parent
1040 1044 # (a branch always has two parents (or none) by definition)
1041 1045 unknown = remote.branches(unknown)
1042 1046 while unknown:
1043 1047 r = []
1044 1048 while unknown:
1045 1049 n = unknown.pop(0)
1046 1050 if n[0] in seen:
1047 1051 continue
1048 1052
1049 1053 self.ui.debug(_("examining %s:%s\n")
1050 1054 % (short(n[0]), short(n[1])))
1051 1055 if n[0] == nullid: # found the end of the branch
1052 1056 pass
1053 1057 elif n in seenbranch:
1054 1058 self.ui.debug(_("branch already found\n"))
1055 1059 continue
1056 1060 elif n[1] and n[1] in m: # do we know the base?
1057 1061 self.ui.debug(_("found incomplete branch %s:%s\n")
1058 1062 % (short(n[0]), short(n[1])))
1059 1063 search.append(n) # schedule branch range for scanning
1060 1064 seenbranch[n] = 1
1061 1065 else:
1062 1066 if n[1] not in seen and n[1] not in fetch:
1063 1067 if n[2] in m and n[3] in m:
1064 1068 self.ui.debug(_("found new changeset %s\n") %
1065 1069 short(n[1]))
1066 1070 fetch[n[1]] = 1 # earliest unknown
1067 1071 for p in n[2:4]:
1068 1072 if p in m:
1069 1073 base[p] = 1 # latest known
1070 1074
1071 1075 for p in n[2:4]:
1072 1076 if p not in req and p not in m:
1073 1077 r.append(p)
1074 1078 req[p] = 1
1075 1079 seen[n[0]] = 1
1076 1080
1077 1081 if r:
1078 1082 reqcnt += 1
1079 1083 self.ui.debug(_("request %d: %s\n") %
1080 1084 (reqcnt, " ".join(map(short, r))))
1081 1085 for p in range(0, len(r), 10):
1082 1086 for b in remote.branches(r[p:p+10]):
1083 1087 self.ui.debug(_("received %s:%s\n") %
1084 1088 (short(b[0]), short(b[1])))
1085 1089 unknown.append(b)
1086 1090
1087 1091 # do binary search on the branches we found
1088 1092 while search:
1089 1093 n = search.pop(0)
1090 1094 reqcnt += 1
1091 1095 l = remote.between([(n[0], n[1])])[0]
1092 1096 l.append(n[1])
1093 1097 p = n[0]
1094 1098 f = 1
1095 1099 for i in l:
1096 1100 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1097 1101 if i in m:
1098 1102 if f <= 2:
1099 1103 self.ui.debug(_("found new branch changeset %s\n") %
1100 1104 short(p))
1101 1105 fetch[p] = 1
1102 1106 base[i] = 1
1103 1107 else:
1104 1108 self.ui.debug(_("narrowed branch search to %s:%s\n")
1105 1109 % (short(p), short(i)))
1106 1110 search.append((p, i))
1107 1111 break
1108 1112 p, f = i, f * 2
1109 1113
1110 1114 # sanity check our fetch list
1111 1115 for f in fetch.keys():
1112 1116 if f in m:
1113 1117 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1114 1118
1115 1119 if base.keys() == [nullid]:
1116 1120 if force:
1117 1121 self.ui.warn(_("warning: repository is unrelated\n"))
1118 1122 else:
1119 1123 raise util.Abort(_("repository is unrelated"))
1120 1124
1121 1125 self.ui.note(_("found new changesets starting at ") +
1122 1126 " ".join([short(f) for f in fetch]) + "\n")
1123 1127
1124 1128 self.ui.debug(_("%d total queries\n") % reqcnt)
1125 1129
1126 1130 return fetch.keys()
1127 1131
1128 1132 def findoutgoing(self, remote, base=None, heads=None, force=False):
1129 1133 """Return list of nodes that are roots of subsets not in remote
1130 1134
1131 1135 If base dict is specified, assume that these nodes and their parents
1132 1136 exist on the remote side.
1133 1137 If a list of heads is specified, return only nodes which are heads
1134 1138 or ancestors of these heads, and return a second element which
1135 1139 contains all remote heads which get new children.
1136 1140 """
1137 1141 if base == None:
1138 1142 base = {}
1139 1143 self.findincoming(remote, base, heads, force=force)
1140 1144
1141 1145 self.ui.debug(_("common changesets up to ")
1142 1146 + " ".join(map(short, base.keys())) + "\n")
1143 1147
1144 1148 remain = dict.fromkeys(self.changelog.nodemap)
1145 1149
1146 1150 # prune everything remote has from the tree
1147 1151 del remain[nullid]
1148 1152 remove = base.keys()
1149 1153 while remove:
1150 1154 n = remove.pop(0)
1151 1155 if n in remain:
1152 1156 del remain[n]
1153 1157 for p in self.changelog.parents(n):
1154 1158 remove.append(p)
1155 1159
1156 1160 # find every node whose parents have been pruned
1157 1161 subset = []
1158 1162 # find every remote head that will get new children
1159 1163 updated_heads = {}
1160 1164 for n in remain:
1161 1165 p1, p2 = self.changelog.parents(n)
1162 1166 if p1 not in remain and p2 not in remain:
1163 1167 subset.append(n)
1164 1168 if heads:
1165 1169 if p1 in heads:
1166 1170 updated_heads[p1] = True
1167 1171 if p2 in heads:
1168 1172 updated_heads[p2] = True
1169 1173
1170 1174 # this is the set of all roots we have to push
1171 1175 if heads:
1172 1176 return subset, updated_heads.keys()
1173 1177 else:
1174 1178 return subset
1175 1179
1176 1180 def pull(self, remote, heads=None, force=False):
1177 1181 l = self.lock()
1178 1182
1179 1183 fetch = self.findincoming(remote, force=force)
1180 1184 if fetch == [nullid]:
1181 1185 self.ui.status(_("requesting all changes\n"))
1182 1186
1183 1187 if not fetch:
1184 1188 self.ui.status(_("no changes found\n"))
1185 1189 return 0
1186 1190
1187 1191 if heads is None:
1188 1192 cg = remote.changegroup(fetch, 'pull')
1189 1193 else:
1190 1194 cg = remote.changegroupsubset(fetch, heads, 'pull')
1191 1195 return self.addchangegroup(cg, 'pull', remote.url())
1192 1196
1193 1197 def push(self, remote, force=False, revs=None):
1194 1198 # there are two ways to push to remote repo:
1195 1199 #
1196 1200 # addchangegroup assumes local user can lock remote
1197 1201 # repo (local filesystem, old ssh servers).
1198 1202 #
1199 1203 # unbundle assumes local user cannot lock remote repo (new ssh
1200 1204 # servers, http servers).
1201 1205
1202 1206 if remote.capable('unbundle'):
1203 1207 return self.push_unbundle(remote, force, revs)
1204 1208 return self.push_addchangegroup(remote, force, revs)
1205 1209
1206 1210 def prepush(self, remote, force, revs):
1207 1211 base = {}
1208 1212 remote_heads = remote.heads()
1209 1213 inc = self.findincoming(remote, base, remote_heads, force=force)
1210 1214 if not force and inc:
1211 1215 self.ui.warn(_("abort: unsynced remote changes!\n"))
1212 1216 self.ui.status(_("(did you forget to sync?"
1213 1217 " use push -f to force)\n"))
1214 1218 return None, 1
1215 1219
1216 1220 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1217 1221 if revs is not None:
1218 1222 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1219 1223 else:
1220 1224 bases, heads = update, self.changelog.heads()
1221 1225
1222 1226 if not bases:
1223 1227 self.ui.status(_("no changes found\n"))
1224 1228 return None, 1
1225 1229 elif not force:
1226 1230 # FIXME we don't properly detect creation of new heads
1227 1231 # in the push -r case, assume the user knows what he's doing
1228 1232 if not revs and len(remote_heads) < len(heads) \
1229 1233 and remote_heads != [nullid]:
1230 1234 self.ui.warn(_("abort: push creates new remote branches!\n"))
1231 1235 self.ui.status(_("(did you forget to merge?"
1232 1236 " use push -f to force)\n"))
1233 1237 return None, 1
1234 1238
1235 1239 if revs is None:
1236 1240 cg = self.changegroup(update, 'push')
1237 1241 else:
1238 1242 cg = self.changegroupsubset(update, revs, 'push')
1239 1243 return cg, remote_heads
1240 1244
1241 1245 def push_addchangegroup(self, remote, force, revs):
1242 1246 lock = remote.lock()
1243 1247
1244 1248 ret = self.prepush(remote, force, revs)
1245 1249 if ret[0] is not None:
1246 1250 cg, remote_heads = ret
1247 1251 return remote.addchangegroup(cg, 'push', self.url())
1248 1252 return ret[1]
1249 1253
1250 1254 def push_unbundle(self, remote, force, revs):
1251 1255 # local repo finds heads on server, finds out what revs it
1252 1256 # must push. once revs transferred, if server finds it has
1253 1257 # different heads (someone else won commit/push race), server
1254 1258 # aborts.
1255 1259
1256 1260 ret = self.prepush(remote, force, revs)
1257 1261 if ret[0] is not None:
1258 1262 cg, remote_heads = ret
1259 1263 if force: remote_heads = ['force']
1260 1264 return remote.unbundle(cg, remote_heads, 'push')
1261 1265 return ret[1]
1262 1266
1263 1267 def changegroupsubset(self, bases, heads, source):
1264 1268 """This function generates a changegroup consisting of all the nodes
1265 1269 that are descendents of any of the bases, and ancestors of any of
1266 1270 the heads.
1267 1271
1268 1272 It is fairly complex as determining which filenodes and which
1269 1273 manifest nodes need to be included for the changeset to be complete
1270 1274 is non-trivial.
1271 1275
1272 1276 Another wrinkle is doing the reverse, figuring out which changeset in
1273 1277 the changegroup a particular filenode or manifestnode belongs to."""
1274 1278
1275 1279 self.hook('preoutgoing', throw=True, source=source)
1276 1280
1277 1281 # Set up some initial variables
1278 1282 # Make it easy to refer to self.changelog
1279 1283 cl = self.changelog
1280 1284 # msng is short for missing - compute the list of changesets in this
1281 1285 # changegroup.
1282 1286 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1283 1287 # Some bases may turn out to be superfluous, and some heads may be
1284 1288 # too. nodesbetween will return the minimal set of bases and heads
1285 1289 # necessary to re-create the changegroup.
1286 1290
1287 1291 # Known heads are the list of heads that it is assumed the recipient
1288 1292 # of this changegroup will know about.
1289 1293 knownheads = {}
1290 1294 # We assume that all parents of bases are known heads.
1291 1295 for n in bases:
1292 1296 for p in cl.parents(n):
1293 1297 if p != nullid:
1294 1298 knownheads[p] = 1
1295 1299 knownheads = knownheads.keys()
1296 1300 if knownheads:
1297 1301 # Now that we know what heads are known, we can compute which
1298 1302 # changesets are known. The recipient must know about all
1299 1303 # changesets required to reach the known heads from the null
1300 1304 # changeset.
1301 1305 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1302 1306 junk = None
1303 1307 # Transform the list into an ersatz set.
1304 1308 has_cl_set = dict.fromkeys(has_cl_set)
1305 1309 else:
1306 1310 # If there were no known heads, the recipient cannot be assumed to
1307 1311 # know about any changesets.
1308 1312 has_cl_set = {}
1309 1313
1310 1314 # Make it easy to refer to self.manifest
1311 1315 mnfst = self.manifest
1312 1316 # We don't know which manifests are missing yet
1313 1317 msng_mnfst_set = {}
1314 1318 # Nor do we know which filenodes are missing.
1315 1319 msng_filenode_set = {}
1316 1320
1317 1321 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1318 1322 junk = None
1319 1323
1320 1324 # A changeset always belongs to itself, so the changenode lookup
1321 1325 # function for a changenode is identity.
1322 1326 def identity(x):
1323 1327 return x
1324 1328
1325 1329 # A function generating function. Sets up an environment for the
1326 1330 # inner function.
1327 1331 def cmp_by_rev_func(revlog):
1328 1332 # Compare two nodes by their revision number in the environment's
1329 1333 # revision history. Since the revision number both represents the
1330 1334 # most efficient order to read the nodes in, and represents a
1331 1335 # topological sorting of the nodes, this function is often useful.
1332 1336 def cmp_by_rev(a, b):
1333 1337 return cmp(revlog.rev(a), revlog.rev(b))
1334 1338 return cmp_by_rev
1335 1339
1336 1340 # If we determine that a particular file or manifest node must be a
1337 1341 # node that the recipient of the changegroup will already have, we can
1338 1342 # also assume the recipient will have all the parents. This function
1339 1343 # prunes them from the set of missing nodes.
1340 1344 def prune_parents(revlog, hasset, msngset):
1341 1345 haslst = hasset.keys()
1342 1346 haslst.sort(cmp_by_rev_func(revlog))
1343 1347 for node in haslst:
1344 1348 parentlst = [p for p in revlog.parents(node) if p != nullid]
1345 1349 while parentlst:
1346 1350 n = parentlst.pop()
1347 1351 if n not in hasset:
1348 1352 hasset[n] = 1
1349 1353 p = [p for p in revlog.parents(n) if p != nullid]
1350 1354 parentlst.extend(p)
1351 1355 for n in hasset:
1352 1356 msngset.pop(n, None)
1353 1357
1354 1358 # This is a function generating function used to set up an environment
1355 1359 # for the inner function to execute in.
1356 1360 def manifest_and_file_collector(changedfileset):
1357 1361 # This is an information gathering function that gathers
1358 1362 # information from each changeset node that goes out as part of
1359 1363 # the changegroup. The information gathered is a list of which
1360 1364 # manifest nodes are potentially required (the recipient may
1361 1365 # already have them) and total list of all files which were
1362 1366 # changed in any changeset in the changegroup.
1363 1367 #
1364 1368 # We also remember the first changenode we saw any manifest
1365 1369 # referenced by so we can later determine which changenode 'owns'
1366 1370 # the manifest.
1367 1371 def collect_manifests_and_files(clnode):
1368 1372 c = cl.read(clnode)
1369 1373 for f in c[3]:
1370 1374 # This is to make sure we only have one instance of each
1371 1375 # filename string for each filename.
1372 1376 changedfileset.setdefault(f, f)
1373 1377 msng_mnfst_set.setdefault(c[0], clnode)
1374 1378 return collect_manifests_and_files
1375 1379
1376 1380 # Figure out which manifest nodes (of the ones we think might be part
1377 1381 # of the changegroup) the recipient must know about and remove them
1378 1382 # from the changegroup.
1379 1383 def prune_manifests():
1380 1384 has_mnfst_set = {}
1381 1385 for n in msng_mnfst_set:
1382 1386 # If a 'missing' manifest thinks it belongs to a changenode
1383 1387 # the recipient is assumed to have, obviously the recipient
1384 1388 # must have that manifest.
1385 1389 linknode = cl.node(mnfst.linkrev(n))
1386 1390 if linknode in has_cl_set:
1387 1391 has_mnfst_set[n] = 1
1388 1392 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1389 1393
1390 1394 # Use the information collected in collect_manifests_and_files to say
1391 1395 # which changenode any manifestnode belongs to.
1392 1396 def lookup_manifest_link(mnfstnode):
1393 1397 return msng_mnfst_set[mnfstnode]
1394 1398
1395 1399 # A function generating function that sets up the initial environment
1396 1400 # the inner function.
1397 1401 def filenode_collector(changedfiles):
1398 1402 next_rev = [0]
1399 1403 # This gathers information from each manifestnode included in the
1400 1404 # changegroup about which filenodes the manifest node references
1401 1405 # so we can include those in the changegroup too.
1402 1406 #
1403 1407 # It also remembers which changenode each filenode belongs to. It
1404 1408 # does this by assuming the a filenode belongs to the changenode
1405 1409 # the first manifest that references it belongs to.
1406 1410 def collect_msng_filenodes(mnfstnode):
1407 1411 r = mnfst.rev(mnfstnode)
1408 1412 if r == next_rev[0]:
1409 1413 # If the last rev we looked at was the one just previous,
1410 1414 # we only need to see a diff.
1411 1415 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1412 1416 # For each line in the delta
1413 1417 for dline in delta.splitlines():
1414 1418 # get the filename and filenode for that line
1415 1419 f, fnode = dline.split('\0')
1416 1420 fnode = bin(fnode[:40])
1417 1421 f = changedfiles.get(f, None)
1418 1422 # And if the file is in the list of files we care
1419 1423 # about.
1420 1424 if f is not None:
1421 1425 # Get the changenode this manifest belongs to
1422 1426 clnode = msng_mnfst_set[mnfstnode]
1423 1427 # Create the set of filenodes for the file if
1424 1428 # there isn't one already.
1425 1429 ndset = msng_filenode_set.setdefault(f, {})
1426 1430 # And set the filenode's changelog node to the
1427 1431 # manifest's if it hasn't been set already.
1428 1432 ndset.setdefault(fnode, clnode)
1429 1433 else:
1430 1434 # Otherwise we need a full manifest.
1431 1435 m = mnfst.read(mnfstnode)
1432 1436 # For every file in we care about.
1433 1437 for f in changedfiles:
1434 1438 fnode = m.get(f, None)
1435 1439 # If it's in the manifest
1436 1440 if fnode is not None:
1437 1441 # See comments above.
1438 1442 clnode = msng_mnfst_set[mnfstnode]
1439 1443 ndset = msng_filenode_set.setdefault(f, {})
1440 1444 ndset.setdefault(fnode, clnode)
1441 1445 # Remember the revision we hope to see next.
1442 1446 next_rev[0] = r + 1
1443 1447 return collect_msng_filenodes
1444 1448
1445 1449 # We have a list of filenodes we think we need for a file, lets remove
1446 1450 # all those we now the recipient must have.
1447 1451 def prune_filenodes(f, filerevlog):
1448 1452 msngset = msng_filenode_set[f]
1449 1453 hasset = {}
1450 1454 # If a 'missing' filenode thinks it belongs to a changenode we
1451 1455 # assume the recipient must have, then the recipient must have
1452 1456 # that filenode.
1453 1457 for n in msngset:
1454 1458 clnode = cl.node(filerevlog.linkrev(n))
1455 1459 if clnode in has_cl_set:
1456 1460 hasset[n] = 1
1457 1461 prune_parents(filerevlog, hasset, msngset)
1458 1462
1459 1463 # A function generator function that sets up the a context for the
1460 1464 # inner function.
1461 1465 def lookup_filenode_link_func(fname):
1462 1466 msngset = msng_filenode_set[fname]
1463 1467 # Lookup the changenode the filenode belongs to.
1464 1468 def lookup_filenode_link(fnode):
1465 1469 return msngset[fnode]
1466 1470 return lookup_filenode_link
1467 1471
1468 1472 # Now that we have all theses utility functions to help out and
1469 1473 # logically divide up the task, generate the group.
1470 1474 def gengroup():
1471 1475 # The set of changed files starts empty.
1472 1476 changedfiles = {}
1473 1477 # Create a changenode group generator that will call our functions
1474 1478 # back to lookup the owning changenode and collect information.
1475 1479 group = cl.group(msng_cl_lst, identity,
1476 1480 manifest_and_file_collector(changedfiles))
1477 1481 for chnk in group:
1478 1482 yield chnk
1479 1483
1480 1484 # The list of manifests has been collected by the generator
1481 1485 # calling our functions back.
1482 1486 prune_manifests()
1483 1487 msng_mnfst_lst = msng_mnfst_set.keys()
1484 1488 # Sort the manifestnodes by revision number.
1485 1489 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1486 1490 # Create a generator for the manifestnodes that calls our lookup
1487 1491 # and data collection functions back.
1488 1492 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1489 1493 filenode_collector(changedfiles))
1490 1494 for chnk in group:
1491 1495 yield chnk
1492 1496
1493 1497 # These are no longer needed, dereference and toss the memory for
1494 1498 # them.
1495 1499 msng_mnfst_lst = None
1496 1500 msng_mnfst_set.clear()
1497 1501
1498 1502 changedfiles = changedfiles.keys()
1499 1503 changedfiles.sort()
1500 1504 # Go through all our files in order sorted by name.
1501 1505 for fname in changedfiles:
1502 1506 filerevlog = self.file(fname)
1503 1507 # Toss out the filenodes that the recipient isn't really
1504 1508 # missing.
1505 1509 if msng_filenode_set.has_key(fname):
1506 1510 prune_filenodes(fname, filerevlog)
1507 1511 msng_filenode_lst = msng_filenode_set[fname].keys()
1508 1512 else:
1509 1513 msng_filenode_lst = []
1510 1514 # If any filenodes are left, generate the group for them,
1511 1515 # otherwise don't bother.
1512 1516 if len(msng_filenode_lst) > 0:
1513 1517 yield changegroup.genchunk(fname)
1514 1518 # Sort the filenodes by their revision #
1515 1519 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1516 1520 # Create a group generator and only pass in a changenode
1517 1521 # lookup function as we need to collect no information
1518 1522 # from filenodes.
1519 1523 group = filerevlog.group(msng_filenode_lst,
1520 1524 lookup_filenode_link_func(fname))
1521 1525 for chnk in group:
1522 1526 yield chnk
1523 1527 if msng_filenode_set.has_key(fname):
1524 1528 # Don't need this anymore, toss it to free memory.
1525 1529 del msng_filenode_set[fname]
1526 1530 # Signal that no more groups are left.
1527 1531 yield changegroup.closechunk()
1528 1532
1529 1533 if msng_cl_lst:
1530 1534 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1531 1535
1532 1536 return util.chunkbuffer(gengroup())
1533 1537
1534 1538 def changegroup(self, basenodes, source):
1535 1539 """Generate a changegroup of all nodes that we have that a recipient
1536 1540 doesn't.
1537 1541
1538 1542 This is much easier than the previous function as we can assume that
1539 1543 the recipient has any changenode we aren't sending them."""
1540 1544
1541 1545 self.hook('preoutgoing', throw=True, source=source)
1542 1546
1543 1547 cl = self.changelog
1544 1548 nodes = cl.nodesbetween(basenodes, None)[0]
1545 1549 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1546 1550
1547 1551 def identity(x):
1548 1552 return x
1549 1553
1550 1554 def gennodelst(revlog):
1551 1555 for r in xrange(0, revlog.count()):
1552 1556 n = revlog.node(r)
1553 1557 if revlog.linkrev(n) in revset:
1554 1558 yield n
1555 1559
1556 1560 def changed_file_collector(changedfileset):
1557 1561 def collect_changed_files(clnode):
1558 1562 c = cl.read(clnode)
1559 1563 for fname in c[3]:
1560 1564 changedfileset[fname] = 1
1561 1565 return collect_changed_files
1562 1566
1563 1567 def lookuprevlink_func(revlog):
1564 1568 def lookuprevlink(n):
1565 1569 return cl.node(revlog.linkrev(n))
1566 1570 return lookuprevlink
1567 1571
1568 1572 def gengroup():
1569 1573 # construct a list of all changed files
1570 1574 changedfiles = {}
1571 1575
1572 1576 for chnk in cl.group(nodes, identity,
1573 1577 changed_file_collector(changedfiles)):
1574 1578 yield chnk
1575 1579 changedfiles = changedfiles.keys()
1576 1580 changedfiles.sort()
1577 1581
1578 1582 mnfst = self.manifest
1579 1583 nodeiter = gennodelst(mnfst)
1580 1584 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1581 1585 yield chnk
1582 1586
1583 1587 for fname in changedfiles:
1584 1588 filerevlog = self.file(fname)
1585 1589 nodeiter = gennodelst(filerevlog)
1586 1590 nodeiter = list(nodeiter)
1587 1591 if nodeiter:
1588 1592 yield changegroup.genchunk(fname)
1589 1593 lookup = lookuprevlink_func(filerevlog)
1590 1594 for chnk in filerevlog.group(nodeiter, lookup):
1591 1595 yield chnk
1592 1596
1593 1597 yield changegroup.closechunk()
1594 1598
1595 1599 if nodes:
1596 1600 self.hook('outgoing', node=hex(nodes[0]), source=source)
1597 1601
1598 1602 return util.chunkbuffer(gengroup())
1599 1603
1600 1604 def addchangegroup(self, source, srctype, url):
1601 1605 """add changegroup to repo.
1602 1606 returns number of heads modified or added + 1."""
1603 1607
1604 1608 def csmap(x):
1605 1609 self.ui.debug(_("add changeset %s\n") % short(x))
1606 1610 return cl.count()
1607 1611
1608 1612 def revmap(x):
1609 1613 return cl.rev(x)
1610 1614
1611 1615 if not source:
1612 1616 return 0
1613 1617
1614 1618 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1615 1619
1616 1620 changesets = files = revisions = 0
1617 1621
1618 1622 tr = self.transaction()
1619 1623
1620 1624 # write changelog data to temp files so concurrent readers will not see
1621 1625 # inconsistent view
1622 1626 cl = None
1623 1627 try:
1624 1628 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1625 1629
1626 1630 oldheads = len(cl.heads())
1627 1631
1628 1632 # pull off the changeset group
1629 1633 self.ui.status(_("adding changesets\n"))
1630 1634 cor = cl.count() - 1
1631 1635 chunkiter = changegroup.chunkiter(source)
1632 1636 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1633 1637 raise util.Abort(_("received changelog group is empty"))
1634 1638 cnr = cl.count() - 1
1635 1639 changesets = cnr - cor
1636 1640
1637 1641 # pull off the manifest group
1638 1642 self.ui.status(_("adding manifests\n"))
1639 1643 chunkiter = changegroup.chunkiter(source)
1640 1644 # no need to check for empty manifest group here:
1641 1645 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1642 1646 # no new manifest will be created and the manifest group will
1643 1647 # be empty during the pull
1644 1648 self.manifest.addgroup(chunkiter, revmap, tr)
1645 1649
1646 1650 # process the files
1647 1651 self.ui.status(_("adding file changes\n"))
1648 1652 while 1:
1649 1653 f = changegroup.getchunk(source)
1650 1654 if not f:
1651 1655 break
1652 1656 self.ui.debug(_("adding %s revisions\n") % f)
1653 1657 fl = self.file(f)
1654 1658 o = fl.count()
1655 1659 chunkiter = changegroup.chunkiter(source)
1656 1660 if fl.addgroup(chunkiter, revmap, tr) is None:
1657 1661 raise util.Abort(_("received file revlog group is empty"))
1658 1662 revisions += fl.count() - o
1659 1663 files += 1
1660 1664
1661 1665 cl.writedata()
1662 1666 finally:
1663 1667 if cl:
1664 1668 cl.cleanup()
1665 1669
1666 1670 # make changelog see real files again
1667 1671 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1668 1672 self.changelog.checkinlinesize(tr)
1669 1673
1670 1674 newheads = len(self.changelog.heads())
1671 1675 heads = ""
1672 1676 if oldheads and newheads != oldheads:
1673 1677 heads = _(" (%+d heads)") % (newheads - oldheads)
1674 1678
1675 1679 self.ui.status(_("added %d changesets"
1676 1680 " with %d changes to %d files%s\n")
1677 1681 % (changesets, revisions, files, heads))
1678 1682
1679 1683 if changesets > 0:
1680 1684 self.hook('pretxnchangegroup', throw=True,
1681 1685 node=hex(self.changelog.node(cor+1)), source=srctype,
1682 1686 url=url)
1683 1687
1684 1688 tr.close()
1685 1689
1686 1690 if changesets > 0:
1687 1691 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1688 1692 source=srctype, url=url)
1689 1693
1690 1694 for i in range(cor + 1, cnr + 1):
1691 1695 self.hook("incoming", node=hex(self.changelog.node(i)),
1692 1696 source=srctype, url=url)
1693 1697
1694 1698 return newheads - oldheads + 1
1695 1699
1696 1700 def update(self, node, allow=False, force=False, choose=None,
1697 1701 moddirstate=True, forcemerge=False, wlock=None, show_stats=True):
1698 1702 pl = self.dirstate.parents()
1699 1703 if not force and pl[1] != nullid:
1700 1704 raise util.Abort(_("outstanding uncommitted merges"))
1701 1705
1702 1706 err = False
1703 1707
1704 1708 p1, p2 = pl[0], node
1705 1709 pa = self.changelog.ancestor(p1, p2)
1706 1710 m1n = self.changelog.read(p1)[0]
1707 1711 m2n = self.changelog.read(p2)[0]
1708 1712 man = self.manifest.ancestor(m1n, m2n)
1709 1713 m1 = self.manifest.read(m1n)
1710 1714 mf1 = self.manifest.readflags(m1n)
1711 1715 m2 = self.manifest.read(m2n).copy()
1712 1716 mf2 = self.manifest.readflags(m2n)
1713 1717 ma = self.manifest.read(man)
1714 1718 mfa = self.manifest.readflags(man)
1715 1719
1716 1720 modified, added, removed, deleted, unknown = self.changes()
1717 1721
1718 1722 # is this a jump, or a merge? i.e. is there a linear path
1719 1723 # from p1 to p2?
1720 1724 linear_path = (pa == p1 or pa == p2)
1721 1725
1722 1726 if allow and linear_path:
1723 1727 raise util.Abort(_("there is nothing to merge, just use "
1724 1728 "'hg update' or look at 'hg heads'"))
1725 1729 if allow and not forcemerge:
1726 1730 if modified or added or removed:
1727 1731 raise util.Abort(_("outstanding uncommitted changes"))
1728 1732
1729 1733 if not forcemerge and not force:
1730 1734 for f in unknown:
1731 1735 if f in m2:
1732 1736 t1 = self.wread(f)
1733 1737 t2 = self.file(f).read(m2[f])
1734 1738 if cmp(t1, t2) != 0:
1735 1739 raise util.Abort(_("'%s' already exists in the working"
1736 1740 " dir and differs from remote") % f)
1737 1741
1738 1742 # resolve the manifest to determine which files
1739 1743 # we care about merging
1740 1744 self.ui.note(_("resolving manifests\n"))
1741 1745 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1742 1746 (force, allow, moddirstate, linear_path))
1743 1747 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1744 1748 (short(man), short(m1n), short(m2n)))
1745 1749
1746 1750 merge = {}
1747 1751 get = {}
1748 1752 remove = []
1749 1753
1750 1754 # construct a working dir manifest
1751 1755 mw = m1.copy()
1752 1756 mfw = mf1.copy()
1753 1757 umap = dict.fromkeys(unknown)
1754 1758
1755 1759 for f in added + modified + unknown:
1756 1760 mw[f] = ""
1757 1761 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1758 1762
1759 1763 if moddirstate and not wlock:
1760 1764 wlock = self.wlock()
1761 1765
1762 1766 for f in deleted + removed:
1763 1767 if f in mw:
1764 1768 del mw[f]
1765 1769
1766 1770 # If we're jumping between revisions (as opposed to merging),
1767 1771 # and if neither the working directory nor the target rev has
1768 1772 # the file, then we need to remove it from the dirstate, to
1769 1773 # prevent the dirstate from listing the file when it is no
1770 1774 # longer in the manifest.
1771 1775 if moddirstate and linear_path and f not in m2:
1772 1776 self.dirstate.forget((f,))
1773 1777
1774 1778 # Compare manifests
1775 1779 for f, n in mw.iteritems():
1776 1780 if choose and not choose(f):
1777 1781 continue
1778 1782 if f in m2:
1779 1783 s = 0
1780 1784
1781 1785 # is the wfile new since m1, and match m2?
1782 1786 if f not in m1:
1783 1787 t1 = self.wread(f)
1784 1788 t2 = self.file(f).read(m2[f])
1785 1789 if cmp(t1, t2) == 0:
1786 1790 n = m2[f]
1787 1791 del t1, t2
1788 1792
1789 1793 # are files different?
1790 1794 if n != m2[f]:
1791 1795 a = ma.get(f, nullid)
1792 1796 # are both different from the ancestor?
1793 1797 if n != a and m2[f] != a:
1794 1798 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1795 1799 # merge executable bits
1796 1800 # "if we changed or they changed, change in merge"
1797 1801 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1798 1802 mode = ((a^b) | (a^c)) ^ a
1799 1803 merge[f] = (m1.get(f, nullid), m2[f], mode)
1800 1804 s = 1
1801 1805 # are we clobbering?
1802 1806 # is remote's version newer?
1803 1807 # or are we going back in time?
1804 1808 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1805 1809 self.ui.debug(_(" remote %s is newer, get\n") % f)
1806 1810 get[f] = m2[f]
1807 1811 s = 1
1808 1812 elif f in umap or f in added:
1809 1813 # this unknown file is the same as the checkout
1810 1814 # we need to reset the dirstate if the file was added
1811 1815 get[f] = m2[f]
1812 1816
1813 1817 if not s and mfw[f] != mf2[f]:
1814 1818 if force:
1815 1819 self.ui.debug(_(" updating permissions for %s\n") % f)
1816 1820 util.set_exec(self.wjoin(f), mf2[f])
1817 1821 else:
1818 1822 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1819 1823 mode = ((a^b) | (a^c)) ^ a
1820 1824 if mode != b:
1821 1825 self.ui.debug(_(" updating permissions for %s\n")
1822 1826 % f)
1823 1827 util.set_exec(self.wjoin(f), mode)
1824 1828 del m2[f]
1825 1829 elif f in ma:
1826 1830 if n != ma[f]:
1827 1831 r = _("d")
1828 1832 if not force and (linear_path or allow):
1829 1833 r = self.ui.prompt(
1830 1834 (_(" local changed %s which remote deleted\n") % f) +
1831 1835 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1832 1836 if r == _("d"):
1833 1837 remove.append(f)
1834 1838 else:
1835 1839 self.ui.debug(_("other deleted %s\n") % f)
1836 1840 remove.append(f) # other deleted it
1837 1841 else:
1838 1842 # file is created on branch or in working directory
1839 1843 if force and f not in umap:
1840 1844 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1841 1845 remove.append(f)
1842 1846 elif n == m1.get(f, nullid): # same as parent
1843 1847 if p2 == pa: # going backwards?
1844 1848 self.ui.debug(_("remote deleted %s\n") % f)
1845 1849 remove.append(f)
1846 1850 else:
1847 1851 self.ui.debug(_("local modified %s, keeping\n") % f)
1848 1852 else:
1849 1853 self.ui.debug(_("working dir created %s, keeping\n") % f)
1850 1854
1851 1855 for f, n in m2.iteritems():
1852 1856 if choose and not choose(f):
1853 1857 continue
1854 1858 if f[0] == "/":
1855 1859 continue
1856 1860 if f in ma and n != ma[f]:
1857 1861 r = _("k")
1858 1862 if not force and (linear_path or allow):
1859 1863 r = self.ui.prompt(
1860 1864 (_("remote changed %s which local deleted\n") % f) +
1861 1865 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1862 1866 if r == _("k"):
1863 1867 get[f] = n
1864 1868 elif f not in ma:
1865 1869 self.ui.debug(_("remote created %s\n") % f)
1866 1870 get[f] = n
1867 1871 else:
1868 1872 if force or p2 == pa: # going backwards?
1869 1873 self.ui.debug(_("local deleted %s, recreating\n") % f)
1870 1874 get[f] = n
1871 1875 else:
1872 1876 self.ui.debug(_("local deleted %s\n") % f)
1873 1877
1874 1878 del mw, m1, m2, ma
1875 1879
1876 1880 if force:
1877 1881 for f in merge:
1878 1882 get[f] = merge[f][1]
1879 1883 merge = {}
1880 1884
1881 1885 if linear_path or force:
1882 1886 # we don't need to do any magic, just jump to the new rev
1883 1887 branch_merge = False
1884 1888 p1, p2 = p2, nullid
1885 1889 else:
1886 1890 if not allow:
1887 1891 self.ui.status(_("this update spans a branch"
1888 1892 " affecting the following files:\n"))
1889 1893 fl = merge.keys() + get.keys()
1890 1894 fl.sort()
1891 1895 for f in fl:
1892 1896 cf = ""
1893 1897 if f in merge:
1894 1898 cf = _(" (resolve)")
1895 1899 self.ui.status(" %s%s\n" % (f, cf))
1896 1900 self.ui.warn(_("aborting update spanning branches!\n"))
1897 1901 self.ui.status(_("(use 'hg merge' to merge across branches"
1898 1902 " or 'hg update -C' to lose changes)\n"))
1899 1903 return 1
1900 1904 branch_merge = True
1901 1905
1902 1906 xp1 = hex(p1)
1903 1907 xp2 = hex(p2)
1904 1908 if p2 == nullid: xxp2 = ''
1905 1909 else: xxp2 = xp2
1906 1910
1907 1911 self.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2)
1908 1912
1909 1913 # get the files we don't need to change
1910 1914 files = get.keys()
1911 1915 files.sort()
1912 1916 for f in files:
1913 1917 if f[0] == "/":
1914 1918 continue
1915 1919 self.ui.note(_("getting %s\n") % f)
1916 1920 t = self.file(f).read(get[f])
1917 1921 self.wwrite(f, t)
1918 1922 util.set_exec(self.wjoin(f), mf2[f])
1919 1923 if moddirstate:
1920 1924 if branch_merge:
1921 1925 self.dirstate.update([f], 'n', st_mtime=-1)
1922 1926 else:
1923 1927 self.dirstate.update([f], 'n')
1924 1928
1925 1929 # merge the tricky bits
1926 1930 failedmerge = []
1927 1931 files = merge.keys()
1928 1932 files.sort()
1929 1933 for f in files:
1930 1934 self.ui.status(_("merging %s\n") % f)
1931 1935 my, other, flag = merge[f]
1932 1936 ret = self.merge3(f, my, other, xp1, xp2)
1933 1937 if ret:
1934 1938 err = True
1935 1939 failedmerge.append(f)
1936 1940 util.set_exec(self.wjoin(f), flag)
1937 1941 if moddirstate:
1938 1942 if branch_merge:
1939 1943 # We've done a branch merge, mark this file as merged
1940 1944 # so that we properly record the merger later
1941 1945 self.dirstate.update([f], 'm')
1942 1946 else:
1943 1947 # We've update-merged a locally modified file, so
1944 1948 # we set the dirstate to emulate a normal checkout
1945 1949 # of that file some time in the past. Thus our
1946 1950 # merge will appear as a normal local file
1947 1951 # modification.
1948 1952 f_len = len(self.file(f).read(other))
1949 1953 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1950 1954
1951 1955 remove.sort()
1952 1956 for f in remove:
1953 1957 self.ui.note(_("removing %s\n") % f)
1954 1958 util.audit_path(f)
1955 1959 try:
1956 1960 util.unlink(self.wjoin(f))
1957 1961 except OSError, inst:
1958 1962 if inst.errno != errno.ENOENT:
1959 1963 self.ui.warn(_("update failed to remove %s: %s!\n") %
1960 1964 (f, inst.strerror))
1961 1965 if moddirstate:
1962 1966 if branch_merge:
1963 1967 self.dirstate.update(remove, 'r')
1964 1968 else:
1965 1969 self.dirstate.forget(remove)
1966 1970
1967 1971 if moddirstate:
1968 1972 self.dirstate.setparents(p1, p2)
1969 1973
1970 1974 if show_stats:
1971 1975 stats = ((len(get), _("updated")),
1972 1976 (len(merge) - len(failedmerge), _("merged")),
1973 1977 (len(remove), _("removed")),
1974 1978 (len(failedmerge), _("unresolved")))
1975 1979 note = ", ".join([_("%d files %s") % s for s in stats])
1976 1980 self.ui.status("%s\n" % note)
1977 1981 if moddirstate:
1978 1982 if branch_merge:
1979 1983 if failedmerge:
1980 1984 self.ui.status(_("There are unresolved merges,"
1981 1985 " you can redo the full merge using:\n"
1982 1986 " hg update -C %s\n"
1983 1987 " hg merge %s\n"
1984 1988 % (self.changelog.rev(p1),
1985 1989 self.changelog.rev(p2))))
1986 1990 else:
1987 1991 self.ui.status(_("(branch merge, don't forget to commit)\n"))
1988 1992 elif failedmerge:
1989 1993 self.ui.status(_("There are unresolved merges with"
1990 1994 " locally modified files.\n"))
1991 1995
1992 1996 self.hook('update', parent1=xp1, parent2=xxp2, error=int(err))
1993 1997 return err
1994 1998
1995 1999 def merge3(self, fn, my, other, p1, p2):
1996 2000 """perform a 3-way merge in the working directory"""
1997 2001
1998 2002 def temp(prefix, node):
1999 2003 pre = "%s~%s." % (os.path.basename(fn), prefix)
2000 2004 (fd, name) = tempfile.mkstemp(prefix=pre)
2001 2005 f = os.fdopen(fd, "wb")
2002 2006 self.wwrite(fn, fl.read(node), f)
2003 2007 f.close()
2004 2008 return name
2005 2009
2006 2010 fl = self.file(fn)
2007 2011 base = fl.ancestor(my, other)
2008 2012 a = self.wjoin(fn)
2009 2013 b = temp("base", base)
2010 2014 c = temp("other", other)
2011 2015
2012 2016 self.ui.note(_("resolving %s\n") % fn)
2013 2017 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
2014 2018 (fn, short(my), short(other), short(base)))
2015 2019
2016 2020 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
2017 2021 or "hgmerge")
2018 2022 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
2019 2023 environ={'HG_FILE': fn,
2020 2024 'HG_MY_NODE': p1,
2021 2025 'HG_OTHER_NODE': p2,
2022 2026 'HG_FILE_MY_NODE': hex(my),
2023 2027 'HG_FILE_OTHER_NODE': hex(other),
2024 2028 'HG_FILE_BASE_NODE': hex(base)})
2025 2029 if r:
2026 2030 self.ui.warn(_("merging %s failed!\n") % fn)
2027 2031
2028 2032 os.unlink(b)
2029 2033 os.unlink(c)
2030 2034 return r
2031 2035
2032 2036 def verify(self):
2033 2037 filelinkrevs = {}
2034 2038 filenodes = {}
2035 2039 changesets = revisions = files = 0
2036 2040 errors = [0]
2037 2041 warnings = [0]
2038 2042 neededmanifests = {}
2039 2043
2040 2044 def err(msg):
2041 2045 self.ui.warn(msg + "\n")
2042 2046 errors[0] += 1
2043 2047
2044 2048 def warn(msg):
2045 2049 self.ui.warn(msg + "\n")
2046 2050 warnings[0] += 1
2047 2051
2048 2052 def checksize(obj, name):
2049 2053 d = obj.checksize()
2050 2054 if d[0]:
2051 2055 err(_("%s data length off by %d bytes") % (name, d[0]))
2052 2056 if d[1]:
2053 2057 err(_("%s index contains %d extra bytes") % (name, d[1]))
2054 2058
2055 2059 def checkversion(obj, name):
2056 2060 if obj.version != revlog.REVLOGV0:
2057 2061 if not revlogv1:
2058 2062 warn(_("warning: `%s' uses revlog format 1") % name)
2059 2063 elif revlogv1:
2060 2064 warn(_("warning: `%s' uses revlog format 0") % name)
2061 2065
2062 2066 revlogv1 = self.revlogversion != revlog.REVLOGV0
2063 2067 if self.ui.verbose or revlogv1 != self.revlogv1:
2064 2068 self.ui.status(_("repository uses revlog format %d\n") %
2065 2069 (revlogv1 and 1 or 0))
2066 2070
2067 2071 seen = {}
2068 2072 self.ui.status(_("checking changesets\n"))
2069 2073 checksize(self.changelog, "changelog")
2070 2074
2071 2075 for i in range(self.changelog.count()):
2072 2076 changesets += 1
2073 2077 n = self.changelog.node(i)
2074 2078 l = self.changelog.linkrev(n)
2075 2079 if l != i:
2076 2080 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
2077 2081 if n in seen:
2078 2082 err(_("duplicate changeset at revision %d") % i)
2079 2083 seen[n] = 1
2080 2084
2081 2085 for p in self.changelog.parents(n):
2082 2086 if p not in self.changelog.nodemap:
2083 2087 err(_("changeset %s has unknown parent %s") %
2084 2088 (short(n), short(p)))
2085 2089 try:
2086 2090 changes = self.changelog.read(n)
2087 2091 except KeyboardInterrupt:
2088 2092 self.ui.warn(_("interrupted"))
2089 2093 raise
2090 2094 except Exception, inst:
2091 2095 err(_("unpacking changeset %s: %s") % (short(n), inst))
2092 2096 continue
2093 2097
2094 2098 neededmanifests[changes[0]] = n
2095 2099
2096 2100 for f in changes[3]:
2097 2101 filelinkrevs.setdefault(f, []).append(i)
2098 2102
2099 2103 seen = {}
2100 2104 self.ui.status(_("checking manifests\n"))
2101 2105 checkversion(self.manifest, "manifest")
2102 2106 checksize(self.manifest, "manifest")
2103 2107
2104 2108 for i in range(self.manifest.count()):
2105 2109 n = self.manifest.node(i)
2106 2110 l = self.manifest.linkrev(n)
2107 2111
2108 2112 if l < 0 or l >= self.changelog.count():
2109 2113 err(_("bad manifest link (%d) at revision %d") % (l, i))
2110 2114
2111 2115 if n in neededmanifests:
2112 2116 del neededmanifests[n]
2113 2117
2114 2118 if n in seen:
2115 2119 err(_("duplicate manifest at revision %d") % i)
2116 2120
2117 2121 seen[n] = 1
2118 2122
2119 2123 for p in self.manifest.parents(n):
2120 2124 if p not in self.manifest.nodemap:
2121 2125 err(_("manifest %s has unknown parent %s") %
2122 2126 (short(n), short(p)))
2123 2127
2124 2128 try:
2125 2129 delta = mdiff.patchtext(self.manifest.delta(n))
2126 2130 except KeyboardInterrupt:
2127 2131 self.ui.warn(_("interrupted"))
2128 2132 raise
2129 2133 except Exception, inst:
2130 2134 err(_("unpacking manifest %s: %s") % (short(n), inst))
2131 2135 continue
2132 2136
2133 2137 try:
2134 2138 ff = [ l.split('\0') for l in delta.splitlines() ]
2135 2139 for f, fn in ff:
2136 2140 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
2137 2141 except (ValueError, TypeError), inst:
2138 2142 err(_("broken delta in manifest %s: %s") % (short(n), inst))
2139 2143
2140 2144 self.ui.status(_("crosschecking files in changesets and manifests\n"))
2141 2145
2142 2146 for m, c in neededmanifests.items():
2143 2147 err(_("Changeset %s refers to unknown manifest %s") %
2144 2148 (short(m), short(c)))
2145 2149 del neededmanifests
2146 2150
2147 2151 for f in filenodes:
2148 2152 if f not in filelinkrevs:
2149 2153 err(_("file %s in manifest but not in changesets") % f)
2150 2154
2151 2155 for f in filelinkrevs:
2152 2156 if f not in filenodes:
2153 2157 err(_("file %s in changeset but not in manifest") % f)
2154 2158
2155 2159 self.ui.status(_("checking files\n"))
2156 2160 ff = filenodes.keys()
2157 2161 ff.sort()
2158 2162 for f in ff:
2159 2163 if f == "/dev/null":
2160 2164 continue
2161 2165 files += 1
2162 2166 if not f:
2163 2167 err(_("file without name in manifest %s") % short(n))
2164 2168 continue
2165 2169 fl = self.file(f)
2166 2170 checkversion(fl, f)
2167 2171 checksize(fl, f)
2168 2172
2169 2173 nodes = {nullid: 1}
2170 2174 seen = {}
2171 2175 for i in range(fl.count()):
2172 2176 revisions += 1
2173 2177 n = fl.node(i)
2174 2178
2175 2179 if n in seen:
2176 2180 err(_("%s: duplicate revision %d") % (f, i))
2177 2181 if n not in filenodes[f]:
2178 2182 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
2179 2183 else:
2180 2184 del filenodes[f][n]
2181 2185
2182 2186 flr = fl.linkrev(n)
2183 2187 if flr not in filelinkrevs.get(f, []):
2184 2188 err(_("%s:%s points to unexpected changeset %d")
2185 2189 % (f, short(n), flr))
2186 2190 else:
2187 2191 filelinkrevs[f].remove(flr)
2188 2192
2189 2193 # verify contents
2190 2194 try:
2191 2195 t = fl.read(n)
2192 2196 except KeyboardInterrupt:
2193 2197 self.ui.warn(_("interrupted"))
2194 2198 raise
2195 2199 except Exception, inst:
2196 2200 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
2197 2201
2198 2202 # verify parents
2199 2203 (p1, p2) = fl.parents(n)
2200 2204 if p1 not in nodes:
2201 2205 err(_("file %s:%s unknown parent 1 %s") %
2202 2206 (f, short(n), short(p1)))
2203 2207 if p2 not in nodes:
2204 2208 err(_("file %s:%s unknown parent 2 %s") %
2205 2209 (f, short(n), short(p1)))
2206 2210 nodes[n] = 1
2207 2211
2208 2212 # cross-check
2209 2213 for node in filenodes[f]:
2210 2214 err(_("node %s in manifests not in %s") % (hex(node), f))
2211 2215
2212 2216 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
2213 2217 (files, changesets, revisions))
2214 2218
2215 2219 if warnings[0]:
2216 2220 self.ui.warn(_("%d warnings encountered!\n") % warnings[0])
2217 2221 if errors[0]:
2218 2222 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
2219 2223 return 1
2220 2224
2221 2225 def stream_in(self, remote):
2222 2226 fp = remote.stream_out()
2223 2227 resp = int(fp.readline())
2224 2228 if resp != 0:
2225 2229 raise util.Abort(_('operation forbidden by server'))
2226 2230 self.ui.status(_('streaming all changes\n'))
2227 2231 total_files, total_bytes = map(int, fp.readline().split(' ', 1))
2228 2232 self.ui.status(_('%d files to transfer, %s of data\n') %
2229 2233 (total_files, util.bytecount(total_bytes)))
2230 2234 start = time.time()
2231 2235 for i in xrange(total_files):
2232 2236 name, size = fp.readline().split('\0', 1)
2233 2237 size = int(size)
2234 2238 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2235 2239 ofp = self.opener(name, 'w')
2236 2240 for chunk in util.filechunkiter(fp, limit=size):
2237 2241 ofp.write(chunk)
2238 2242 ofp.close()
2239 2243 elapsed = time.time() - start
2240 2244 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2241 2245 (util.bytecount(total_bytes), elapsed,
2242 2246 util.bytecount(total_bytes / elapsed)))
2243 2247 self.reload()
2244 2248 return len(self.heads()) + 1
2245 2249
2246 2250 def clone(self, remote, heads=[], stream=False):
2247 2251 '''clone remote repository.
2248 2252
2249 2253 keyword arguments:
2250 2254 heads: list of revs to clone (forces use of pull)
2251 2255 stream: use streaming clone if possible'''
2252 2256
2253 2257 # now, all clients that can request uncompressed clones can
2254 2258 # read repo formats supported by all servers that can serve
2255 2259 # them.
2256 2260
2257 2261 # if revlog format changes, client will have to check version
2258 2262 # and format flags on "stream" capability, and use
2259 2263 # uncompressed only if compatible.
2260 2264
2261 2265 if stream and not heads and remote.capable('stream'):
2262 2266 return self.stream_in(remote)
2263 2267 return self.pull(remote, heads)
2264 2268
2265 2269 # used to avoid circular references so destructors work
2266 2270 def aftertrans(base):
2267 2271 p = base
2268 2272 def a():
2269 2273 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
2270 2274 util.rename(os.path.join(p, "journal.dirstate"),
2271 2275 os.path.join(p, "undo.dirstate"))
2272 2276 return a
2273 2277
2274 2278 def instance(ui, path, create):
2275 2279 return localrepository(ui, util.drop_scheme('file', path), create)
2276 2280
2277 2281 def islocal(path):
2278 2282 return True
General Comments 0
You need to be logged in to leave comments. Login now