##// END OF EJS Templates
New UnexpectedOutput exception to catch server errors in localrepo.stream_in...
Thomas Arendsen Hein -
r3564:eda9e7c9 default
parent child Browse files
Show More
@@ -1,3548 +1,3557 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from demandload import demandload
9 9 from node import *
10 10 from i18n import gettext as _
11 11 demandload(globals(), "os re sys signal shutil imp urllib pdb shlex")
12 12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
13 13 demandload(globals(), "fnmatch difflib patch random signal tempfile time")
14 14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
15 15 demandload(globals(), "archival cStringIO changegroup")
16 16 demandload(globals(), "cmdutil hgweb.server sshserver")
17 17
18 18 class UnknownCommand(Exception):
19 19 """Exception raised if command is not in the command table."""
20 20 class AmbiguousCommand(Exception):
21 21 """Exception raised if command shortcut matches more than one command."""
22 22
23 23 def bail_if_changed(repo):
24 24 modified, added, removed, deleted = repo.status()[:4]
25 25 if modified or added or removed or deleted:
26 26 raise util.Abort(_("outstanding uncommitted changes"))
27 27
28 28 def relpath(repo, args):
29 29 cwd = repo.getcwd()
30 30 if cwd:
31 31 return [util.normpath(os.path.join(cwd, x)) for x in args]
32 32 return args
33 33
34 34 def logmessage(opts):
35 35 """ get the log message according to -m and -l option """
36 36 message = opts['message']
37 37 logfile = opts['logfile']
38 38
39 39 if message and logfile:
40 40 raise util.Abort(_('options --message and --logfile are mutually '
41 41 'exclusive'))
42 42 if not message and logfile:
43 43 try:
44 44 if logfile == '-':
45 45 message = sys.stdin.read()
46 46 else:
47 47 message = open(logfile).read()
48 48 except IOError, inst:
49 49 raise util.Abort(_("can't read commit message '%s': %s") %
50 50 (logfile, inst.strerror))
51 51 return message
52 52
53 53 def walkchangerevs(ui, repo, pats, opts):
54 54 '''Iterate over files and the revs they changed in.
55 55
56 56 Callers most commonly need to iterate backwards over the history
57 57 it is interested in. Doing so has awful (quadratic-looking)
58 58 performance, so we use iterators in a "windowed" way.
59 59
60 60 We walk a window of revisions in the desired order. Within the
61 61 window, we first walk forwards to gather data, then in the desired
62 62 order (usually backwards) to display it.
63 63
64 64 This function returns an (iterator, getchange, matchfn) tuple. The
65 65 getchange function returns the changelog entry for a numeric
66 66 revision. The iterator yields 3-tuples. They will be of one of
67 67 the following forms:
68 68
69 69 "window", incrementing, lastrev: stepping through a window,
70 70 positive if walking forwards through revs, last rev in the
71 71 sequence iterated over - use to reset state for the current window
72 72
73 73 "add", rev, fns: out-of-order traversal of the given file names
74 74 fns, which changed during revision rev - use to gather data for
75 75 possible display
76 76
77 77 "iter", rev, None: in-order traversal of the revs earlier iterated
78 78 over with "add" - use to display data'''
79 79
80 80 def increasing_windows(start, end, windowsize=8, sizelimit=512):
81 81 if start < end:
82 82 while start < end:
83 83 yield start, min(windowsize, end-start)
84 84 start += windowsize
85 85 if windowsize < sizelimit:
86 86 windowsize *= 2
87 87 else:
88 88 while start > end:
89 89 yield start, min(windowsize, start-end-1)
90 90 start -= windowsize
91 91 if windowsize < sizelimit:
92 92 windowsize *= 2
93 93
94 94
95 95 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
96 96 follow = opts.get('follow') or opts.get('follow_first')
97 97
98 98 if repo.changelog.count() == 0:
99 99 return [], False, matchfn
100 100
101 101 if follow:
102 102 defrange = '%s:0' % repo.changectx().rev()
103 103 else:
104 104 defrange = 'tip:0'
105 105 revs = map(int, cmdutil.revrange(ui, repo, opts['rev'] or [defrange]))
106 106 wanted = {}
107 107 slowpath = anypats
108 108 fncache = {}
109 109
110 110 chcache = {}
111 111 def getchange(rev):
112 112 ch = chcache.get(rev)
113 113 if ch is None:
114 114 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
115 115 return ch
116 116
117 117 if not slowpath and not files:
118 118 # No files, no patterns. Display all revs.
119 119 wanted = dict(zip(revs, revs))
120 120 copies = []
121 121 if not slowpath:
122 122 # Only files, no patterns. Check the history of each file.
123 123 def filerevgen(filelog, node):
124 124 cl_count = repo.changelog.count()
125 125 if node is None:
126 126 last = filelog.count() - 1
127 127 else:
128 128 last = filelog.rev(node)
129 129 for i, window in increasing_windows(last, -1):
130 130 revs = []
131 131 for j in xrange(i - window, i + 1):
132 132 n = filelog.node(j)
133 133 revs.append((filelog.linkrev(n),
134 134 follow and filelog.renamed(n)))
135 135 revs.reverse()
136 136 for rev in revs:
137 137 # only yield rev for which we have the changelog, it can
138 138 # happen while doing "hg log" during a pull or commit
139 139 if rev[0] < cl_count:
140 140 yield rev
141 141 def iterfiles():
142 142 for filename in files:
143 143 yield filename, None
144 144 for filename_node in copies:
145 145 yield filename_node
146 146 minrev, maxrev = min(revs), max(revs)
147 147 for file_, node in iterfiles():
148 148 filelog = repo.file(file_)
149 149 # A zero count may be a directory or deleted file, so
150 150 # try to find matching entries on the slow path.
151 151 if filelog.count() == 0:
152 152 slowpath = True
153 153 break
154 154 for rev, copied in filerevgen(filelog, node):
155 155 if rev <= maxrev:
156 156 if rev < minrev:
157 157 break
158 158 fncache.setdefault(rev, [])
159 159 fncache[rev].append(file_)
160 160 wanted[rev] = 1
161 161 if follow and copied:
162 162 copies.append(copied)
163 163 if slowpath:
164 164 if follow:
165 165 raise util.Abort(_('can only follow copies/renames for explicit '
166 166 'file names'))
167 167
168 168 # The slow path checks files modified in every changeset.
169 169 def changerevgen():
170 170 for i, window in increasing_windows(repo.changelog.count()-1, -1):
171 171 for j in xrange(i - window, i + 1):
172 172 yield j, getchange(j)[3]
173 173
174 174 for rev, changefiles in changerevgen():
175 175 matches = filter(matchfn, changefiles)
176 176 if matches:
177 177 fncache[rev] = matches
178 178 wanted[rev] = 1
179 179
180 180 class followfilter:
181 181 def __init__(self, onlyfirst=False):
182 182 self.startrev = -1
183 183 self.roots = []
184 184 self.onlyfirst = onlyfirst
185 185
186 186 def match(self, rev):
187 187 def realparents(rev):
188 188 if self.onlyfirst:
189 189 return repo.changelog.parentrevs(rev)[0:1]
190 190 else:
191 191 return filter(lambda x: x != -1, repo.changelog.parentrevs(rev))
192 192
193 193 if self.startrev == -1:
194 194 self.startrev = rev
195 195 return True
196 196
197 197 if rev > self.startrev:
198 198 # forward: all descendants
199 199 if not self.roots:
200 200 self.roots.append(self.startrev)
201 201 for parent in realparents(rev):
202 202 if parent in self.roots:
203 203 self.roots.append(rev)
204 204 return True
205 205 else:
206 206 # backwards: all parents
207 207 if not self.roots:
208 208 self.roots.extend(realparents(self.startrev))
209 209 if rev in self.roots:
210 210 self.roots.remove(rev)
211 211 self.roots.extend(realparents(rev))
212 212 return True
213 213
214 214 return False
215 215
216 216 # it might be worthwhile to do this in the iterator if the rev range
217 217 # is descending and the prune args are all within that range
218 218 for rev in opts.get('prune', ()):
219 219 rev = repo.changelog.rev(repo.lookup(rev))
220 220 ff = followfilter()
221 221 stop = min(revs[0], revs[-1])
222 222 for x in xrange(rev, stop-1, -1):
223 223 if ff.match(x) and wanted.has_key(x):
224 224 del wanted[x]
225 225
226 226 def iterate():
227 227 if follow and not files:
228 228 ff = followfilter(onlyfirst=opts.get('follow_first'))
229 229 def want(rev):
230 230 if ff.match(rev) and rev in wanted:
231 231 return True
232 232 return False
233 233 else:
234 234 def want(rev):
235 235 return rev in wanted
236 236
237 237 for i, window in increasing_windows(0, len(revs)):
238 238 yield 'window', revs[0] < revs[-1], revs[-1]
239 239 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
240 240 srevs = list(nrevs)
241 241 srevs.sort()
242 242 for rev in srevs:
243 243 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
244 244 yield 'add', rev, fns
245 245 for rev in nrevs:
246 246 yield 'iter', rev, None
247 247 return iterate(), getchange, matchfn
248 248
249 249 def write_bundle(cg, filename=None, compress=True):
250 250 """Write a bundle file and return its filename.
251 251
252 252 Existing files will not be overwritten.
253 253 If no filename is specified, a temporary file is created.
254 254 bz2 compression can be turned off.
255 255 The bundle file will be deleted in case of errors.
256 256 """
257 257 class nocompress(object):
258 258 def compress(self, x):
259 259 return x
260 260 def flush(self):
261 261 return ""
262 262
263 263 fh = None
264 264 cleanup = None
265 265 try:
266 266 if filename:
267 267 if os.path.exists(filename):
268 268 raise util.Abort(_("file '%s' already exists") % filename)
269 269 fh = open(filename, "wb")
270 270 else:
271 271 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
272 272 fh = os.fdopen(fd, "wb")
273 273 cleanup = filename
274 274
275 275 if compress:
276 276 fh.write("HG10")
277 277 z = bz2.BZ2Compressor(9)
278 278 else:
279 279 fh.write("HG10UN")
280 280 z = nocompress()
281 281 # parse the changegroup data, otherwise we will block
282 282 # in case of sshrepo because we don't know the end of the stream
283 283
284 284 # an empty chunkiter is the end of the changegroup
285 285 empty = False
286 286 while not empty:
287 287 empty = True
288 288 for chunk in changegroup.chunkiter(cg):
289 289 empty = False
290 290 fh.write(z.compress(changegroup.genchunk(chunk)))
291 291 fh.write(z.compress(changegroup.closechunk()))
292 292 fh.write(z.flush())
293 293 cleanup = None
294 294 return filename
295 295 finally:
296 296 if fh is not None:
297 297 fh.close()
298 298 if cleanup is not None:
299 299 os.unlink(cleanup)
300 300
301 301 def trimuser(ui, name, rev, revcache):
302 302 """trim the name of the user who committed a change"""
303 303 user = revcache.get(rev)
304 304 if user is None:
305 305 user = revcache[rev] = ui.shortuser(name)
306 306 return user
307 307
308 308 class changeset_printer(object):
309 309 '''show changeset information when templating not requested.'''
310 310
311 311 def __init__(self, ui, repo):
312 312 self.ui = ui
313 313 self.repo = repo
314 314
315 315 def show(self, rev=0, changenode=None, brinfo=None, copies=None):
316 316 '''show a single changeset or file revision'''
317 317 log = self.repo.changelog
318 318 if changenode is None:
319 319 changenode = log.node(rev)
320 320 elif not rev:
321 321 rev = log.rev(changenode)
322 322
323 323 if self.ui.quiet:
324 324 self.ui.write("%d:%s\n" % (rev, short(changenode)))
325 325 return
326 326
327 327 changes = log.read(changenode)
328 328 date = util.datestr(changes[2])
329 329 extra = changes[5]
330 330 branch = extra.get("branch")
331 331
332 332 hexfunc = self.ui.debugflag and hex or short
333 333
334 334 parents = [(log.rev(p), hexfunc(p)) for p in log.parents(changenode)
335 335 if self.ui.debugflag or p != nullid]
336 336 if (not self.ui.debugflag and len(parents) == 1 and
337 337 parents[0][0] == rev-1):
338 338 parents = []
339 339
340 340 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
341 341
342 342 if branch:
343 343 self.ui.status(_("branch: %s\n") % branch)
344 344 for tag in self.repo.nodetags(changenode):
345 345 self.ui.status(_("tag: %s\n") % tag)
346 346 for parent in parents:
347 347 self.ui.write(_("parent: %d:%s\n") % parent)
348 348
349 349 if brinfo and changenode in brinfo:
350 350 br = brinfo[changenode]
351 351 self.ui.write(_("branch: %s\n") % " ".join(br))
352 352
353 353 self.ui.debug(_("manifest: %d:%s\n") %
354 354 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
355 355 self.ui.status(_("user: %s\n") % changes[1])
356 356 self.ui.status(_("date: %s\n") % date)
357 357
358 358 if self.ui.debugflag:
359 359 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
360 360 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
361 361 files):
362 362 if value:
363 363 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
364 364 elif changes[3]:
365 365 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
366 366 if copies:
367 367 copies = ['%s (%s)' % c for c in copies]
368 368 self.ui.note(_("copies: %s\n") % ' '.join(copies))
369 369
370 370 if extra and self.ui.debugflag:
371 371 extraitems = extra.items()
372 372 extraitems.sort()
373 373 for key, value in extraitems:
374 374 self.ui.debug(_("extra: %s=%s\n")
375 375 % (key, value.encode('string_escape')))
376 376
377 377 description = changes[4].strip()
378 378 if description:
379 379 if self.ui.verbose:
380 380 self.ui.status(_("description:\n"))
381 381 self.ui.status(description)
382 382 self.ui.status("\n\n")
383 383 else:
384 384 self.ui.status(_("summary: %s\n") %
385 385 description.splitlines()[0])
386 386 self.ui.status("\n")
387 387
388 388 def show_changeset(ui, repo, opts):
389 389 """show one changeset using template or regular display.
390 390
391 391 Display format will be the first non-empty hit of:
392 392 1. option 'template'
393 393 2. option 'style'
394 394 3. [ui] setting 'logtemplate'
395 395 4. [ui] setting 'style'
396 396 If all of these values are either the unset or the empty string,
397 397 regular display via changeset_printer() is done.
398 398 """
399 399 # options
400 400 tmpl = opts.get('template')
401 401 mapfile = None
402 402 if tmpl:
403 403 tmpl = templater.parsestring(tmpl, quoted=False)
404 404 else:
405 405 mapfile = opts.get('style')
406 406 # ui settings
407 407 if not mapfile:
408 408 tmpl = ui.config('ui', 'logtemplate')
409 409 if tmpl:
410 410 tmpl = templater.parsestring(tmpl)
411 411 else:
412 412 mapfile = ui.config('ui', 'style')
413 413
414 414 if tmpl or mapfile:
415 415 if mapfile:
416 416 if not os.path.split(mapfile)[0]:
417 417 mapname = (templater.templatepath('map-cmdline.' + mapfile)
418 418 or templater.templatepath(mapfile))
419 419 if mapname: mapfile = mapname
420 420 try:
421 421 t = templater.changeset_templater(ui, repo, mapfile)
422 422 except SyntaxError, inst:
423 423 raise util.Abort(inst.args[0])
424 424 if tmpl: t.use_template(tmpl)
425 425 return t
426 426 return changeset_printer(ui, repo)
427 427
428 428 def setremoteconfig(ui, opts):
429 429 "copy remote options to ui tree"
430 430 if opts.get('ssh'):
431 431 ui.setconfig("ui", "ssh", opts['ssh'])
432 432 if opts.get('remotecmd'):
433 433 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
434 434
435 435 def show_version(ui):
436 436 """output version and copyright information"""
437 437 ui.write(_("Mercurial Distributed SCM (version %s)\n")
438 438 % version.get_version())
439 439 ui.status(_(
440 440 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
441 441 "This is free software; see the source for copying conditions. "
442 442 "There is NO\nwarranty; "
443 443 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
444 444 ))
445 445
446 446 def help_(ui, name=None, with_version=False):
447 447 """show help for a command, extension, or list of commands
448 448
449 449 With no arguments, print a list of commands and short help.
450 450
451 451 Given a command name, print help for that command.
452 452
453 453 Given an extension name, print help for that extension, and the
454 454 commands it provides."""
455 455 option_lists = []
456 456
457 457 def helpcmd(name):
458 458 if with_version:
459 459 show_version(ui)
460 460 ui.write('\n')
461 461 aliases, i = findcmd(ui, name)
462 462 # synopsis
463 463 ui.write("%s\n\n" % i[2])
464 464
465 465 # description
466 466 doc = i[0].__doc__
467 467 if not doc:
468 468 doc = _("(No help text available)")
469 469 if ui.quiet:
470 470 doc = doc.splitlines(0)[0]
471 471 ui.write("%s\n" % doc.rstrip())
472 472
473 473 if not ui.quiet:
474 474 # aliases
475 475 if len(aliases) > 1:
476 476 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
477 477
478 478 # options
479 479 if i[1]:
480 480 option_lists.append(("options", i[1]))
481 481
482 482 def helplist(select=None):
483 483 h = {}
484 484 cmds = {}
485 485 for c, e in table.items():
486 486 f = c.split("|", 1)[0]
487 487 if select and not select(f):
488 488 continue
489 489 if name == "shortlist" and not f.startswith("^"):
490 490 continue
491 491 f = f.lstrip("^")
492 492 if not ui.debugflag and f.startswith("debug"):
493 493 continue
494 494 doc = e[0].__doc__
495 495 if not doc:
496 496 doc = _("(No help text available)")
497 497 h[f] = doc.splitlines(0)[0].rstrip()
498 498 cmds[f] = c.lstrip("^")
499 499
500 500 fns = h.keys()
501 501 fns.sort()
502 502 m = max(map(len, fns))
503 503 for f in fns:
504 504 if ui.verbose:
505 505 commands = cmds[f].replace("|",", ")
506 506 ui.write(" %s:\n %s\n"%(commands, h[f]))
507 507 else:
508 508 ui.write(' %-*s %s\n' % (m, f, h[f]))
509 509
510 510 def helpext(name):
511 511 try:
512 512 mod = findext(name)
513 513 except KeyError:
514 514 raise UnknownCommand(name)
515 515
516 516 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
517 517 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
518 518 for d in doc[1:]:
519 519 ui.write(d, '\n')
520 520
521 521 ui.status('\n')
522 522 if ui.verbose:
523 523 ui.status(_('list of commands:\n\n'))
524 524 else:
525 525 ui.status(_('list of commands (use "hg help -v %s" '
526 526 'to show aliases and global options):\n\n') % name)
527 527
528 528 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
529 529 helplist(modcmds.has_key)
530 530
531 531 if name and name != 'shortlist':
532 532 try:
533 533 helpcmd(name)
534 534 except UnknownCommand:
535 535 helpext(name)
536 536
537 537 else:
538 538 # program name
539 539 if ui.verbose or with_version:
540 540 show_version(ui)
541 541 else:
542 542 ui.status(_("Mercurial Distributed SCM\n"))
543 543 ui.status('\n')
544 544
545 545 # list of commands
546 546 if name == "shortlist":
547 547 ui.status(_('basic commands (use "hg help" '
548 548 'for the full list or option "-v" for details):\n\n'))
549 549 elif ui.verbose:
550 550 ui.status(_('list of commands:\n\n'))
551 551 else:
552 552 ui.status(_('list of commands (use "hg help -v" '
553 553 'to show aliases and global options):\n\n'))
554 554
555 555 helplist()
556 556
557 557 # global options
558 558 if ui.verbose:
559 559 option_lists.append(("global options", globalopts))
560 560
561 561 # list all option lists
562 562 opt_output = []
563 563 for title, options in option_lists:
564 564 opt_output.append(("\n%s:\n" % title, None))
565 565 for shortopt, longopt, default, desc in options:
566 566 if "DEPRECATED" in desc and not ui.verbose: continue
567 567 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
568 568 longopt and " --%s" % longopt),
569 569 "%s%s" % (desc,
570 570 default
571 571 and _(" (default: %s)") % default
572 572 or "")))
573 573
574 574 if opt_output:
575 575 opts_len = max([len(line[0]) for line in opt_output if line[1]])
576 576 for first, second in opt_output:
577 577 if second:
578 578 ui.write(" %-*s %s\n" % (opts_len, first, second))
579 579 else:
580 580 ui.write("%s\n" % first)
581 581
582 582 # Commands start here, listed alphabetically
583 583
584 584 def add(ui, repo, *pats, **opts):
585 585 """add the specified files on the next commit
586 586
587 587 Schedule files to be version controlled and added to the repository.
588 588
589 589 The files will be added to the repository at the next commit.
590 590
591 591 If no names are given, add all files in the repository.
592 592 """
593 593
594 594 names = []
595 595 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
596 596 if exact:
597 597 if ui.verbose:
598 598 ui.status(_('adding %s\n') % rel)
599 599 names.append(abs)
600 600 elif repo.dirstate.state(abs) == '?':
601 601 ui.status(_('adding %s\n') % rel)
602 602 names.append(abs)
603 603 if not opts.get('dry_run'):
604 604 repo.add(names)
605 605
606 606 def addremove(ui, repo, *pats, **opts):
607 607 """add all new files, delete all missing files
608 608
609 609 Add all new files and remove all missing files from the repository.
610 610
611 611 New files are ignored if they match any of the patterns in .hgignore. As
612 612 with add, these changes take effect at the next commit.
613 613
614 614 Use the -s option to detect renamed files. With a parameter > 0,
615 615 this compares every removed file with every added file and records
616 616 those similar enough as renames. This option takes a percentage
617 617 between 0 (disabled) and 100 (files must be identical) as its
618 618 parameter. Detecting renamed files this way can be expensive.
619 619 """
620 620 sim = float(opts.get('similarity') or 0)
621 621 if sim < 0 or sim > 100:
622 622 raise util.Abort(_('similarity must be between 0 and 100'))
623 623 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
624 624
625 625 def annotate(ui, repo, *pats, **opts):
626 626 """show changeset information per file line
627 627
628 628 List changes in files, showing the revision id responsible for each line
629 629
630 630 This command is useful to discover who did a change or when a change took
631 631 place.
632 632
633 633 Without the -a option, annotate will avoid processing files it
634 634 detects as binary. With -a, annotate will generate an annotation
635 635 anyway, probably with undesirable results.
636 636 """
637 637 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
638 638
639 639 if not pats:
640 640 raise util.Abort(_('at least one file name or pattern required'))
641 641
642 642 opmap = [['user', lambda x: ui.shortuser(x.user())],
643 643 ['number', lambda x: str(x.rev())],
644 644 ['changeset', lambda x: short(x.node())],
645 645 ['date', getdate], ['follow', lambda x: x.path()]]
646 646 if (not opts['user'] and not opts['changeset'] and not opts['date']
647 647 and not opts['follow']):
648 648 opts['number'] = 1
649 649
650 650 ctx = repo.changectx(opts['rev'])
651 651
652 652 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
653 653 node=ctx.node()):
654 654 fctx = ctx.filectx(abs)
655 655 if not opts['text'] and util.binary(fctx.data()):
656 656 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
657 657 continue
658 658
659 659 lines = fctx.annotate(follow=opts.get('follow'))
660 660 pieces = []
661 661
662 662 for o, f in opmap:
663 663 if opts[o]:
664 664 l = [f(n) for n, dummy in lines]
665 665 if l:
666 666 m = max(map(len, l))
667 667 pieces.append(["%*s" % (m, x) for x in l])
668 668
669 669 if pieces:
670 670 for p, l in zip(zip(*pieces), lines):
671 671 ui.write("%s: %s" % (" ".join(p), l[1]))
672 672
673 673 def archive(ui, repo, dest, **opts):
674 674 '''create unversioned archive of a repository revision
675 675
676 676 By default, the revision used is the parent of the working
677 677 directory; use "-r" to specify a different revision.
678 678
679 679 To specify the type of archive to create, use "-t". Valid
680 680 types are:
681 681
682 682 "files" (default): a directory full of files
683 683 "tar": tar archive, uncompressed
684 684 "tbz2": tar archive, compressed using bzip2
685 685 "tgz": tar archive, compressed using gzip
686 686 "uzip": zip archive, uncompressed
687 687 "zip": zip archive, compressed using deflate
688 688
689 689 The exact name of the destination archive or directory is given
690 690 using a format string; see "hg help export" for details.
691 691
692 692 Each member added to an archive file has a directory prefix
693 693 prepended. Use "-p" to specify a format string for the prefix.
694 694 The default is the basename of the archive, with suffixes removed.
695 695 '''
696 696
697 697 node = repo.changectx(opts['rev']).node()
698 698 dest = cmdutil.make_filename(repo, dest, node)
699 699 if os.path.realpath(dest) == repo.root:
700 700 raise util.Abort(_('repository root cannot be destination'))
701 701 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
702 702 kind = opts.get('type') or 'files'
703 703 prefix = opts['prefix']
704 704 if dest == '-':
705 705 if kind == 'files':
706 706 raise util.Abort(_('cannot archive plain files to stdout'))
707 707 dest = sys.stdout
708 708 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
709 709 prefix = cmdutil.make_filename(repo, prefix, node)
710 710 archival.archive(repo, dest, node, kind, not opts['no_decode'],
711 711 matchfn, prefix)
712 712
713 713 def backout(ui, repo, rev, **opts):
714 714 '''reverse effect of earlier changeset
715 715
716 716 Commit the backed out changes as a new changeset. The new
717 717 changeset is a child of the backed out changeset.
718 718
719 719 If you back out a changeset other than the tip, a new head is
720 720 created. This head is the parent of the working directory. If
721 721 you back out an old changeset, your working directory will appear
722 722 old after the backout. You should merge the backout changeset
723 723 with another head.
724 724
725 725 The --merge option remembers the parent of the working directory
726 726 before starting the backout, then merges the new head with that
727 727 changeset afterwards. This saves you from doing the merge by
728 728 hand. The result of this merge is not committed, as for a normal
729 729 merge.'''
730 730
731 731 bail_if_changed(repo)
732 732 op1, op2 = repo.dirstate.parents()
733 733 if op2 != nullid:
734 734 raise util.Abort(_('outstanding uncommitted merge'))
735 735 node = repo.lookup(rev)
736 736 p1, p2 = repo.changelog.parents(node)
737 737 if p1 == nullid:
738 738 raise util.Abort(_('cannot back out a change with no parents'))
739 739 if p2 != nullid:
740 740 if not opts['parent']:
741 741 raise util.Abort(_('cannot back out a merge changeset without '
742 742 '--parent'))
743 743 p = repo.lookup(opts['parent'])
744 744 if p not in (p1, p2):
745 745 raise util.Abort(_('%s is not a parent of %s' %
746 746 (short(p), short(node))))
747 747 parent = p
748 748 else:
749 749 if opts['parent']:
750 750 raise util.Abort(_('cannot use --parent on non-merge changeset'))
751 751 parent = p1
752 752 hg.clean(repo, node, show_stats=False)
753 753 revert_opts = opts.copy()
754 754 revert_opts['all'] = True
755 755 revert_opts['rev'] = hex(parent)
756 756 revert(ui, repo, **revert_opts)
757 757 commit_opts = opts.copy()
758 758 commit_opts['addremove'] = False
759 759 if not commit_opts['message'] and not commit_opts['logfile']:
760 760 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
761 761 commit_opts['force_editor'] = True
762 762 commit(ui, repo, **commit_opts)
763 763 def nice(node):
764 764 return '%d:%s' % (repo.changelog.rev(node), short(node))
765 765 ui.status(_('changeset %s backs out changeset %s\n') %
766 766 (nice(repo.changelog.tip()), nice(node)))
767 767 if op1 != node:
768 768 if opts['merge']:
769 769 ui.status(_('merging with changeset %s\n') % nice(op1))
770 770 n = _lookup(repo, hex(op1))
771 771 hg.merge(repo, n)
772 772 else:
773 773 ui.status(_('the backout changeset is a new head - '
774 774 'do not forget to merge\n'))
775 775 ui.status(_('(use "backout --merge" '
776 776 'if you want to auto-merge)\n'))
777 777
778 778 def branch(ui, repo, label=None):
779 779 """set or show the current branch name
780 780
781 781 With <name>, set the current branch name. Otherwise, show the
782 782 current branch name.
783 783 """
784 784
785 785 if label is not None:
786 786 repo.opener("branch", "w").write(label)
787 787 else:
788 788 b = repo.workingctx().branch()
789 789 if b:
790 790 ui.write("%s\n" % b)
791 791
792 792 def branches(ui, repo):
793 793 """list repository named branches
794 794
795 795 List the repository's named branches.
796 796 """
797 797 b = repo.branchtags()
798 798 l = [(-repo.changelog.rev(n), n, t) for t,n in b.items()]
799 799 l.sort()
800 800 for r, n, t in l:
801 801 hexfunc = ui.debugflag and hex or short
802 802 if ui.quiet:
803 803 ui.write("%s\n" % t)
804 804 else:
805 805 ui.write("%-30s %s:%s\n" % (t, -r, hexfunc(n)))
806 806
807 807 def bundle(ui, repo, fname, dest=None, **opts):
808 808 """create a changegroup file
809 809
810 810 Generate a compressed changegroup file collecting changesets not
811 811 found in the other repository.
812 812
813 813 If no destination repository is specified the destination is assumed
814 814 to have all the nodes specified by one or more --base parameters.
815 815
816 816 The bundle file can then be transferred using conventional means and
817 817 applied to another repository with the unbundle or pull command.
818 818 This is useful when direct push and pull are not available or when
819 819 exporting an entire repository is undesirable.
820 820
821 821 Applying bundles preserves all changeset contents including
822 822 permissions, copy/rename information, and revision history.
823 823 """
824 824 revs = opts.get('rev') or None
825 825 if revs:
826 826 revs = [repo.lookup(rev) for rev in revs]
827 827 base = opts.get('base')
828 828 if base:
829 829 if dest:
830 830 raise util.Abort(_("--base is incompatible with specifiying "
831 831 "a destination"))
832 832 base = [repo.lookup(rev) for rev in base]
833 833 # create the right base
834 834 # XXX: nodesbetween / changegroup* should be "fixed" instead
835 835 o = []
836 836 has_set = sets.Set(base)
837 837 for n in base:
838 838 has_set.update(repo.changelog.reachable(n))
839 839 if revs:
840 840 visit = list(revs)
841 841 else:
842 842 visit = repo.changelog.heads()
843 843 seen = sets.Set(visit)
844 844 while visit:
845 845 n = visit.pop(0)
846 846 parents = [p for p in repo.changelog.parents(n)
847 847 if p != nullid and p not in has_set]
848 848 if len(parents) == 0:
849 849 o.insert(0, n)
850 850 else:
851 851 for p in parents:
852 852 if p not in seen:
853 853 seen.add(p)
854 854 visit.append(p)
855 855 else:
856 856 setremoteconfig(ui, opts)
857 857 dest = ui.expandpath(dest or 'default-push', dest or 'default')
858 858 other = hg.repository(ui, dest)
859 859 o = repo.findoutgoing(other, force=opts['force'])
860 860
861 861 if revs:
862 862 cg = repo.changegroupsubset(o, revs, 'bundle')
863 863 else:
864 864 cg = repo.changegroup(o, 'bundle')
865 865 write_bundle(cg, fname)
866 866
867 867 def cat(ui, repo, file1, *pats, **opts):
868 868 """output the latest or given revisions of files
869 869
870 870 Print the specified files as they were at the given revision.
871 871 If no revision is given then working dir parent is used, or tip
872 872 if no revision is checked out.
873 873
874 874 Output may be to a file, in which case the name of the file is
875 875 given using a format string. The formatting rules are the same as
876 876 for the export command, with the following additions:
877 877
878 878 %s basename of file being printed
879 879 %d dirname of file being printed, or '.' if in repo root
880 880 %p root-relative path name of file being printed
881 881 """
882 882 ctx = repo.changectx(opts['rev'])
883 883 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
884 884 ctx.node()):
885 885 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
886 886 fp.write(ctx.filectx(abs).data())
887 887
888 888 def clone(ui, source, dest=None, **opts):
889 889 """make a copy of an existing repository
890 890
891 891 Create a copy of an existing repository in a new directory.
892 892
893 893 If no destination directory name is specified, it defaults to the
894 894 basename of the source.
895 895
896 896 The location of the source is added to the new repository's
897 897 .hg/hgrc file, as the default to be used for future pulls.
898 898
899 899 For efficiency, hardlinks are used for cloning whenever the source
900 900 and destination are on the same filesystem (note this applies only
901 901 to the repository data, not to the checked out files). Some
902 902 filesystems, such as AFS, implement hardlinking incorrectly, but
903 903 do not report errors. In these cases, use the --pull option to
904 904 avoid hardlinking.
905 905
906 906 You can safely clone repositories and checked out files using full
907 907 hardlinks with
908 908
909 909 $ cp -al REPO REPOCLONE
910 910
911 911 which is the fastest way to clone. However, the operation is not
912 912 atomic (making sure REPO is not modified during the operation is
913 913 up to you) and you have to make sure your editor breaks hardlinks
914 914 (Emacs and most Linux Kernel tools do so).
915 915
916 916 If you use the -r option to clone up to a specific revision, no
917 917 subsequent revisions will be present in the cloned repository.
918 918 This option implies --pull, even on local repositories.
919 919
920 920 See pull for valid source format details.
921 921
922 922 It is possible to specify an ssh:// URL as the destination, but no
923 923 .hg/hgrc will be created on the remote side. Look at the help text
924 924 for the pull command for important details about ssh:// URLs.
925 925 """
926 926 setremoteconfig(ui, opts)
927 927 hg.clone(ui, ui.expandpath(source), dest,
928 928 pull=opts['pull'],
929 929 stream=opts['uncompressed'],
930 930 rev=opts['rev'],
931 931 update=not opts['noupdate'])
932 932
933 933 def commit(ui, repo, *pats, **opts):
934 934 """commit the specified files or all outstanding changes
935 935
936 936 Commit changes to the given files into the repository.
937 937
938 938 If a list of files is omitted, all changes reported by "hg status"
939 939 will be committed.
940 940
941 941 If no commit message is specified, the editor configured in your hgrc
942 942 or in the EDITOR environment variable is started to enter a message.
943 943 """
944 944 message = logmessage(opts)
945 945
946 946 if opts['addremove']:
947 947 cmdutil.addremove(repo, pats, opts)
948 948 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
949 949 if pats:
950 950 modified, added, removed = repo.status(files=fns, match=match)[:3]
951 951 files = modified + added + removed
952 952 else:
953 953 files = []
954 954 try:
955 955 repo.commit(files, message, opts['user'], opts['date'], match,
956 956 force_editor=opts.get('force_editor'))
957 957 except ValueError, inst:
958 958 raise util.Abort(str(inst))
959 959
960 960 def docopy(ui, repo, pats, opts, wlock):
961 961 # called with the repo lock held
962 962 cwd = repo.getcwd()
963 963 errors = 0
964 964 copied = []
965 965 targets = {}
966 966
967 967 def okaytocopy(abs, rel, exact):
968 968 reasons = {'?': _('is not managed'),
969 969 'a': _('has been marked for add'),
970 970 'r': _('has been marked for remove')}
971 971 state = repo.dirstate.state(abs)
972 972 reason = reasons.get(state)
973 973 if reason:
974 974 if state == 'a':
975 975 origsrc = repo.dirstate.copied(abs)
976 976 if origsrc is not None:
977 977 return origsrc
978 978 if exact:
979 979 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
980 980 else:
981 981 return abs
982 982
983 983 def copy(origsrc, abssrc, relsrc, target, exact):
984 984 abstarget = util.canonpath(repo.root, cwd, target)
985 985 reltarget = util.pathto(cwd, abstarget)
986 986 prevsrc = targets.get(abstarget)
987 987 if prevsrc is not None:
988 988 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
989 989 (reltarget, abssrc, prevsrc))
990 990 return
991 991 if (not opts['after'] and os.path.exists(reltarget) or
992 992 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
993 993 if not opts['force']:
994 994 ui.warn(_('%s: not overwriting - file exists\n') %
995 995 reltarget)
996 996 return
997 997 if not opts['after'] and not opts.get('dry_run'):
998 998 os.unlink(reltarget)
999 999 if opts['after']:
1000 1000 if not os.path.exists(reltarget):
1001 1001 return
1002 1002 else:
1003 1003 targetdir = os.path.dirname(reltarget) or '.'
1004 1004 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
1005 1005 os.makedirs(targetdir)
1006 1006 try:
1007 1007 restore = repo.dirstate.state(abstarget) == 'r'
1008 1008 if restore and not opts.get('dry_run'):
1009 1009 repo.undelete([abstarget], wlock)
1010 1010 try:
1011 1011 if not opts.get('dry_run'):
1012 1012 shutil.copyfile(relsrc, reltarget)
1013 1013 shutil.copymode(relsrc, reltarget)
1014 1014 restore = False
1015 1015 finally:
1016 1016 if restore:
1017 1017 repo.remove([abstarget], wlock)
1018 1018 except shutil.Error, inst:
1019 1019 raise util.Abort(str(inst))
1020 1020 except IOError, inst:
1021 1021 if inst.errno == errno.ENOENT:
1022 1022 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1023 1023 else:
1024 1024 ui.warn(_('%s: cannot copy - %s\n') %
1025 1025 (relsrc, inst.strerror))
1026 1026 errors += 1
1027 1027 return
1028 1028 if ui.verbose or not exact:
1029 1029 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1030 1030 targets[abstarget] = abssrc
1031 1031 if abstarget != origsrc and not opts.get('dry_run'):
1032 1032 repo.copy(origsrc, abstarget, wlock)
1033 1033 copied.append((abssrc, relsrc, exact))
1034 1034
1035 1035 def targetpathfn(pat, dest, srcs):
1036 1036 if os.path.isdir(pat):
1037 1037 abspfx = util.canonpath(repo.root, cwd, pat)
1038 1038 if destdirexists:
1039 1039 striplen = len(os.path.split(abspfx)[0])
1040 1040 else:
1041 1041 striplen = len(abspfx)
1042 1042 if striplen:
1043 1043 striplen += len(os.sep)
1044 1044 res = lambda p: os.path.join(dest, p[striplen:])
1045 1045 elif destdirexists:
1046 1046 res = lambda p: os.path.join(dest, os.path.basename(p))
1047 1047 else:
1048 1048 res = lambda p: dest
1049 1049 return res
1050 1050
1051 1051 def targetpathafterfn(pat, dest, srcs):
1052 1052 if util.patkind(pat, None)[0]:
1053 1053 # a mercurial pattern
1054 1054 res = lambda p: os.path.join(dest, os.path.basename(p))
1055 1055 else:
1056 1056 abspfx = util.canonpath(repo.root, cwd, pat)
1057 1057 if len(abspfx) < len(srcs[0][0]):
1058 1058 # A directory. Either the target path contains the last
1059 1059 # component of the source path or it does not.
1060 1060 def evalpath(striplen):
1061 1061 score = 0
1062 1062 for s in srcs:
1063 1063 t = os.path.join(dest, s[0][striplen:])
1064 1064 if os.path.exists(t):
1065 1065 score += 1
1066 1066 return score
1067 1067
1068 1068 striplen = len(abspfx)
1069 1069 if striplen:
1070 1070 striplen += len(os.sep)
1071 1071 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1072 1072 score = evalpath(striplen)
1073 1073 striplen1 = len(os.path.split(abspfx)[0])
1074 1074 if striplen1:
1075 1075 striplen1 += len(os.sep)
1076 1076 if evalpath(striplen1) > score:
1077 1077 striplen = striplen1
1078 1078 res = lambda p: os.path.join(dest, p[striplen:])
1079 1079 else:
1080 1080 # a file
1081 1081 if destdirexists:
1082 1082 res = lambda p: os.path.join(dest, os.path.basename(p))
1083 1083 else:
1084 1084 res = lambda p: dest
1085 1085 return res
1086 1086
1087 1087
1088 1088 pats = list(pats)
1089 1089 if not pats:
1090 1090 raise util.Abort(_('no source or destination specified'))
1091 1091 if len(pats) == 1:
1092 1092 raise util.Abort(_('no destination specified'))
1093 1093 dest = pats.pop()
1094 1094 destdirexists = os.path.isdir(dest)
1095 1095 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1096 1096 raise util.Abort(_('with multiple sources, destination must be an '
1097 1097 'existing directory'))
1098 1098 if opts['after']:
1099 1099 tfn = targetpathafterfn
1100 1100 else:
1101 1101 tfn = targetpathfn
1102 1102 copylist = []
1103 1103 for pat in pats:
1104 1104 srcs = []
1105 1105 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
1106 1106 origsrc = okaytocopy(abssrc, relsrc, exact)
1107 1107 if origsrc:
1108 1108 srcs.append((origsrc, abssrc, relsrc, exact))
1109 1109 if not srcs:
1110 1110 continue
1111 1111 copylist.append((tfn(pat, dest, srcs), srcs))
1112 1112 if not copylist:
1113 1113 raise util.Abort(_('no files to copy'))
1114 1114
1115 1115 for targetpath, srcs in copylist:
1116 1116 for origsrc, abssrc, relsrc, exact in srcs:
1117 1117 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1118 1118
1119 1119 if errors:
1120 1120 ui.warn(_('(consider using --after)\n'))
1121 1121 return errors, copied
1122 1122
1123 1123 def copy(ui, repo, *pats, **opts):
1124 1124 """mark files as copied for the next commit
1125 1125
1126 1126 Mark dest as having copies of source files. If dest is a
1127 1127 directory, copies are put in that directory. If dest is a file,
1128 1128 there can only be one source.
1129 1129
1130 1130 By default, this command copies the contents of files as they
1131 1131 stand in the working directory. If invoked with --after, the
1132 1132 operation is recorded, but no copying is performed.
1133 1133
1134 1134 This command takes effect in the next commit.
1135 1135
1136 1136 NOTE: This command should be treated as experimental. While it
1137 1137 should properly record copied files, this information is not yet
1138 1138 fully used by merge, nor fully reported by log.
1139 1139 """
1140 1140 wlock = repo.wlock(0)
1141 1141 errs, copied = docopy(ui, repo, pats, opts, wlock)
1142 1142 return errs
1143 1143
1144 1144 def debugancestor(ui, index, rev1, rev2):
1145 1145 """find the ancestor revision of two revisions in a given index"""
1146 1146 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1147 1147 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1148 1148 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1149 1149
1150 1150 def debugcomplete(ui, cmd='', **opts):
1151 1151 """returns the completion list associated with the given command"""
1152 1152
1153 1153 if opts['options']:
1154 1154 options = []
1155 1155 otables = [globalopts]
1156 1156 if cmd:
1157 1157 aliases, entry = findcmd(ui, cmd)
1158 1158 otables.append(entry[1])
1159 1159 for t in otables:
1160 1160 for o in t:
1161 1161 if o[0]:
1162 1162 options.append('-%s' % o[0])
1163 1163 options.append('--%s' % o[1])
1164 1164 ui.write("%s\n" % "\n".join(options))
1165 1165 return
1166 1166
1167 1167 clist = findpossible(ui, cmd).keys()
1168 1168 clist.sort()
1169 1169 ui.write("%s\n" % "\n".join(clist))
1170 1170
1171 1171 def debugrebuildstate(ui, repo, rev=None):
1172 1172 """rebuild the dirstate as it would look like for the given revision"""
1173 1173 if not rev:
1174 1174 rev = repo.changelog.tip()
1175 1175 else:
1176 1176 rev = repo.lookup(rev)
1177 1177 change = repo.changelog.read(rev)
1178 1178 n = change[0]
1179 1179 files = repo.manifest.read(n)
1180 1180 wlock = repo.wlock()
1181 1181 repo.dirstate.rebuild(rev, files)
1182 1182
1183 1183 def debugcheckstate(ui, repo):
1184 1184 """validate the correctness of the current dirstate"""
1185 1185 parent1, parent2 = repo.dirstate.parents()
1186 1186 repo.dirstate.read()
1187 1187 dc = repo.dirstate.map
1188 1188 keys = dc.keys()
1189 1189 keys.sort()
1190 1190 m1n = repo.changelog.read(parent1)[0]
1191 1191 m2n = repo.changelog.read(parent2)[0]
1192 1192 m1 = repo.manifest.read(m1n)
1193 1193 m2 = repo.manifest.read(m2n)
1194 1194 errors = 0
1195 1195 for f in dc:
1196 1196 state = repo.dirstate.state(f)
1197 1197 if state in "nr" and f not in m1:
1198 1198 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1199 1199 errors += 1
1200 1200 if state in "a" and f in m1:
1201 1201 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1202 1202 errors += 1
1203 1203 if state in "m" and f not in m1 and f not in m2:
1204 1204 ui.warn(_("%s in state %s, but not in either manifest\n") %
1205 1205 (f, state))
1206 1206 errors += 1
1207 1207 for f in m1:
1208 1208 state = repo.dirstate.state(f)
1209 1209 if state not in "nrm":
1210 1210 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1211 1211 errors += 1
1212 1212 if errors:
1213 1213 error = _(".hg/dirstate inconsistent with current parent's manifest")
1214 1214 raise util.Abort(error)
1215 1215
1216 1216 def showconfig(ui, repo, *values, **opts):
1217 1217 """show combined config settings from all hgrc files
1218 1218
1219 1219 With no args, print names and values of all config items.
1220 1220
1221 1221 With one arg of the form section.name, print just the value of
1222 1222 that config item.
1223 1223
1224 1224 With multiple args, print names and values of all config items
1225 1225 with matching section names."""
1226 1226
1227 1227 untrusted = bool(opts.get('untrusted'))
1228 1228 if values:
1229 1229 if len([v for v in values if '.' in v]) > 1:
1230 1230 raise util.Abort(_('only one config item permitted'))
1231 1231 for section, name, value in ui.walkconfig(untrusted=untrusted):
1232 1232 sectname = section + '.' + name
1233 1233 if values:
1234 1234 for v in values:
1235 1235 if v == section:
1236 1236 ui.write('%s=%s\n' % (sectname, value))
1237 1237 elif v == sectname:
1238 1238 ui.write(value, '\n')
1239 1239 else:
1240 1240 ui.write('%s=%s\n' % (sectname, value))
1241 1241
1242 1242 def debugsetparents(ui, repo, rev1, rev2=None):
1243 1243 """manually set the parents of the current working directory
1244 1244
1245 1245 This is useful for writing repository conversion tools, but should
1246 1246 be used with care.
1247 1247 """
1248 1248
1249 1249 if not rev2:
1250 1250 rev2 = hex(nullid)
1251 1251
1252 1252 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1253 1253
1254 1254 def debugstate(ui, repo):
1255 1255 """show the contents of the current dirstate"""
1256 1256 repo.dirstate.read()
1257 1257 dc = repo.dirstate.map
1258 1258 keys = dc.keys()
1259 1259 keys.sort()
1260 1260 for file_ in keys:
1261 1261 ui.write("%c %3o %10d %s %s\n"
1262 1262 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1263 1263 time.strftime("%x %X",
1264 1264 time.localtime(dc[file_][3])), file_))
1265 1265 for f in repo.dirstate.copies():
1266 1266 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1267 1267
1268 1268 def debugdata(ui, file_, rev):
1269 1269 """dump the contents of an data file revision"""
1270 1270 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1271 1271 file_[:-2] + ".i", file_, 0)
1272 1272 try:
1273 1273 ui.write(r.revision(r.lookup(rev)))
1274 1274 except KeyError:
1275 1275 raise util.Abort(_('invalid revision identifier %s') % rev)
1276 1276
1277 1277 def debugindex(ui, file_):
1278 1278 """dump the contents of an index file"""
1279 1279 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1280 1280 ui.write(" rev offset length base linkrev" +
1281 1281 " nodeid p1 p2\n")
1282 1282 for i in xrange(r.count()):
1283 1283 node = r.node(i)
1284 1284 pp = r.parents(node)
1285 1285 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1286 1286 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1287 1287 short(node), short(pp[0]), short(pp[1])))
1288 1288
1289 1289 def debugindexdot(ui, file_):
1290 1290 """dump an index DAG as a .dot file"""
1291 1291 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1292 1292 ui.write("digraph G {\n")
1293 1293 for i in xrange(r.count()):
1294 1294 node = r.node(i)
1295 1295 pp = r.parents(node)
1296 1296 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1297 1297 if pp[1] != nullid:
1298 1298 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1299 1299 ui.write("}\n")
1300 1300
1301 1301 def debugrename(ui, repo, file, rev=None):
1302 1302 """dump rename information"""
1303 1303 r = repo.file(relpath(repo, [file])[0])
1304 1304 if rev:
1305 1305 try:
1306 1306 # assume all revision numbers are for changesets
1307 1307 n = repo.lookup(rev)
1308 1308 change = repo.changelog.read(n)
1309 1309 m = repo.manifest.read(change[0])
1310 1310 n = m[relpath(repo, [file])[0]]
1311 1311 except (hg.RepoError, KeyError):
1312 1312 n = r.lookup(rev)
1313 1313 else:
1314 1314 n = r.tip()
1315 1315 m = r.renamed(n)
1316 1316 if m:
1317 1317 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1318 1318 else:
1319 1319 ui.write(_("not renamed\n"))
1320 1320
1321 1321 def debugwalk(ui, repo, *pats, **opts):
1322 1322 """show how files match on given patterns"""
1323 1323 items = list(cmdutil.walk(repo, pats, opts))
1324 1324 if not items:
1325 1325 return
1326 1326 fmt = '%%s %%-%ds %%-%ds %%s' % (
1327 1327 max([len(abs) for (src, abs, rel, exact) in items]),
1328 1328 max([len(rel) for (src, abs, rel, exact) in items]))
1329 1329 for src, abs, rel, exact in items:
1330 1330 line = fmt % (src, abs, rel, exact and 'exact' or '')
1331 1331 ui.write("%s\n" % line.rstrip())
1332 1332
1333 1333 def diff(ui, repo, *pats, **opts):
1334 1334 """diff repository (or selected files)
1335 1335
1336 1336 Show differences between revisions for the specified files.
1337 1337
1338 1338 Differences between files are shown using the unified diff format.
1339 1339
1340 1340 When two revision arguments are given, then changes are shown
1341 1341 between those revisions. If only one revision is specified then
1342 1342 that revision is compared to the working directory, and, when no
1343 1343 revisions are specified, the working directory files are compared
1344 1344 to its parent.
1345 1345
1346 1346 Without the -a option, diff will avoid generating diffs of files
1347 1347 it detects as binary. With -a, diff will generate a diff anyway,
1348 1348 probably with undesirable results.
1349 1349 """
1350 1350 node1, node2 = cmdutil.revpair(ui, repo, opts['rev'])
1351 1351
1352 1352 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1353 1353
1354 1354 patch.diff(repo, node1, node2, fns, match=matchfn,
1355 1355 opts=patch.diffopts(ui, opts))
1356 1356
1357 1357 def export(ui, repo, *changesets, **opts):
1358 1358 """dump the header and diffs for one or more changesets
1359 1359
1360 1360 Print the changeset header and diffs for one or more revisions.
1361 1361
1362 1362 The information shown in the changeset header is: author,
1363 1363 changeset hash, parent and commit comment.
1364 1364
1365 1365 Output may be to a file, in which case the name of the file is
1366 1366 given using a format string. The formatting rules are as follows:
1367 1367
1368 1368 %% literal "%" character
1369 1369 %H changeset hash (40 bytes of hexadecimal)
1370 1370 %N number of patches being generated
1371 1371 %R changeset revision number
1372 1372 %b basename of the exporting repository
1373 1373 %h short-form changeset hash (12 bytes of hexadecimal)
1374 1374 %n zero-padded sequence number, starting at 1
1375 1375 %r zero-padded changeset revision number
1376 1376
1377 1377 Without the -a option, export will avoid generating diffs of files
1378 1378 it detects as binary. With -a, export will generate a diff anyway,
1379 1379 probably with undesirable results.
1380 1380
1381 1381 With the --switch-parent option, the diff will be against the second
1382 1382 parent. It can be useful to review a merge.
1383 1383 """
1384 1384 if not changesets:
1385 1385 raise util.Abort(_("export requires at least one changeset"))
1386 1386 revs = list(cmdutil.revrange(ui, repo, changesets))
1387 1387 if len(revs) > 1:
1388 1388 ui.note(_('exporting patches:\n'))
1389 1389 else:
1390 1390 ui.note(_('exporting patch:\n'))
1391 1391 patch.export(repo, map(repo.lookup, revs), template=opts['output'],
1392 1392 switch_parent=opts['switch_parent'],
1393 1393 opts=patch.diffopts(ui, opts))
1394 1394
1395 1395 def grep(ui, repo, pattern, *pats, **opts):
1396 1396 """search for a pattern in specified files and revisions
1397 1397
1398 1398 Search revisions of files for a regular expression.
1399 1399
1400 1400 This command behaves differently than Unix grep. It only accepts
1401 1401 Python/Perl regexps. It searches repository history, not the
1402 1402 working directory. It always prints the revision number in which
1403 1403 a match appears.
1404 1404
1405 1405 By default, grep only prints output for the first revision of a
1406 1406 file in which it finds a match. To get it to print every revision
1407 1407 that contains a change in match status ("-" for a match that
1408 1408 becomes a non-match, or "+" for a non-match that becomes a match),
1409 1409 use the --all flag.
1410 1410 """
1411 1411 reflags = 0
1412 1412 if opts['ignore_case']:
1413 1413 reflags |= re.I
1414 1414 regexp = re.compile(pattern, reflags)
1415 1415 sep, eol = ':', '\n'
1416 1416 if opts['print0']:
1417 1417 sep = eol = '\0'
1418 1418
1419 1419 fcache = {}
1420 1420 def getfile(fn):
1421 1421 if fn not in fcache:
1422 1422 fcache[fn] = repo.file(fn)
1423 1423 return fcache[fn]
1424 1424
1425 1425 def matchlines(body):
1426 1426 begin = 0
1427 1427 linenum = 0
1428 1428 while True:
1429 1429 match = regexp.search(body, begin)
1430 1430 if not match:
1431 1431 break
1432 1432 mstart, mend = match.span()
1433 1433 linenum += body.count('\n', begin, mstart) + 1
1434 1434 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1435 1435 lend = body.find('\n', mend)
1436 1436 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1437 1437 begin = lend + 1
1438 1438
1439 1439 class linestate(object):
1440 1440 def __init__(self, line, linenum, colstart, colend):
1441 1441 self.line = line
1442 1442 self.linenum = linenum
1443 1443 self.colstart = colstart
1444 1444 self.colend = colend
1445 1445
1446 1446 def __eq__(self, other):
1447 1447 return self.line == other.line
1448 1448
1449 1449 matches = {}
1450 1450 copies = {}
1451 1451 def grepbody(fn, rev, body):
1452 1452 matches[rev].setdefault(fn, [])
1453 1453 m = matches[rev][fn]
1454 1454 for lnum, cstart, cend, line in matchlines(body):
1455 1455 s = linestate(line, lnum, cstart, cend)
1456 1456 m.append(s)
1457 1457
1458 1458 def difflinestates(a, b):
1459 1459 sm = difflib.SequenceMatcher(None, a, b)
1460 1460 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1461 1461 if tag == 'insert':
1462 1462 for i in xrange(blo, bhi):
1463 1463 yield ('+', b[i])
1464 1464 elif tag == 'delete':
1465 1465 for i in xrange(alo, ahi):
1466 1466 yield ('-', a[i])
1467 1467 elif tag == 'replace':
1468 1468 for i in xrange(alo, ahi):
1469 1469 yield ('-', a[i])
1470 1470 for i in xrange(blo, bhi):
1471 1471 yield ('+', b[i])
1472 1472
1473 1473 prev = {}
1474 1474 ucache = {}
1475 1475 def display(fn, rev, states, prevstates):
1476 1476 counts = {'-': 0, '+': 0}
1477 1477 filerevmatches = {}
1478 1478 if incrementing or not opts['all']:
1479 1479 a, b = prevstates, states
1480 1480 else:
1481 1481 a, b = states, prevstates
1482 1482 for change, l in difflinestates(a, b):
1483 1483 if incrementing or not opts['all']:
1484 1484 r = rev
1485 1485 else:
1486 1486 r = prev[fn]
1487 1487 cols = [fn, str(r)]
1488 1488 if opts['line_number']:
1489 1489 cols.append(str(l.linenum))
1490 1490 if opts['all']:
1491 1491 cols.append(change)
1492 1492 if opts['user']:
1493 1493 cols.append(trimuser(ui, getchange(r)[1], rev,
1494 1494 ucache))
1495 1495 if opts['files_with_matches']:
1496 1496 c = (fn, rev)
1497 1497 if c in filerevmatches:
1498 1498 continue
1499 1499 filerevmatches[c] = 1
1500 1500 else:
1501 1501 cols.append(l.line)
1502 1502 ui.write(sep.join(cols), eol)
1503 1503 counts[change] += 1
1504 1504 return counts['+'], counts['-']
1505 1505
1506 1506 fstate = {}
1507 1507 skip = {}
1508 1508 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1509 1509 count = 0
1510 1510 incrementing = False
1511 1511 follow = opts.get('follow')
1512 1512 for st, rev, fns in changeiter:
1513 1513 if st == 'window':
1514 1514 incrementing = rev
1515 1515 matches.clear()
1516 1516 elif st == 'add':
1517 1517 change = repo.changelog.read(repo.lookup(str(rev)))
1518 1518 mf = repo.manifest.read(change[0])
1519 1519 matches[rev] = {}
1520 1520 for fn in fns:
1521 1521 if fn in skip:
1522 1522 continue
1523 1523 fstate.setdefault(fn, {})
1524 1524 try:
1525 1525 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1526 1526 if follow:
1527 1527 copied = getfile(fn).renamed(mf[fn])
1528 1528 if copied:
1529 1529 copies.setdefault(rev, {})[fn] = copied[0]
1530 1530 except KeyError:
1531 1531 pass
1532 1532 elif st == 'iter':
1533 1533 states = matches[rev].items()
1534 1534 states.sort()
1535 1535 for fn, m in states:
1536 1536 copy = copies.get(rev, {}).get(fn)
1537 1537 if fn in skip:
1538 1538 if copy:
1539 1539 skip[copy] = True
1540 1540 continue
1541 1541 if incrementing or not opts['all'] or fstate[fn]:
1542 1542 pos, neg = display(fn, rev, m, fstate[fn])
1543 1543 count += pos + neg
1544 1544 if pos and not opts['all']:
1545 1545 skip[fn] = True
1546 1546 if copy:
1547 1547 skip[copy] = True
1548 1548 fstate[fn] = m
1549 1549 if copy:
1550 1550 fstate[copy] = m
1551 1551 prev[fn] = rev
1552 1552
1553 1553 if not incrementing:
1554 1554 fstate = fstate.items()
1555 1555 fstate.sort()
1556 1556 for fn, state in fstate:
1557 1557 if fn in skip:
1558 1558 continue
1559 1559 if fn not in copies.get(prev[fn], {}):
1560 1560 display(fn, rev, {}, state)
1561 1561 return (count == 0 and 1) or 0
1562 1562
1563 1563 def heads(ui, repo, **opts):
1564 1564 """show current repository heads
1565 1565
1566 1566 Show all repository head changesets.
1567 1567
1568 1568 Repository "heads" are changesets that don't have children
1569 1569 changesets. They are where development generally takes place and
1570 1570 are the usual targets for update and merge operations.
1571 1571 """
1572 1572 if opts['rev']:
1573 1573 heads = repo.heads(repo.lookup(opts['rev']))
1574 1574 else:
1575 1575 heads = repo.heads()
1576 1576 br = None
1577 1577 if opts['branches']:
1578 1578 ui.warn(_("the --branches option is deprecated, "
1579 1579 "please use 'hg branches' instead\n"))
1580 1580 br = repo.branchlookup(heads)
1581 1581 displayer = show_changeset(ui, repo, opts)
1582 1582 for n in heads:
1583 1583 displayer.show(changenode=n, brinfo=br)
1584 1584
1585 1585 def identify(ui, repo):
1586 1586 """print information about the working copy
1587 1587
1588 1588 Print a short summary of the current state of the repo.
1589 1589
1590 1590 This summary identifies the repository state using one or two parent
1591 1591 hash identifiers, followed by a "+" if there are uncommitted changes
1592 1592 in the working directory, followed by a list of tags for this revision.
1593 1593 """
1594 1594 parents = [p for p in repo.dirstate.parents() if p != nullid]
1595 1595 if not parents:
1596 1596 ui.write(_("unknown\n"))
1597 1597 return
1598 1598
1599 1599 hexfunc = ui.debugflag and hex or short
1600 1600 modified, added, removed, deleted = repo.status()[:4]
1601 1601 output = ["%s%s" %
1602 1602 ('+'.join([hexfunc(parent) for parent in parents]),
1603 1603 (modified or added or removed or deleted) and "+" or "")]
1604 1604
1605 1605 if not ui.quiet:
1606 1606
1607 1607 branch = repo.workingctx().branch()
1608 1608 if branch:
1609 1609 output.append("(%s)" % branch)
1610 1610
1611 1611 # multiple tags for a single parent separated by '/'
1612 1612 parenttags = ['/'.join(tags)
1613 1613 for tags in map(repo.nodetags, parents) if tags]
1614 1614 # tags for multiple parents separated by ' + '
1615 1615 if parenttags:
1616 1616 output.append(' + '.join(parenttags))
1617 1617
1618 1618 ui.write("%s\n" % ' '.join(output))
1619 1619
1620 1620 def import_(ui, repo, patch1, *patches, **opts):
1621 1621 """import an ordered set of patches
1622 1622
1623 1623 Import a list of patches and commit them individually.
1624 1624
1625 1625 If there are outstanding changes in the working directory, import
1626 1626 will abort unless given the -f flag.
1627 1627
1628 1628 You can import a patch straight from a mail message. Even patches
1629 1629 as attachments work (body part must be type text/plain or
1630 1630 text/x-patch to be used). From and Subject headers of email
1631 1631 message are used as default committer and commit message. All
1632 1632 text/plain body parts before first diff are added to commit
1633 1633 message.
1634 1634
1635 1635 If imported patch was generated by hg export, user and description
1636 1636 from patch override values from message headers and body. Values
1637 1637 given on command line with -m and -u override these.
1638 1638
1639 1639 To read a patch from standard input, use patch name "-".
1640 1640 """
1641 1641 patches = (patch1,) + patches
1642 1642
1643 1643 if not opts['force']:
1644 1644 bail_if_changed(repo)
1645 1645
1646 1646 d = opts["base"]
1647 1647 strip = opts["strip"]
1648 1648
1649 1649 wlock = repo.wlock()
1650 1650 lock = repo.lock()
1651 1651
1652 1652 for p in patches:
1653 1653 pf = os.path.join(d, p)
1654 1654
1655 1655 if pf == '-':
1656 1656 ui.status(_("applying patch from stdin\n"))
1657 1657 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1658 1658 else:
1659 1659 ui.status(_("applying %s\n") % p)
1660 1660 tmpname, message, user, date = patch.extract(ui, file(pf))
1661 1661
1662 1662 if tmpname is None:
1663 1663 raise util.Abort(_('no diffs found'))
1664 1664
1665 1665 try:
1666 1666 if opts['message']:
1667 1667 # pickup the cmdline msg
1668 1668 message = opts['message']
1669 1669 elif message:
1670 1670 # pickup the patch msg
1671 1671 message = message.strip()
1672 1672 else:
1673 1673 # launch the editor
1674 1674 message = None
1675 1675 ui.debug(_('message:\n%s\n') % message)
1676 1676
1677 1677 files = {}
1678 1678 try:
1679 1679 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1680 1680 files=files)
1681 1681 finally:
1682 1682 files = patch.updatedir(ui, repo, files, wlock=wlock)
1683 1683 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1684 1684 finally:
1685 1685 os.unlink(tmpname)
1686 1686
1687 1687 def incoming(ui, repo, source="default", **opts):
1688 1688 """show new changesets found in source
1689 1689
1690 1690 Show new changesets found in the specified path/URL or the default
1691 1691 pull location. These are the changesets that would be pulled if a pull
1692 1692 was requested.
1693 1693
1694 1694 For remote repository, using --bundle avoids downloading the changesets
1695 1695 twice if the incoming is followed by a pull.
1696 1696
1697 1697 See pull for valid source format details.
1698 1698 """
1699 1699 source = ui.expandpath(source)
1700 1700 setremoteconfig(ui, opts)
1701 1701
1702 1702 other = hg.repository(ui, source)
1703 1703 incoming = repo.findincoming(other, force=opts["force"])
1704 1704 if not incoming:
1705 1705 ui.status(_("no changes found\n"))
1706 1706 return
1707 1707
1708 1708 cleanup = None
1709 1709 try:
1710 1710 fname = opts["bundle"]
1711 1711 if fname or not other.local():
1712 1712 # create a bundle (uncompressed if other repo is not local)
1713 1713 cg = other.changegroup(incoming, "incoming")
1714 1714 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1715 1715 # keep written bundle?
1716 1716 if opts["bundle"]:
1717 1717 cleanup = None
1718 1718 if not other.local():
1719 1719 # use the created uncompressed bundlerepo
1720 1720 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1721 1721
1722 1722 revs = None
1723 1723 if opts['rev']:
1724 1724 revs = [other.lookup(rev) for rev in opts['rev']]
1725 1725 o = other.changelog.nodesbetween(incoming, revs)[0]
1726 1726 if opts['newest_first']:
1727 1727 o.reverse()
1728 1728 displayer = show_changeset(ui, other, opts)
1729 1729 for n in o:
1730 1730 parents = [p for p in other.changelog.parents(n) if p != nullid]
1731 1731 if opts['no_merges'] and len(parents) == 2:
1732 1732 continue
1733 1733 displayer.show(changenode=n)
1734 1734 if opts['patch']:
1735 1735 prev = (parents and parents[0]) or nullid
1736 1736 patch.diff(other, prev, n, fp=repo.ui)
1737 1737 ui.write("\n")
1738 1738 finally:
1739 1739 if hasattr(other, 'close'):
1740 1740 other.close()
1741 1741 if cleanup:
1742 1742 os.unlink(cleanup)
1743 1743
1744 1744 def init(ui, dest=".", **opts):
1745 1745 """create a new repository in the given directory
1746 1746
1747 1747 Initialize a new repository in the given directory. If the given
1748 1748 directory does not exist, it is created.
1749 1749
1750 1750 If no directory is given, the current directory is used.
1751 1751
1752 1752 It is possible to specify an ssh:// URL as the destination.
1753 1753 Look at the help text for the pull command for important details
1754 1754 about ssh:// URLs.
1755 1755 """
1756 1756 setremoteconfig(ui, opts)
1757 1757 hg.repository(ui, dest, create=1)
1758 1758
1759 1759 def locate(ui, repo, *pats, **opts):
1760 1760 """locate files matching specific patterns
1761 1761
1762 1762 Print all files under Mercurial control whose names match the
1763 1763 given patterns.
1764 1764
1765 1765 This command searches the current directory and its
1766 1766 subdirectories. To search an entire repository, move to the root
1767 1767 of the repository.
1768 1768
1769 1769 If no patterns are given to match, this command prints all file
1770 1770 names.
1771 1771
1772 1772 If you want to feed the output of this command into the "xargs"
1773 1773 command, use the "-0" option to both this command and "xargs".
1774 1774 This will avoid the problem of "xargs" treating single filenames
1775 1775 that contain white space as multiple filenames.
1776 1776 """
1777 1777 end = opts['print0'] and '\0' or '\n'
1778 1778 rev = opts['rev']
1779 1779 if rev:
1780 1780 node = repo.lookup(rev)
1781 1781 else:
1782 1782 node = None
1783 1783
1784 1784 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1785 1785 head='(?:.*/|)'):
1786 1786 if not node and repo.dirstate.state(abs) == '?':
1787 1787 continue
1788 1788 if opts['fullpath']:
1789 1789 ui.write(os.path.join(repo.root, abs), end)
1790 1790 else:
1791 1791 ui.write(((pats and rel) or abs), end)
1792 1792
1793 1793 def log(ui, repo, *pats, **opts):
1794 1794 """show revision history of entire repository or files
1795 1795
1796 1796 Print the revision history of the specified files or the entire
1797 1797 project.
1798 1798
1799 1799 File history is shown without following rename or copy history of
1800 1800 files. Use -f/--follow with a file name to follow history across
1801 1801 renames and copies. --follow without a file name will only show
1802 1802 ancestors or descendants of the starting revision. --follow-first
1803 1803 only follows the first parent of merge revisions.
1804 1804
1805 1805 If no revision range is specified, the default is tip:0 unless
1806 1806 --follow is set, in which case the working directory parent is
1807 1807 used as the starting revision.
1808 1808
1809 1809 By default this command outputs: changeset id and hash, tags,
1810 1810 non-trivial parents, user, date and time, and a summary for each
1811 1811 commit. When the -v/--verbose switch is used, the list of changed
1812 1812 files and full commit message is shown.
1813 1813 """
1814 1814 class dui(object):
1815 1815 # Implement and delegate some ui protocol. Save hunks of
1816 1816 # output for later display in the desired order.
1817 1817 def __init__(self, ui):
1818 1818 self.ui = ui
1819 1819 self.hunk = {}
1820 1820 self.header = {}
1821 1821 def bump(self, rev):
1822 1822 self.rev = rev
1823 1823 self.hunk[rev] = []
1824 1824 self.header[rev] = []
1825 1825 def note(self, *args):
1826 1826 if self.verbose:
1827 1827 self.write(*args)
1828 1828 def status(self, *args):
1829 1829 if not self.quiet:
1830 1830 self.write(*args)
1831 1831 def write(self, *args):
1832 1832 self.hunk[self.rev].append(args)
1833 1833 def write_header(self, *args):
1834 1834 self.header[self.rev].append(args)
1835 1835 def debug(self, *args):
1836 1836 if self.debugflag:
1837 1837 self.write(*args)
1838 1838 def __getattr__(self, key):
1839 1839 return getattr(self.ui, key)
1840 1840
1841 1841 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1842 1842
1843 1843 if opts['branches']:
1844 1844 ui.warn(_("the --branches option is deprecated, "
1845 1845 "please use 'hg branches' instead\n"))
1846 1846
1847 1847 if opts['limit']:
1848 1848 try:
1849 1849 limit = int(opts['limit'])
1850 1850 except ValueError:
1851 1851 raise util.Abort(_('limit must be a positive integer'))
1852 1852 if limit <= 0: raise util.Abort(_('limit must be positive'))
1853 1853 else:
1854 1854 limit = sys.maxint
1855 1855 count = 0
1856 1856
1857 1857 if opts['copies'] and opts['rev']:
1858 1858 endrev = max([int(i)
1859 1859 for i in cmdutil.revrange(ui, repo, opts['rev'])]) + 1
1860 1860 else:
1861 1861 endrev = repo.changelog.count()
1862 1862 rcache = {}
1863 1863 ncache = {}
1864 1864 dcache = []
1865 1865 def getrenamed(fn, rev, man):
1866 1866 '''looks up all renames for a file (up to endrev) the first
1867 1867 time the file is given. It indexes on the changerev and only
1868 1868 parses the manifest if linkrev != changerev.
1869 1869 Returns rename info for fn at changerev rev.'''
1870 1870 if fn not in rcache:
1871 1871 rcache[fn] = {}
1872 1872 ncache[fn] = {}
1873 1873 fl = repo.file(fn)
1874 1874 for i in xrange(fl.count()):
1875 1875 node = fl.node(i)
1876 1876 lr = fl.linkrev(node)
1877 1877 renamed = fl.renamed(node)
1878 1878 rcache[fn][lr] = renamed
1879 1879 if renamed:
1880 1880 ncache[fn][node] = renamed
1881 1881 if lr >= endrev:
1882 1882 break
1883 1883 if rev in rcache[fn]:
1884 1884 return rcache[fn][rev]
1885 1885 mr = repo.manifest.rev(man)
1886 1886 if repo.manifest.parentrevs(mr) != (mr - 1, -1):
1887 1887 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1888 1888 if not dcache or dcache[0] != man:
1889 1889 dcache[:] = [man, repo.manifest.readdelta(man)]
1890 1890 if fn in dcache[1]:
1891 1891 return ncache[fn].get(dcache[1][fn])
1892 1892 return None
1893 1893
1894 1894 displayer = show_changeset(ui, repo, opts)
1895 1895 for st, rev, fns in changeiter:
1896 1896 if st == 'window':
1897 1897 du = dui(ui)
1898 1898 displayer.ui = du
1899 1899 elif st == 'add':
1900 1900 du.bump(rev)
1901 1901 changenode = repo.changelog.node(rev)
1902 1902 parents = [p for p in repo.changelog.parents(changenode)
1903 1903 if p != nullid]
1904 1904 if opts['no_merges'] and len(parents) == 2:
1905 1905 continue
1906 1906 if opts['only_merges'] and len(parents) != 2:
1907 1907 continue
1908 1908
1909 1909 if opts['keyword']:
1910 1910 changes = getchange(rev)
1911 1911 miss = 0
1912 1912 for k in [kw.lower() for kw in opts['keyword']]:
1913 1913 if not (k in changes[1].lower() or
1914 1914 k in changes[4].lower() or
1915 1915 k in " ".join(changes[3][:20]).lower()):
1916 1916 miss = 1
1917 1917 break
1918 1918 if miss:
1919 1919 continue
1920 1920
1921 1921 br = None
1922 1922 if opts['branches']:
1923 1923 br = repo.branchlookup([repo.changelog.node(rev)])
1924 1924
1925 1925 copies = []
1926 1926 if opts.get('copies') and rev:
1927 1927 mf = getchange(rev)[0]
1928 1928 for fn in getchange(rev)[3]:
1929 1929 rename = getrenamed(fn, rev, mf)
1930 1930 if rename:
1931 1931 copies.append((fn, rename[0]))
1932 1932 displayer.show(rev, brinfo=br, copies=copies)
1933 1933 if opts['patch']:
1934 1934 prev = (parents and parents[0]) or nullid
1935 1935 patch.diff(repo, prev, changenode, match=matchfn, fp=du)
1936 1936 du.write("\n\n")
1937 1937 elif st == 'iter':
1938 1938 if count == limit: break
1939 1939 if du.header[rev]:
1940 1940 for args in du.header[rev]:
1941 1941 ui.write_header(*args)
1942 1942 if du.hunk[rev]:
1943 1943 count += 1
1944 1944 for args in du.hunk[rev]:
1945 1945 ui.write(*args)
1946 1946
1947 1947 def manifest(ui, repo, rev=None):
1948 1948 """output the latest or given revision of the project manifest
1949 1949
1950 1950 Print a list of version controlled files for the given revision.
1951 1951
1952 1952 The manifest is the list of files being version controlled. If no revision
1953 1953 is given then the tip is used.
1954 1954 """
1955 1955 if rev:
1956 1956 try:
1957 1957 # assume all revision numbers are for changesets
1958 1958 n = repo.lookup(rev)
1959 1959 change = repo.changelog.read(n)
1960 1960 n = change[0]
1961 1961 except hg.RepoError:
1962 1962 n = repo.manifest.lookup(rev)
1963 1963 else:
1964 1964 n = repo.manifest.tip()
1965 1965 m = repo.manifest.read(n)
1966 1966 files = m.keys()
1967 1967 files.sort()
1968 1968
1969 1969 for f in files:
1970 1970 ui.write("%40s %3s %s\n" % (hex(m[f]),
1971 1971 m.execf(f) and "755" or "644", f))
1972 1972
1973 1973 def merge(ui, repo, node=None, force=None, branch=None):
1974 1974 """Merge working directory with another revision
1975 1975
1976 1976 Merge the contents of the current working directory and the
1977 1977 requested revision. Files that changed between either parent are
1978 1978 marked as changed for the next commit and a commit must be
1979 1979 performed before any further updates are allowed.
1980 1980
1981 1981 If no revision is specified, the working directory's parent is a
1982 1982 head revision, and the repository contains exactly one other head,
1983 1983 the other head is merged with by default. Otherwise, an explicit
1984 1984 revision to merge with must be provided.
1985 1985 """
1986 1986
1987 1987 if node or branch:
1988 1988 node = _lookup(repo, node, branch)
1989 1989 else:
1990 1990 heads = repo.heads()
1991 1991 if len(heads) > 2:
1992 1992 raise util.Abort(_('repo has %d heads - '
1993 1993 'please merge with an explicit rev') %
1994 1994 len(heads))
1995 1995 if len(heads) == 1:
1996 1996 raise util.Abort(_('there is nothing to merge - '
1997 1997 'use "hg update" instead'))
1998 1998 parent = repo.dirstate.parents()[0]
1999 1999 if parent not in heads:
2000 2000 raise util.Abort(_('working dir not at a head rev - '
2001 2001 'use "hg update" or merge with an explicit rev'))
2002 2002 node = parent == heads[0] and heads[-1] or heads[0]
2003 2003 return hg.merge(repo, node, force=force)
2004 2004
2005 2005 def outgoing(ui, repo, dest=None, **opts):
2006 2006 """show changesets not found in destination
2007 2007
2008 2008 Show changesets not found in the specified destination repository or
2009 2009 the default push location. These are the changesets that would be pushed
2010 2010 if a push was requested.
2011 2011
2012 2012 See pull for valid destination format details.
2013 2013 """
2014 2014 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2015 2015 setremoteconfig(ui, opts)
2016 2016 revs = None
2017 2017 if opts['rev']:
2018 2018 revs = [repo.lookup(rev) for rev in opts['rev']]
2019 2019
2020 2020 other = hg.repository(ui, dest)
2021 2021 o = repo.findoutgoing(other, force=opts['force'])
2022 2022 if not o:
2023 2023 ui.status(_("no changes found\n"))
2024 2024 return
2025 2025 o = repo.changelog.nodesbetween(o, revs)[0]
2026 2026 if opts['newest_first']:
2027 2027 o.reverse()
2028 2028 displayer = show_changeset(ui, repo, opts)
2029 2029 for n in o:
2030 2030 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2031 2031 if opts['no_merges'] and len(parents) == 2:
2032 2032 continue
2033 2033 displayer.show(changenode=n)
2034 2034 if opts['patch']:
2035 2035 prev = (parents and parents[0]) or nullid
2036 2036 patch.diff(repo, prev, n)
2037 2037 ui.write("\n")
2038 2038
2039 2039 def parents(ui, repo, file_=None, rev=None, branches=None, **opts):
2040 2040 """show the parents of the working dir or revision
2041 2041
2042 2042 Print the working directory's parent revisions.
2043 2043 """
2044 2044 # legacy
2045 2045 if file_ and not rev:
2046 2046 try:
2047 2047 rev = repo.lookup(file_)
2048 2048 file_ = None
2049 2049 except hg.RepoError:
2050 2050 pass
2051 2051 else:
2052 2052 ui.warn(_("'hg parent REV' is deprecated, "
2053 2053 "please use 'hg parents -r REV instead\n"))
2054 2054
2055 2055 if rev:
2056 2056 if file_:
2057 2057 ctx = repo.filectx(file_, changeid=rev)
2058 2058 else:
2059 2059 ctx = repo.changectx(rev)
2060 2060 p = [cp.node() for cp in ctx.parents()]
2061 2061 else:
2062 2062 p = repo.dirstate.parents()
2063 2063
2064 2064 br = None
2065 2065 if branches is not None:
2066 2066 ui.warn(_("the --branches option is deprecated, "
2067 2067 "please use 'hg branches' instead\n"))
2068 2068 br = repo.branchlookup(p)
2069 2069 displayer = show_changeset(ui, repo, opts)
2070 2070 for n in p:
2071 2071 if n != nullid:
2072 2072 displayer.show(changenode=n, brinfo=br)
2073 2073
2074 2074 def paths(ui, repo, search=None):
2075 2075 """show definition of symbolic path names
2076 2076
2077 2077 Show definition of symbolic path name NAME. If no name is given, show
2078 2078 definition of available names.
2079 2079
2080 2080 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2081 2081 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2082 2082 """
2083 2083 if search:
2084 2084 for name, path in ui.configitems("paths"):
2085 2085 if name == search:
2086 2086 ui.write("%s\n" % path)
2087 2087 return
2088 2088 ui.warn(_("not found!\n"))
2089 2089 return 1
2090 2090 else:
2091 2091 for name, path in ui.configitems("paths"):
2092 2092 ui.write("%s = %s\n" % (name, path))
2093 2093
2094 2094 def postincoming(ui, repo, modheads, optupdate):
2095 2095 if modheads == 0:
2096 2096 return
2097 2097 if optupdate:
2098 2098 if modheads == 1:
2099 2099 return hg.update(repo, repo.changelog.tip()) # update
2100 2100 else:
2101 2101 ui.status(_("not updating, since new heads added\n"))
2102 2102 if modheads > 1:
2103 2103 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2104 2104 else:
2105 2105 ui.status(_("(run 'hg update' to get a working copy)\n"))
2106 2106
2107 2107 def pull(ui, repo, source="default", **opts):
2108 2108 """pull changes from the specified source
2109 2109
2110 2110 Pull changes from a remote repository to a local one.
2111 2111
2112 2112 This finds all changes from the repository at the specified path
2113 2113 or URL and adds them to the local repository. By default, this
2114 2114 does not update the copy of the project in the working directory.
2115 2115
2116 2116 Valid URLs are of the form:
2117 2117
2118 2118 local/filesystem/path (or file://local/filesystem/path)
2119 2119 http://[user@]host[:port]/[path]
2120 2120 https://[user@]host[:port]/[path]
2121 2121 ssh://[user@]host[:port]/[path]
2122 2122 static-http://host[:port]/[path]
2123 2123
2124 2124 Paths in the local filesystem can either point to Mercurial
2125 2125 repositories or to bundle files (as created by 'hg bundle' or
2126 2126 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2127 2127 allows access to a Mercurial repository where you simply use a web
2128 2128 server to publish the .hg directory as static content.
2129 2129
2130 2130 Some notes about using SSH with Mercurial:
2131 2131 - SSH requires an accessible shell account on the destination machine
2132 2132 and a copy of hg in the remote path or specified with as remotecmd.
2133 2133 - path is relative to the remote user's home directory by default.
2134 2134 Use an extra slash at the start of a path to specify an absolute path:
2135 2135 ssh://example.com//tmp/repository
2136 2136 - Mercurial doesn't use its own compression via SSH; the right thing
2137 2137 to do is to configure it in your ~/.ssh/config, e.g.:
2138 2138 Host *.mylocalnetwork.example.com
2139 2139 Compression no
2140 2140 Host *
2141 2141 Compression yes
2142 2142 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2143 2143 with the --ssh command line option.
2144 2144 """
2145 2145 source = ui.expandpath(source)
2146 2146 setremoteconfig(ui, opts)
2147 2147
2148 2148 other = hg.repository(ui, source)
2149 2149 ui.status(_('pulling from %s\n') % (source))
2150 2150 revs = None
2151 2151 if opts['rev']:
2152 2152 if 'lookup' in other.capabilities:
2153 2153 revs = [other.lookup(rev) for rev in opts['rev']]
2154 2154 else:
2155 2155 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
2156 2156 raise util.Abort(error)
2157 2157 modheads = repo.pull(other, heads=revs, force=opts['force'])
2158 2158 return postincoming(ui, repo, modheads, opts['update'])
2159 2159
2160 2160 def push(ui, repo, dest=None, **opts):
2161 2161 """push changes to the specified destination
2162 2162
2163 2163 Push changes from the local repository to the given destination.
2164 2164
2165 2165 This is the symmetrical operation for pull. It helps to move
2166 2166 changes from the current repository to a different one. If the
2167 2167 destination is local this is identical to a pull in that directory
2168 2168 from the current one.
2169 2169
2170 2170 By default, push will refuse to run if it detects the result would
2171 2171 increase the number of remote heads. This generally indicates the
2172 2172 the client has forgotten to sync and merge before pushing.
2173 2173
2174 2174 Valid URLs are of the form:
2175 2175
2176 2176 local/filesystem/path (or file://local/filesystem/path)
2177 2177 ssh://[user@]host[:port]/[path]
2178 2178 http://[user@]host[:port]/[path]
2179 2179 https://[user@]host[:port]/[path]
2180 2180
2181 2181 Look at the help text for the pull command for important details
2182 2182 about ssh:// URLs.
2183 2183
2184 2184 Pushing to http:// and https:// URLs is only possible, if this
2185 2185 feature is explicitly enabled on the remote Mercurial server.
2186 2186 """
2187 2187 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2188 2188 setremoteconfig(ui, opts)
2189 2189
2190 2190 other = hg.repository(ui, dest)
2191 2191 ui.status('pushing to %s\n' % (dest))
2192 2192 revs = None
2193 2193 if opts['rev']:
2194 2194 revs = [repo.lookup(rev) for rev in opts['rev']]
2195 2195 r = repo.push(other, opts['force'], revs=revs)
2196 2196 return r == 0
2197 2197
2198 2198 def rawcommit(ui, repo, *flist, **rc):
2199 2199 """raw commit interface (DEPRECATED)
2200 2200
2201 2201 (DEPRECATED)
2202 2202 Lowlevel commit, for use in helper scripts.
2203 2203
2204 2204 This command is not intended to be used by normal users, as it is
2205 2205 primarily useful for importing from other SCMs.
2206 2206
2207 2207 This command is now deprecated and will be removed in a future
2208 2208 release, please use debugsetparents and commit instead.
2209 2209 """
2210 2210
2211 2211 ui.warn(_("(the rawcommit command is deprecated)\n"))
2212 2212
2213 2213 message = rc['message']
2214 2214 if not message and rc['logfile']:
2215 2215 try:
2216 2216 message = open(rc['logfile']).read()
2217 2217 except IOError:
2218 2218 pass
2219 2219 if not message and not rc['logfile']:
2220 2220 raise util.Abort(_("missing commit message"))
2221 2221
2222 2222 files = relpath(repo, list(flist))
2223 2223 if rc['files']:
2224 2224 files += open(rc['files']).read().splitlines()
2225 2225
2226 2226 rc['parent'] = map(repo.lookup, rc['parent'])
2227 2227
2228 2228 try:
2229 2229 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2230 2230 except ValueError, inst:
2231 2231 raise util.Abort(str(inst))
2232 2232
2233 2233 def recover(ui, repo):
2234 2234 """roll back an interrupted transaction
2235 2235
2236 2236 Recover from an interrupted commit or pull.
2237 2237
2238 2238 This command tries to fix the repository status after an interrupted
2239 2239 operation. It should only be necessary when Mercurial suggests it.
2240 2240 """
2241 2241 if repo.recover():
2242 2242 return hg.verify(repo)
2243 2243 return 1
2244 2244
2245 2245 def remove(ui, repo, *pats, **opts):
2246 2246 """remove the specified files on the next commit
2247 2247
2248 2248 Schedule the indicated files for removal from the repository.
2249 2249
2250 2250 This command schedules the files to be removed at the next commit.
2251 2251 This only removes files from the current branch, not from the
2252 2252 entire project history. If the files still exist in the working
2253 2253 directory, they will be deleted from it. If invoked with --after,
2254 2254 files that have been manually deleted are marked as removed.
2255 2255
2256 2256 Modified files and added files are not removed by default. To
2257 2257 remove them, use the -f/--force option.
2258 2258 """
2259 2259 names = []
2260 2260 if not opts['after'] and not pats:
2261 2261 raise util.Abort(_('no files specified'))
2262 2262 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2263 2263 exact = dict.fromkeys(files)
2264 2264 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2265 2265 modified, added, removed, deleted, unknown = mardu
2266 2266 remove, forget = [], []
2267 2267 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2268 2268 reason = None
2269 2269 if abs not in deleted and opts['after']:
2270 2270 reason = _('is still present')
2271 2271 elif abs in modified and not opts['force']:
2272 2272 reason = _('is modified (use -f to force removal)')
2273 2273 elif abs in added:
2274 2274 if opts['force']:
2275 2275 forget.append(abs)
2276 2276 continue
2277 2277 reason = _('has been marked for add (use -f to force removal)')
2278 2278 elif abs in unknown:
2279 2279 reason = _('is not managed')
2280 2280 elif abs in removed:
2281 2281 continue
2282 2282 if reason:
2283 2283 if exact:
2284 2284 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2285 2285 else:
2286 2286 if ui.verbose or not exact:
2287 2287 ui.status(_('removing %s\n') % rel)
2288 2288 remove.append(abs)
2289 2289 repo.forget(forget)
2290 2290 repo.remove(remove, unlink=not opts['after'])
2291 2291
2292 2292 def rename(ui, repo, *pats, **opts):
2293 2293 """rename files; equivalent of copy + remove
2294 2294
2295 2295 Mark dest as copies of sources; mark sources for deletion. If
2296 2296 dest is a directory, copies are put in that directory. If dest is
2297 2297 a file, there can only be one source.
2298 2298
2299 2299 By default, this command copies the contents of files as they
2300 2300 stand in the working directory. If invoked with --after, the
2301 2301 operation is recorded, but no copying is performed.
2302 2302
2303 2303 This command takes effect in the next commit.
2304 2304
2305 2305 NOTE: This command should be treated as experimental. While it
2306 2306 should properly record rename files, this information is not yet
2307 2307 fully used by merge, nor fully reported by log.
2308 2308 """
2309 2309 wlock = repo.wlock(0)
2310 2310 errs, copied = docopy(ui, repo, pats, opts, wlock)
2311 2311 names = []
2312 2312 for abs, rel, exact in copied:
2313 2313 if ui.verbose or not exact:
2314 2314 ui.status(_('removing %s\n') % rel)
2315 2315 names.append(abs)
2316 2316 if not opts.get('dry_run'):
2317 2317 repo.remove(names, True, wlock)
2318 2318 return errs
2319 2319
2320 2320 def revert(ui, repo, *pats, **opts):
2321 2321 """revert files or dirs to their states as of some revision
2322 2322
2323 2323 With no revision specified, revert the named files or directories
2324 2324 to the contents they had in the parent of the working directory.
2325 2325 This restores the contents of the affected files to an unmodified
2326 2326 state. If the working directory has two parents, you must
2327 2327 explicitly specify the revision to revert to.
2328 2328
2329 2329 Modified files are saved with a .orig suffix before reverting.
2330 2330 To disable these backups, use --no-backup.
2331 2331
2332 2332 Using the -r option, revert the given files or directories to their
2333 2333 contents as of a specific revision. This can be helpful to "roll
2334 2334 back" some or all of a change that should not have been committed.
2335 2335
2336 2336 Revert modifies the working directory. It does not commit any
2337 2337 changes, or change the parent of the working directory. If you
2338 2338 revert to a revision other than the parent of the working
2339 2339 directory, the reverted files will thus appear modified
2340 2340 afterwards.
2341 2341
2342 2342 If a file has been deleted, it is recreated. If the executable
2343 2343 mode of a file was changed, it is reset.
2344 2344
2345 2345 If names are given, all files matching the names are reverted.
2346 2346
2347 2347 If no arguments are given, no files are reverted.
2348 2348 """
2349 2349
2350 2350 if not pats and not opts['all']:
2351 2351 raise util.Abort(_('no files or directories specified; '
2352 2352 'use --all to revert the whole repo'))
2353 2353
2354 2354 parent, p2 = repo.dirstate.parents()
2355 2355 if not opts['rev'] and p2 != nullid:
2356 2356 raise util.Abort(_('uncommitted merge - please provide a '
2357 2357 'specific revision'))
2358 2358 node = repo.changectx(opts['rev']).node()
2359 2359 mf = repo.manifest.read(repo.changelog.read(node)[0])
2360 2360 if node == parent:
2361 2361 pmf = mf
2362 2362 else:
2363 2363 pmf = None
2364 2364
2365 2365 wlock = repo.wlock()
2366 2366
2367 2367 # need all matching names in dirstate and manifest of target rev,
2368 2368 # so have to walk both. do not print errors if files exist in one
2369 2369 # but not other.
2370 2370
2371 2371 names = {}
2372 2372 target_only = {}
2373 2373
2374 2374 # walk dirstate.
2375 2375
2376 2376 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2377 2377 badmatch=mf.has_key):
2378 2378 names[abs] = (rel, exact)
2379 2379 if src == 'b':
2380 2380 target_only[abs] = True
2381 2381
2382 2382 # walk target manifest.
2383 2383
2384 2384 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2385 2385 badmatch=names.has_key):
2386 2386 if abs in names: continue
2387 2387 names[abs] = (rel, exact)
2388 2388 target_only[abs] = True
2389 2389
2390 2390 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2391 2391 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2392 2392
2393 2393 revert = ([], _('reverting %s\n'))
2394 2394 add = ([], _('adding %s\n'))
2395 2395 remove = ([], _('removing %s\n'))
2396 2396 forget = ([], _('forgetting %s\n'))
2397 2397 undelete = ([], _('undeleting %s\n'))
2398 2398 update = {}
2399 2399
2400 2400 disptable = (
2401 2401 # dispatch table:
2402 2402 # file state
2403 2403 # action if in target manifest
2404 2404 # action if not in target manifest
2405 2405 # make backup if in target manifest
2406 2406 # make backup if not in target manifest
2407 2407 (modified, revert, remove, True, True),
2408 2408 (added, revert, forget, True, False),
2409 2409 (removed, undelete, None, False, False),
2410 2410 (deleted, revert, remove, False, False),
2411 2411 (unknown, add, None, True, False),
2412 2412 (target_only, add, None, False, False),
2413 2413 )
2414 2414
2415 2415 entries = names.items()
2416 2416 entries.sort()
2417 2417
2418 2418 for abs, (rel, exact) in entries:
2419 2419 mfentry = mf.get(abs)
2420 2420 def handle(xlist, dobackup):
2421 2421 xlist[0].append(abs)
2422 2422 update[abs] = 1
2423 2423 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2424 2424 bakname = "%s.orig" % rel
2425 2425 ui.note(_('saving current version of %s as %s\n') %
2426 2426 (rel, bakname))
2427 2427 if not opts.get('dry_run'):
2428 2428 shutil.copyfile(rel, bakname)
2429 2429 shutil.copymode(rel, bakname)
2430 2430 if ui.verbose or not exact:
2431 2431 ui.status(xlist[1] % rel)
2432 2432 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2433 2433 if abs not in table: continue
2434 2434 # file has changed in dirstate
2435 2435 if mfentry:
2436 2436 handle(hitlist, backuphit)
2437 2437 elif misslist is not None:
2438 2438 handle(misslist, backupmiss)
2439 2439 else:
2440 2440 if exact: ui.warn(_('file not managed: %s\n' % rel))
2441 2441 break
2442 2442 else:
2443 2443 # file has not changed in dirstate
2444 2444 if node == parent:
2445 2445 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2446 2446 continue
2447 2447 if pmf is None:
2448 2448 # only need parent manifest in this unlikely case,
2449 2449 # so do not read by default
2450 2450 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2451 2451 if abs in pmf:
2452 2452 if mfentry:
2453 2453 # if version of file is same in parent and target
2454 2454 # manifests, do nothing
2455 2455 if pmf[abs] != mfentry:
2456 2456 handle(revert, False)
2457 2457 else:
2458 2458 handle(remove, False)
2459 2459
2460 2460 if not opts.get('dry_run'):
2461 2461 repo.dirstate.forget(forget[0])
2462 2462 r = hg.revert(repo, node, update.has_key, wlock)
2463 2463 repo.dirstate.update(add[0], 'a')
2464 2464 repo.dirstate.update(undelete[0], 'n')
2465 2465 repo.dirstate.update(remove[0], 'r')
2466 2466 return r
2467 2467
2468 2468 def rollback(ui, repo):
2469 2469 """roll back the last transaction in this repository
2470 2470
2471 2471 Roll back the last transaction in this repository, restoring the
2472 2472 project to its state prior to the transaction.
2473 2473
2474 2474 Transactions are used to encapsulate the effects of all commands
2475 2475 that create new changesets or propagate existing changesets into a
2476 2476 repository. For example, the following commands are transactional,
2477 2477 and their effects can be rolled back:
2478 2478
2479 2479 commit
2480 2480 import
2481 2481 pull
2482 2482 push (with this repository as destination)
2483 2483 unbundle
2484 2484
2485 2485 This command should be used with care. There is only one level of
2486 2486 rollback, and there is no way to undo a rollback.
2487 2487
2488 2488 This command is not intended for use on public repositories. Once
2489 2489 changes are visible for pull by other users, rolling a transaction
2490 2490 back locally is ineffective (someone else may already have pulled
2491 2491 the changes). Furthermore, a race is possible with readers of the
2492 2492 repository; for example an in-progress pull from the repository
2493 2493 may fail if a rollback is performed.
2494 2494 """
2495 2495 repo.rollback()
2496 2496
2497 2497 def root(ui, repo):
2498 2498 """print the root (top) of the current working dir
2499 2499
2500 2500 Print the root directory of the current repository.
2501 2501 """
2502 2502 ui.write(repo.root + "\n")
2503 2503
2504 2504 def serve(ui, repo, **opts):
2505 2505 """export the repository via HTTP
2506 2506
2507 2507 Start a local HTTP repository browser and pull server.
2508 2508
2509 2509 By default, the server logs accesses to stdout and errors to
2510 2510 stderr. Use the "-A" and "-E" options to log to files.
2511 2511 """
2512 2512
2513 2513 if opts["stdio"]:
2514 2514 if repo is None:
2515 2515 raise hg.RepoError(_("There is no Mercurial repository here"
2516 2516 " (.hg not found)"))
2517 2517 s = sshserver.sshserver(ui, repo)
2518 2518 s.serve_forever()
2519 2519
2520 2520 optlist = ("name templates style address port ipv6"
2521 2521 " accesslog errorlog webdir_conf")
2522 2522 for o in optlist.split():
2523 2523 if opts[o]:
2524 2524 ui.setconfig("web", o, str(opts[o]))
2525 2525
2526 2526 if repo is None and not ui.config("web", "webdir_conf"):
2527 2527 raise hg.RepoError(_("There is no Mercurial repository here"
2528 2528 " (.hg not found)"))
2529 2529
2530 2530 if opts['daemon'] and not opts['daemon_pipefds']:
2531 2531 rfd, wfd = os.pipe()
2532 2532 args = sys.argv[:]
2533 2533 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2534 2534 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2535 2535 args[0], args)
2536 2536 os.close(wfd)
2537 2537 os.read(rfd, 1)
2538 2538 os._exit(0)
2539 2539
2540 2540 try:
2541 2541 httpd = hgweb.server.create_server(ui, repo)
2542 2542 except socket.error, inst:
2543 2543 raise util.Abort(_('cannot start server: %s') % inst.args[1])
2544 2544
2545 2545 if ui.verbose:
2546 2546 addr, port = httpd.socket.getsockname()
2547 2547 if addr == '0.0.0.0':
2548 2548 addr = socket.gethostname()
2549 2549 else:
2550 2550 try:
2551 2551 addr = socket.gethostbyaddr(addr)[0]
2552 2552 except socket.error:
2553 2553 pass
2554 2554 if port != 80:
2555 2555 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2556 2556 else:
2557 2557 ui.status(_('listening at http://%s/\n') % addr)
2558 2558
2559 2559 if opts['pid_file']:
2560 2560 fp = open(opts['pid_file'], 'w')
2561 2561 fp.write(str(os.getpid()) + '\n')
2562 2562 fp.close()
2563 2563
2564 2564 if opts['daemon_pipefds']:
2565 2565 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2566 2566 os.close(rfd)
2567 2567 os.write(wfd, 'y')
2568 2568 os.close(wfd)
2569 2569 sys.stdout.flush()
2570 2570 sys.stderr.flush()
2571 2571 fd = os.open(util.nulldev, os.O_RDWR)
2572 2572 if fd != 0: os.dup2(fd, 0)
2573 2573 if fd != 1: os.dup2(fd, 1)
2574 2574 if fd != 2: os.dup2(fd, 2)
2575 2575 if fd not in (0, 1, 2): os.close(fd)
2576 2576
2577 2577 httpd.serve_forever()
2578 2578
2579 2579 def status(ui, repo, *pats, **opts):
2580 2580 """show changed files in the working directory
2581 2581
2582 2582 Show status of files in the repository. If names are given, only
2583 2583 files that match are shown. Files that are clean or ignored, are
2584 2584 not listed unless -c (clean), -i (ignored) or -A is given.
2585 2585
2586 2586 If one revision is given, it is used as the base revision.
2587 2587 If two revisions are given, the difference between them is shown.
2588 2588
2589 2589 The codes used to show the status of files are:
2590 2590 M = modified
2591 2591 A = added
2592 2592 R = removed
2593 2593 C = clean
2594 2594 ! = deleted, but still tracked
2595 2595 ? = not tracked
2596 2596 I = ignored (not shown by default)
2597 2597 = the previous added file was copied from here
2598 2598 """
2599 2599
2600 2600 all = opts['all']
2601 2601 node1, node2 = cmdutil.revpair(ui, repo, opts.get('rev'))
2602 2602
2603 2603 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2604 2604 cwd = (pats and repo.getcwd()) or ''
2605 2605 modified, added, removed, deleted, unknown, ignored, clean = [
2606 2606 [util.pathto(cwd, x) for x in n]
2607 2607 for n in repo.status(node1=node1, node2=node2, files=files,
2608 2608 match=matchfn,
2609 2609 list_ignored=all or opts['ignored'],
2610 2610 list_clean=all or opts['clean'])]
2611 2611
2612 2612 changetypes = (('modified', 'M', modified),
2613 2613 ('added', 'A', added),
2614 2614 ('removed', 'R', removed),
2615 2615 ('deleted', '!', deleted),
2616 2616 ('unknown', '?', unknown),
2617 2617 ('ignored', 'I', ignored))
2618 2618
2619 2619 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2620 2620
2621 2621 end = opts['print0'] and '\0' or '\n'
2622 2622
2623 2623 for opt, char, changes in ([ct for ct in explicit_changetypes
2624 2624 if all or opts[ct[0]]]
2625 2625 or changetypes):
2626 2626 if opts['no_status']:
2627 2627 format = "%%s%s" % end
2628 2628 else:
2629 2629 format = "%s %%s%s" % (char, end)
2630 2630
2631 2631 for f in changes:
2632 2632 ui.write(format % f)
2633 2633 if ((all or opts.get('copies')) and not opts.get('no_status')):
2634 2634 copied = repo.dirstate.copied(f)
2635 2635 if copied:
2636 2636 ui.write(' %s%s' % (copied, end))
2637 2637
2638 2638 def tag(ui, repo, name, rev_=None, **opts):
2639 2639 """add a tag for the current tip or a given revision
2640 2640
2641 2641 Name a particular revision using <name>.
2642 2642
2643 2643 Tags are used to name particular revisions of the repository and are
2644 2644 very useful to compare different revision, to go back to significant
2645 2645 earlier versions or to mark branch points as releases, etc.
2646 2646
2647 2647 If no revision is given, the parent of the working directory is used.
2648 2648
2649 2649 To facilitate version control, distribution, and merging of tags,
2650 2650 they are stored as a file named ".hgtags" which is managed
2651 2651 similarly to other project files and can be hand-edited if
2652 2652 necessary. The file '.hg/localtags' is used for local tags (not
2653 2653 shared among repositories).
2654 2654 """
2655 2655 if name in ['tip', '.']:
2656 2656 raise util.Abort(_("the name '%s' is reserved") % name)
2657 2657 if rev_ is not None:
2658 2658 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2659 2659 "please use 'hg tag [-r REV] NAME' instead\n"))
2660 2660 if opts['rev']:
2661 2661 raise util.Abort(_("use only one form to specify the revision"))
2662 2662 if opts['rev']:
2663 2663 rev_ = opts['rev']
2664 2664 if not rev_ and repo.dirstate.parents()[1] != nullid:
2665 2665 raise util.Abort(_('uncommitted merge - please provide a '
2666 2666 'specific revision'))
2667 2667 r = repo.changectx(rev_).node()
2668 2668
2669 2669 message = opts['message']
2670 2670 if not message:
2671 2671 message = _('Added tag %s for changeset %s') % (name, short(r))
2672 2672
2673 2673 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2674 2674
2675 2675 def tags(ui, repo):
2676 2676 """list repository tags
2677 2677
2678 2678 List the repository tags.
2679 2679
2680 2680 This lists both regular and local tags.
2681 2681 """
2682 2682
2683 2683 l = repo.tagslist()
2684 2684 l.reverse()
2685 2685 hexfunc = ui.debugflag and hex or short
2686 2686 for t, n in l:
2687 2687 try:
2688 2688 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2689 2689 except KeyError:
2690 2690 r = " ?:?"
2691 2691 if ui.quiet:
2692 2692 ui.write("%s\n" % t)
2693 2693 else:
2694 2694 ui.write("%-30s %s\n" % (t, r))
2695 2695
2696 2696 def tip(ui, repo, **opts):
2697 2697 """show the tip revision
2698 2698
2699 2699 Show the tip revision.
2700 2700 """
2701 2701 n = repo.changelog.tip()
2702 2702 br = None
2703 2703 if opts['branches']:
2704 2704 ui.warn(_("the --branches option is deprecated, "
2705 2705 "please use 'hg branches' instead\n"))
2706 2706 br = repo.branchlookup([n])
2707 2707 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2708 2708 if opts['patch']:
2709 2709 patch.diff(repo, repo.changelog.parents(n)[0], n)
2710 2710
2711 2711 def unbundle(ui, repo, fname, **opts):
2712 2712 """apply a changegroup file
2713 2713
2714 2714 Apply a compressed changegroup file generated by the bundle
2715 2715 command.
2716 2716 """
2717 2717 f = urllib.urlopen(fname)
2718 2718
2719 2719 header = f.read(6)
2720 2720 if not header.startswith("HG"):
2721 2721 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2722 2722 elif not header.startswith("HG10"):
2723 2723 raise util.Abort(_("%s: unknown bundle version") % fname)
2724 2724 elif header == "HG10BZ":
2725 2725 def generator(f):
2726 2726 zd = bz2.BZ2Decompressor()
2727 2727 zd.decompress("BZ")
2728 2728 for chunk in f:
2729 2729 yield zd.decompress(chunk)
2730 2730 elif header == "HG10UN":
2731 2731 def generator(f):
2732 2732 for chunk in f:
2733 2733 yield chunk
2734 2734 else:
2735 2735 raise util.Abort(_("%s: unknown bundle compression type")
2736 2736 % fname)
2737 2737 gen = generator(util.filechunkiter(f, 4096))
2738 2738 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle',
2739 2739 'bundle:' + fname)
2740 2740 return postincoming(ui, repo, modheads, opts['update'])
2741 2741
2742 2742 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2743 2743 branch=None):
2744 2744 """update or merge working directory
2745 2745
2746 2746 Update the working directory to the specified revision.
2747 2747
2748 2748 If there are no outstanding changes in the working directory and
2749 2749 there is a linear relationship between the current version and the
2750 2750 requested version, the result is the requested version.
2751 2751
2752 2752 To merge the working directory with another revision, use the
2753 2753 merge command.
2754 2754
2755 2755 By default, update will refuse to run if doing so would require
2756 2756 merging or discarding local changes.
2757 2757 """
2758 2758 node = _lookup(repo, node, branch)
2759 2759 if clean:
2760 2760 return hg.clean(repo, node)
2761 2761 else:
2762 2762 return hg.update(repo, node)
2763 2763
2764 2764 def _lookup(repo, node, branch=None):
2765 2765 if branch:
2766 2766 repo.ui.warn(_("the --branch option is deprecated, "
2767 2767 "please use 'hg branch' instead\n"))
2768 2768 br = repo.branchlookup(branch=branch)
2769 2769 found = []
2770 2770 for x in br:
2771 2771 if branch in br[x]:
2772 2772 found.append(x)
2773 2773 if len(found) > 1:
2774 2774 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2775 2775 for x in found:
2776 2776 show_changeset(ui, repo, {}).show(changenode=x, brinfo=br)
2777 2777 raise util.Abort("")
2778 2778 if len(found) == 1:
2779 2779 node = found[0]
2780 2780 repo.ui.warn(_("Using head %s for branch %s\n")
2781 2781 % (short(node), branch))
2782 2782 else:
2783 2783 raise util.Abort(_("branch %s not found") % branch)
2784 2784 else:
2785 2785 node = node and repo.lookup(node) or repo.changelog.tip()
2786 2786 return node
2787 2787
2788 2788 def verify(ui, repo):
2789 2789 """verify the integrity of the repository
2790 2790
2791 2791 Verify the integrity of the current repository.
2792 2792
2793 2793 This will perform an extensive check of the repository's
2794 2794 integrity, validating the hashes and checksums of each entry in
2795 2795 the changelog, manifest, and tracked files, as well as the
2796 2796 integrity of their crosslinks and indices.
2797 2797 """
2798 2798 return hg.verify(repo)
2799 2799
2800 2800 # Command options and aliases are listed here, alphabetically
2801 2801
2802 2802 globalopts = [
2803 2803 ('R', 'repository', '',
2804 2804 _('repository root directory or symbolic path name')),
2805 2805 ('', 'cwd', '', _('change working directory')),
2806 2806 ('y', 'noninteractive', None,
2807 2807 _('do not prompt, assume \'yes\' for any required answers')),
2808 2808 ('q', 'quiet', None, _('suppress output')),
2809 2809 ('v', 'verbose', None, _('enable additional output')),
2810 2810 ('', 'config', [], _('set/override config option')),
2811 2811 ('', 'debug', None, _('enable debugging output')),
2812 2812 ('', 'debugger', None, _('start debugger')),
2813 2813 ('', 'lsprof', None, _('print improved command execution profile')),
2814 2814 ('', 'traceback', None, _('print traceback on exception')),
2815 2815 ('', 'time', None, _('time how long the command takes')),
2816 2816 ('', 'profile', None, _('print command execution profile')),
2817 2817 ('', 'version', None, _('output version information and exit')),
2818 2818 ('h', 'help', None, _('display help and exit')),
2819 2819 ]
2820 2820
2821 2821 dryrunopts = [('n', 'dry-run', None,
2822 2822 _('do not perform actions, just print output'))]
2823 2823
2824 2824 remoteopts = [
2825 2825 ('e', 'ssh', '', _('specify ssh command to use')),
2826 2826 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2827 2827 ]
2828 2828
2829 2829 walkopts = [
2830 2830 ('I', 'include', [], _('include names matching the given patterns')),
2831 2831 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2832 2832 ]
2833 2833
2834 2834 table = {
2835 2835 "^add":
2836 2836 (add,
2837 2837 walkopts + dryrunopts,
2838 2838 _('hg add [OPTION]... [FILE]...')),
2839 2839 "addremove":
2840 2840 (addremove,
2841 2841 [('s', 'similarity', '',
2842 2842 _('guess renamed files by similarity (0<=s<=100)')),
2843 2843 ] + walkopts + dryrunopts,
2844 2844 _('hg addremove [OPTION]... [FILE]...')),
2845 2845 "^annotate":
2846 2846 (annotate,
2847 2847 [('r', 'rev', '', _('annotate the specified revision')),
2848 2848 ('f', 'follow', None, _('follow file copies and renames')),
2849 2849 ('a', 'text', None, _('treat all files as text')),
2850 2850 ('u', 'user', None, _('list the author')),
2851 2851 ('d', 'date', None, _('list the date')),
2852 2852 ('n', 'number', None, _('list the revision number (default)')),
2853 2853 ('c', 'changeset', None, _('list the changeset')),
2854 2854 ] + walkopts,
2855 2855 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2856 2856 "archive":
2857 2857 (archive,
2858 2858 [('', 'no-decode', None, _('do not pass files through decoders')),
2859 2859 ('p', 'prefix', '', _('directory prefix for files in archive')),
2860 2860 ('r', 'rev', '', _('revision to distribute')),
2861 2861 ('t', 'type', '', _('type of distribution to create')),
2862 2862 ] + walkopts,
2863 2863 _('hg archive [OPTION]... DEST')),
2864 2864 "backout":
2865 2865 (backout,
2866 2866 [('', 'merge', None,
2867 2867 _('merge with old dirstate parent after backout')),
2868 2868 ('m', 'message', '', _('use <text> as commit message')),
2869 2869 ('l', 'logfile', '', _('read commit message from <file>')),
2870 2870 ('d', 'date', '', _('record datecode as commit date')),
2871 2871 ('', 'parent', '', _('parent to choose when backing out merge')),
2872 2872 ('u', 'user', '', _('record user as committer')),
2873 2873 ] + walkopts,
2874 2874 _('hg backout [OPTION]... REV')),
2875 2875 "branch": (branch, [], _('hg branch [NAME]')),
2876 2876 "branches": (branches, [], _('hg branches')),
2877 2877 "bundle":
2878 2878 (bundle,
2879 2879 [('f', 'force', None,
2880 2880 _('run even when remote repository is unrelated')),
2881 2881 ('r', 'rev', [],
2882 2882 _('a changeset you would like to bundle')),
2883 2883 ('', 'base', [],
2884 2884 _('a base changeset to specify instead of a destination')),
2885 2885 ] + remoteopts,
2886 2886 _('hg bundle [--base REV]... [--rev REV]... FILE [DEST]')),
2887 2887 "cat":
2888 2888 (cat,
2889 2889 [('o', 'output', '', _('print output to file with formatted name')),
2890 2890 ('r', 'rev', '', _('print the given revision')),
2891 2891 ] + walkopts,
2892 2892 _('hg cat [OPTION]... FILE...')),
2893 2893 "^clone":
2894 2894 (clone,
2895 2895 [('U', 'noupdate', None, _('do not update the new working directory')),
2896 2896 ('r', 'rev', [],
2897 2897 _('a changeset you would like to have after cloning')),
2898 2898 ('', 'pull', None, _('use pull protocol to copy metadata')),
2899 2899 ('', 'uncompressed', None,
2900 2900 _('use uncompressed transfer (fast over LAN)')),
2901 2901 ] + remoteopts,
2902 2902 _('hg clone [OPTION]... SOURCE [DEST]')),
2903 2903 "^commit|ci":
2904 2904 (commit,
2905 2905 [('A', 'addremove', None,
2906 2906 _('mark new/missing files as added/removed before committing')),
2907 2907 ('m', 'message', '', _('use <text> as commit message')),
2908 2908 ('l', 'logfile', '', _('read the commit message from <file>')),
2909 2909 ('d', 'date', '', _('record datecode as commit date')),
2910 2910 ('u', 'user', '', _('record user as commiter')),
2911 2911 ] + walkopts,
2912 2912 _('hg commit [OPTION]... [FILE]...')),
2913 2913 "copy|cp":
2914 2914 (copy,
2915 2915 [('A', 'after', None, _('record a copy that has already occurred')),
2916 2916 ('f', 'force', None,
2917 2917 _('forcibly copy over an existing managed file')),
2918 2918 ] + walkopts + dryrunopts,
2919 2919 _('hg copy [OPTION]... [SOURCE]... DEST')),
2920 2920 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2921 2921 "debugcomplete":
2922 2922 (debugcomplete,
2923 2923 [('o', 'options', None, _('show the command options'))],
2924 2924 _('debugcomplete [-o] CMD')),
2925 2925 "debugrebuildstate":
2926 2926 (debugrebuildstate,
2927 2927 [('r', 'rev', '', _('revision to rebuild to'))],
2928 2928 _('debugrebuildstate [-r REV] [REV]')),
2929 2929 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2930 2930 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2931 2931 "debugstate": (debugstate, [], _('debugstate')),
2932 2932 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2933 2933 "debugindex": (debugindex, [], _('debugindex FILE')),
2934 2934 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2935 2935 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2936 2936 "debugwalk":
2937 2937 (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2938 2938 "^diff":
2939 2939 (diff,
2940 2940 [('r', 'rev', [], _('revision')),
2941 2941 ('a', 'text', None, _('treat all files as text')),
2942 2942 ('p', 'show-function', None,
2943 2943 _('show which function each change is in')),
2944 2944 ('g', 'git', None, _('use git extended diff format')),
2945 2945 ('', 'nodates', None, _("don't include dates in diff headers")),
2946 2946 ('w', 'ignore-all-space', None,
2947 2947 _('ignore white space when comparing lines')),
2948 2948 ('b', 'ignore-space-change', None,
2949 2949 _('ignore changes in the amount of white space')),
2950 2950 ('B', 'ignore-blank-lines', None,
2951 2951 _('ignore changes whose lines are all blank')),
2952 2952 ] + walkopts,
2953 2953 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2954 2954 "^export":
2955 2955 (export,
2956 2956 [('o', 'output', '', _('print output to file with formatted name')),
2957 2957 ('a', 'text', None, _('treat all files as text')),
2958 2958 ('g', 'git', None, _('use git extended diff format')),
2959 2959 ('', 'nodates', None, _("don't include dates in diff headers")),
2960 2960 ('', 'switch-parent', None, _('diff against the second parent'))],
2961 2961 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2962 2962 "grep":
2963 2963 (grep,
2964 2964 [('0', 'print0', None, _('end fields with NUL')),
2965 2965 ('', 'all', None, _('print all revisions that match')),
2966 2966 ('f', 'follow', None,
2967 2967 _('follow changeset history, or file history across copies and renames')),
2968 2968 ('i', 'ignore-case', None, _('ignore case when matching')),
2969 2969 ('l', 'files-with-matches', None,
2970 2970 _('print only filenames and revs that match')),
2971 2971 ('n', 'line-number', None, _('print matching line numbers')),
2972 2972 ('r', 'rev', [], _('search in given revision range')),
2973 2973 ('u', 'user', None, _('print user who committed change')),
2974 2974 ] + walkopts,
2975 2975 _('hg grep [OPTION]... PATTERN [FILE]...')),
2976 2976 "heads":
2977 2977 (heads,
2978 2978 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2979 2979 ('', 'style', '', _('display using template map file')),
2980 2980 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2981 2981 ('', 'template', '', _('display with template'))],
2982 2982 _('hg heads [-r REV]')),
2983 2983 "help": (help_, [], _('hg help [COMMAND]')),
2984 2984 "identify|id": (identify, [], _('hg identify')),
2985 2985 "import|patch":
2986 2986 (import_,
2987 2987 [('p', 'strip', 1,
2988 2988 _('directory strip option for patch. This has the same\n'
2989 2989 'meaning as the corresponding patch option')),
2990 2990 ('m', 'message', '', _('use <text> as commit message')),
2991 2991 ('b', 'base', '', _('base path (DEPRECATED)')),
2992 2992 ('f', 'force', None,
2993 2993 _('skip check for outstanding uncommitted changes'))],
2994 2994 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2995 2995 "incoming|in": (incoming,
2996 2996 [('M', 'no-merges', None, _('do not show merges')),
2997 2997 ('f', 'force', None,
2998 2998 _('run even when remote repository is unrelated')),
2999 2999 ('', 'style', '', _('display using template map file')),
3000 3000 ('n', 'newest-first', None, _('show newest record first')),
3001 3001 ('', 'bundle', '', _('file to store the bundles into')),
3002 3002 ('p', 'patch', None, _('show patch')),
3003 3003 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
3004 3004 ('', 'template', '', _('display with template')),
3005 3005 ] + remoteopts,
3006 3006 _('hg incoming [-p] [-n] [-M] [-r REV]...'
3007 3007 ' [--bundle FILENAME] [SOURCE]')),
3008 3008 "^init":
3009 3009 (init, remoteopts, _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
3010 3010 "locate":
3011 3011 (locate,
3012 3012 [('r', 'rev', '', _('search the repository as it stood at rev')),
3013 3013 ('0', 'print0', None,
3014 3014 _('end filenames with NUL, for use with xargs')),
3015 3015 ('f', 'fullpath', None,
3016 3016 _('print complete paths from the filesystem root')),
3017 3017 ] + walkopts,
3018 3018 _('hg locate [OPTION]... [PATTERN]...')),
3019 3019 "^log|history":
3020 3020 (log,
3021 3021 [('b', 'branches', None, _('show branches (DEPRECATED)')),
3022 3022 ('f', 'follow', None,
3023 3023 _('follow changeset history, or file history across copies and renames')),
3024 3024 ('', 'follow-first', None,
3025 3025 _('only follow the first parent of merge changesets')),
3026 3026 ('C', 'copies', None, _('show copied files')),
3027 3027 ('k', 'keyword', [], _('search for a keyword')),
3028 3028 ('l', 'limit', '', _('limit number of changes displayed')),
3029 3029 ('r', 'rev', [], _('show the specified revision or range')),
3030 3030 ('M', 'no-merges', None, _('do not show merges')),
3031 3031 ('', 'style', '', _('display using template map file')),
3032 3032 ('m', 'only-merges', None, _('show only merges')),
3033 3033 ('p', 'patch', None, _('show patch')),
3034 3034 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3035 3035 ('', 'template', '', _('display with template')),
3036 3036 ] + walkopts,
3037 3037 _('hg log [OPTION]... [FILE]')),
3038 3038 "manifest": (manifest, [], _('hg manifest [REV]')),
3039 3039 "merge":
3040 3040 (merge,
3041 3041 [('b', 'branch', '', _('merge with head of a specific branch (DEPRECATED)')),
3042 3042 ('f', 'force', None, _('force a merge with outstanding changes'))],
3043 3043 _('hg merge [-f] [REV]')),
3044 3044 "outgoing|out": (outgoing,
3045 3045 [('M', 'no-merges', None, _('do not show merges')),
3046 3046 ('f', 'force', None,
3047 3047 _('run even when remote repository is unrelated')),
3048 3048 ('p', 'patch', None, _('show patch')),
3049 3049 ('', 'style', '', _('display using template map file')),
3050 3050 ('r', 'rev', [], _('a specific revision you would like to push')),
3051 3051 ('n', 'newest-first', None, _('show newest record first')),
3052 3052 ('', 'template', '', _('display with template')),
3053 3053 ] + remoteopts,
3054 3054 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3055 3055 "^parents":
3056 3056 (parents,
3057 3057 [('b', 'branches', None, _('show branches (DEPRECATED)')),
3058 3058 ('r', 'rev', '', _('show parents from the specified rev')),
3059 3059 ('', 'style', '', _('display using template map file')),
3060 3060 ('', 'template', '', _('display with template'))],
3061 3061 _('hg parents [-r REV] [FILE]')),
3062 3062 "paths": (paths, [], _('hg paths [NAME]')),
3063 3063 "^pull":
3064 3064 (pull,
3065 3065 [('u', 'update', None,
3066 3066 _('update to new tip if changesets were pulled')),
3067 3067 ('f', 'force', None,
3068 3068 _('run even when remote repository is unrelated')),
3069 3069 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
3070 3070 ] + remoteopts,
3071 3071 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3072 3072 "^push":
3073 3073 (push,
3074 3074 [('f', 'force', None, _('force push')),
3075 3075 ('r', 'rev', [], _('a specific revision you would like to push')),
3076 3076 ] + remoteopts,
3077 3077 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3078 3078 "debugrawcommit|rawcommit":
3079 3079 (rawcommit,
3080 3080 [('p', 'parent', [], _('parent')),
3081 3081 ('d', 'date', '', _('date code')),
3082 3082 ('u', 'user', '', _('user')),
3083 3083 ('F', 'files', '', _('file list')),
3084 3084 ('m', 'message', '', _('commit message')),
3085 3085 ('l', 'logfile', '', _('commit message file'))],
3086 3086 _('hg debugrawcommit [OPTION]... [FILE]...')),
3087 3087 "recover": (recover, [], _('hg recover')),
3088 3088 "^remove|rm":
3089 3089 (remove,
3090 3090 [('A', 'after', None, _('record remove that has already occurred')),
3091 3091 ('f', 'force', None, _('remove file even if modified')),
3092 3092 ] + walkopts,
3093 3093 _('hg remove [OPTION]... FILE...')),
3094 3094 "rename|mv":
3095 3095 (rename,
3096 3096 [('A', 'after', None, _('record a rename that has already occurred')),
3097 3097 ('f', 'force', None,
3098 3098 _('forcibly copy over an existing managed file')),
3099 3099 ] + walkopts + dryrunopts,
3100 3100 _('hg rename [OPTION]... SOURCE... DEST')),
3101 3101 "^revert":
3102 3102 (revert,
3103 3103 [('a', 'all', None, _('revert all changes when no arguments given')),
3104 3104 ('r', 'rev', '', _('revision to revert to')),
3105 3105 ('', 'no-backup', None, _('do not save backup copies of files')),
3106 3106 ] + walkopts + dryrunopts,
3107 3107 _('hg revert [-r REV] [NAME]...')),
3108 3108 "rollback": (rollback, [], _('hg rollback')),
3109 3109 "root": (root, [], _('hg root')),
3110 3110 "showconfig|debugconfig":
3111 3111 (showconfig,
3112 3112 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3113 3113 _('showconfig [-u] [NAME]...')),
3114 3114 "^serve":
3115 3115 (serve,
3116 3116 [('A', 'accesslog', '', _('name of access log file to write to')),
3117 3117 ('d', 'daemon', None, _('run server in background')),
3118 3118 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3119 3119 ('E', 'errorlog', '', _('name of error log file to write to')),
3120 3120 ('p', 'port', 0, _('port to use (default: 8000)')),
3121 3121 ('a', 'address', '', _('address to use')),
3122 3122 ('n', 'name', '',
3123 3123 _('name to show in web pages (default: working dir)')),
3124 3124 ('', 'webdir-conf', '', _('name of the webdir config file'
3125 3125 ' (serve more than one repo)')),
3126 3126 ('', 'pid-file', '', _('name of file to write process ID to')),
3127 3127 ('', 'stdio', None, _('for remote clients')),
3128 3128 ('t', 'templates', '', _('web templates to use')),
3129 3129 ('', 'style', '', _('template style to use')),
3130 3130 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3131 3131 _('hg serve [OPTION]...')),
3132 3132 "^status|st":
3133 3133 (status,
3134 3134 [('A', 'all', None, _('show status of all files')),
3135 3135 ('m', 'modified', None, _('show only modified files')),
3136 3136 ('a', 'added', None, _('show only added files')),
3137 3137 ('r', 'removed', None, _('show only removed files')),
3138 3138 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3139 3139 ('c', 'clean', None, _('show only files without changes')),
3140 3140 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3141 3141 ('i', 'ignored', None, _('show ignored files')),
3142 3142 ('n', 'no-status', None, _('hide status prefix')),
3143 3143 ('C', 'copies', None, _('show source of copied files')),
3144 3144 ('0', 'print0', None,
3145 3145 _('end filenames with NUL, for use with xargs')),
3146 3146 ('', 'rev', [], _('show difference from revision')),
3147 3147 ] + walkopts,
3148 3148 _('hg status [OPTION]... [FILE]...')),
3149 3149 "tag":
3150 3150 (tag,
3151 3151 [('l', 'local', None, _('make the tag local')),
3152 3152 ('m', 'message', '', _('message for tag commit log entry')),
3153 3153 ('d', 'date', '', _('record datecode as commit date')),
3154 3154 ('u', 'user', '', _('record user as commiter')),
3155 3155 ('r', 'rev', '', _('revision to tag'))],
3156 3156 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3157 3157 "tags": (tags, [], _('hg tags')),
3158 3158 "tip":
3159 3159 (tip,
3160 3160 [('b', 'branches', None, _('show branches (DEPRECATED)')),
3161 3161 ('', 'style', '', _('display using template map file')),
3162 3162 ('p', 'patch', None, _('show patch')),
3163 3163 ('', 'template', '', _('display with template'))],
3164 3164 _('hg tip [-p]')),
3165 3165 "unbundle":
3166 3166 (unbundle,
3167 3167 [('u', 'update', None,
3168 3168 _('update to new tip if changesets were unbundled'))],
3169 3169 _('hg unbundle [-u] FILE')),
3170 3170 "^update|up|checkout|co":
3171 3171 (update,
3172 3172 [('b', 'branch', '',
3173 3173 _('checkout the head of a specific branch (DEPRECATED)')),
3174 3174 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3175 3175 ('C', 'clean', None, _('overwrite locally modified files')),
3176 3176 ('f', 'force', None, _('force a merge with outstanding changes'))],
3177 3177 _('hg update [-C] [-f] [REV]')),
3178 3178 "verify": (verify, [], _('hg verify')),
3179 3179 "version": (show_version, [], _('hg version')),
3180 3180 }
3181 3181
3182 3182 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3183 3183 " debugindex debugindexdot")
3184 3184 optionalrepo = ("paths serve showconfig")
3185 3185
3186 3186 def findpossible(ui, cmd):
3187 3187 """
3188 3188 Return cmd -> (aliases, command table entry)
3189 3189 for each matching command.
3190 3190 Return debug commands (or their aliases) only if no normal command matches.
3191 3191 """
3192 3192 choice = {}
3193 3193 debugchoice = {}
3194 3194 for e in table.keys():
3195 3195 aliases = e.lstrip("^").split("|")
3196 3196 found = None
3197 3197 if cmd in aliases:
3198 3198 found = cmd
3199 3199 elif not ui.config("ui", "strict"):
3200 3200 for a in aliases:
3201 3201 if a.startswith(cmd):
3202 3202 found = a
3203 3203 break
3204 3204 if found is not None:
3205 3205 if aliases[0].startswith("debug") or found.startswith("debug"):
3206 3206 debugchoice[found] = (aliases, table[e])
3207 3207 else:
3208 3208 choice[found] = (aliases, table[e])
3209 3209
3210 3210 if not choice and debugchoice:
3211 3211 choice = debugchoice
3212 3212
3213 3213 return choice
3214 3214
3215 3215 def findcmd(ui, cmd):
3216 3216 """Return (aliases, command table entry) for command string."""
3217 3217 choice = findpossible(ui, cmd)
3218 3218
3219 3219 if choice.has_key(cmd):
3220 3220 return choice[cmd]
3221 3221
3222 3222 if len(choice) > 1:
3223 3223 clist = choice.keys()
3224 3224 clist.sort()
3225 3225 raise AmbiguousCommand(cmd, clist)
3226 3226
3227 3227 if choice:
3228 3228 return choice.values()[0]
3229 3229
3230 3230 raise UnknownCommand(cmd)
3231 3231
3232 3232 def catchterm(*args):
3233 3233 raise util.SignalInterrupt
3234 3234
3235 3235 def run():
3236 3236 sys.exit(dispatch(sys.argv[1:]))
3237 3237
3238 3238 class ParseError(Exception):
3239 3239 """Exception raised on errors in parsing the command line."""
3240 3240
3241 3241 def parse(ui, args):
3242 3242 options = {}
3243 3243 cmdoptions = {}
3244 3244
3245 3245 try:
3246 3246 args = fancyopts.fancyopts(args, globalopts, options)
3247 3247 except fancyopts.getopt.GetoptError, inst:
3248 3248 raise ParseError(None, inst)
3249 3249
3250 3250 if args:
3251 3251 cmd, args = args[0], args[1:]
3252 3252 aliases, i = findcmd(ui, cmd)
3253 3253 cmd = aliases[0]
3254 3254 defaults = ui.config("defaults", cmd)
3255 3255 if defaults:
3256 3256 args = shlex.split(defaults) + args
3257 3257 c = list(i[1])
3258 3258 else:
3259 3259 cmd = None
3260 3260 c = []
3261 3261
3262 3262 # combine global options into local
3263 3263 for o in globalopts:
3264 3264 c.append((o[0], o[1], options[o[1]], o[3]))
3265 3265
3266 3266 try:
3267 3267 args = fancyopts.fancyopts(args, c, cmdoptions)
3268 3268 except fancyopts.getopt.GetoptError, inst:
3269 3269 raise ParseError(cmd, inst)
3270 3270
3271 3271 # separate global options back out
3272 3272 for o in globalopts:
3273 3273 n = o[1]
3274 3274 options[n] = cmdoptions[n]
3275 3275 del cmdoptions[n]
3276 3276
3277 3277 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3278 3278
3279 3279 external = {}
3280 3280
3281 3281 def findext(name):
3282 3282 '''return module with given extension name'''
3283 3283 try:
3284 3284 return sys.modules[external[name]]
3285 3285 except KeyError:
3286 3286 for k, v in external.iteritems():
3287 3287 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3288 3288 return sys.modules[v]
3289 3289 raise KeyError(name)
3290 3290
3291 3291 def load_extensions(ui):
3292 3292 added = []
3293 3293 for ext_name, load_from_name in ui.extensions():
3294 3294 if ext_name in external:
3295 3295 continue
3296 3296 try:
3297 3297 if load_from_name:
3298 3298 # the module will be loaded in sys.modules
3299 3299 # choose an unique name so that it doesn't
3300 3300 # conflicts with other modules
3301 3301 module_name = "hgext_%s" % ext_name.replace('.', '_')
3302 3302 mod = imp.load_source(module_name, load_from_name)
3303 3303 else:
3304 3304 def importh(name):
3305 3305 mod = __import__(name)
3306 3306 components = name.split('.')
3307 3307 for comp in components[1:]:
3308 3308 mod = getattr(mod, comp)
3309 3309 return mod
3310 3310 try:
3311 3311 mod = importh("hgext.%s" % ext_name)
3312 3312 except ImportError:
3313 3313 mod = importh(ext_name)
3314 3314 external[ext_name] = mod.__name__
3315 3315 added.append((mod, ext_name))
3316 3316 except (util.SignalInterrupt, KeyboardInterrupt):
3317 3317 raise
3318 3318 except Exception, inst:
3319 3319 ui.warn(_("*** failed to import extension %s: %s\n") %
3320 3320 (ext_name, inst))
3321 3321 if ui.print_exc():
3322 3322 return 1
3323 3323
3324 3324 for mod, name in added:
3325 3325 uisetup = getattr(mod, 'uisetup', None)
3326 3326 if uisetup:
3327 3327 uisetup(ui)
3328 3328 cmdtable = getattr(mod, 'cmdtable', {})
3329 3329 for t in cmdtable:
3330 3330 if t in table:
3331 3331 ui.warn(_("module %s overrides %s\n") % (name, t))
3332 3332 table.update(cmdtable)
3333 3333
3334 3334 def parseconfig(config):
3335 3335 """parse the --config options from the command line"""
3336 3336 parsed = []
3337 3337 for cfg in config:
3338 3338 try:
3339 3339 name, value = cfg.split('=', 1)
3340 3340 section, name = name.split('.', 1)
3341 3341 if not section or not name:
3342 3342 raise IndexError
3343 3343 parsed.append((section, name, value))
3344 3344 except (IndexError, ValueError):
3345 3345 raise util.Abort(_('malformed --config option: %s') % cfg)
3346 3346 return parsed
3347 3347
3348 3348 def dispatch(args):
3349 3349 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3350 3350 num = getattr(signal, name, None)
3351 3351 if num: signal.signal(num, catchterm)
3352 3352
3353 3353 try:
3354 3354 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3355 3355 except util.Abort, inst:
3356 3356 sys.stderr.write(_("abort: %s\n") % inst)
3357 3357 return -1
3358 3358
3359 3359 load_extensions(u)
3360 3360 u.addreadhook(load_extensions)
3361 3361
3362 3362 try:
3363 3363 cmd, func, args, options, cmdoptions = parse(u, args)
3364 3364 if options["time"]:
3365 3365 def get_times():
3366 3366 t = os.times()
3367 3367 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3368 3368 t = (t[0], t[1], t[2], t[3], time.clock())
3369 3369 return t
3370 3370 s = get_times()
3371 3371 def print_time():
3372 3372 t = get_times()
3373 3373 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3374 3374 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3375 3375 atexit.register(print_time)
3376 3376
3377 3377 # enter the debugger before command execution
3378 3378 if options['debugger']:
3379 3379 pdb.set_trace()
3380 3380
3381 3381 try:
3382 3382 if options['cwd']:
3383 3383 try:
3384 3384 os.chdir(options['cwd'])
3385 3385 except OSError, inst:
3386 3386 raise util.Abort('%s: %s' %
3387 3387 (options['cwd'], inst.strerror))
3388 3388
3389 3389 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3390 3390 not options["noninteractive"], options["traceback"],
3391 3391 parseconfig(options["config"]))
3392 3392
3393 3393 path = u.expandpath(options["repository"]) or ""
3394 3394 repo = path and hg.repository(u, path=path) or None
3395 3395 if repo and not repo.local():
3396 3396 raise util.Abort(_("repository '%s' is not local") % path)
3397 3397
3398 3398 if options['help']:
3399 3399 return help_(u, cmd, options['version'])
3400 3400 elif options['version']:
3401 3401 return show_version(u)
3402 3402 elif not cmd:
3403 3403 return help_(u, 'shortlist')
3404 3404
3405 3405 if cmd not in norepo.split():
3406 3406 try:
3407 3407 if not repo:
3408 3408 repo = hg.repository(u, path=path)
3409 3409 u = repo.ui
3410 3410 for name in external.itervalues():
3411 3411 mod = sys.modules[name]
3412 3412 if hasattr(mod, 'reposetup'):
3413 3413 mod.reposetup(u, repo)
3414 3414 hg.repo_setup_hooks.append(mod.reposetup)
3415 3415 except hg.RepoError:
3416 3416 if cmd not in optionalrepo.split():
3417 3417 raise
3418 3418 d = lambda: func(u, repo, *args, **cmdoptions)
3419 3419 else:
3420 3420 d = lambda: func(u, *args, **cmdoptions)
3421 3421
3422 3422 try:
3423 3423 if options['profile']:
3424 3424 import hotshot, hotshot.stats
3425 3425 prof = hotshot.Profile("hg.prof")
3426 3426 try:
3427 3427 try:
3428 3428 return prof.runcall(d)
3429 3429 except:
3430 3430 try:
3431 3431 u.warn(_('exception raised - generating '
3432 3432 'profile anyway\n'))
3433 3433 except:
3434 3434 pass
3435 3435 raise
3436 3436 finally:
3437 3437 prof.close()
3438 3438 stats = hotshot.stats.load("hg.prof")
3439 3439 stats.strip_dirs()
3440 3440 stats.sort_stats('time', 'calls')
3441 3441 stats.print_stats(40)
3442 3442 elif options['lsprof']:
3443 3443 try:
3444 3444 from mercurial import lsprof
3445 3445 except ImportError:
3446 3446 raise util.Abort(_(
3447 3447 'lsprof not available - install from '
3448 3448 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3449 3449 p = lsprof.Profiler()
3450 3450 p.enable(subcalls=True)
3451 3451 try:
3452 3452 return d()
3453 3453 finally:
3454 3454 p.disable()
3455 3455 stats = lsprof.Stats(p.getstats())
3456 3456 stats.sort()
3457 3457 stats.pprint(top=10, file=sys.stderr, climit=5)
3458 3458 else:
3459 3459 return d()
3460 3460 finally:
3461 3461 u.flush()
3462 3462 except:
3463 3463 # enter the debugger when we hit an exception
3464 3464 if options['debugger']:
3465 3465 pdb.post_mortem(sys.exc_info()[2])
3466 3466 u.print_exc()
3467 3467 raise
3468 3468 except ParseError, inst:
3469 3469 if inst.args[0]:
3470 3470 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3471 3471 help_(u, inst.args[0])
3472 3472 else:
3473 3473 u.warn(_("hg: %s\n") % inst.args[1])
3474 3474 help_(u, 'shortlist')
3475 3475 except AmbiguousCommand, inst:
3476 3476 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3477 3477 (inst.args[0], " ".join(inst.args[1])))
3478 3478 except UnknownCommand, inst:
3479 3479 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3480 3480 help_(u, 'shortlist')
3481 3481 except hg.RepoError, inst:
3482 3482 u.warn(_("abort: %s!\n") % inst)
3483 3483 except lock.LockHeld, inst:
3484 3484 if inst.errno == errno.ETIMEDOUT:
3485 3485 reason = _('timed out waiting for lock held by %s') % inst.locker
3486 3486 else:
3487 3487 reason = _('lock held by %s') % inst.locker
3488 3488 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3489 3489 except lock.LockUnavailable, inst:
3490 3490 u.warn(_("abort: could not lock %s: %s\n") %
3491 3491 (inst.desc or inst.filename, inst.strerror))
3492 3492 except revlog.RevlogError, inst:
3493 3493 u.warn(_("abort: %s!\n") % inst)
3494 3494 except util.SignalInterrupt:
3495 3495 u.warn(_("killed!\n"))
3496 3496 except KeyboardInterrupt:
3497 3497 try:
3498 3498 u.warn(_("interrupted!\n"))
3499 3499 except IOError, inst:
3500 3500 if inst.errno == errno.EPIPE:
3501 3501 if u.debugflag:
3502 3502 u.warn(_("\nbroken pipe\n"))
3503 3503 else:
3504 3504 raise
3505 3505 except IOError, inst:
3506 3506 if hasattr(inst, "code"):
3507 3507 u.warn(_("abort: %s\n") % inst)
3508 3508 elif hasattr(inst, "reason"):
3509 3509 u.warn(_("abort: error: %s\n") % inst.reason[1])
3510 3510 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3511 3511 if u.debugflag:
3512 3512 u.warn(_("broken pipe\n"))
3513 3513 elif getattr(inst, "strerror", None):
3514 3514 if getattr(inst, "filename", None):
3515 3515 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3516 3516 else:
3517 3517 u.warn(_("abort: %s\n") % inst.strerror)
3518 3518 else:
3519 3519 raise
3520 3520 except OSError, inst:
3521 3521 if getattr(inst, "filename", None):
3522 3522 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3523 3523 else:
3524 3524 u.warn(_("abort: %s\n") % inst.strerror)
3525 except util.UnexpectedOutput, inst:
3526 u.warn(_("abort: %s") % inst[0])
3527 if not isinstance(inst[1], basestring):
3528 u.warn(" %r\n" % (inst[1],))
3529 elif not inst[1]:
3530 u.warn(_(" empty string\n"))
3531 else:
3532 u.warn("\n%r%s\n" %
3533 (inst[1][:400], len(inst[1]) > 400 and '...' or ''))
3525 3534 except util.Abort, inst:
3526 3535 u.warn(_("abort: %s\n") % inst)
3527 3536 except TypeError, inst:
3528 3537 # was this an argument error?
3529 3538 tb = traceback.extract_tb(sys.exc_info()[2])
3530 3539 if len(tb) > 2: # no
3531 3540 raise
3532 3541 u.debug(inst, "\n")
3533 3542 u.warn(_("%s: invalid arguments\n") % cmd)
3534 3543 help_(u, cmd)
3535 3544 except SystemExit, inst:
3536 3545 # Commands shouldn't sys.exit directly, but give a return code.
3537 3546 # Just in case catch this and and pass exit code to caller.
3538 3547 return inst.code
3539 3548 except:
3540 3549 u.warn(_("** unknown exception encountered, details follow\n"))
3541 3550 u.warn(_("** report bug details to "
3542 3551 "http://www.selenic.com/mercurial/bts\n"))
3543 3552 u.warn(_("** or mercurial@selenic.com\n"))
3544 3553 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3545 3554 % version.get_version())
3546 3555 raise
3547 3556
3548 3557 return -1
@@ -1,1841 +1,1856 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import gettext as _
10 10 from demandload import *
11 11 import repo
12 12 demandload(globals(), "appendfile changegroup")
13 13 demandload(globals(), "changelog dirstate filelog manifest context")
14 14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 15 demandload(globals(), "os revlog time util")
16 16
17 17 class localrepository(repo.repository):
18 18 capabilities = ('lookup', 'changegroupsubset')
19 19
20 20 def __del__(self):
21 21 self.transhandle = None
22 22 def __init__(self, parentui, path=None, create=0):
23 23 repo.repository.__init__(self)
24 24 if not path:
25 25 p = os.getcwd()
26 26 while not os.path.isdir(os.path.join(p, ".hg")):
27 27 oldp = p
28 28 p = os.path.dirname(p)
29 29 if p == oldp:
30 30 raise repo.RepoError(_("There is no Mercurial repository"
31 31 " here (.hg not found)"))
32 32 path = p
33 33 self.path = os.path.join(path, ".hg")
34 34
35 35 if not os.path.isdir(self.path):
36 36 if create:
37 37 if not os.path.exists(path):
38 38 os.mkdir(path)
39 39 os.mkdir(self.path)
40 40 os.mkdir(self.join("data"))
41 41 else:
42 42 raise repo.RepoError(_("repository %s not found") % path)
43 43 elif create:
44 44 raise repo.RepoError(_("repository %s already exists") % path)
45 45
46 46 self.root = os.path.abspath(path)
47 47 self.origroot = path
48 48 self.ui = ui.ui(parentui=parentui)
49 49 self.opener = util.opener(self.path)
50 50 self.sopener = util.opener(self.path)
51 51 self.wopener = util.opener(self.root)
52 52
53 53 try:
54 54 self.ui.readconfig(self.join("hgrc"), self.root)
55 55 except IOError:
56 56 pass
57 57
58 58 v = self.ui.configrevlog()
59 59 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
60 60 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
61 61 fl = v.get('flags', None)
62 62 flags = 0
63 63 if fl != None:
64 64 for x in fl.split():
65 65 flags |= revlog.flagstr(x)
66 66 elif self.revlogv1:
67 67 flags = revlog.REVLOG_DEFAULT_FLAGS
68 68
69 69 v = self.revlogversion | flags
70 70 self.manifest = manifest.manifest(self.sopener, v)
71 71 self.changelog = changelog.changelog(self.sopener, v)
72 72
73 73 # the changelog might not have the inline index flag
74 74 # on. If the format of the changelog is the same as found in
75 75 # .hgrc, apply any flags found in the .hgrc as well.
76 76 # Otherwise, just version from the changelog
77 77 v = self.changelog.version
78 78 if v == self.revlogversion:
79 79 v |= flags
80 80 self.revlogversion = v
81 81
82 82 self.tagscache = None
83 83 self.branchcache = None
84 84 self.nodetagscache = None
85 85 self.encodepats = None
86 86 self.decodepats = None
87 87 self.transhandle = None
88 88
89 89 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
90 90
91 91 def url(self):
92 92 return 'file:' + self.root
93 93
94 94 def hook(self, name, throw=False, **args):
95 95 def callhook(hname, funcname):
96 96 '''call python hook. hook is callable object, looked up as
97 97 name in python module. if callable returns "true", hook
98 98 fails, else passes. if hook raises exception, treated as
99 99 hook failure. exception propagates if throw is "true".
100 100
101 101 reason for "true" meaning "hook failed" is so that
102 102 unmodified commands (e.g. mercurial.commands.update) can
103 103 be run as hooks without wrappers to convert return values.'''
104 104
105 105 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
106 106 d = funcname.rfind('.')
107 107 if d == -1:
108 108 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
109 109 % (hname, funcname))
110 110 modname = funcname[:d]
111 111 try:
112 112 obj = __import__(modname)
113 113 except ImportError:
114 114 try:
115 115 # extensions are loaded with hgext_ prefix
116 116 obj = __import__("hgext_%s" % modname)
117 117 except ImportError:
118 118 raise util.Abort(_('%s hook is invalid '
119 119 '(import of "%s" failed)') %
120 120 (hname, modname))
121 121 try:
122 122 for p in funcname.split('.')[1:]:
123 123 obj = getattr(obj, p)
124 124 except AttributeError, err:
125 125 raise util.Abort(_('%s hook is invalid '
126 126 '("%s" is not defined)') %
127 127 (hname, funcname))
128 128 if not callable(obj):
129 129 raise util.Abort(_('%s hook is invalid '
130 130 '("%s" is not callable)') %
131 131 (hname, funcname))
132 132 try:
133 133 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
134 134 except (KeyboardInterrupt, util.SignalInterrupt):
135 135 raise
136 136 except Exception, exc:
137 137 if isinstance(exc, util.Abort):
138 138 self.ui.warn(_('error: %s hook failed: %s\n') %
139 139 (hname, exc.args[0]))
140 140 else:
141 141 self.ui.warn(_('error: %s hook raised an exception: '
142 142 '%s\n') % (hname, exc))
143 143 if throw:
144 144 raise
145 145 self.ui.print_exc()
146 146 return True
147 147 if r:
148 148 if throw:
149 149 raise util.Abort(_('%s hook failed') % hname)
150 150 self.ui.warn(_('warning: %s hook failed\n') % hname)
151 151 return r
152 152
153 153 def runhook(name, cmd):
154 154 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
155 155 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
156 156 r = util.system(cmd, environ=env, cwd=self.root)
157 157 if r:
158 158 desc, r = util.explain_exit(r)
159 159 if throw:
160 160 raise util.Abort(_('%s hook %s') % (name, desc))
161 161 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
162 162 return r
163 163
164 164 r = False
165 165 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
166 166 if hname.split(".", 1)[0] == name and cmd]
167 167 hooks.sort()
168 168 for hname, cmd in hooks:
169 169 if cmd.startswith('python:'):
170 170 r = callhook(hname, cmd[7:].strip()) or r
171 171 else:
172 172 r = runhook(hname, cmd) or r
173 173 return r
174 174
175 175 tag_disallowed = ':\r\n'
176 176
177 177 def tag(self, name, node, message, local, user, date):
178 178 '''tag a revision with a symbolic name.
179 179
180 180 if local is True, the tag is stored in a per-repository file.
181 181 otherwise, it is stored in the .hgtags file, and a new
182 182 changeset is committed with the change.
183 183
184 184 keyword arguments:
185 185
186 186 local: whether to store tag in non-version-controlled file
187 187 (default False)
188 188
189 189 message: commit message to use if committing
190 190
191 191 user: name of user to use if committing
192 192
193 193 date: date tuple to use if committing'''
194 194
195 195 for c in self.tag_disallowed:
196 196 if c in name:
197 197 raise util.Abort(_('%r cannot be used in a tag name') % c)
198 198
199 199 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
200 200
201 201 if local:
202 202 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
203 203 self.hook('tag', node=hex(node), tag=name, local=local)
204 204 return
205 205
206 206 for x in self.status()[:5]:
207 207 if '.hgtags' in x:
208 208 raise util.Abort(_('working copy of .hgtags is changed '
209 209 '(please commit .hgtags manually)'))
210 210
211 211 self.wfile('.hgtags', 'ab').write('%s %s\n' % (hex(node), name))
212 212 if self.dirstate.state('.hgtags') == '?':
213 213 self.add(['.hgtags'])
214 214
215 215 self.commit(['.hgtags'], message, user, date)
216 216 self.hook('tag', node=hex(node), tag=name, local=local)
217 217
218 218 def tags(self):
219 219 '''return a mapping of tag to node'''
220 220 if not self.tagscache:
221 221 self.tagscache = {}
222 222
223 223 def parsetag(line, context):
224 224 if not line:
225 225 return
226 226 s = l.split(" ", 1)
227 227 if len(s) != 2:
228 228 self.ui.warn(_("%s: cannot parse entry\n") % context)
229 229 return
230 230 node, key = s
231 231 key = key.strip()
232 232 try:
233 233 bin_n = bin(node)
234 234 except TypeError:
235 235 self.ui.warn(_("%s: node '%s' is not well formed\n") %
236 236 (context, node))
237 237 return
238 238 if bin_n not in self.changelog.nodemap:
239 239 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
240 240 (context, key))
241 241 return
242 242 self.tagscache[key] = bin_n
243 243
244 244 # read the tags file from each head, ending with the tip,
245 245 # and add each tag found to the map, with "newer" ones
246 246 # taking precedence
247 247 heads = self.heads()
248 248 heads.reverse()
249 249 seen = {}
250 250 for node in heads:
251 251 f = self.filectx('.hgtags', node)
252 252 if not f or f.filerev() in seen: continue
253 253 seen[f.filerev()] = 1
254 254 count = 0
255 255 for l in f.data().splitlines():
256 256 count += 1
257 257 parsetag(l, _("%s, line %d") % (str(f), count))
258 258
259 259 try:
260 260 f = self.opener("localtags")
261 261 count = 0
262 262 for l in f:
263 263 count += 1
264 264 parsetag(l, _("localtags, line %d") % count)
265 265 except IOError:
266 266 pass
267 267
268 268 self.tagscache['tip'] = self.changelog.tip()
269 269
270 270 return self.tagscache
271 271
272 272 def tagslist(self):
273 273 '''return a list of tags ordered by revision'''
274 274 l = []
275 275 for t, n in self.tags().items():
276 276 try:
277 277 r = self.changelog.rev(n)
278 278 except:
279 279 r = -2 # sort to the beginning of the list if unknown
280 280 l.append((r, t, n))
281 281 l.sort()
282 282 return [(t, n) for r, t, n in l]
283 283
284 284 def nodetags(self, node):
285 285 '''return the tags associated with a node'''
286 286 if not self.nodetagscache:
287 287 self.nodetagscache = {}
288 288 for t, n in self.tags().items():
289 289 self.nodetagscache.setdefault(n, []).append(t)
290 290 return self.nodetagscache.get(node, [])
291 291
292 292 def branchtags(self):
293 293 if self.branchcache != None:
294 294 return self.branchcache
295 295
296 296 self.branchcache = {} # avoid recursion in changectx
297 297
298 298 partial, last, lrev = self._readbranchcache()
299 299
300 300 tiprev = self.changelog.count() - 1
301 301 if lrev != tiprev:
302 302 self._updatebranchcache(partial, lrev+1, tiprev+1)
303 303 self._writebranchcache(partial, self.changelog.tip(), tiprev)
304 304
305 305 self.branchcache = partial
306 306 return self.branchcache
307 307
308 308 def _readbranchcache(self):
309 309 partial = {}
310 310 try:
311 311 f = self.opener("branches.cache")
312 312 last, lrev = f.readline().rstrip().split(" ", 1)
313 313 last, lrev = bin(last), int(lrev)
314 314 if (lrev < self.changelog.count() and
315 315 self.changelog.node(lrev) == last): # sanity check
316 316 for l in f:
317 317 node, label = l.rstrip().split(" ", 1)
318 318 partial[label] = bin(node)
319 319 else: # invalidate the cache
320 320 last, lrev = nullid, -1
321 321 f.close()
322 322 except IOError:
323 323 last, lrev = nullid, -1
324 324 return partial, last, lrev
325 325
326 326 def _writebranchcache(self, branches, tip, tiprev):
327 327 try:
328 328 f = self.opener("branches.cache", "w")
329 329 f.write("%s %s\n" % (hex(tip), tiprev))
330 330 for label, node in branches.iteritems():
331 331 f.write("%s %s\n" % (hex(node), label))
332 332 except IOError:
333 333 pass
334 334
335 335 def _updatebranchcache(self, partial, start, end):
336 336 for r in xrange(start, end):
337 337 c = self.changectx(r)
338 338 b = c.branch()
339 339 if b:
340 340 partial[b] = c.node()
341 341
342 342 def lookup(self, key):
343 343 if key == '.':
344 344 key = self.dirstate.parents()[0]
345 345 if key == nullid:
346 346 raise repo.RepoError(_("no revision checked out"))
347 347 n = self.changelog._match(key)
348 348 if n:
349 349 return n
350 350 if key in self.tags():
351 351 return self.tags()[key]
352 352 if key in self.branchtags():
353 353 return self.branchtags()[key]
354 354 n = self.changelog._partialmatch(key)
355 355 if n:
356 356 return n
357 357 raise repo.RepoError(_("unknown revision '%s'") % key)
358 358
359 359 def dev(self):
360 360 return os.lstat(self.path).st_dev
361 361
362 362 def local(self):
363 363 return True
364 364
365 365 def join(self, f):
366 366 return os.path.join(self.path, f)
367 367
368 368 def sjoin(self, f):
369 369 return os.path.join(self.path, f)
370 370
371 371 def wjoin(self, f):
372 372 return os.path.join(self.root, f)
373 373
374 374 def file(self, f):
375 375 if f[0] == '/':
376 376 f = f[1:]
377 377 return filelog.filelog(self.sopener, f, self.revlogversion)
378 378
379 379 def changectx(self, changeid=None):
380 380 return context.changectx(self, changeid)
381 381
382 382 def workingctx(self):
383 383 return context.workingctx(self)
384 384
385 385 def parents(self, changeid=None):
386 386 '''
387 387 get list of changectxs for parents of changeid or working directory
388 388 '''
389 389 if changeid is None:
390 390 pl = self.dirstate.parents()
391 391 else:
392 392 n = self.changelog.lookup(changeid)
393 393 pl = self.changelog.parents(n)
394 394 if pl[1] == nullid:
395 395 return [self.changectx(pl[0])]
396 396 return [self.changectx(pl[0]), self.changectx(pl[1])]
397 397
398 398 def filectx(self, path, changeid=None, fileid=None):
399 399 """changeid can be a changeset revision, node, or tag.
400 400 fileid can be a file revision or node."""
401 401 return context.filectx(self, path, changeid, fileid)
402 402
403 403 def getcwd(self):
404 404 return self.dirstate.getcwd()
405 405
406 406 def wfile(self, f, mode='r'):
407 407 return self.wopener(f, mode)
408 408
409 409 def wread(self, filename):
410 410 if self.encodepats == None:
411 411 l = []
412 412 for pat, cmd in self.ui.configitems("encode"):
413 413 mf = util.matcher(self.root, "", [pat], [], [])[1]
414 414 l.append((mf, cmd))
415 415 self.encodepats = l
416 416
417 417 data = self.wopener(filename, 'r').read()
418 418
419 419 for mf, cmd in self.encodepats:
420 420 if mf(filename):
421 421 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
422 422 data = util.filter(data, cmd)
423 423 break
424 424
425 425 return data
426 426
427 427 def wwrite(self, filename, data, fd=None):
428 428 if self.decodepats == None:
429 429 l = []
430 430 for pat, cmd in self.ui.configitems("decode"):
431 431 mf = util.matcher(self.root, "", [pat], [], [])[1]
432 432 l.append((mf, cmd))
433 433 self.decodepats = l
434 434
435 435 for mf, cmd in self.decodepats:
436 436 if mf(filename):
437 437 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
438 438 data = util.filter(data, cmd)
439 439 break
440 440
441 441 if fd:
442 442 return fd.write(data)
443 443 return self.wopener(filename, 'w').write(data)
444 444
445 445 def transaction(self):
446 446 tr = self.transhandle
447 447 if tr != None and tr.running():
448 448 return tr.nest()
449 449
450 450 # save dirstate for rollback
451 451 try:
452 452 ds = self.opener("dirstate").read()
453 453 except IOError:
454 454 ds = ""
455 455 self.opener("journal.dirstate", "w").write(ds)
456 456
457 457 tr = transaction.transaction(self.ui.warn, self.sopener,
458 458 self.sjoin("journal"),
459 459 aftertrans(self.path))
460 460 self.transhandle = tr
461 461 return tr
462 462
463 463 def recover(self):
464 464 l = self.lock()
465 465 if os.path.exists(self.sjoin("journal")):
466 466 self.ui.status(_("rolling back interrupted transaction\n"))
467 467 transaction.rollback(self.sopener, self.sjoin("journal"))
468 468 self.reload()
469 469 return True
470 470 else:
471 471 self.ui.warn(_("no interrupted transaction available\n"))
472 472 return False
473 473
474 474 def rollback(self, wlock=None):
475 475 if not wlock:
476 476 wlock = self.wlock()
477 477 l = self.lock()
478 478 if os.path.exists(self.sjoin("undo")):
479 479 self.ui.status(_("rolling back last transaction\n"))
480 480 transaction.rollback(self.sopener, self.sjoin("undo"))
481 481 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
482 482 self.reload()
483 483 self.wreload()
484 484 else:
485 485 self.ui.warn(_("no rollback information available\n"))
486 486
487 487 def wreload(self):
488 488 self.dirstate.read()
489 489
490 490 def reload(self):
491 491 self.changelog.load()
492 492 self.manifest.load()
493 493 self.tagscache = None
494 494 self.nodetagscache = None
495 495
496 496 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
497 497 desc=None):
498 498 try:
499 499 l = lock.lock(lockname, 0, releasefn, desc=desc)
500 500 except lock.LockHeld, inst:
501 501 if not wait:
502 502 raise
503 503 self.ui.warn(_("waiting for lock on %s held by %s\n") %
504 504 (desc, inst.args[0]))
505 505 # default to 600 seconds timeout
506 506 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
507 507 releasefn, desc=desc)
508 508 if acquirefn:
509 509 acquirefn()
510 510 return l
511 511
512 512 def lock(self, wait=1):
513 513 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
514 514 desc=_('repository %s') % self.origroot)
515 515
516 516 def wlock(self, wait=1):
517 517 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
518 518 self.wreload,
519 519 desc=_('working directory of %s') % self.origroot)
520 520
521 521 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
522 522 """
523 523 commit an individual file as part of a larger transaction
524 524 """
525 525
526 526 t = self.wread(fn)
527 527 fl = self.file(fn)
528 528 fp1 = manifest1.get(fn, nullid)
529 529 fp2 = manifest2.get(fn, nullid)
530 530
531 531 meta = {}
532 532 cp = self.dirstate.copied(fn)
533 533 if cp:
534 534 meta["copy"] = cp
535 535 if not manifest2: # not a branch merge
536 536 meta["copyrev"] = hex(manifest1.get(cp, nullid))
537 537 fp2 = nullid
538 538 elif fp2 != nullid: # copied on remote side
539 539 meta["copyrev"] = hex(manifest1.get(cp, nullid))
540 540 else: # copied on local side, reversed
541 541 meta["copyrev"] = hex(manifest2.get(cp))
542 542 fp2 = nullid
543 543 self.ui.debug(_(" %s: copy %s:%s\n") %
544 544 (fn, cp, meta["copyrev"]))
545 545 fp1 = nullid
546 546 elif fp2 != nullid:
547 547 # is one parent an ancestor of the other?
548 548 fpa = fl.ancestor(fp1, fp2)
549 549 if fpa == fp1:
550 550 fp1, fp2 = fp2, nullid
551 551 elif fpa == fp2:
552 552 fp2 = nullid
553 553
554 554 # is the file unmodified from the parent? report existing entry
555 555 if fp2 == nullid and not fl.cmp(fp1, t):
556 556 return fp1
557 557
558 558 changelist.append(fn)
559 559 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
560 560
561 561 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
562 562 orig_parent = self.dirstate.parents()[0] or nullid
563 563 p1 = p1 or self.dirstate.parents()[0] or nullid
564 564 p2 = p2 or self.dirstate.parents()[1] or nullid
565 565 c1 = self.changelog.read(p1)
566 566 c2 = self.changelog.read(p2)
567 567 m1 = self.manifest.read(c1[0]).copy()
568 568 m2 = self.manifest.read(c2[0])
569 569 changed = []
570 570 removed = []
571 571
572 572 if orig_parent == p1:
573 573 update_dirstate = 1
574 574 else:
575 575 update_dirstate = 0
576 576
577 577 if not wlock:
578 578 wlock = self.wlock()
579 579 l = self.lock()
580 580 tr = self.transaction()
581 581 linkrev = self.changelog.count()
582 582 for f in files:
583 583 try:
584 584 m1[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
585 585 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
586 586 except IOError:
587 587 try:
588 588 del m1[f]
589 589 if update_dirstate:
590 590 self.dirstate.forget([f])
591 591 removed.append(f)
592 592 except:
593 593 # deleted from p2?
594 594 pass
595 595
596 596 mnode = self.manifest.add(m1, tr, linkrev, c1[0], c2[0])
597 597 user = user or self.ui.username()
598 598 n = self.changelog.add(mnode, changed + removed, text,
599 599 tr, p1, p2, user, date)
600 600 tr.close()
601 601 if update_dirstate:
602 602 self.dirstate.setparents(n, nullid)
603 603
604 604 def commit(self, files=None, text="", user=None, date=None,
605 605 match=util.always, force=False, lock=None, wlock=None,
606 606 force_editor=False):
607 607 commit = []
608 608 remove = []
609 609 changed = []
610 610
611 611 if files:
612 612 for f in files:
613 613 s = self.dirstate.state(f)
614 614 if s in 'nmai':
615 615 commit.append(f)
616 616 elif s == 'r':
617 617 remove.append(f)
618 618 else:
619 619 self.ui.warn(_("%s not tracked!\n") % f)
620 620 else:
621 621 modified, added, removed, deleted, unknown = self.status(match=match)[:5]
622 622 commit = modified + added
623 623 remove = removed
624 624
625 625 p1, p2 = self.dirstate.parents()
626 626 c1 = self.changelog.read(p1)
627 627 c2 = self.changelog.read(p2)
628 628 m1 = self.manifest.read(c1[0]).copy()
629 629 m2 = self.manifest.read(c2[0])
630 630
631 631 branchname = self.workingctx().branch()
632 632 oldname = c1[5].get("branch", "")
633 633
634 634 if not commit and not remove and not force and p2 == nullid and \
635 635 branchname == oldname:
636 636 self.ui.status(_("nothing changed\n"))
637 637 return None
638 638
639 639 xp1 = hex(p1)
640 640 if p2 == nullid: xp2 = ''
641 641 else: xp2 = hex(p2)
642 642
643 643 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
644 644
645 645 if not wlock:
646 646 wlock = self.wlock()
647 647 if not lock:
648 648 lock = self.lock()
649 649 tr = self.transaction()
650 650
651 651 # check in files
652 652 new = {}
653 653 linkrev = self.changelog.count()
654 654 commit.sort()
655 655 for f in commit:
656 656 self.ui.note(f + "\n")
657 657 try:
658 658 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
659 659 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
660 660 except IOError:
661 661 self.ui.warn(_("trouble committing %s!\n") % f)
662 662 raise
663 663
664 664 # update manifest
665 665 m1.update(new)
666 666 for f in remove:
667 667 if f in m1:
668 668 del m1[f]
669 669 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, remove))
670 670
671 671 # add changeset
672 672 new = new.keys()
673 673 new.sort()
674 674
675 675 user = user or self.ui.username()
676 676 if not text or force_editor:
677 677 edittext = []
678 678 if text:
679 679 edittext.append(text)
680 680 edittext.append("")
681 681 if p2 != nullid:
682 682 edittext.append("HG: branch merge")
683 683 edittext.extend(["HG: changed %s" % f for f in changed])
684 684 edittext.extend(["HG: removed %s" % f for f in remove])
685 685 if not changed and not remove:
686 686 edittext.append("HG: no files changed")
687 687 edittext.append("")
688 688 # run editor in the repository root
689 689 olddir = os.getcwd()
690 690 os.chdir(self.root)
691 691 text = self.ui.edit("\n".join(edittext), user)
692 692 os.chdir(olddir)
693 693
694 694 lines = [line.rstrip() for line in text.rstrip().splitlines()]
695 695 while lines and not lines[0]:
696 696 del lines[0]
697 697 if not lines:
698 698 return None
699 699 text = '\n'.join(lines)
700 700 extra = {}
701 701 if branchname:
702 702 extra["branch"] = branchname
703 703 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2,
704 704 user, date, extra)
705 705 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
706 706 parent2=xp2)
707 707 tr.close()
708 708
709 709 self.dirstate.setparents(n)
710 710 self.dirstate.update(new, "n")
711 711 self.dirstate.forget(remove)
712 712
713 713 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
714 714 return n
715 715
716 716 def walk(self, node=None, files=[], match=util.always, badmatch=None):
717 717 if node:
718 718 fdict = dict.fromkeys(files)
719 719 for fn in self.manifest.read(self.changelog.read(node)[0]):
720 720 for ffn in fdict:
721 721 # match if the file is the exact name or a directory
722 722 if ffn == fn or fn.startswith("%s/" % ffn):
723 723 del fdict[ffn]
724 724 break
725 725 if match(fn):
726 726 yield 'm', fn
727 727 for fn in fdict:
728 728 if badmatch and badmatch(fn):
729 729 if match(fn):
730 730 yield 'b', fn
731 731 else:
732 732 self.ui.warn(_('%s: No such file in rev %s\n') % (
733 733 util.pathto(self.getcwd(), fn), short(node)))
734 734 else:
735 735 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
736 736 yield src, fn
737 737
738 738 def status(self, node1=None, node2=None, files=[], match=util.always,
739 739 wlock=None, list_ignored=False, list_clean=False):
740 740 """return status of files between two nodes or node and working directory
741 741
742 742 If node1 is None, use the first dirstate parent instead.
743 743 If node2 is None, compare node1 with working directory.
744 744 """
745 745
746 746 def fcmp(fn, mf):
747 747 t1 = self.wread(fn)
748 748 return self.file(fn).cmp(mf.get(fn, nullid), t1)
749 749
750 750 def mfmatches(node):
751 751 change = self.changelog.read(node)
752 752 mf = self.manifest.read(change[0]).copy()
753 753 for fn in mf.keys():
754 754 if not match(fn):
755 755 del mf[fn]
756 756 return mf
757 757
758 758 modified, added, removed, deleted, unknown = [], [], [], [], []
759 759 ignored, clean = [], []
760 760
761 761 compareworking = False
762 762 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
763 763 compareworking = True
764 764
765 765 if not compareworking:
766 766 # read the manifest from node1 before the manifest from node2,
767 767 # so that we'll hit the manifest cache if we're going through
768 768 # all the revisions in parent->child order.
769 769 mf1 = mfmatches(node1)
770 770
771 771 # are we comparing the working directory?
772 772 if not node2:
773 773 if not wlock:
774 774 try:
775 775 wlock = self.wlock(wait=0)
776 776 except lock.LockException:
777 777 wlock = None
778 778 (lookup, modified, added, removed, deleted, unknown,
779 779 ignored, clean) = self.dirstate.status(files, match,
780 780 list_ignored, list_clean)
781 781
782 782 # are we comparing working dir against its parent?
783 783 if compareworking:
784 784 if lookup:
785 785 # do a full compare of any files that might have changed
786 786 mf2 = mfmatches(self.dirstate.parents()[0])
787 787 for f in lookup:
788 788 if fcmp(f, mf2):
789 789 modified.append(f)
790 790 else:
791 791 clean.append(f)
792 792 if wlock is not None:
793 793 self.dirstate.update([f], "n")
794 794 else:
795 795 # we are comparing working dir against non-parent
796 796 # generate a pseudo-manifest for the working dir
797 797 # XXX: create it in dirstate.py ?
798 798 mf2 = mfmatches(self.dirstate.parents()[0])
799 799 for f in lookup + modified + added:
800 800 mf2[f] = ""
801 801 mf2.set(f, execf=util.is_exec(self.wjoin(f), mf2.execf(f)))
802 802 for f in removed:
803 803 if f in mf2:
804 804 del mf2[f]
805 805 else:
806 806 # we are comparing two revisions
807 807 mf2 = mfmatches(node2)
808 808
809 809 if not compareworking:
810 810 # flush lists from dirstate before comparing manifests
811 811 modified, added, clean = [], [], []
812 812
813 813 # make sure to sort the files so we talk to the disk in a
814 814 # reasonable order
815 815 mf2keys = mf2.keys()
816 816 mf2keys.sort()
817 817 for fn in mf2keys:
818 818 if mf1.has_key(fn):
819 819 if mf1.flags(fn) != mf2.flags(fn) or \
820 820 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
821 821 modified.append(fn)
822 822 elif list_clean:
823 823 clean.append(fn)
824 824 del mf1[fn]
825 825 else:
826 826 added.append(fn)
827 827
828 828 removed = mf1.keys()
829 829
830 830 # sort and return results:
831 831 for l in modified, added, removed, deleted, unknown, ignored, clean:
832 832 l.sort()
833 833 return (modified, added, removed, deleted, unknown, ignored, clean)
834 834
835 835 def add(self, list, wlock=None):
836 836 if not wlock:
837 837 wlock = self.wlock()
838 838 for f in list:
839 839 p = self.wjoin(f)
840 840 if not os.path.exists(p):
841 841 self.ui.warn(_("%s does not exist!\n") % f)
842 842 elif not os.path.isfile(p):
843 843 self.ui.warn(_("%s not added: only files supported currently\n")
844 844 % f)
845 845 elif self.dirstate.state(f) in 'an':
846 846 self.ui.warn(_("%s already tracked!\n") % f)
847 847 else:
848 848 self.dirstate.update([f], "a")
849 849
850 850 def forget(self, list, wlock=None):
851 851 if not wlock:
852 852 wlock = self.wlock()
853 853 for f in list:
854 854 if self.dirstate.state(f) not in 'ai':
855 855 self.ui.warn(_("%s not added!\n") % f)
856 856 else:
857 857 self.dirstate.forget([f])
858 858
859 859 def remove(self, list, unlink=False, wlock=None):
860 860 if unlink:
861 861 for f in list:
862 862 try:
863 863 util.unlink(self.wjoin(f))
864 864 except OSError, inst:
865 865 if inst.errno != errno.ENOENT:
866 866 raise
867 867 if not wlock:
868 868 wlock = self.wlock()
869 869 for f in list:
870 870 p = self.wjoin(f)
871 871 if os.path.exists(p):
872 872 self.ui.warn(_("%s still exists!\n") % f)
873 873 elif self.dirstate.state(f) == 'a':
874 874 self.dirstate.forget([f])
875 875 elif f not in self.dirstate:
876 876 self.ui.warn(_("%s not tracked!\n") % f)
877 877 else:
878 878 self.dirstate.update([f], "r")
879 879
880 880 def undelete(self, list, wlock=None):
881 881 p = self.dirstate.parents()[0]
882 882 mn = self.changelog.read(p)[0]
883 883 m = self.manifest.read(mn)
884 884 if not wlock:
885 885 wlock = self.wlock()
886 886 for f in list:
887 887 if self.dirstate.state(f) not in "r":
888 888 self.ui.warn("%s not removed!\n" % f)
889 889 else:
890 890 t = self.file(f).read(m[f])
891 891 self.wwrite(f, t)
892 892 util.set_exec(self.wjoin(f), m.execf(f))
893 893 self.dirstate.update([f], "n")
894 894
895 895 def copy(self, source, dest, wlock=None):
896 896 p = self.wjoin(dest)
897 897 if not os.path.exists(p):
898 898 self.ui.warn(_("%s does not exist!\n") % dest)
899 899 elif not os.path.isfile(p):
900 900 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
901 901 else:
902 902 if not wlock:
903 903 wlock = self.wlock()
904 904 if self.dirstate.state(dest) == '?':
905 905 self.dirstate.update([dest], "a")
906 906 self.dirstate.copy(source, dest)
907 907
908 908 def heads(self, start=None):
909 909 heads = self.changelog.heads(start)
910 910 # sort the output in rev descending order
911 911 heads = [(-self.changelog.rev(h), h) for h in heads]
912 912 heads.sort()
913 913 return [n for (r, n) in heads]
914 914
915 915 # branchlookup returns a dict giving a list of branches for
916 916 # each head. A branch is defined as the tag of a node or
917 917 # the branch of the node's parents. If a node has multiple
918 918 # branch tags, tags are eliminated if they are visible from other
919 919 # branch tags.
920 920 #
921 921 # So, for this graph: a->b->c->d->e
922 922 # \ /
923 923 # aa -----/
924 924 # a has tag 2.6.12
925 925 # d has tag 2.6.13
926 926 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
927 927 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
928 928 # from the list.
929 929 #
930 930 # It is possible that more than one head will have the same branch tag.
931 931 # callers need to check the result for multiple heads under the same
932 932 # branch tag if that is a problem for them (ie checkout of a specific
933 933 # branch).
934 934 #
935 935 # passing in a specific branch will limit the depth of the search
936 936 # through the parents. It won't limit the branches returned in the
937 937 # result though.
938 938 def branchlookup(self, heads=None, branch=None):
939 939 if not heads:
940 940 heads = self.heads()
941 941 headt = [ h for h in heads ]
942 942 chlog = self.changelog
943 943 branches = {}
944 944 merges = []
945 945 seenmerge = {}
946 946
947 947 # traverse the tree once for each head, recording in the branches
948 948 # dict which tags are visible from this head. The branches
949 949 # dict also records which tags are visible from each tag
950 950 # while we traverse.
951 951 while headt or merges:
952 952 if merges:
953 953 n, found = merges.pop()
954 954 visit = [n]
955 955 else:
956 956 h = headt.pop()
957 957 visit = [h]
958 958 found = [h]
959 959 seen = {}
960 960 while visit:
961 961 n = visit.pop()
962 962 if n in seen:
963 963 continue
964 964 pp = chlog.parents(n)
965 965 tags = self.nodetags(n)
966 966 if tags:
967 967 for x in tags:
968 968 if x == 'tip':
969 969 continue
970 970 for f in found:
971 971 branches.setdefault(f, {})[n] = 1
972 972 branches.setdefault(n, {})[n] = 1
973 973 break
974 974 if n not in found:
975 975 found.append(n)
976 976 if branch in tags:
977 977 continue
978 978 seen[n] = 1
979 979 if pp[1] != nullid and n not in seenmerge:
980 980 merges.append((pp[1], [x for x in found]))
981 981 seenmerge[n] = 1
982 982 if pp[0] != nullid:
983 983 visit.append(pp[0])
984 984 # traverse the branches dict, eliminating branch tags from each
985 985 # head that are visible from another branch tag for that head.
986 986 out = {}
987 987 viscache = {}
988 988 for h in heads:
989 989 def visible(node):
990 990 if node in viscache:
991 991 return viscache[node]
992 992 ret = {}
993 993 visit = [node]
994 994 while visit:
995 995 x = visit.pop()
996 996 if x in viscache:
997 997 ret.update(viscache[x])
998 998 elif x not in ret:
999 999 ret[x] = 1
1000 1000 if x in branches:
1001 1001 visit[len(visit):] = branches[x].keys()
1002 1002 viscache[node] = ret
1003 1003 return ret
1004 1004 if h not in branches:
1005 1005 continue
1006 1006 # O(n^2), but somewhat limited. This only searches the
1007 1007 # tags visible from a specific head, not all the tags in the
1008 1008 # whole repo.
1009 1009 for b in branches[h]:
1010 1010 vis = False
1011 1011 for bb in branches[h].keys():
1012 1012 if b != bb:
1013 1013 if b in visible(bb):
1014 1014 vis = True
1015 1015 break
1016 1016 if not vis:
1017 1017 l = out.setdefault(h, [])
1018 1018 l[len(l):] = self.nodetags(b)
1019 1019 return out
1020 1020
1021 1021 def branches(self, nodes):
1022 1022 if not nodes:
1023 1023 nodes = [self.changelog.tip()]
1024 1024 b = []
1025 1025 for n in nodes:
1026 1026 t = n
1027 1027 while 1:
1028 1028 p = self.changelog.parents(n)
1029 1029 if p[1] != nullid or p[0] == nullid:
1030 1030 b.append((t, n, p[0], p[1]))
1031 1031 break
1032 1032 n = p[0]
1033 1033 return b
1034 1034
1035 1035 def between(self, pairs):
1036 1036 r = []
1037 1037
1038 1038 for top, bottom in pairs:
1039 1039 n, l, i = top, [], 0
1040 1040 f = 1
1041 1041
1042 1042 while n != bottom:
1043 1043 p = self.changelog.parents(n)[0]
1044 1044 if i == f:
1045 1045 l.append(n)
1046 1046 f = f * 2
1047 1047 n = p
1048 1048 i += 1
1049 1049
1050 1050 r.append(l)
1051 1051
1052 1052 return r
1053 1053
1054 1054 def findincoming(self, remote, base=None, heads=None, force=False):
1055 1055 """Return list of roots of the subsets of missing nodes from remote
1056 1056
1057 1057 If base dict is specified, assume that these nodes and their parents
1058 1058 exist on the remote side and that no child of a node of base exists
1059 1059 in both remote and self.
1060 1060 Furthermore base will be updated to include the nodes that exists
1061 1061 in self and remote but no children exists in self and remote.
1062 1062 If a list of heads is specified, return only nodes which are heads
1063 1063 or ancestors of these heads.
1064 1064
1065 1065 All the ancestors of base are in self and in remote.
1066 1066 All the descendants of the list returned are missing in self.
1067 1067 (and so we know that the rest of the nodes are missing in remote, see
1068 1068 outgoing)
1069 1069 """
1070 1070 m = self.changelog.nodemap
1071 1071 search = []
1072 1072 fetch = {}
1073 1073 seen = {}
1074 1074 seenbranch = {}
1075 1075 if base == None:
1076 1076 base = {}
1077 1077
1078 1078 if not heads:
1079 1079 heads = remote.heads()
1080 1080
1081 1081 if self.changelog.tip() == nullid:
1082 1082 base[nullid] = 1
1083 1083 if heads != [nullid]:
1084 1084 return [nullid]
1085 1085 return []
1086 1086
1087 1087 # assume we're closer to the tip than the root
1088 1088 # and start by examining the heads
1089 1089 self.ui.status(_("searching for changes\n"))
1090 1090
1091 1091 unknown = []
1092 1092 for h in heads:
1093 1093 if h not in m:
1094 1094 unknown.append(h)
1095 1095 else:
1096 1096 base[h] = 1
1097 1097
1098 1098 if not unknown:
1099 1099 return []
1100 1100
1101 1101 req = dict.fromkeys(unknown)
1102 1102 reqcnt = 0
1103 1103
1104 1104 # search through remote branches
1105 1105 # a 'branch' here is a linear segment of history, with four parts:
1106 1106 # head, root, first parent, second parent
1107 1107 # (a branch always has two parents (or none) by definition)
1108 1108 unknown = remote.branches(unknown)
1109 1109 while unknown:
1110 1110 r = []
1111 1111 while unknown:
1112 1112 n = unknown.pop(0)
1113 1113 if n[0] in seen:
1114 1114 continue
1115 1115
1116 1116 self.ui.debug(_("examining %s:%s\n")
1117 1117 % (short(n[0]), short(n[1])))
1118 1118 if n[0] == nullid: # found the end of the branch
1119 1119 pass
1120 1120 elif n in seenbranch:
1121 1121 self.ui.debug(_("branch already found\n"))
1122 1122 continue
1123 1123 elif n[1] and n[1] in m: # do we know the base?
1124 1124 self.ui.debug(_("found incomplete branch %s:%s\n")
1125 1125 % (short(n[0]), short(n[1])))
1126 1126 search.append(n) # schedule branch range for scanning
1127 1127 seenbranch[n] = 1
1128 1128 else:
1129 1129 if n[1] not in seen and n[1] not in fetch:
1130 1130 if n[2] in m and n[3] in m:
1131 1131 self.ui.debug(_("found new changeset %s\n") %
1132 1132 short(n[1]))
1133 1133 fetch[n[1]] = 1 # earliest unknown
1134 1134 for p in n[2:4]:
1135 1135 if p in m:
1136 1136 base[p] = 1 # latest known
1137 1137
1138 1138 for p in n[2:4]:
1139 1139 if p not in req and p not in m:
1140 1140 r.append(p)
1141 1141 req[p] = 1
1142 1142 seen[n[0]] = 1
1143 1143
1144 1144 if r:
1145 1145 reqcnt += 1
1146 1146 self.ui.debug(_("request %d: %s\n") %
1147 1147 (reqcnt, " ".join(map(short, r))))
1148 1148 for p in xrange(0, len(r), 10):
1149 1149 for b in remote.branches(r[p:p+10]):
1150 1150 self.ui.debug(_("received %s:%s\n") %
1151 1151 (short(b[0]), short(b[1])))
1152 1152 unknown.append(b)
1153 1153
1154 1154 # do binary search on the branches we found
1155 1155 while search:
1156 1156 n = search.pop(0)
1157 1157 reqcnt += 1
1158 1158 l = remote.between([(n[0], n[1])])[0]
1159 1159 l.append(n[1])
1160 1160 p = n[0]
1161 1161 f = 1
1162 1162 for i in l:
1163 1163 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1164 1164 if i in m:
1165 1165 if f <= 2:
1166 1166 self.ui.debug(_("found new branch changeset %s\n") %
1167 1167 short(p))
1168 1168 fetch[p] = 1
1169 1169 base[i] = 1
1170 1170 else:
1171 1171 self.ui.debug(_("narrowed branch search to %s:%s\n")
1172 1172 % (short(p), short(i)))
1173 1173 search.append((p, i))
1174 1174 break
1175 1175 p, f = i, f * 2
1176 1176
1177 1177 # sanity check our fetch list
1178 1178 for f in fetch.keys():
1179 1179 if f in m:
1180 1180 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1181 1181
1182 1182 if base.keys() == [nullid]:
1183 1183 if force:
1184 1184 self.ui.warn(_("warning: repository is unrelated\n"))
1185 1185 else:
1186 1186 raise util.Abort(_("repository is unrelated"))
1187 1187
1188 1188 self.ui.debug(_("found new changesets starting at ") +
1189 1189 " ".join([short(f) for f in fetch]) + "\n")
1190 1190
1191 1191 self.ui.debug(_("%d total queries\n") % reqcnt)
1192 1192
1193 1193 return fetch.keys()
1194 1194
1195 1195 def findoutgoing(self, remote, base=None, heads=None, force=False):
1196 1196 """Return list of nodes that are roots of subsets not in remote
1197 1197
1198 1198 If base dict is specified, assume that these nodes and their parents
1199 1199 exist on the remote side.
1200 1200 If a list of heads is specified, return only nodes which are heads
1201 1201 or ancestors of these heads, and return a second element which
1202 1202 contains all remote heads which get new children.
1203 1203 """
1204 1204 if base == None:
1205 1205 base = {}
1206 1206 self.findincoming(remote, base, heads, force=force)
1207 1207
1208 1208 self.ui.debug(_("common changesets up to ")
1209 1209 + " ".join(map(short, base.keys())) + "\n")
1210 1210
1211 1211 remain = dict.fromkeys(self.changelog.nodemap)
1212 1212
1213 1213 # prune everything remote has from the tree
1214 1214 del remain[nullid]
1215 1215 remove = base.keys()
1216 1216 while remove:
1217 1217 n = remove.pop(0)
1218 1218 if n in remain:
1219 1219 del remain[n]
1220 1220 for p in self.changelog.parents(n):
1221 1221 remove.append(p)
1222 1222
1223 1223 # find every node whose parents have been pruned
1224 1224 subset = []
1225 1225 # find every remote head that will get new children
1226 1226 updated_heads = {}
1227 1227 for n in remain:
1228 1228 p1, p2 = self.changelog.parents(n)
1229 1229 if p1 not in remain and p2 not in remain:
1230 1230 subset.append(n)
1231 1231 if heads:
1232 1232 if p1 in heads:
1233 1233 updated_heads[p1] = True
1234 1234 if p2 in heads:
1235 1235 updated_heads[p2] = True
1236 1236
1237 1237 # this is the set of all roots we have to push
1238 1238 if heads:
1239 1239 return subset, updated_heads.keys()
1240 1240 else:
1241 1241 return subset
1242 1242
1243 1243 def pull(self, remote, heads=None, force=False, lock=None):
1244 1244 mylock = False
1245 1245 if not lock:
1246 1246 lock = self.lock()
1247 1247 mylock = True
1248 1248
1249 1249 try:
1250 1250 fetch = self.findincoming(remote, force=force)
1251 1251 if fetch == [nullid]:
1252 1252 self.ui.status(_("requesting all changes\n"))
1253 1253
1254 1254 if not fetch:
1255 1255 self.ui.status(_("no changes found\n"))
1256 1256 return 0
1257 1257
1258 1258 if heads is None:
1259 1259 cg = remote.changegroup(fetch, 'pull')
1260 1260 else:
1261 1261 if 'changegroupsubset' not in remote.capabilities:
1262 1262 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1263 1263 cg = remote.changegroupsubset(fetch, heads, 'pull')
1264 1264 return self.addchangegroup(cg, 'pull', remote.url())
1265 1265 finally:
1266 1266 if mylock:
1267 1267 lock.release()
1268 1268
1269 1269 def push(self, remote, force=False, revs=None):
1270 1270 # there are two ways to push to remote repo:
1271 1271 #
1272 1272 # addchangegroup assumes local user can lock remote
1273 1273 # repo (local filesystem, old ssh servers).
1274 1274 #
1275 1275 # unbundle assumes local user cannot lock remote repo (new ssh
1276 1276 # servers, http servers).
1277 1277
1278 1278 if remote.capable('unbundle'):
1279 1279 return self.push_unbundle(remote, force, revs)
1280 1280 return self.push_addchangegroup(remote, force, revs)
1281 1281
1282 1282 def prepush(self, remote, force, revs):
1283 1283 base = {}
1284 1284 remote_heads = remote.heads()
1285 1285 inc = self.findincoming(remote, base, remote_heads, force=force)
1286 1286 if not force and inc:
1287 1287 self.ui.warn(_("abort: unsynced remote changes!\n"))
1288 1288 self.ui.status(_("(did you forget to sync?"
1289 1289 " use push -f to force)\n"))
1290 1290 return None, 1
1291 1291
1292 1292 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1293 1293 if revs is not None:
1294 1294 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1295 1295 else:
1296 1296 bases, heads = update, self.changelog.heads()
1297 1297
1298 1298 if not bases:
1299 1299 self.ui.status(_("no changes found\n"))
1300 1300 return None, 1
1301 1301 elif not force:
1302 1302 # FIXME we don't properly detect creation of new heads
1303 1303 # in the push -r case, assume the user knows what he's doing
1304 1304 if not revs and len(remote_heads) < len(heads) \
1305 1305 and remote_heads != [nullid]:
1306 1306 self.ui.warn(_("abort: push creates new remote branches!\n"))
1307 1307 self.ui.status(_("(did you forget to merge?"
1308 1308 " use push -f to force)\n"))
1309 1309 return None, 1
1310 1310
1311 1311 if revs is None:
1312 1312 cg = self.changegroup(update, 'push')
1313 1313 else:
1314 1314 cg = self.changegroupsubset(update, revs, 'push')
1315 1315 return cg, remote_heads
1316 1316
1317 1317 def push_addchangegroup(self, remote, force, revs):
1318 1318 lock = remote.lock()
1319 1319
1320 1320 ret = self.prepush(remote, force, revs)
1321 1321 if ret[0] is not None:
1322 1322 cg, remote_heads = ret
1323 1323 return remote.addchangegroup(cg, 'push', self.url())
1324 1324 return ret[1]
1325 1325
1326 1326 def push_unbundle(self, remote, force, revs):
1327 1327 # local repo finds heads on server, finds out what revs it
1328 1328 # must push. once revs transferred, if server finds it has
1329 1329 # different heads (someone else won commit/push race), server
1330 1330 # aborts.
1331 1331
1332 1332 ret = self.prepush(remote, force, revs)
1333 1333 if ret[0] is not None:
1334 1334 cg, remote_heads = ret
1335 1335 if force: remote_heads = ['force']
1336 1336 return remote.unbundle(cg, remote_heads, 'push')
1337 1337 return ret[1]
1338 1338
1339 1339 def changegroupinfo(self, nodes):
1340 1340 self.ui.note(_("%d changesets found\n") % len(nodes))
1341 1341 if self.ui.debugflag:
1342 1342 self.ui.debug(_("List of changesets:\n"))
1343 1343 for node in nodes:
1344 1344 self.ui.debug("%s\n" % hex(node))
1345 1345
1346 1346 def changegroupsubset(self, bases, heads, source):
1347 1347 """This function generates a changegroup consisting of all the nodes
1348 1348 that are descendents of any of the bases, and ancestors of any of
1349 1349 the heads.
1350 1350
1351 1351 It is fairly complex as determining which filenodes and which
1352 1352 manifest nodes need to be included for the changeset to be complete
1353 1353 is non-trivial.
1354 1354
1355 1355 Another wrinkle is doing the reverse, figuring out which changeset in
1356 1356 the changegroup a particular filenode or manifestnode belongs to."""
1357 1357
1358 1358 self.hook('preoutgoing', throw=True, source=source)
1359 1359
1360 1360 # Set up some initial variables
1361 1361 # Make it easy to refer to self.changelog
1362 1362 cl = self.changelog
1363 1363 # msng is short for missing - compute the list of changesets in this
1364 1364 # changegroup.
1365 1365 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1366 1366 self.changegroupinfo(msng_cl_lst)
1367 1367 # Some bases may turn out to be superfluous, and some heads may be
1368 1368 # too. nodesbetween will return the minimal set of bases and heads
1369 1369 # necessary to re-create the changegroup.
1370 1370
1371 1371 # Known heads are the list of heads that it is assumed the recipient
1372 1372 # of this changegroup will know about.
1373 1373 knownheads = {}
1374 1374 # We assume that all parents of bases are known heads.
1375 1375 for n in bases:
1376 1376 for p in cl.parents(n):
1377 1377 if p != nullid:
1378 1378 knownheads[p] = 1
1379 1379 knownheads = knownheads.keys()
1380 1380 if knownheads:
1381 1381 # Now that we know what heads are known, we can compute which
1382 1382 # changesets are known. The recipient must know about all
1383 1383 # changesets required to reach the known heads from the null
1384 1384 # changeset.
1385 1385 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1386 1386 junk = None
1387 1387 # Transform the list into an ersatz set.
1388 1388 has_cl_set = dict.fromkeys(has_cl_set)
1389 1389 else:
1390 1390 # If there were no known heads, the recipient cannot be assumed to
1391 1391 # know about any changesets.
1392 1392 has_cl_set = {}
1393 1393
1394 1394 # Make it easy to refer to self.manifest
1395 1395 mnfst = self.manifest
1396 1396 # We don't know which manifests are missing yet
1397 1397 msng_mnfst_set = {}
1398 1398 # Nor do we know which filenodes are missing.
1399 1399 msng_filenode_set = {}
1400 1400
1401 1401 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1402 1402 junk = None
1403 1403
1404 1404 # A changeset always belongs to itself, so the changenode lookup
1405 1405 # function for a changenode is identity.
1406 1406 def identity(x):
1407 1407 return x
1408 1408
1409 1409 # A function generating function. Sets up an environment for the
1410 1410 # inner function.
1411 1411 def cmp_by_rev_func(revlog):
1412 1412 # Compare two nodes by their revision number in the environment's
1413 1413 # revision history. Since the revision number both represents the
1414 1414 # most efficient order to read the nodes in, and represents a
1415 1415 # topological sorting of the nodes, this function is often useful.
1416 1416 def cmp_by_rev(a, b):
1417 1417 return cmp(revlog.rev(a), revlog.rev(b))
1418 1418 return cmp_by_rev
1419 1419
1420 1420 # If we determine that a particular file or manifest node must be a
1421 1421 # node that the recipient of the changegroup will already have, we can
1422 1422 # also assume the recipient will have all the parents. This function
1423 1423 # prunes them from the set of missing nodes.
1424 1424 def prune_parents(revlog, hasset, msngset):
1425 1425 haslst = hasset.keys()
1426 1426 haslst.sort(cmp_by_rev_func(revlog))
1427 1427 for node in haslst:
1428 1428 parentlst = [p for p in revlog.parents(node) if p != nullid]
1429 1429 while parentlst:
1430 1430 n = parentlst.pop()
1431 1431 if n not in hasset:
1432 1432 hasset[n] = 1
1433 1433 p = [p for p in revlog.parents(n) if p != nullid]
1434 1434 parentlst.extend(p)
1435 1435 for n in hasset:
1436 1436 msngset.pop(n, None)
1437 1437
1438 1438 # This is a function generating function used to set up an environment
1439 1439 # for the inner function to execute in.
1440 1440 def manifest_and_file_collector(changedfileset):
1441 1441 # This is an information gathering function that gathers
1442 1442 # information from each changeset node that goes out as part of
1443 1443 # the changegroup. The information gathered is a list of which
1444 1444 # manifest nodes are potentially required (the recipient may
1445 1445 # already have them) and total list of all files which were
1446 1446 # changed in any changeset in the changegroup.
1447 1447 #
1448 1448 # We also remember the first changenode we saw any manifest
1449 1449 # referenced by so we can later determine which changenode 'owns'
1450 1450 # the manifest.
1451 1451 def collect_manifests_and_files(clnode):
1452 1452 c = cl.read(clnode)
1453 1453 for f in c[3]:
1454 1454 # This is to make sure we only have one instance of each
1455 1455 # filename string for each filename.
1456 1456 changedfileset.setdefault(f, f)
1457 1457 msng_mnfst_set.setdefault(c[0], clnode)
1458 1458 return collect_manifests_and_files
1459 1459
1460 1460 # Figure out which manifest nodes (of the ones we think might be part
1461 1461 # of the changegroup) the recipient must know about and remove them
1462 1462 # from the changegroup.
1463 1463 def prune_manifests():
1464 1464 has_mnfst_set = {}
1465 1465 for n in msng_mnfst_set:
1466 1466 # If a 'missing' manifest thinks it belongs to a changenode
1467 1467 # the recipient is assumed to have, obviously the recipient
1468 1468 # must have that manifest.
1469 1469 linknode = cl.node(mnfst.linkrev(n))
1470 1470 if linknode in has_cl_set:
1471 1471 has_mnfst_set[n] = 1
1472 1472 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1473 1473
1474 1474 # Use the information collected in collect_manifests_and_files to say
1475 1475 # which changenode any manifestnode belongs to.
1476 1476 def lookup_manifest_link(mnfstnode):
1477 1477 return msng_mnfst_set[mnfstnode]
1478 1478
1479 1479 # A function generating function that sets up the initial environment
1480 1480 # the inner function.
1481 1481 def filenode_collector(changedfiles):
1482 1482 next_rev = [0]
1483 1483 # This gathers information from each manifestnode included in the
1484 1484 # changegroup about which filenodes the manifest node references
1485 1485 # so we can include those in the changegroup too.
1486 1486 #
1487 1487 # It also remembers which changenode each filenode belongs to. It
1488 1488 # does this by assuming the a filenode belongs to the changenode
1489 1489 # the first manifest that references it belongs to.
1490 1490 def collect_msng_filenodes(mnfstnode):
1491 1491 r = mnfst.rev(mnfstnode)
1492 1492 if r == next_rev[0]:
1493 1493 # If the last rev we looked at was the one just previous,
1494 1494 # we only need to see a diff.
1495 1495 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1496 1496 # For each line in the delta
1497 1497 for dline in delta.splitlines():
1498 1498 # get the filename and filenode for that line
1499 1499 f, fnode = dline.split('\0')
1500 1500 fnode = bin(fnode[:40])
1501 1501 f = changedfiles.get(f, None)
1502 1502 # And if the file is in the list of files we care
1503 1503 # about.
1504 1504 if f is not None:
1505 1505 # Get the changenode this manifest belongs to
1506 1506 clnode = msng_mnfst_set[mnfstnode]
1507 1507 # Create the set of filenodes for the file if
1508 1508 # there isn't one already.
1509 1509 ndset = msng_filenode_set.setdefault(f, {})
1510 1510 # And set the filenode's changelog node to the
1511 1511 # manifest's if it hasn't been set already.
1512 1512 ndset.setdefault(fnode, clnode)
1513 1513 else:
1514 1514 # Otherwise we need a full manifest.
1515 1515 m = mnfst.read(mnfstnode)
1516 1516 # For every file in we care about.
1517 1517 for f in changedfiles:
1518 1518 fnode = m.get(f, None)
1519 1519 # If it's in the manifest
1520 1520 if fnode is not None:
1521 1521 # See comments above.
1522 1522 clnode = msng_mnfst_set[mnfstnode]
1523 1523 ndset = msng_filenode_set.setdefault(f, {})
1524 1524 ndset.setdefault(fnode, clnode)
1525 1525 # Remember the revision we hope to see next.
1526 1526 next_rev[0] = r + 1
1527 1527 return collect_msng_filenodes
1528 1528
1529 1529 # We have a list of filenodes we think we need for a file, lets remove
1530 1530 # all those we now the recipient must have.
1531 1531 def prune_filenodes(f, filerevlog):
1532 1532 msngset = msng_filenode_set[f]
1533 1533 hasset = {}
1534 1534 # If a 'missing' filenode thinks it belongs to a changenode we
1535 1535 # assume the recipient must have, then the recipient must have
1536 1536 # that filenode.
1537 1537 for n in msngset:
1538 1538 clnode = cl.node(filerevlog.linkrev(n))
1539 1539 if clnode in has_cl_set:
1540 1540 hasset[n] = 1
1541 1541 prune_parents(filerevlog, hasset, msngset)
1542 1542
1543 1543 # A function generator function that sets up the a context for the
1544 1544 # inner function.
1545 1545 def lookup_filenode_link_func(fname):
1546 1546 msngset = msng_filenode_set[fname]
1547 1547 # Lookup the changenode the filenode belongs to.
1548 1548 def lookup_filenode_link(fnode):
1549 1549 return msngset[fnode]
1550 1550 return lookup_filenode_link
1551 1551
1552 1552 # Now that we have all theses utility functions to help out and
1553 1553 # logically divide up the task, generate the group.
1554 1554 def gengroup():
1555 1555 # The set of changed files starts empty.
1556 1556 changedfiles = {}
1557 1557 # Create a changenode group generator that will call our functions
1558 1558 # back to lookup the owning changenode and collect information.
1559 1559 group = cl.group(msng_cl_lst, identity,
1560 1560 manifest_and_file_collector(changedfiles))
1561 1561 for chnk in group:
1562 1562 yield chnk
1563 1563
1564 1564 # The list of manifests has been collected by the generator
1565 1565 # calling our functions back.
1566 1566 prune_manifests()
1567 1567 msng_mnfst_lst = msng_mnfst_set.keys()
1568 1568 # Sort the manifestnodes by revision number.
1569 1569 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1570 1570 # Create a generator for the manifestnodes that calls our lookup
1571 1571 # and data collection functions back.
1572 1572 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1573 1573 filenode_collector(changedfiles))
1574 1574 for chnk in group:
1575 1575 yield chnk
1576 1576
1577 1577 # These are no longer needed, dereference and toss the memory for
1578 1578 # them.
1579 1579 msng_mnfst_lst = None
1580 1580 msng_mnfst_set.clear()
1581 1581
1582 1582 changedfiles = changedfiles.keys()
1583 1583 changedfiles.sort()
1584 1584 # Go through all our files in order sorted by name.
1585 1585 for fname in changedfiles:
1586 1586 filerevlog = self.file(fname)
1587 1587 # Toss out the filenodes that the recipient isn't really
1588 1588 # missing.
1589 1589 if msng_filenode_set.has_key(fname):
1590 1590 prune_filenodes(fname, filerevlog)
1591 1591 msng_filenode_lst = msng_filenode_set[fname].keys()
1592 1592 else:
1593 1593 msng_filenode_lst = []
1594 1594 # If any filenodes are left, generate the group for them,
1595 1595 # otherwise don't bother.
1596 1596 if len(msng_filenode_lst) > 0:
1597 1597 yield changegroup.genchunk(fname)
1598 1598 # Sort the filenodes by their revision #
1599 1599 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1600 1600 # Create a group generator and only pass in a changenode
1601 1601 # lookup function as we need to collect no information
1602 1602 # from filenodes.
1603 1603 group = filerevlog.group(msng_filenode_lst,
1604 1604 lookup_filenode_link_func(fname))
1605 1605 for chnk in group:
1606 1606 yield chnk
1607 1607 if msng_filenode_set.has_key(fname):
1608 1608 # Don't need this anymore, toss it to free memory.
1609 1609 del msng_filenode_set[fname]
1610 1610 # Signal that no more groups are left.
1611 1611 yield changegroup.closechunk()
1612 1612
1613 1613 if msng_cl_lst:
1614 1614 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1615 1615
1616 1616 return util.chunkbuffer(gengroup())
1617 1617
1618 1618 def changegroup(self, basenodes, source):
1619 1619 """Generate a changegroup of all nodes that we have that a recipient
1620 1620 doesn't.
1621 1621
1622 1622 This is much easier than the previous function as we can assume that
1623 1623 the recipient has any changenode we aren't sending them."""
1624 1624
1625 1625 self.hook('preoutgoing', throw=True, source=source)
1626 1626
1627 1627 cl = self.changelog
1628 1628 nodes = cl.nodesbetween(basenodes, None)[0]
1629 1629 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1630 1630 self.changegroupinfo(nodes)
1631 1631
1632 1632 def identity(x):
1633 1633 return x
1634 1634
1635 1635 def gennodelst(revlog):
1636 1636 for r in xrange(0, revlog.count()):
1637 1637 n = revlog.node(r)
1638 1638 if revlog.linkrev(n) in revset:
1639 1639 yield n
1640 1640
1641 1641 def changed_file_collector(changedfileset):
1642 1642 def collect_changed_files(clnode):
1643 1643 c = cl.read(clnode)
1644 1644 for fname in c[3]:
1645 1645 changedfileset[fname] = 1
1646 1646 return collect_changed_files
1647 1647
1648 1648 def lookuprevlink_func(revlog):
1649 1649 def lookuprevlink(n):
1650 1650 return cl.node(revlog.linkrev(n))
1651 1651 return lookuprevlink
1652 1652
1653 1653 def gengroup():
1654 1654 # construct a list of all changed files
1655 1655 changedfiles = {}
1656 1656
1657 1657 for chnk in cl.group(nodes, identity,
1658 1658 changed_file_collector(changedfiles)):
1659 1659 yield chnk
1660 1660 changedfiles = changedfiles.keys()
1661 1661 changedfiles.sort()
1662 1662
1663 1663 mnfst = self.manifest
1664 1664 nodeiter = gennodelst(mnfst)
1665 1665 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1666 1666 yield chnk
1667 1667
1668 1668 for fname in changedfiles:
1669 1669 filerevlog = self.file(fname)
1670 1670 nodeiter = gennodelst(filerevlog)
1671 1671 nodeiter = list(nodeiter)
1672 1672 if nodeiter:
1673 1673 yield changegroup.genchunk(fname)
1674 1674 lookup = lookuprevlink_func(filerevlog)
1675 1675 for chnk in filerevlog.group(nodeiter, lookup):
1676 1676 yield chnk
1677 1677
1678 1678 yield changegroup.closechunk()
1679 1679
1680 1680 if nodes:
1681 1681 self.hook('outgoing', node=hex(nodes[0]), source=source)
1682 1682
1683 1683 return util.chunkbuffer(gengroup())
1684 1684
1685 1685 def addchangegroup(self, source, srctype, url):
1686 1686 """add changegroup to repo.
1687 1687 returns number of heads modified or added + 1."""
1688 1688
1689 1689 def csmap(x):
1690 1690 self.ui.debug(_("add changeset %s\n") % short(x))
1691 1691 return cl.count()
1692 1692
1693 1693 def revmap(x):
1694 1694 return cl.rev(x)
1695 1695
1696 1696 if not source:
1697 1697 return 0
1698 1698
1699 1699 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1700 1700
1701 1701 changesets = files = revisions = 0
1702 1702
1703 1703 tr = self.transaction()
1704 1704
1705 1705 # write changelog data to temp files so concurrent readers will not see
1706 1706 # inconsistent view
1707 1707 cl = None
1708 1708 try:
1709 1709 cl = appendfile.appendchangelog(self.sopener,
1710 1710 self.changelog.version)
1711 1711
1712 1712 oldheads = len(cl.heads())
1713 1713
1714 1714 # pull off the changeset group
1715 1715 self.ui.status(_("adding changesets\n"))
1716 1716 cor = cl.count() - 1
1717 1717 chunkiter = changegroup.chunkiter(source)
1718 1718 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1719 1719 raise util.Abort(_("received changelog group is empty"))
1720 1720 cnr = cl.count() - 1
1721 1721 changesets = cnr - cor
1722 1722
1723 1723 # pull off the manifest group
1724 1724 self.ui.status(_("adding manifests\n"))
1725 1725 chunkiter = changegroup.chunkiter(source)
1726 1726 # no need to check for empty manifest group here:
1727 1727 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1728 1728 # no new manifest will be created and the manifest group will
1729 1729 # be empty during the pull
1730 1730 self.manifest.addgroup(chunkiter, revmap, tr)
1731 1731
1732 1732 # process the files
1733 1733 self.ui.status(_("adding file changes\n"))
1734 1734 while 1:
1735 1735 f = changegroup.getchunk(source)
1736 1736 if not f:
1737 1737 break
1738 1738 self.ui.debug(_("adding %s revisions\n") % f)
1739 1739 fl = self.file(f)
1740 1740 o = fl.count()
1741 1741 chunkiter = changegroup.chunkiter(source)
1742 1742 if fl.addgroup(chunkiter, revmap, tr) is None:
1743 1743 raise util.Abort(_("received file revlog group is empty"))
1744 1744 revisions += fl.count() - o
1745 1745 files += 1
1746 1746
1747 1747 cl.writedata()
1748 1748 finally:
1749 1749 if cl:
1750 1750 cl.cleanup()
1751 1751
1752 1752 # make changelog see real files again
1753 1753 self.changelog = changelog.changelog(self.sopener,
1754 1754 self.changelog.version)
1755 1755 self.changelog.checkinlinesize(tr)
1756 1756
1757 1757 newheads = len(self.changelog.heads())
1758 1758 heads = ""
1759 1759 if oldheads and newheads != oldheads:
1760 1760 heads = _(" (%+d heads)") % (newheads - oldheads)
1761 1761
1762 1762 self.ui.status(_("added %d changesets"
1763 1763 " with %d changes to %d files%s\n")
1764 1764 % (changesets, revisions, files, heads))
1765 1765
1766 1766 if changesets > 0:
1767 1767 self.hook('pretxnchangegroup', throw=True,
1768 1768 node=hex(self.changelog.node(cor+1)), source=srctype,
1769 1769 url=url)
1770 1770
1771 1771 tr.close()
1772 1772
1773 1773 if changesets > 0:
1774 1774 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1775 1775 source=srctype, url=url)
1776 1776
1777 1777 for i in xrange(cor + 1, cnr + 1):
1778 1778 self.hook("incoming", node=hex(self.changelog.node(i)),
1779 1779 source=srctype, url=url)
1780 1780
1781 1781 return newheads - oldheads + 1
1782 1782
1783 1783
1784 1784 def stream_in(self, remote):
1785 1785 fp = remote.stream_out()
1786 resp = int(fp.readline())
1786 l = fp.readline()
1787 try:
1788 resp = int(l)
1789 except ValueError:
1790 raise util.UnexpectedOutput(
1791 _('Unexpected response from remote server:'), l)
1787 1792 if resp != 0:
1788 1793 raise util.Abort(_('operation forbidden by server'))
1789 1794 self.ui.status(_('streaming all changes\n'))
1790 total_files, total_bytes = map(int, fp.readline().split(' ', 1))
1795 l = fp.readline()
1796 try:
1797 total_files, total_bytes = map(int, l.split(' ', 1))
1798 except ValueError, TypeError:
1799 raise util.UnexpectedOutput(
1800 _('Unexpected response from remote server:'), l)
1791 1801 self.ui.status(_('%d files to transfer, %s of data\n') %
1792 1802 (total_files, util.bytecount(total_bytes)))
1793 1803 start = time.time()
1794 1804 for i in xrange(total_files):
1795 name, size = fp.readline().split('\0', 1)
1796 size = int(size)
1805 l = fp.readline()
1806 try:
1807 name, size = l.split('\0', 1)
1808 size = int(size)
1809 except ValueError, TypeError:
1810 raise util.UnexpectedOutput(
1811 _('Unexpected response from remote server:'), l)
1797 1812 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1798 1813 ofp = self.sopener(name, 'w')
1799 1814 for chunk in util.filechunkiter(fp, limit=size):
1800 1815 ofp.write(chunk)
1801 1816 ofp.close()
1802 1817 elapsed = time.time() - start
1803 1818 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1804 1819 (util.bytecount(total_bytes), elapsed,
1805 1820 util.bytecount(total_bytes / elapsed)))
1806 1821 self.reload()
1807 1822 return len(self.heads()) + 1
1808 1823
1809 1824 def clone(self, remote, heads=[], stream=False):
1810 1825 '''clone remote repository.
1811 1826
1812 1827 keyword arguments:
1813 1828 heads: list of revs to clone (forces use of pull)
1814 1829 stream: use streaming clone if possible'''
1815 1830
1816 1831 # now, all clients that can request uncompressed clones can
1817 1832 # read repo formats supported by all servers that can serve
1818 1833 # them.
1819 1834
1820 1835 # if revlog format changes, client will have to check version
1821 1836 # and format flags on "stream" capability, and use
1822 1837 # uncompressed only if compatible.
1823 1838
1824 1839 if stream and not heads and remote.capable('stream'):
1825 1840 return self.stream_in(remote)
1826 1841 return self.pull(remote, heads)
1827 1842
1828 1843 # used to avoid circular references so destructors work
1829 1844 def aftertrans(base):
1830 1845 p = base
1831 1846 def a():
1832 1847 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1833 1848 util.rename(os.path.join(p, "journal.dirstate"),
1834 1849 os.path.join(p, "undo.dirstate"))
1835 1850 return a
1836 1851
1837 1852 def instance(ui, path, create):
1838 1853 return localrepository(ui, util.drop_scheme('file', path), create)
1839 1854
1840 1855 def islocal(path):
1841 1856 return True
@@ -1,1051 +1,1054 b''
1 1 """
2 2 util.py - Mercurial utility functions and platform specfic implementations
3 3
4 4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5 5 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
6 6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
7 7
8 8 This software may be used and distributed according to the terms
9 9 of the GNU General Public License, incorporated herein by reference.
10 10
11 11 This contains helper routines that are independent of the SCM core and hide
12 12 platform-specific details from the core.
13 13 """
14 14
15 15 from i18n import gettext as _
16 16 from demandload import *
17 17 demandload(globals(), "cStringIO errno getpass popen2 re shutil sys tempfile")
18 18 demandload(globals(), "os threading time calendar ConfigParser")
19 19
20 20 # used by parsedate
21 21 defaultdateformats = ('%Y-%m-%d %H:%M:%S', '%Y-%m-%d %H:%M',
22 22 '%a %b %d %H:%M:%S %Y')
23 23
24 24 class SignalInterrupt(Exception):
25 25 """Exception raised on SIGTERM and SIGHUP."""
26 26
27 27 # like SafeConfigParser but with case-sensitive keys
28 28 class configparser(ConfigParser.SafeConfigParser):
29 29 def optionxform(self, optionstr):
30 30 return optionstr
31 31
32 32 def cachefunc(func):
33 33 '''cache the result of function calls'''
34 34 # XXX doesn't handle keywords args
35 35 cache = {}
36 36 if func.func_code.co_argcount == 1:
37 37 # we gain a small amount of time because
38 38 # we don't need to pack/unpack the list
39 39 def f(arg):
40 40 if arg not in cache:
41 41 cache[arg] = func(arg)
42 42 return cache[arg]
43 43 else:
44 44 def f(*args):
45 45 if args not in cache:
46 46 cache[args] = func(*args)
47 47 return cache[args]
48 48
49 49 return f
50 50
51 51 def pipefilter(s, cmd):
52 52 '''filter string S through command CMD, returning its output'''
53 53 (pout, pin) = popen2.popen2(cmd, -1, 'b')
54 54 def writer():
55 55 try:
56 56 pin.write(s)
57 57 pin.close()
58 58 except IOError, inst:
59 59 if inst.errno != errno.EPIPE:
60 60 raise
61 61
62 62 # we should use select instead on UNIX, but this will work on most
63 63 # systems, including Windows
64 64 w = threading.Thread(target=writer)
65 65 w.start()
66 66 f = pout.read()
67 67 pout.close()
68 68 w.join()
69 69 return f
70 70
71 71 def tempfilter(s, cmd):
72 72 '''filter string S through a pair of temporary files with CMD.
73 73 CMD is used as a template to create the real command to be run,
74 74 with the strings INFILE and OUTFILE replaced by the real names of
75 75 the temporary files generated.'''
76 76 inname, outname = None, None
77 77 try:
78 78 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
79 79 fp = os.fdopen(infd, 'wb')
80 80 fp.write(s)
81 81 fp.close()
82 82 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
83 83 os.close(outfd)
84 84 cmd = cmd.replace('INFILE', inname)
85 85 cmd = cmd.replace('OUTFILE', outname)
86 86 code = os.system(cmd)
87 87 if code: raise Abort(_("command '%s' failed: %s") %
88 88 (cmd, explain_exit(code)))
89 89 return open(outname, 'rb').read()
90 90 finally:
91 91 try:
92 92 if inname: os.unlink(inname)
93 93 except: pass
94 94 try:
95 95 if outname: os.unlink(outname)
96 96 except: pass
97 97
98 98 filtertable = {
99 99 'tempfile:': tempfilter,
100 100 'pipe:': pipefilter,
101 101 }
102 102
103 103 def filter(s, cmd):
104 104 "filter a string through a command that transforms its input to its output"
105 105 for name, fn in filtertable.iteritems():
106 106 if cmd.startswith(name):
107 107 return fn(s, cmd[len(name):].lstrip())
108 108 return pipefilter(s, cmd)
109 109
110 110 def find_in_path(name, path, default=None):
111 111 '''find name in search path. path can be string (will be split
112 112 with os.pathsep), or iterable thing that returns strings. if name
113 113 found, return path to name. else return default.'''
114 114 if isinstance(path, str):
115 115 path = path.split(os.pathsep)
116 116 for p in path:
117 117 p_name = os.path.join(p, name)
118 118 if os.path.exists(p_name):
119 119 return p_name
120 120 return default
121 121
122 122 def binary(s):
123 123 """return true if a string is binary data using diff's heuristic"""
124 124 if s and '\0' in s[:4096]:
125 125 return True
126 126 return False
127 127
128 128 def unique(g):
129 129 """return the uniq elements of iterable g"""
130 130 seen = {}
131 131 for f in g:
132 132 if f not in seen:
133 133 seen[f] = 1
134 134 yield f
135 135
136 136 class Abort(Exception):
137 137 """Raised if a command needs to print an error and exit."""
138 138
139 class UnexpectedOutput(Abort):
140 """Raised to print an error with part of output and exit."""
141
139 142 def always(fn): return True
140 143 def never(fn): return False
141 144
142 145 def patkind(name, dflt_pat='glob'):
143 146 """Split a string into an optional pattern kind prefix and the
144 147 actual pattern."""
145 148 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
146 149 if name.startswith(prefix + ':'): return name.split(':', 1)
147 150 return dflt_pat, name
148 151
149 152 def globre(pat, head='^', tail='$'):
150 153 "convert a glob pattern into a regexp"
151 154 i, n = 0, len(pat)
152 155 res = ''
153 156 group = False
154 157 def peek(): return i < n and pat[i]
155 158 while i < n:
156 159 c = pat[i]
157 160 i = i+1
158 161 if c == '*':
159 162 if peek() == '*':
160 163 i += 1
161 164 res += '.*'
162 165 else:
163 166 res += '[^/]*'
164 167 elif c == '?':
165 168 res += '.'
166 169 elif c == '[':
167 170 j = i
168 171 if j < n and pat[j] in '!]':
169 172 j += 1
170 173 while j < n and pat[j] != ']':
171 174 j += 1
172 175 if j >= n:
173 176 res += '\\['
174 177 else:
175 178 stuff = pat[i:j].replace('\\','\\\\')
176 179 i = j + 1
177 180 if stuff[0] == '!':
178 181 stuff = '^' + stuff[1:]
179 182 elif stuff[0] == '^':
180 183 stuff = '\\' + stuff
181 184 res = '%s[%s]' % (res, stuff)
182 185 elif c == '{':
183 186 group = True
184 187 res += '(?:'
185 188 elif c == '}' and group:
186 189 res += ')'
187 190 group = False
188 191 elif c == ',' and group:
189 192 res += '|'
190 193 elif c == '\\':
191 194 p = peek()
192 195 if p:
193 196 i += 1
194 197 res += re.escape(p)
195 198 else:
196 199 res += re.escape(c)
197 200 else:
198 201 res += re.escape(c)
199 202 return head + res + tail
200 203
201 204 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
202 205
203 206 def pathto(n1, n2):
204 207 '''return the relative path from one place to another.
205 208 this returns a path in the form used by the local filesystem, not hg.'''
206 209 if not n1: return localpath(n2)
207 210 a, b = n1.split('/'), n2.split('/')
208 211 a.reverse()
209 212 b.reverse()
210 213 while a and b and a[-1] == b[-1]:
211 214 a.pop()
212 215 b.pop()
213 216 b.reverse()
214 217 return os.sep.join((['..'] * len(a)) + b)
215 218
216 219 def canonpath(root, cwd, myname):
217 220 """return the canonical path of myname, given cwd and root"""
218 221 if root == os.sep:
219 222 rootsep = os.sep
220 223 elif root.endswith(os.sep):
221 224 rootsep = root
222 225 else:
223 226 rootsep = root + os.sep
224 227 name = myname
225 228 if not os.path.isabs(name):
226 229 name = os.path.join(root, cwd, name)
227 230 name = os.path.normpath(name)
228 231 if name != rootsep and name.startswith(rootsep):
229 232 name = name[len(rootsep):]
230 233 audit_path(name)
231 234 return pconvert(name)
232 235 elif name == root:
233 236 return ''
234 237 else:
235 238 # Determine whether `name' is in the hierarchy at or beneath `root',
236 239 # by iterating name=dirname(name) until that causes no change (can't
237 240 # check name == '/', because that doesn't work on windows). For each
238 241 # `name', compare dev/inode numbers. If they match, the list `rel'
239 242 # holds the reversed list of components making up the relative file
240 243 # name we want.
241 244 root_st = os.stat(root)
242 245 rel = []
243 246 while True:
244 247 try:
245 248 name_st = os.stat(name)
246 249 except OSError:
247 250 break
248 251 if samestat(name_st, root_st):
249 252 rel.reverse()
250 253 name = os.path.join(*rel)
251 254 audit_path(name)
252 255 return pconvert(name)
253 256 dirname, basename = os.path.split(name)
254 257 rel.append(basename)
255 258 if dirname == name:
256 259 break
257 260 name = dirname
258 261
259 262 raise Abort('%s not under root' % myname)
260 263
261 264 def matcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
262 265 return _matcher(canonroot, cwd, names, inc, exc, head, 'glob', src)
263 266
264 267 def cmdmatcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
265 268 if os.name == 'nt':
266 269 dflt_pat = 'glob'
267 270 else:
268 271 dflt_pat = 'relpath'
269 272 return _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src)
270 273
271 274 def _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src):
272 275 """build a function to match a set of file patterns
273 276
274 277 arguments:
275 278 canonroot - the canonical root of the tree you're matching against
276 279 cwd - the current working directory, if relevant
277 280 names - patterns to find
278 281 inc - patterns to include
279 282 exc - patterns to exclude
280 283 head - a regex to prepend to patterns to control whether a match is rooted
281 284
282 285 a pattern is one of:
283 286 'glob:<rooted glob>'
284 287 're:<rooted regexp>'
285 288 'path:<rooted path>'
286 289 'relglob:<relative glob>'
287 290 'relpath:<relative path>'
288 291 'relre:<relative regexp>'
289 292 '<rooted path or regexp>'
290 293
291 294 returns:
292 295 a 3-tuple containing
293 296 - list of explicit non-pattern names passed in
294 297 - a bool match(filename) function
295 298 - a bool indicating if any patterns were passed in
296 299
297 300 todo:
298 301 make head regex a rooted bool
299 302 """
300 303
301 304 def contains_glob(name):
302 305 for c in name:
303 306 if c in _globchars: return True
304 307 return False
305 308
306 309 def regex(kind, name, tail):
307 310 '''convert a pattern into a regular expression'''
308 311 if kind == 're':
309 312 return name
310 313 elif kind == 'path':
311 314 return '^' + re.escape(name) + '(?:/|$)'
312 315 elif kind == 'relglob':
313 316 return head + globre(name, '(?:|.*/)', tail)
314 317 elif kind == 'relpath':
315 318 return head + re.escape(name) + tail
316 319 elif kind == 'relre':
317 320 if name.startswith('^'):
318 321 return name
319 322 return '.*' + name
320 323 return head + globre(name, '', tail)
321 324
322 325 def matchfn(pats, tail):
323 326 """build a matching function from a set of patterns"""
324 327 if not pats:
325 328 return
326 329 matches = []
327 330 for k, p in pats:
328 331 try:
329 332 pat = '(?:%s)' % regex(k, p, tail)
330 333 matches.append(re.compile(pat).match)
331 334 except re.error:
332 335 if src: raise Abort("%s: invalid pattern (%s): %s" % (src, k, p))
333 336 else: raise Abort("invalid pattern (%s): %s" % (k, p))
334 337
335 338 def buildfn(text):
336 339 for m in matches:
337 340 r = m(text)
338 341 if r:
339 342 return r
340 343
341 344 return buildfn
342 345
343 346 def globprefix(pat):
344 347 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
345 348 root = []
346 349 for p in pat.split(os.sep):
347 350 if contains_glob(p): break
348 351 root.append(p)
349 352 return '/'.join(root)
350 353
351 354 pats = []
352 355 files = []
353 356 roots = []
354 357 for kind, name in [patkind(p, dflt_pat) for p in names]:
355 358 if kind in ('glob', 'relpath'):
356 359 name = canonpath(canonroot, cwd, name)
357 360 if name == '':
358 361 kind, name = 'glob', '**'
359 362 if kind in ('glob', 'path', 're'):
360 363 pats.append((kind, name))
361 364 if kind == 'glob':
362 365 root = globprefix(name)
363 366 if root: roots.append(root)
364 367 elif kind == 'relpath':
365 368 files.append((kind, name))
366 369 roots.append(name)
367 370
368 371 patmatch = matchfn(pats, '$') or always
369 372 filematch = matchfn(files, '(?:/|$)') or always
370 373 incmatch = always
371 374 if inc:
372 375 inckinds = [patkind(canonpath(canonroot, cwd, i)) for i in inc]
373 376 incmatch = matchfn(inckinds, '(?:/|$)')
374 377 excmatch = lambda fn: False
375 378 if exc:
376 379 exckinds = [patkind(canonpath(canonroot, cwd, x)) for x in exc]
377 380 excmatch = matchfn(exckinds, '(?:/|$)')
378 381
379 382 return (roots,
380 383 lambda fn: (incmatch(fn) and not excmatch(fn) and
381 384 (fn.endswith('/') or
382 385 (not pats and not files) or
383 386 (pats and patmatch(fn)) or
384 387 (files and filematch(fn)))),
385 388 (inc or exc or (pats and pats != [('glob', '**')])) and True)
386 389
387 390 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
388 391 '''enhanced shell command execution.
389 392 run with environment maybe modified, maybe in different dir.
390 393
391 394 if command fails and onerr is None, return status. if ui object,
392 395 print error message and return status, else raise onerr object as
393 396 exception.'''
394 397 def py2shell(val):
395 398 'convert python object into string that is useful to shell'
396 399 if val in (None, False):
397 400 return '0'
398 401 if val == True:
399 402 return '1'
400 403 return str(val)
401 404 oldenv = {}
402 405 for k in environ:
403 406 oldenv[k] = os.environ.get(k)
404 407 if cwd is not None:
405 408 oldcwd = os.getcwd()
406 409 try:
407 410 for k, v in environ.iteritems():
408 411 os.environ[k] = py2shell(v)
409 412 if cwd is not None and oldcwd != cwd:
410 413 os.chdir(cwd)
411 414 rc = os.system(cmd)
412 415 if rc and onerr:
413 416 errmsg = '%s %s' % (os.path.basename(cmd.split(None, 1)[0]),
414 417 explain_exit(rc)[0])
415 418 if errprefix:
416 419 errmsg = '%s: %s' % (errprefix, errmsg)
417 420 try:
418 421 onerr.warn(errmsg + '\n')
419 422 except AttributeError:
420 423 raise onerr(errmsg)
421 424 return rc
422 425 finally:
423 426 for k, v in oldenv.iteritems():
424 427 if v is None:
425 428 del os.environ[k]
426 429 else:
427 430 os.environ[k] = v
428 431 if cwd is not None and oldcwd != cwd:
429 432 os.chdir(oldcwd)
430 433
431 434 def rename(src, dst):
432 435 """forcibly rename a file"""
433 436 try:
434 437 os.rename(src, dst)
435 438 except OSError, err:
436 439 # on windows, rename to existing file is not allowed, so we
437 440 # must delete destination first. but if file is open, unlink
438 441 # schedules it for delete but does not delete it. rename
439 442 # happens immediately even for open files, so we create
440 443 # temporary file, delete it, rename destination to that name,
441 444 # then delete that. then rename is safe to do.
442 445 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
443 446 os.close(fd)
444 447 os.unlink(temp)
445 448 os.rename(dst, temp)
446 449 os.unlink(temp)
447 450 os.rename(src, dst)
448 451
449 452 def unlink(f):
450 453 """unlink and remove the directory if it is empty"""
451 454 os.unlink(f)
452 455 # try removing directories that might now be empty
453 456 try:
454 457 os.removedirs(os.path.dirname(f))
455 458 except OSError:
456 459 pass
457 460
458 461 def copyfiles(src, dst, hardlink=None):
459 462 """Copy a directory tree using hardlinks if possible"""
460 463
461 464 if hardlink is None:
462 465 hardlink = (os.stat(src).st_dev ==
463 466 os.stat(os.path.dirname(dst)).st_dev)
464 467
465 468 if os.path.isdir(src):
466 469 os.mkdir(dst)
467 470 for name in os.listdir(src):
468 471 srcname = os.path.join(src, name)
469 472 dstname = os.path.join(dst, name)
470 473 copyfiles(srcname, dstname, hardlink)
471 474 else:
472 475 if hardlink:
473 476 try:
474 477 os_link(src, dst)
475 478 except (IOError, OSError):
476 479 hardlink = False
477 480 shutil.copy(src, dst)
478 481 else:
479 482 shutil.copy(src, dst)
480 483
481 484 def audit_path(path):
482 485 """Abort if path contains dangerous components"""
483 486 parts = os.path.normcase(path).split(os.sep)
484 487 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
485 488 or os.pardir in parts):
486 489 raise Abort(_("path contains illegal component: %s\n") % path)
487 490
488 491 def _makelock_file(info, pathname):
489 492 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
490 493 os.write(ld, info)
491 494 os.close(ld)
492 495
493 496 def _readlock_file(pathname):
494 497 return posixfile(pathname).read()
495 498
496 499 def nlinks(pathname):
497 500 """Return number of hardlinks for the given file."""
498 501 return os.lstat(pathname).st_nlink
499 502
500 503 if hasattr(os, 'link'):
501 504 os_link = os.link
502 505 else:
503 506 def os_link(src, dst):
504 507 raise OSError(0, _("Hardlinks not supported"))
505 508
506 509 def fstat(fp):
507 510 '''stat file object that may not have fileno method.'''
508 511 try:
509 512 return os.fstat(fp.fileno())
510 513 except AttributeError:
511 514 return os.stat(fp.name)
512 515
513 516 posixfile = file
514 517
515 518 def is_win_9x():
516 519 '''return true if run on windows 95, 98 or me.'''
517 520 try:
518 521 return sys.getwindowsversion()[3] == 1
519 522 except AttributeError:
520 523 return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
521 524
522 525 def username(uid=None):
523 526 """Return the name of the user with the given uid.
524 527
525 528 If uid is None, return the name of the current user."""
526 529 try:
527 530 import pwd
528 531 if uid is None:
529 532 uid = os.getuid()
530 533 try:
531 534 return pwd.getpwuid(uid)[0]
532 535 except KeyError:
533 536 return str(uid)
534 537 except ImportError:
535 538 return None
536 539
537 540 def groupname(gid=None):
538 541 """Return the name of the group with the given gid.
539 542
540 543 If gid is None, return the name of the current group."""
541 544 try:
542 545 import grp
543 546 if gid is None:
544 547 gid = os.getgid()
545 548 try:
546 549 return grp.getgrgid(gid)[0]
547 550 except KeyError:
548 551 return str(gid)
549 552 except ImportError:
550 553 return None
551 554
552 555 # Platform specific variants
553 556 if os.name == 'nt':
554 557 demandload(globals(), "msvcrt")
555 558 nulldev = 'NUL:'
556 559
557 560 class winstdout:
558 561 '''stdout on windows misbehaves if sent through a pipe'''
559 562
560 563 def __init__(self, fp):
561 564 self.fp = fp
562 565
563 566 def __getattr__(self, key):
564 567 return getattr(self.fp, key)
565 568
566 569 def close(self):
567 570 try:
568 571 self.fp.close()
569 572 except: pass
570 573
571 574 def write(self, s):
572 575 try:
573 576 return self.fp.write(s)
574 577 except IOError, inst:
575 578 if inst.errno != 0: raise
576 579 self.close()
577 580 raise IOError(errno.EPIPE, 'Broken pipe')
578 581
579 582 sys.stdout = winstdout(sys.stdout)
580 583
581 584 def system_rcpath():
582 585 try:
583 586 return system_rcpath_win32()
584 587 except:
585 588 return [r'c:\mercurial\mercurial.ini']
586 589
587 590 def os_rcpath():
588 591 '''return default os-specific hgrc search path'''
589 592 path = system_rcpath()
590 593 path.append(user_rcpath())
591 594 userprofile = os.environ.get('USERPROFILE')
592 595 if userprofile:
593 596 path.append(os.path.join(userprofile, 'mercurial.ini'))
594 597 return path
595 598
596 599 def user_rcpath():
597 600 '''return os-specific hgrc search path to the user dir'''
598 601 return os.path.join(os.path.expanduser('~'), 'mercurial.ini')
599 602
600 603 def parse_patch_output(output_line):
601 604 """parses the output produced by patch and returns the file name"""
602 605 pf = output_line[14:]
603 606 if pf[0] == '`':
604 607 pf = pf[1:-1] # Remove the quotes
605 608 return pf
606 609
607 610 def testpid(pid):
608 611 '''return False if pid dead, True if running or not known'''
609 612 return True
610 613
611 614 def is_exec(f, last):
612 615 return last
613 616
614 617 def set_exec(f, mode):
615 618 pass
616 619
617 620 def set_binary(fd):
618 621 msvcrt.setmode(fd.fileno(), os.O_BINARY)
619 622
620 623 def pconvert(path):
621 624 return path.replace("\\", "/")
622 625
623 626 def localpath(path):
624 627 return path.replace('/', '\\')
625 628
626 629 def normpath(path):
627 630 return pconvert(os.path.normpath(path))
628 631
629 632 makelock = _makelock_file
630 633 readlock = _readlock_file
631 634
632 635 def samestat(s1, s2):
633 636 return False
634 637
635 638 def shellquote(s):
636 639 return '"%s"' % s.replace('"', '\\"')
637 640
638 641 def explain_exit(code):
639 642 return _("exited with status %d") % code, code
640 643
641 644 try:
642 645 # override functions with win32 versions if possible
643 646 from util_win32 import *
644 647 if not is_win_9x():
645 648 posixfile = posixfile_nt
646 649 except ImportError:
647 650 pass
648 651
649 652 else:
650 653 nulldev = '/dev/null'
651 654
652 655 def rcfiles(path):
653 656 rcs = [os.path.join(path, 'hgrc')]
654 657 rcdir = os.path.join(path, 'hgrc.d')
655 658 try:
656 659 rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
657 660 if f.endswith(".rc")])
658 661 except OSError:
659 662 pass
660 663 return rcs
661 664
662 665 def os_rcpath():
663 666 '''return default os-specific hgrc search path'''
664 667 path = []
665 668 # old mod_python does not set sys.argv
666 669 if len(getattr(sys, 'argv', [])) > 0:
667 670 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
668 671 '/../etc/mercurial'))
669 672 path.extend(rcfiles('/etc/mercurial'))
670 673 path.append(os.path.expanduser('~/.hgrc'))
671 674 path = [os.path.normpath(f) for f in path]
672 675 return path
673 676
674 677 def parse_patch_output(output_line):
675 678 """parses the output produced by patch and returns the file name"""
676 679 pf = output_line[14:]
677 680 if pf.startswith("'") and pf.endswith("'") and " " in pf:
678 681 pf = pf[1:-1] # Remove the quotes
679 682 return pf
680 683
681 684 def is_exec(f, last):
682 685 """check whether a file is executable"""
683 686 return (os.lstat(f).st_mode & 0100 != 0)
684 687
685 688 def set_exec(f, mode):
686 689 s = os.lstat(f).st_mode
687 690 if (s & 0100 != 0) == mode:
688 691 return
689 692 if mode:
690 693 # Turn on +x for every +r bit when making a file executable
691 694 # and obey umask.
692 695 umask = os.umask(0)
693 696 os.umask(umask)
694 697 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
695 698 else:
696 699 os.chmod(f, s & 0666)
697 700
698 701 def set_binary(fd):
699 702 pass
700 703
701 704 def pconvert(path):
702 705 return path
703 706
704 707 def localpath(path):
705 708 return path
706 709
707 710 normpath = os.path.normpath
708 711 samestat = os.path.samestat
709 712
710 713 def makelock(info, pathname):
711 714 try:
712 715 os.symlink(info, pathname)
713 716 except OSError, why:
714 717 if why.errno == errno.EEXIST:
715 718 raise
716 719 else:
717 720 _makelock_file(info, pathname)
718 721
719 722 def readlock(pathname):
720 723 try:
721 724 return os.readlink(pathname)
722 725 except OSError, why:
723 726 if why.errno == errno.EINVAL:
724 727 return _readlock_file(pathname)
725 728 else:
726 729 raise
727 730
728 731 def shellquote(s):
729 732 return "'%s'" % s.replace("'", "'\\''")
730 733
731 734 def testpid(pid):
732 735 '''return False if pid dead, True if running or not sure'''
733 736 try:
734 737 os.kill(pid, 0)
735 738 return True
736 739 except OSError, inst:
737 740 return inst.errno != errno.ESRCH
738 741
739 742 def explain_exit(code):
740 743 """return a 2-tuple (desc, code) describing a process's status"""
741 744 if os.WIFEXITED(code):
742 745 val = os.WEXITSTATUS(code)
743 746 return _("exited with status %d") % val, val
744 747 elif os.WIFSIGNALED(code):
745 748 val = os.WTERMSIG(code)
746 749 return _("killed by signal %d") % val, val
747 750 elif os.WIFSTOPPED(code):
748 751 val = os.WSTOPSIG(code)
749 752 return _("stopped by signal %d") % val, val
750 753 raise ValueError(_("invalid exit code"))
751 754
752 755 def opener(base, audit=True):
753 756 """
754 757 return a function that opens files relative to base
755 758
756 759 this function is used to hide the details of COW semantics and
757 760 remote file access from higher level code.
758 761 """
759 762 p = base
760 763 audit_p = audit
761 764
762 765 def mktempcopy(name):
763 766 d, fn = os.path.split(name)
764 767 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
765 768 os.close(fd)
766 769 ofp = posixfile(temp, "wb")
767 770 try:
768 771 try:
769 772 ifp = posixfile(name, "rb")
770 773 except IOError, inst:
771 774 if not getattr(inst, 'filename', None):
772 775 inst.filename = name
773 776 raise
774 777 for chunk in filechunkiter(ifp):
775 778 ofp.write(chunk)
776 779 ifp.close()
777 780 ofp.close()
778 781 except:
779 782 try: os.unlink(temp)
780 783 except: pass
781 784 raise
782 785 st = os.lstat(name)
783 786 os.chmod(temp, st.st_mode)
784 787 return temp
785 788
786 789 class atomictempfile(posixfile):
787 790 """the file will only be copied when rename is called"""
788 791 def __init__(self, name, mode):
789 792 self.__name = name
790 793 self.temp = mktempcopy(name)
791 794 posixfile.__init__(self, self.temp, mode)
792 795 def rename(self):
793 796 if not self.closed:
794 797 posixfile.close(self)
795 798 rename(self.temp, localpath(self.__name))
796 799 def __del__(self):
797 800 if not self.closed:
798 801 try:
799 802 os.unlink(self.temp)
800 803 except: pass
801 804 posixfile.close(self)
802 805
803 806 class atomicfile(atomictempfile):
804 807 """the file will only be copied on close"""
805 808 def __init__(self, name, mode):
806 809 atomictempfile.__init__(self, name, mode)
807 810 def close(self):
808 811 self.rename()
809 812 def __del__(self):
810 813 self.rename()
811 814
812 815 def o(path, mode="r", text=False, atomic=False, atomictemp=False):
813 816 if audit_p:
814 817 audit_path(path)
815 818 f = os.path.join(p, path)
816 819
817 820 if not text:
818 821 mode += "b" # for that other OS
819 822
820 823 if mode[0] != "r":
821 824 try:
822 825 nlink = nlinks(f)
823 826 except OSError:
824 827 d = os.path.dirname(f)
825 828 if not os.path.isdir(d):
826 829 os.makedirs(d)
827 830 else:
828 831 if atomic:
829 832 return atomicfile(f, mode)
830 833 elif atomictemp:
831 834 return atomictempfile(f, mode)
832 835 if nlink > 1:
833 836 rename(mktempcopy(f), f)
834 837 return posixfile(f, mode)
835 838
836 839 return o
837 840
838 841 class chunkbuffer(object):
839 842 """Allow arbitrary sized chunks of data to be efficiently read from an
840 843 iterator over chunks of arbitrary size."""
841 844
842 845 def __init__(self, in_iter, targetsize = 2**16):
843 846 """in_iter is the iterator that's iterating over the input chunks.
844 847 targetsize is how big a buffer to try to maintain."""
845 848 self.in_iter = iter(in_iter)
846 849 self.buf = ''
847 850 self.targetsize = int(targetsize)
848 851 if self.targetsize <= 0:
849 852 raise ValueError(_("targetsize must be greater than 0, was %d") %
850 853 targetsize)
851 854 self.iterempty = False
852 855
853 856 def fillbuf(self):
854 857 """Ignore target size; read every chunk from iterator until empty."""
855 858 if not self.iterempty:
856 859 collector = cStringIO.StringIO()
857 860 collector.write(self.buf)
858 861 for ch in self.in_iter:
859 862 collector.write(ch)
860 863 self.buf = collector.getvalue()
861 864 self.iterempty = True
862 865
863 866 def read(self, l):
864 867 """Read L bytes of data from the iterator of chunks of data.
865 868 Returns less than L bytes if the iterator runs dry."""
866 869 if l > len(self.buf) and not self.iterempty:
867 870 # Clamp to a multiple of self.targetsize
868 871 targetsize = self.targetsize * ((l // self.targetsize) + 1)
869 872 collector = cStringIO.StringIO()
870 873 collector.write(self.buf)
871 874 collected = len(self.buf)
872 875 for chunk in self.in_iter:
873 876 collector.write(chunk)
874 877 collected += len(chunk)
875 878 if collected >= targetsize:
876 879 break
877 880 if collected < targetsize:
878 881 self.iterempty = True
879 882 self.buf = collector.getvalue()
880 883 s, self.buf = self.buf[:l], buffer(self.buf, l)
881 884 return s
882 885
883 886 def filechunkiter(f, size=65536, limit=None):
884 887 """Create a generator that produces the data in the file size
885 888 (default 65536) bytes at a time, up to optional limit (default is
886 889 to read all data). Chunks may be less than size bytes if the
887 890 chunk is the last chunk in the file, or the file is a socket or
888 891 some other type of file that sometimes reads less data than is
889 892 requested."""
890 893 assert size >= 0
891 894 assert limit is None or limit >= 0
892 895 while True:
893 896 if limit is None: nbytes = size
894 897 else: nbytes = min(limit, size)
895 898 s = nbytes and f.read(nbytes)
896 899 if not s: break
897 900 if limit: limit -= len(s)
898 901 yield s
899 902
900 903 def makedate():
901 904 lt = time.localtime()
902 905 if lt[8] == 1 and time.daylight:
903 906 tz = time.altzone
904 907 else:
905 908 tz = time.timezone
906 909 return time.mktime(lt), tz
907 910
908 911 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True):
909 912 """represent a (unixtime, offset) tuple as a localized time.
910 913 unixtime is seconds since the epoch, and offset is the time zone's
911 914 number of seconds away from UTC. if timezone is false, do not
912 915 append time zone to string."""
913 916 t, tz = date or makedate()
914 917 s = time.strftime(format, time.gmtime(float(t) - tz))
915 918 if timezone:
916 919 s += " %+03d%02d" % (-tz / 3600, ((-tz % 3600) / 60))
917 920 return s
918 921
919 922 def strdate(string, format='%a %b %d %H:%M:%S %Y'):
920 923 """parse a localized time string and return a (unixtime, offset) tuple.
921 924 if the string cannot be parsed, ValueError is raised."""
922 925 def hastimezone(string):
923 926 return (string[-4:].isdigit() and
924 927 (string[-5] == '+' or string[-5] == '-') and
925 928 string[-6].isspace())
926 929
927 930 # NOTE: unixtime = localunixtime + offset
928 931 if hastimezone(string):
929 932 date, tz = string[:-6], string[-5:]
930 933 tz = int(tz)
931 934 offset = - 3600 * (tz / 100) - 60 * (tz % 100)
932 935 else:
933 936 date, offset = string, None
934 937 timetuple = time.strptime(date, format)
935 938 localunixtime = int(calendar.timegm(timetuple))
936 939 if offset is None:
937 940 # local timezone
938 941 unixtime = int(time.mktime(timetuple))
939 942 offset = unixtime - localunixtime
940 943 else:
941 944 unixtime = localunixtime + offset
942 945 return unixtime, offset
943 946
944 947 def parsedate(string, formats=None):
945 948 """parse a localized time string and return a (unixtime, offset) tuple.
946 949 The date may be a "unixtime offset" string or in one of the specified
947 950 formats."""
948 951 if not formats:
949 952 formats = defaultdateformats
950 953 try:
951 954 when, offset = map(int, string.split(' '))
952 955 except ValueError:
953 956 for format in formats:
954 957 try:
955 958 when, offset = strdate(string, format)
956 959 except ValueError:
957 960 pass
958 961 else:
959 962 break
960 963 else:
961 964 raise ValueError(_('invalid date: %r '
962 965 'see hg(1) manual page for details')
963 966 % string)
964 967 # validate explicit (probably user-specified) date and
965 968 # time zone offset. values must fit in signed 32 bits for
966 969 # current 32-bit linux runtimes. timezones go from UTC-12
967 970 # to UTC+14
968 971 if abs(when) > 0x7fffffff:
969 972 raise ValueError(_('date exceeds 32 bits: %d') % when)
970 973 if offset < -50400 or offset > 43200:
971 974 raise ValueError(_('impossible time zone offset: %d') % offset)
972 975 return when, offset
973 976
974 977 def shortuser(user):
975 978 """Return a short representation of a user name or email address."""
976 979 f = user.find('@')
977 980 if f >= 0:
978 981 user = user[:f]
979 982 f = user.find('<')
980 983 if f >= 0:
981 984 user = user[f+1:]
982 985 f = user.find(' ')
983 986 if f >= 0:
984 987 user = user[:f]
985 988 return user
986 989
987 990 def walkrepos(path):
988 991 '''yield every hg repository under path, recursively.'''
989 992 def errhandler(err):
990 993 if err.filename == path:
991 994 raise err
992 995
993 996 for root, dirs, files in os.walk(path, onerror=errhandler):
994 997 for d in dirs:
995 998 if d == '.hg':
996 999 yield root
997 1000 dirs[:] = []
998 1001 break
999 1002
1000 1003 _rcpath = None
1001 1004
1002 1005 def rcpath():
1003 1006 '''return hgrc search path. if env var HGRCPATH is set, use it.
1004 1007 for each item in path, if directory, use files ending in .rc,
1005 1008 else use item.
1006 1009 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1007 1010 if no HGRCPATH, use default os-specific path.'''
1008 1011 global _rcpath
1009 1012 if _rcpath is None:
1010 1013 if 'HGRCPATH' in os.environ:
1011 1014 _rcpath = []
1012 1015 for p in os.environ['HGRCPATH'].split(os.pathsep):
1013 1016 if not p: continue
1014 1017 if os.path.isdir(p):
1015 1018 for f in os.listdir(p):
1016 1019 if f.endswith('.rc'):
1017 1020 _rcpath.append(os.path.join(p, f))
1018 1021 else:
1019 1022 _rcpath.append(p)
1020 1023 else:
1021 1024 _rcpath = os_rcpath()
1022 1025 return _rcpath
1023 1026
1024 1027 def bytecount(nbytes):
1025 1028 '''return byte count formatted as readable string, with units'''
1026 1029
1027 1030 units = (
1028 1031 (100, 1<<30, _('%.0f GB')),
1029 1032 (10, 1<<30, _('%.1f GB')),
1030 1033 (1, 1<<30, _('%.2f GB')),
1031 1034 (100, 1<<20, _('%.0f MB')),
1032 1035 (10, 1<<20, _('%.1f MB')),
1033 1036 (1, 1<<20, _('%.2f MB')),
1034 1037 (100, 1<<10, _('%.0f KB')),
1035 1038 (10, 1<<10, _('%.1f KB')),
1036 1039 (1, 1<<10, _('%.2f KB')),
1037 1040 (1, 1, _('%.0f bytes')),
1038 1041 )
1039 1042
1040 1043 for multiplier, divisor, format in units:
1041 1044 if nbytes >= divisor * multiplier:
1042 1045 return format % (nbytes / float(divisor))
1043 1046 return units[-1][2] % nbytes
1044 1047
1045 1048 def drop_scheme(scheme, path):
1046 1049 sc = scheme + ':'
1047 1050 if path.startswith(sc):
1048 1051 path = path[len(sc):]
1049 1052 if path.startswith('//'):
1050 1053 path = path[2:]
1051 1054 return path
General Comments 0
You need to be logged in to leave comments. Login now