##// END OF EJS Templates
Added ability to clone from a local repository to a (new) remote one....
Sean Meiners -
r2549:e1831f06 default
parent child Browse files
Show More
@@ -1,3554 +1,3570 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from demandload import demandload
9 9 from node import *
10 10 from i18n import gettext as _
11 11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
13 13 demandload(globals(), "fnmatch mdiff random signal tempfile time")
14 14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
15 15 demandload(globals(), "archival cStringIO changegroup email.Parser")
16 16 demandload(globals(), "hgweb.server sshserver")
17 17
18 18 class UnknownCommand(Exception):
19 19 """Exception raised if command is not in the command table."""
20 20 class AmbiguousCommand(Exception):
21 21 """Exception raised if command shortcut matches more than one command."""
22 22
23 23 def bail_if_changed(repo):
24 24 modified, added, removed, deleted, unknown = repo.changes()
25 25 if modified or added or removed or deleted:
26 26 raise util.Abort(_("outstanding uncommitted changes"))
27 27
28 28 def filterfiles(filters, files):
29 29 l = [x for x in files if x in filters]
30 30
31 31 for t in filters:
32 32 if t and t[-1] != "/":
33 33 t += "/"
34 34 l += [x for x in files if x.startswith(t)]
35 35 return l
36 36
37 37 def relpath(repo, args):
38 38 cwd = repo.getcwd()
39 39 if cwd:
40 40 return [util.normpath(os.path.join(cwd, x)) for x in args]
41 41 return args
42 42
43 43 def matchpats(repo, pats=[], opts={}, head=''):
44 44 cwd = repo.getcwd()
45 45 if not pats and cwd:
46 46 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
47 47 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
48 48 cwd = ''
49 49 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
50 50 opts.get('exclude'), head)
51 51
52 52 def makewalk(repo, pats, opts, node=None, head='', badmatch=None):
53 53 files, matchfn, anypats = matchpats(repo, pats, opts, head)
54 54 exact = dict(zip(files, files))
55 55 def walk():
56 56 for src, fn in repo.walk(node=node, files=files, match=matchfn,
57 57 badmatch=badmatch):
58 58 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
59 59 return files, matchfn, walk()
60 60
61 61 def walk(repo, pats, opts, node=None, head='', badmatch=None):
62 62 files, matchfn, results = makewalk(repo, pats, opts, node, head, badmatch)
63 63 for r in results:
64 64 yield r
65 65
66 66 def walkchangerevs(ui, repo, pats, opts):
67 67 '''Iterate over files and the revs they changed in.
68 68
69 69 Callers most commonly need to iterate backwards over the history
70 70 it is interested in. Doing so has awful (quadratic-looking)
71 71 performance, so we use iterators in a "windowed" way.
72 72
73 73 We walk a window of revisions in the desired order. Within the
74 74 window, we first walk forwards to gather data, then in the desired
75 75 order (usually backwards) to display it.
76 76
77 77 This function returns an (iterator, getchange, matchfn) tuple. The
78 78 getchange function returns the changelog entry for a numeric
79 79 revision. The iterator yields 3-tuples. They will be of one of
80 80 the following forms:
81 81
82 82 "window", incrementing, lastrev: stepping through a window,
83 83 positive if walking forwards through revs, last rev in the
84 84 sequence iterated over - use to reset state for the current window
85 85
86 86 "add", rev, fns: out-of-order traversal of the given file names
87 87 fns, which changed during revision rev - use to gather data for
88 88 possible display
89 89
90 90 "iter", rev, None: in-order traversal of the revs earlier iterated
91 91 over with "add" - use to display data'''
92 92
93 93 def increasing_windows(start, end, windowsize=8, sizelimit=512):
94 94 if start < end:
95 95 while start < end:
96 96 yield start, min(windowsize, end-start)
97 97 start += windowsize
98 98 if windowsize < sizelimit:
99 99 windowsize *= 2
100 100 else:
101 101 while start > end:
102 102 yield start, min(windowsize, start-end-1)
103 103 start -= windowsize
104 104 if windowsize < sizelimit:
105 105 windowsize *= 2
106 106
107 107
108 108 files, matchfn, anypats = matchpats(repo, pats, opts)
109 109
110 110 if repo.changelog.count() == 0:
111 111 return [], False, matchfn
112 112
113 113 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
114 114 wanted = {}
115 115 slowpath = anypats
116 116 fncache = {}
117 117
118 118 chcache = {}
119 119 def getchange(rev):
120 120 ch = chcache.get(rev)
121 121 if ch is None:
122 122 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
123 123 return ch
124 124
125 125 if not slowpath and not files:
126 126 # No files, no patterns. Display all revs.
127 127 wanted = dict(zip(revs, revs))
128 128 if not slowpath:
129 129 # Only files, no patterns. Check the history of each file.
130 130 def filerevgen(filelog):
131 131 for i, window in increasing_windows(filelog.count()-1, -1):
132 132 revs = []
133 133 for j in xrange(i - window, i + 1):
134 134 revs.append(filelog.linkrev(filelog.node(j)))
135 135 revs.reverse()
136 136 for rev in revs:
137 137 yield rev
138 138
139 139 minrev, maxrev = min(revs), max(revs)
140 140 for file_ in files:
141 141 filelog = repo.file(file_)
142 142 # A zero count may be a directory or deleted file, so
143 143 # try to find matching entries on the slow path.
144 144 if filelog.count() == 0:
145 145 slowpath = True
146 146 break
147 147 for rev in filerevgen(filelog):
148 148 if rev <= maxrev:
149 149 if rev < minrev:
150 150 break
151 151 fncache.setdefault(rev, [])
152 152 fncache[rev].append(file_)
153 153 wanted[rev] = 1
154 154 if slowpath:
155 155 # The slow path checks files modified in every changeset.
156 156 def changerevgen():
157 157 for i, window in increasing_windows(repo.changelog.count()-1, -1):
158 158 for j in xrange(i - window, i + 1):
159 159 yield j, getchange(j)[3]
160 160
161 161 for rev, changefiles in changerevgen():
162 162 matches = filter(matchfn, changefiles)
163 163 if matches:
164 164 fncache[rev] = matches
165 165 wanted[rev] = 1
166 166
167 167 def iterate():
168 168 for i, window in increasing_windows(0, len(revs)):
169 169 yield 'window', revs[0] < revs[-1], revs[-1]
170 170 nrevs = [rev for rev in revs[i:i+window]
171 171 if rev in wanted]
172 172 srevs = list(nrevs)
173 173 srevs.sort()
174 174 for rev in srevs:
175 175 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
176 176 yield 'add', rev, fns
177 177 for rev in nrevs:
178 178 yield 'iter', rev, None
179 179 return iterate(), getchange, matchfn
180 180
181 181 revrangesep = ':'
182 182
183 183 def revfix(repo, val, defval):
184 184 '''turn user-level id of changeset into rev number.
185 185 user-level id can be tag, changeset, rev number, or negative rev
186 186 number relative to number of revs (-1 is tip, etc).'''
187 187 if not val:
188 188 return defval
189 189 try:
190 190 num = int(val)
191 191 if str(num) != val:
192 192 raise ValueError
193 193 if num < 0:
194 194 num += repo.changelog.count()
195 195 if num < 0:
196 196 num = 0
197 197 elif num >= repo.changelog.count():
198 198 raise ValueError
199 199 except ValueError:
200 200 try:
201 201 num = repo.changelog.rev(repo.lookup(val))
202 202 except KeyError:
203 203 raise util.Abort(_('invalid revision identifier %s'), val)
204 204 return num
205 205
206 206 def revpair(ui, repo, revs):
207 207 '''return pair of nodes, given list of revisions. second item can
208 208 be None, meaning use working dir.'''
209 209 if not revs:
210 210 return repo.dirstate.parents()[0], None
211 211 end = None
212 212 if len(revs) == 1:
213 213 start = revs[0]
214 214 if revrangesep in start:
215 215 start, end = start.split(revrangesep, 1)
216 216 start = revfix(repo, start, 0)
217 217 end = revfix(repo, end, repo.changelog.count() - 1)
218 218 else:
219 219 start = revfix(repo, start, None)
220 220 elif len(revs) == 2:
221 221 if revrangesep in revs[0] or revrangesep in revs[1]:
222 222 raise util.Abort(_('too many revisions specified'))
223 223 start = revfix(repo, revs[0], None)
224 224 end = revfix(repo, revs[1], None)
225 225 else:
226 226 raise util.Abort(_('too many revisions specified'))
227 227 if end is not None: end = repo.lookup(str(end))
228 228 return repo.lookup(str(start)), end
229 229
230 230 def revrange(ui, repo, revs):
231 231 """Yield revision as strings from a list of revision specifications."""
232 232 seen = {}
233 233 for spec in revs:
234 234 if spec.find(revrangesep) >= 0:
235 235 start, end = spec.split(revrangesep, 1)
236 236 start = revfix(repo, start, 0)
237 237 end = revfix(repo, end, repo.changelog.count() - 1)
238 238 step = start > end and -1 or 1
239 239 for rev in xrange(start, end+step, step):
240 240 if rev in seen:
241 241 continue
242 242 seen[rev] = 1
243 243 yield str(rev)
244 244 else:
245 245 rev = revfix(repo, spec, None)
246 246 if rev in seen:
247 247 continue
248 248 seen[rev] = 1
249 249 yield str(rev)
250 250
251 251 def make_filename(repo, r, pat, node=None,
252 252 total=None, seqno=None, revwidth=None, pathname=None):
253 253 node_expander = {
254 254 'H': lambda: hex(node),
255 255 'R': lambda: str(r.rev(node)),
256 256 'h': lambda: short(node),
257 257 }
258 258 expander = {
259 259 '%': lambda: '%',
260 260 'b': lambda: os.path.basename(repo.root),
261 261 }
262 262
263 263 try:
264 264 if node:
265 265 expander.update(node_expander)
266 266 if node and revwidth is not None:
267 267 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
268 268 if total is not None:
269 269 expander['N'] = lambda: str(total)
270 270 if seqno is not None:
271 271 expander['n'] = lambda: str(seqno)
272 272 if total is not None and seqno is not None:
273 273 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
274 274 if pathname is not None:
275 275 expander['s'] = lambda: os.path.basename(pathname)
276 276 expander['d'] = lambda: os.path.dirname(pathname) or '.'
277 277 expander['p'] = lambda: pathname
278 278
279 279 newname = []
280 280 patlen = len(pat)
281 281 i = 0
282 282 while i < patlen:
283 283 c = pat[i]
284 284 if c == '%':
285 285 i += 1
286 286 c = pat[i]
287 287 c = expander[c]()
288 288 newname.append(c)
289 289 i += 1
290 290 return ''.join(newname)
291 291 except KeyError, inst:
292 292 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
293 293 inst.args[0])
294 294
295 295 def make_file(repo, r, pat, node=None,
296 296 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
297 297 if not pat or pat == '-':
298 298 return 'w' in mode and sys.stdout or sys.stdin
299 299 if hasattr(pat, 'write') and 'w' in mode:
300 300 return pat
301 301 if hasattr(pat, 'read') and 'r' in mode:
302 302 return pat
303 303 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
304 304 pathname),
305 305 mode)
306 306
307 307 def write_bundle(cg, filename=None, compress=True):
308 308 """Write a bundle file and return its filename.
309 309
310 310 Existing files will not be overwritten.
311 311 If no filename is specified, a temporary file is created.
312 312 bz2 compression can be turned off.
313 313 The bundle file will be deleted in case of errors.
314 314 """
315 315 class nocompress(object):
316 316 def compress(self, x):
317 317 return x
318 318 def flush(self):
319 319 return ""
320 320
321 321 fh = None
322 322 cleanup = None
323 323 try:
324 324 if filename:
325 325 if os.path.exists(filename):
326 326 raise util.Abort(_("file '%s' already exists"), filename)
327 327 fh = open(filename, "wb")
328 328 else:
329 329 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
330 330 fh = os.fdopen(fd, "wb")
331 331 cleanup = filename
332 332
333 333 if compress:
334 334 fh.write("HG10")
335 335 z = bz2.BZ2Compressor(9)
336 336 else:
337 337 fh.write("HG10UN")
338 338 z = nocompress()
339 339 # parse the changegroup data, otherwise we will block
340 340 # in case of sshrepo because we don't know the end of the stream
341 341
342 342 # an empty chunkiter is the end of the changegroup
343 343 empty = False
344 344 while not empty:
345 345 empty = True
346 346 for chunk in changegroup.chunkiter(cg):
347 347 empty = False
348 348 fh.write(z.compress(changegroup.genchunk(chunk)))
349 349 fh.write(z.compress(changegroup.closechunk()))
350 350 fh.write(z.flush())
351 351 cleanup = None
352 352 return filename
353 353 finally:
354 354 if fh is not None:
355 355 fh.close()
356 356 if cleanup is not None:
357 357 os.unlink(cleanup)
358 358
359 359 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
360 360 changes=None, text=False, opts={}):
361 361 if not node1:
362 362 node1 = repo.dirstate.parents()[0]
363 363 # reading the data for node1 early allows it to play nicely
364 364 # with repo.changes and the revlog cache.
365 365 change = repo.changelog.read(node1)
366 366 mmap = repo.manifest.read(change[0])
367 367 date1 = util.datestr(change[2])
368 368
369 369 if not changes:
370 370 changes = repo.changes(node1, node2, files, match=match)
371 371 modified, added, removed, deleted, unknown = changes
372 372 if files:
373 373 modified, added, removed = map(lambda x: filterfiles(files, x),
374 374 (modified, added, removed))
375 375
376 376 if not modified and not added and not removed:
377 377 return
378 378
379 379 if node2:
380 380 change = repo.changelog.read(node2)
381 381 mmap2 = repo.manifest.read(change[0])
382 382 _date2 = util.datestr(change[2])
383 383 def date2(f):
384 384 return _date2
385 385 def read(f):
386 386 return repo.file(f).read(mmap2[f])
387 387 else:
388 388 tz = util.makedate()[1]
389 389 _date2 = util.datestr()
390 390 def date2(f):
391 391 try:
392 392 return util.datestr((os.lstat(repo.wjoin(f)).st_mtime, tz))
393 393 except OSError, err:
394 394 if err.errno != errno.ENOENT: raise
395 395 return _date2
396 396 def read(f):
397 397 return repo.wread(f)
398 398
399 399 if ui.quiet:
400 400 r = None
401 401 else:
402 402 hexfunc = ui.verbose and hex or short
403 403 r = [hexfunc(node) for node in [node1, node2] if node]
404 404
405 405 diffopts = ui.diffopts()
406 406 showfunc = opts.get('show_function') or diffopts['showfunc']
407 407 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
408 408 for f in modified:
409 409 to = None
410 410 if f in mmap:
411 411 to = repo.file(f).read(mmap[f])
412 412 tn = read(f)
413 413 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
414 414 showfunc=showfunc, ignorews=ignorews))
415 415 for f in added:
416 416 to = None
417 417 tn = read(f)
418 418 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
419 419 showfunc=showfunc, ignorews=ignorews))
420 420 for f in removed:
421 421 to = repo.file(f).read(mmap[f])
422 422 tn = None
423 423 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
424 424 showfunc=showfunc, ignorews=ignorews))
425 425
426 426 def trimuser(ui, name, rev, revcache):
427 427 """trim the name of the user who committed a change"""
428 428 user = revcache.get(rev)
429 429 if user is None:
430 430 user = revcache[rev] = ui.shortuser(name)
431 431 return user
432 432
433 433 class changeset_printer(object):
434 434 '''show changeset information when templating not requested.'''
435 435
436 436 def __init__(self, ui, repo):
437 437 self.ui = ui
438 438 self.repo = repo
439 439
440 440 def show(self, rev=0, changenode=None, brinfo=None):
441 441 '''show a single changeset or file revision'''
442 442 log = self.repo.changelog
443 443 if changenode is None:
444 444 changenode = log.node(rev)
445 445 elif not rev:
446 446 rev = log.rev(changenode)
447 447
448 448 if self.ui.quiet:
449 449 self.ui.write("%d:%s\n" % (rev, short(changenode)))
450 450 return
451 451
452 452 changes = log.read(changenode)
453 453 date = util.datestr(changes[2])
454 454
455 455 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
456 456 for p in log.parents(changenode)
457 457 if self.ui.debugflag or p != nullid]
458 458 if (not self.ui.debugflag and len(parents) == 1 and
459 459 parents[0][0] == rev-1):
460 460 parents = []
461 461
462 462 if self.ui.verbose:
463 463 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
464 464 else:
465 465 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
466 466
467 467 for tag in self.repo.nodetags(changenode):
468 468 self.ui.status(_("tag: %s\n") % tag)
469 469 for parent in parents:
470 470 self.ui.write(_("parent: %d:%s\n") % parent)
471 471
472 472 if brinfo and changenode in brinfo:
473 473 br = brinfo[changenode]
474 474 self.ui.write(_("branch: %s\n") % " ".join(br))
475 475
476 476 self.ui.debug(_("manifest: %d:%s\n") %
477 477 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
478 478 self.ui.status(_("user: %s\n") % changes[1])
479 479 self.ui.status(_("date: %s\n") % date)
480 480
481 481 if self.ui.debugflag:
482 482 files = self.repo.changes(log.parents(changenode)[0], changenode)
483 483 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
484 484 files):
485 485 if value:
486 486 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
487 487 else:
488 488 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
489 489
490 490 description = changes[4].strip()
491 491 if description:
492 492 if self.ui.verbose:
493 493 self.ui.status(_("description:\n"))
494 494 self.ui.status(description)
495 495 self.ui.status("\n\n")
496 496 else:
497 497 self.ui.status(_("summary: %s\n") %
498 498 description.splitlines()[0])
499 499 self.ui.status("\n")
500 500
501 501 def show_changeset(ui, repo, opts):
502 502 '''show one changeset. uses template or regular display. caller
503 503 can pass in 'style' and 'template' options in opts.'''
504 504
505 505 tmpl = opts.get('template')
506 506 if tmpl:
507 507 tmpl = templater.parsestring(tmpl, quoted=False)
508 508 else:
509 509 tmpl = ui.config('ui', 'logtemplate')
510 510 if tmpl: tmpl = templater.parsestring(tmpl)
511 511 mapfile = opts.get('style') or ui.config('ui', 'style')
512 512 if tmpl or mapfile:
513 513 if mapfile:
514 514 if not os.path.isfile(mapfile):
515 515 mapname = templater.templatepath('map-cmdline.' + mapfile)
516 516 if not mapname: mapname = templater.templatepath(mapfile)
517 517 if mapname: mapfile = mapname
518 518 try:
519 519 t = templater.changeset_templater(ui, repo, mapfile)
520 520 except SyntaxError, inst:
521 521 raise util.Abort(inst.args[0])
522 522 if tmpl: t.use_template(tmpl)
523 523 return t
524 524 return changeset_printer(ui, repo)
525 525
526 526 def show_version(ui):
527 527 """output version and copyright information"""
528 528 ui.write(_("Mercurial Distributed SCM (version %s)\n")
529 529 % version.get_version())
530 530 ui.status(_(
531 531 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
532 532 "This is free software; see the source for copying conditions. "
533 533 "There is NO\nwarranty; "
534 534 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
535 535 ))
536 536
537 537 def help_(ui, cmd=None, with_version=False):
538 538 """show help for a given command or all commands"""
539 539 option_lists = []
540 540 if cmd and cmd != 'shortlist':
541 541 if with_version:
542 542 show_version(ui)
543 543 ui.write('\n')
544 544 aliases, i = find(cmd)
545 545 # synopsis
546 546 ui.write("%s\n\n" % i[2])
547 547
548 548 # description
549 549 doc = i[0].__doc__
550 550 if not doc:
551 551 doc = _("(No help text available)")
552 552 if ui.quiet:
553 553 doc = doc.splitlines(0)[0]
554 554 ui.write("%s\n" % doc.rstrip())
555 555
556 556 if not ui.quiet:
557 557 # aliases
558 558 if len(aliases) > 1:
559 559 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
560 560
561 561 # options
562 562 if i[1]:
563 563 option_lists.append(("options", i[1]))
564 564
565 565 else:
566 566 # program name
567 567 if ui.verbose or with_version:
568 568 show_version(ui)
569 569 else:
570 570 ui.status(_("Mercurial Distributed SCM\n"))
571 571 ui.status('\n')
572 572
573 573 # list of commands
574 574 if cmd == "shortlist":
575 575 ui.status(_('basic commands (use "hg help" '
576 576 'for the full list or option "-v" for details):\n\n'))
577 577 elif ui.verbose:
578 578 ui.status(_('list of commands:\n\n'))
579 579 else:
580 580 ui.status(_('list of commands (use "hg help -v" '
581 581 'to show aliases and global options):\n\n'))
582 582
583 583 h = {}
584 584 cmds = {}
585 585 for c, e in table.items():
586 586 f = c.split("|")[0]
587 587 if cmd == "shortlist" and not f.startswith("^"):
588 588 continue
589 589 f = f.lstrip("^")
590 590 if not ui.debugflag and f.startswith("debug"):
591 591 continue
592 592 doc = e[0].__doc__
593 593 if not doc:
594 594 doc = _("(No help text available)")
595 595 h[f] = doc.splitlines(0)[0].rstrip()
596 596 cmds[f] = c.lstrip("^")
597 597
598 598 fns = h.keys()
599 599 fns.sort()
600 600 m = max(map(len, fns))
601 601 for f in fns:
602 602 if ui.verbose:
603 603 commands = cmds[f].replace("|",", ")
604 604 ui.write(" %s:\n %s\n"%(commands, h[f]))
605 605 else:
606 606 ui.write(' %-*s %s\n' % (m, f, h[f]))
607 607
608 608 # global options
609 609 if ui.verbose:
610 610 option_lists.append(("global options", globalopts))
611 611
612 612 # list all option lists
613 613 opt_output = []
614 614 for title, options in option_lists:
615 615 opt_output.append(("\n%s:\n" % title, None))
616 616 for shortopt, longopt, default, desc in options:
617 617 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
618 618 longopt and " --%s" % longopt),
619 619 "%s%s" % (desc,
620 620 default
621 621 and _(" (default: %s)") % default
622 622 or "")))
623 623
624 624 if opt_output:
625 625 opts_len = max([len(line[0]) for line in opt_output if line[1]])
626 626 for first, second in opt_output:
627 627 if second:
628 628 ui.write(" %-*s %s\n" % (opts_len, first, second))
629 629 else:
630 630 ui.write("%s\n" % first)
631 631
632 632 # Commands start here, listed alphabetically
633 633
634 634 def add(ui, repo, *pats, **opts):
635 635 """add the specified files on the next commit
636 636
637 637 Schedule files to be version controlled and added to the repository.
638 638
639 639 The files will be added to the repository at the next commit.
640 640
641 641 If no names are given, add all files in the repository.
642 642 """
643 643
644 644 names = []
645 645 for src, abs, rel, exact in walk(repo, pats, opts):
646 646 if exact:
647 647 if ui.verbose:
648 648 ui.status(_('adding %s\n') % rel)
649 649 names.append(abs)
650 650 elif repo.dirstate.state(abs) == '?':
651 651 ui.status(_('adding %s\n') % rel)
652 652 names.append(abs)
653 653 if not opts.get('dry_run'):
654 654 repo.add(names)
655 655
656 656 def addremove(ui, repo, *pats, **opts):
657 657 """add all new files, delete all missing files (DEPRECATED)
658 658
659 659 (DEPRECATED)
660 660 Add all new files and remove all missing files from the repository.
661 661
662 662 New files are ignored if they match any of the patterns in .hgignore. As
663 663 with add, these changes take effect at the next commit.
664 664
665 665 This command is now deprecated and will be removed in a future
666 666 release. Please use add and remove --after instead.
667 667 """
668 668 ui.warn(_('(the addremove command is deprecated; use add and remove '
669 669 '--after instead)\n'))
670 670 return addremove_lock(ui, repo, pats, opts)
671 671
672 672 def addremove_lock(ui, repo, pats, opts, wlock=None):
673 673 add, remove = [], []
674 674 for src, abs, rel, exact in walk(repo, pats, opts):
675 675 if src == 'f' and repo.dirstate.state(abs) == '?':
676 676 add.append(abs)
677 677 if ui.verbose or not exact:
678 678 ui.status(_('adding %s\n') % ((pats and rel) or abs))
679 679 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
680 680 remove.append(abs)
681 681 if ui.verbose or not exact:
682 682 ui.status(_('removing %s\n') % ((pats and rel) or abs))
683 683 if not opts.get('dry_run'):
684 684 repo.add(add, wlock=wlock)
685 685 repo.remove(remove, wlock=wlock)
686 686
687 687 def annotate(ui, repo, *pats, **opts):
688 688 """show changeset information per file line
689 689
690 690 List changes in files, showing the revision id responsible for each line
691 691
692 692 This command is useful to discover who did a change or when a change took
693 693 place.
694 694
695 695 Without the -a option, annotate will avoid processing files it
696 696 detects as binary. With -a, annotate will generate an annotation
697 697 anyway, probably with undesirable results.
698 698 """
699 699 def getnode(rev):
700 700 return short(repo.changelog.node(rev))
701 701
702 702 ucache = {}
703 703 def getname(rev):
704 704 cl = repo.changelog.read(repo.changelog.node(rev))
705 705 return trimuser(ui, cl[1], rev, ucache)
706 706
707 707 dcache = {}
708 708 def getdate(rev):
709 709 datestr = dcache.get(rev)
710 710 if datestr is None:
711 711 cl = repo.changelog.read(repo.changelog.node(rev))
712 712 datestr = dcache[rev] = util.datestr(cl[2])
713 713 return datestr
714 714
715 715 if not pats:
716 716 raise util.Abort(_('at least one file name or pattern required'))
717 717
718 718 opmap = [['user', getname], ['number', str], ['changeset', getnode],
719 719 ['date', getdate]]
720 720 if not opts['user'] and not opts['changeset'] and not opts['date']:
721 721 opts['number'] = 1
722 722
723 723 if opts['rev']:
724 724 node = repo.changelog.lookup(opts['rev'])
725 725 else:
726 726 node = repo.dirstate.parents()[0]
727 727 change = repo.changelog.read(node)
728 728 mmap = repo.manifest.read(change[0])
729 729
730 730 for src, abs, rel, exact in walk(repo, pats, opts, node=node):
731 731 f = repo.file(abs)
732 732 if not opts['text'] and util.binary(f.read(mmap[abs])):
733 733 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
734 734 continue
735 735
736 736 lines = f.annotate(mmap[abs])
737 737 pieces = []
738 738
739 739 for o, f in opmap:
740 740 if opts[o]:
741 741 l = [f(n) for n, dummy in lines]
742 742 if l:
743 743 m = max(map(len, l))
744 744 pieces.append(["%*s" % (m, x) for x in l])
745 745
746 746 if pieces:
747 747 for p, l in zip(zip(*pieces), lines):
748 748 ui.write("%s: %s" % (" ".join(p), l[1]))
749 749
750 750 def archive(ui, repo, dest, **opts):
751 751 '''create unversioned archive of a repository revision
752 752
753 753 By default, the revision used is the parent of the working
754 754 directory; use "-r" to specify a different revision.
755 755
756 756 To specify the type of archive to create, use "-t". Valid
757 757 types are:
758 758
759 759 "files" (default): a directory full of files
760 760 "tar": tar archive, uncompressed
761 761 "tbz2": tar archive, compressed using bzip2
762 762 "tgz": tar archive, compressed using gzip
763 763 "uzip": zip archive, uncompressed
764 764 "zip": zip archive, compressed using deflate
765 765
766 766 The exact name of the destination archive or directory is given
767 767 using a format string; see "hg help export" for details.
768 768
769 769 Each member added to an archive file has a directory prefix
770 770 prepended. Use "-p" to specify a format string for the prefix.
771 771 The default is the basename of the archive, with suffixes removed.
772 772 '''
773 773
774 774 if opts['rev']:
775 775 node = repo.lookup(opts['rev'])
776 776 else:
777 777 node, p2 = repo.dirstate.parents()
778 778 if p2 != nullid:
779 779 raise util.Abort(_('uncommitted merge - please provide a '
780 780 'specific revision'))
781 781
782 782 dest = make_filename(repo, repo.changelog, dest, node)
783 783 if os.path.realpath(dest) == repo.root:
784 784 raise util.Abort(_('repository root cannot be destination'))
785 785 dummy, matchfn, dummy = matchpats(repo, [], opts)
786 786 kind = opts.get('type') or 'files'
787 787 prefix = opts['prefix']
788 788 if dest == '-':
789 789 if kind == 'files':
790 790 raise util.Abort(_('cannot archive plain files to stdout'))
791 791 dest = sys.stdout
792 792 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
793 793 prefix = make_filename(repo, repo.changelog, prefix, node)
794 794 archival.archive(repo, dest, node, kind, not opts['no_decode'],
795 795 matchfn, prefix)
796 796
797 797 def backout(ui, repo, rev, **opts):
798 798 '''reverse effect of earlier changeset
799 799
800 800 Commit the backed out changes as a new changeset. The new
801 801 changeset is a child of the backed out changeset.
802 802
803 803 If you back out a changeset other than the tip, a new head is
804 804 created. This head is the parent of the working directory. If
805 805 you back out an old changeset, your working directory will appear
806 806 old after the backout. You should merge the backout changeset
807 807 with another head.
808 808
809 809 The --merge option remembers the parent of the working directory
810 810 before starting the backout, then merges the new head with that
811 811 changeset afterwards. This saves you from doing the merge by
812 812 hand. The result of this merge is not committed, as for a normal
813 813 merge.'''
814 814
815 815 bail_if_changed(repo)
816 816 op1, op2 = repo.dirstate.parents()
817 817 if op2 != nullid:
818 818 raise util.Abort(_('outstanding uncommitted merge'))
819 819 node = repo.lookup(rev)
820 820 parent, p2 = repo.changelog.parents(node)
821 821 if parent == nullid:
822 822 raise util.Abort(_('cannot back out a change with no parents'))
823 823 if p2 != nullid:
824 824 raise util.Abort(_('cannot back out a merge'))
825 825 repo.update(node, force=True, show_stats=False)
826 826 revert_opts = opts.copy()
827 827 revert_opts['rev'] = hex(parent)
828 828 revert(ui, repo, **revert_opts)
829 829 commit_opts = opts.copy()
830 830 commit_opts['addremove'] = False
831 831 if not commit_opts['message'] and not commit_opts['logfile']:
832 832 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
833 833 commit_opts['force_editor'] = True
834 834 commit(ui, repo, **commit_opts)
835 835 def nice(node):
836 836 return '%d:%s' % (repo.changelog.rev(node), short(node))
837 837 ui.status(_('changeset %s backs out changeset %s\n') %
838 838 (nice(repo.changelog.tip()), nice(node)))
839 839 if op1 != node:
840 840 if opts['merge']:
841 841 ui.status(_('merging with changeset %s\n') % nice(op1))
842 842 doupdate(ui, repo, hex(op1), **opts)
843 843 else:
844 844 ui.status(_('the backout changeset is a new head - '
845 845 'do not forget to merge\n'))
846 846 ui.status(_('(use "backout -m" if you want to auto-merge)\n'))
847 847
848 848 def bundle(ui, repo, fname, dest=None, **opts):
849 849 """create a changegroup file
850 850
851 851 Generate a compressed changegroup file collecting all changesets
852 852 not found in the other repository.
853 853
854 854 This file can then be transferred using conventional means and
855 855 applied to another repository with the unbundle command. This is
856 856 useful when native push and pull are not available or when
857 857 exporting an entire repository is undesirable. The standard file
858 858 extension is ".hg".
859 859
860 860 Unlike import/export, this exactly preserves all changeset
861 861 contents including permissions, rename data, and revision history.
862 862 """
863 863 dest = ui.expandpath(dest or 'default-push', dest or 'default')
864 864 other = hg.repository(ui, dest)
865 865 o = repo.findoutgoing(other, force=opts['force'])
866 866 cg = repo.changegroup(o, 'bundle')
867 867 write_bundle(cg, fname)
868 868
869 869 def cat(ui, repo, file1, *pats, **opts):
870 870 """output the latest or given revisions of files
871 871
872 872 Print the specified files as they were at the given revision.
873 873 If no revision is given then the tip is used.
874 874
875 875 Output may be to a file, in which case the name of the file is
876 876 given using a format string. The formatting rules are the same as
877 877 for the export command, with the following additions:
878 878
879 879 %s basename of file being printed
880 880 %d dirname of file being printed, or '.' if in repo root
881 881 %p root-relative path name of file being printed
882 882 """
883 883 mf = {}
884 884 rev = opts['rev']
885 885 if rev:
886 886 node = repo.lookup(rev)
887 887 else:
888 888 node = repo.changelog.tip()
889 889 change = repo.changelog.read(node)
890 890 mf = repo.manifest.read(change[0])
891 891 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, node):
892 892 r = repo.file(abs)
893 893 n = mf[abs]
894 894 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
895 895 fp.write(r.read(n))
896 896
897 897 def clone(ui, source, dest=None, **opts):
898 898 """make a copy of an existing repository
899 899
900 900 Create a copy of an existing repository in a new directory.
901 901
902 902 If no destination directory name is specified, it defaults to the
903 903 basename of the source.
904 904
905 905 The location of the source is added to the new repository's
906 906 .hg/hgrc file, as the default to be used for future pulls.
907 907
908 908 For efficiency, hardlinks are used for cloning whenever the source
909 909 and destination are on the same filesystem. Some filesystems,
910 910 such as AFS, implement hardlinking incorrectly, but do not report
911 911 errors. In these cases, use the --pull option to avoid
912 912 hardlinking.
913 913
914 914 See pull for valid source format details.
915 915 """
916 916 if dest is None:
917 917 dest = os.path.basename(os.path.normpath(source))
918 918
919 919 if os.path.exists(dest):
920 920 raise util.Abort(_("destination '%s' already exists"), dest)
921 921
922 dest = os.path.realpath(dest)
923
924 922 class Dircleanup(object):
925 923 def __init__(self, dir_):
926 924 self.rmtree = shutil.rmtree
927 925 self.dir_ = dir_
928 os.mkdir(dir_)
929 926 def close(self):
930 927 self.dir_ = None
931 928 def __del__(self):
932 929 if self.dir_:
933 930 self.rmtree(self.dir_, True)
934 931
935 932 if opts['ssh']:
936 933 ui.setconfig("ui", "ssh", opts['ssh'])
937 934 if opts['remotecmd']:
938 935 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
939 936
940 937 source = ui.expandpath(source)
941
942 d = Dircleanup(dest)
938 src_repo = hg.repository(ui, source)
939
940 dest_repo = None
941 try:
942 dest_repo = hg.repository(ui, dest)
943 raise util.Abort(_("destination '%s' already exists." % dest))
944 except hg.RepoError:
945 dest_repo = hg.repository(ui, dest, create=1)
946
947 dest_path = None
948 d = None
949 if dest_repo.local():
950 dest_path = os.path.realpath(dest)
951 d = Dircleanup(dest_path)
952
943 953 abspath = source
944 other = hg.repository(ui, source)
945
946 954 copy = False
947 if other.dev() != -1:
955 if src_repo.local() and dest_repo.local():
948 956 abspath = os.path.abspath(source)
949 957 if not opts['pull'] and not opts['rev']:
950 958 copy = True
951 959
952 960 if copy:
953 961 try:
954 962 # we use a lock here because if we race with commit, we
955 963 # can end up with extra data in the cloned revlogs that's
956 964 # not pointed to by changesets, thus causing verify to
957 965 # fail
958 l1 = other.lock()
966 l1 = src_repo.lock()
959 967 except lock.LockException:
960 968 copy = False
961 969
962 970 if copy:
963 971 # we lock here to avoid premature writing to the target
964 os.mkdir(os.path.join(dest, ".hg"))
965 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
966
972 l2 = lock.lock(os.path.join(dest_path, ".hg", "lock"))
973
974 # we need to remove the (empty) data dir in dest so copyfiles can do it's work
975 os.rmdir( os.path.join(dest_path, ".hg", "data") )
967 976 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
968 977 for f in files.split():
969 978 src = os.path.join(source, ".hg", f)
970 dst = os.path.join(dest, ".hg", f)
979 dst = os.path.join(dest_path, ".hg", f)
971 980 try:
972 981 util.copyfiles(src, dst)
973 982 except OSError, inst:
974 983 if inst.errno != errno.ENOENT:
975 984 raise
976 985
977 repo = hg.repository(ui, dest)
986 # we need to re-init the repo after manually copying the data into it
987 dest_repo = hg.repository(ui, dest)
978 988
979 989 else:
980 990 revs = None
981 991 if opts['rev']:
982 if not other.local():
992 if not src_repo.local():
983 993 error = _("clone -r not supported yet for remote repositories.")
984 994 raise util.Abort(error)
985 995 else:
986 revs = [other.lookup(rev) for rev in opts['rev']]
987 repo = hg.repository(ui, dest, create=1)
988 repo.pull(other, heads = revs)
989
990 f = repo.opener("hgrc", "w", text=True)
991 f.write("[paths]\n")
992 f.write("default = %s\n" % abspath)
993 f.close()
994
995 if not opts['noupdate']:
996 doupdate(repo.ui, repo)
997
998 d.close()
996 revs = [src_repo.lookup(rev) for rev in opts['rev']]
997
998 if dest_repo.local():
999 dest_repo.pull(src_repo, heads = revs)
1000 elif src_repo.local():
1001 src_repo.push(dest_repo, revs = revs)
1002 else:
1003 error = _("clone from remote to remote not supported.")
1004 raise util.Abort(error)
1005
1006 if dest_repo.local():
1007 f = dest_repo.opener("hgrc", "w", text=True)
1008 f.write("[paths]\n")
1009 f.write("default = %s\n" % abspath)
1010 f.close()
1011
1012 if not opts['noupdate']:
1013 doupdate(dest_repo.ui, dest_repo)
1014
1015 if d:
1016 d.close()
999 1017
1000 1018 def commit(ui, repo, *pats, **opts):
1001 1019 """commit the specified files or all outstanding changes
1002 1020
1003 1021 Commit changes to the given files into the repository.
1004 1022
1005 1023 If a list of files is omitted, all changes reported by "hg status"
1006 1024 will be committed.
1007 1025
1008 1026 If no commit message is specified, the editor configured in your hgrc
1009 1027 or in the EDITOR environment variable is started to enter a message.
1010 1028 """
1011 1029 message = opts['message']
1012 1030 logfile = opts['logfile']
1013 1031
1014 1032 if message and logfile:
1015 1033 raise util.Abort(_('options --message and --logfile are mutually '
1016 1034 'exclusive'))
1017 1035 if not message and logfile:
1018 1036 try:
1019 1037 if logfile == '-':
1020 1038 message = sys.stdin.read()
1021 1039 else:
1022 1040 message = open(logfile).read()
1023 1041 except IOError, inst:
1024 1042 raise util.Abort(_("can't read commit message '%s': %s") %
1025 1043 (logfile, inst.strerror))
1026 1044
1027 1045 if opts['addremove']:
1028 1046 addremove_lock(ui, repo, pats, opts)
1029 1047 fns, match, anypats = matchpats(repo, pats, opts)
1030 1048 if pats:
1031 1049 modified, added, removed, deleted, unknown = (
1032 1050 repo.changes(files=fns, match=match))
1033 1051 files = modified + added + removed
1034 1052 else:
1035 1053 files = []
1036 1054 try:
1037 1055 repo.commit(files, message, opts['user'], opts['date'], match,
1038 1056 force_editor=opts.get('force_editor'))
1039 1057 except ValueError, inst:
1040 1058 raise util.Abort(str(inst))
1041 1059
1042 1060 def docopy(ui, repo, pats, opts, wlock):
1043 1061 # called with the repo lock held
1044 1062 cwd = repo.getcwd()
1045 1063 errors = 0
1046 1064 copied = []
1047 1065 targets = {}
1048 1066
1049 1067 def okaytocopy(abs, rel, exact):
1050 1068 reasons = {'?': _('is not managed'),
1051 1069 'a': _('has been marked for add'),
1052 1070 'r': _('has been marked for remove')}
1053 1071 state = repo.dirstate.state(abs)
1054 1072 reason = reasons.get(state)
1055 1073 if reason:
1056 1074 if state == 'a':
1057 1075 origsrc = repo.dirstate.copied(abs)
1058 1076 if origsrc is not None:
1059 1077 return origsrc
1060 1078 if exact:
1061 1079 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
1062 1080 else:
1063 1081 return abs
1064 1082
1065 1083 def copy(origsrc, abssrc, relsrc, target, exact):
1066 1084 abstarget = util.canonpath(repo.root, cwd, target)
1067 1085 reltarget = util.pathto(cwd, abstarget)
1068 1086 prevsrc = targets.get(abstarget)
1069 1087 if prevsrc is not None:
1070 1088 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1071 1089 (reltarget, abssrc, prevsrc))
1072 1090 return
1073 1091 if (not opts['after'] and os.path.exists(reltarget) or
1074 1092 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
1075 1093 if not opts['force']:
1076 1094 ui.warn(_('%s: not overwriting - file exists\n') %
1077 1095 reltarget)
1078 1096 return
1079 1097 if not opts['after'] and not opts.get('dry_run'):
1080 1098 os.unlink(reltarget)
1081 1099 if opts['after']:
1082 1100 if not os.path.exists(reltarget):
1083 1101 return
1084 1102 else:
1085 1103 targetdir = os.path.dirname(reltarget) or '.'
1086 1104 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
1087 1105 os.makedirs(targetdir)
1088 1106 try:
1089 1107 restore = repo.dirstate.state(abstarget) == 'r'
1090 1108 if restore and not opts.get('dry_run'):
1091 1109 repo.undelete([abstarget], wlock)
1092 1110 try:
1093 1111 if not opts.get('dry_run'):
1094 1112 shutil.copyfile(relsrc, reltarget)
1095 1113 shutil.copymode(relsrc, reltarget)
1096 1114 restore = False
1097 1115 finally:
1098 1116 if restore:
1099 1117 repo.remove([abstarget], wlock)
1100 1118 except shutil.Error, inst:
1101 1119 raise util.Abort(str(inst))
1102 1120 except IOError, inst:
1103 1121 if inst.errno == errno.ENOENT:
1104 1122 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1105 1123 else:
1106 1124 ui.warn(_('%s: cannot copy - %s\n') %
1107 1125 (relsrc, inst.strerror))
1108 1126 errors += 1
1109 1127 return
1110 1128 if ui.verbose or not exact:
1111 1129 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1112 1130 targets[abstarget] = abssrc
1113 1131 if abstarget != origsrc and not opts.get('dry_run'):
1114 1132 repo.copy(origsrc, abstarget, wlock)
1115 1133 copied.append((abssrc, relsrc, exact))
1116 1134
1117 1135 def targetpathfn(pat, dest, srcs):
1118 1136 if os.path.isdir(pat):
1119 1137 abspfx = util.canonpath(repo.root, cwd, pat)
1120 1138 if destdirexists:
1121 1139 striplen = len(os.path.split(abspfx)[0])
1122 1140 else:
1123 1141 striplen = len(abspfx)
1124 1142 if striplen:
1125 1143 striplen += len(os.sep)
1126 1144 res = lambda p: os.path.join(dest, p[striplen:])
1127 1145 elif destdirexists:
1128 1146 res = lambda p: os.path.join(dest, os.path.basename(p))
1129 1147 else:
1130 1148 res = lambda p: dest
1131 1149 return res
1132 1150
1133 1151 def targetpathafterfn(pat, dest, srcs):
1134 1152 if util.patkind(pat, None)[0]:
1135 1153 # a mercurial pattern
1136 1154 res = lambda p: os.path.join(dest, os.path.basename(p))
1137 1155 else:
1138 1156 abspfx = util.canonpath(repo.root, cwd, pat)
1139 1157 if len(abspfx) < len(srcs[0][0]):
1140 1158 # A directory. Either the target path contains the last
1141 1159 # component of the source path or it does not.
1142 1160 def evalpath(striplen):
1143 1161 score = 0
1144 1162 for s in srcs:
1145 1163 t = os.path.join(dest, s[0][striplen:])
1146 1164 if os.path.exists(t):
1147 1165 score += 1
1148 1166 return score
1149 1167
1150 1168 striplen = len(abspfx)
1151 1169 if striplen:
1152 1170 striplen += len(os.sep)
1153 1171 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1154 1172 score = evalpath(striplen)
1155 1173 striplen1 = len(os.path.split(abspfx)[0])
1156 1174 if striplen1:
1157 1175 striplen1 += len(os.sep)
1158 1176 if evalpath(striplen1) > score:
1159 1177 striplen = striplen1
1160 1178 res = lambda p: os.path.join(dest, p[striplen:])
1161 1179 else:
1162 1180 # a file
1163 1181 if destdirexists:
1164 1182 res = lambda p: os.path.join(dest, os.path.basename(p))
1165 1183 else:
1166 1184 res = lambda p: dest
1167 1185 return res
1168 1186
1169 1187
1170 1188 pats = list(pats)
1171 1189 if not pats:
1172 1190 raise util.Abort(_('no source or destination specified'))
1173 1191 if len(pats) == 1:
1174 1192 raise util.Abort(_('no destination specified'))
1175 1193 dest = pats.pop()
1176 1194 destdirexists = os.path.isdir(dest)
1177 1195 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1178 1196 raise util.Abort(_('with multiple sources, destination must be an '
1179 1197 'existing directory'))
1180 1198 if opts['after']:
1181 1199 tfn = targetpathafterfn
1182 1200 else:
1183 1201 tfn = targetpathfn
1184 1202 copylist = []
1185 1203 for pat in pats:
1186 1204 srcs = []
1187 1205 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
1188 1206 origsrc = okaytocopy(abssrc, relsrc, exact)
1189 1207 if origsrc:
1190 1208 srcs.append((origsrc, abssrc, relsrc, exact))
1191 1209 if not srcs:
1192 1210 continue
1193 1211 copylist.append((tfn(pat, dest, srcs), srcs))
1194 1212 if not copylist:
1195 1213 raise util.Abort(_('no files to copy'))
1196 1214
1197 1215 for targetpath, srcs in copylist:
1198 1216 for origsrc, abssrc, relsrc, exact in srcs:
1199 1217 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1200 1218
1201 1219 if errors:
1202 1220 ui.warn(_('(consider using --after)\n'))
1203 1221 return errors, copied
1204 1222
1205 1223 def copy(ui, repo, *pats, **opts):
1206 1224 """mark files as copied for the next commit
1207 1225
1208 1226 Mark dest as having copies of source files. If dest is a
1209 1227 directory, copies are put in that directory. If dest is a file,
1210 1228 there can only be one source.
1211 1229
1212 1230 By default, this command copies the contents of files as they
1213 1231 stand in the working directory. If invoked with --after, the
1214 1232 operation is recorded, but no copying is performed.
1215 1233
1216 1234 This command takes effect in the next commit.
1217 1235
1218 1236 NOTE: This command should be treated as experimental. While it
1219 1237 should properly record copied files, this information is not yet
1220 1238 fully used by merge, nor fully reported by log.
1221 1239 """
1222 1240 wlock = repo.wlock(0)
1223 1241 errs, copied = docopy(ui, repo, pats, opts, wlock)
1224 1242 return errs
1225 1243
1226 1244 def debugancestor(ui, index, rev1, rev2):
1227 1245 """find the ancestor revision of two revisions in a given index"""
1228 1246 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1229 1247 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1230 1248 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1231 1249
1232 1250 def debugcomplete(ui, cmd='', **opts):
1233 1251 """returns the completion list associated with the given command"""
1234 1252
1235 1253 if opts['options']:
1236 1254 options = []
1237 1255 otables = [globalopts]
1238 1256 if cmd:
1239 1257 aliases, entry = find(cmd)
1240 1258 otables.append(entry[1])
1241 1259 for t in otables:
1242 1260 for o in t:
1243 1261 if o[0]:
1244 1262 options.append('-%s' % o[0])
1245 1263 options.append('--%s' % o[1])
1246 1264 ui.write("%s\n" % "\n".join(options))
1247 1265 return
1248 1266
1249 1267 clist = findpossible(cmd).keys()
1250 1268 clist.sort()
1251 1269 ui.write("%s\n" % "\n".join(clist))
1252 1270
1253 1271 def debugrebuildstate(ui, repo, rev=None):
1254 1272 """rebuild the dirstate as it would look like for the given revision"""
1255 1273 if not rev:
1256 1274 rev = repo.changelog.tip()
1257 1275 else:
1258 1276 rev = repo.lookup(rev)
1259 1277 change = repo.changelog.read(rev)
1260 1278 n = change[0]
1261 1279 files = repo.manifest.readflags(n)
1262 1280 wlock = repo.wlock()
1263 1281 repo.dirstate.rebuild(rev, files.iteritems())
1264 1282
1265 1283 def debugcheckstate(ui, repo):
1266 1284 """validate the correctness of the current dirstate"""
1267 1285 parent1, parent2 = repo.dirstate.parents()
1268 1286 repo.dirstate.read()
1269 1287 dc = repo.dirstate.map
1270 1288 keys = dc.keys()
1271 1289 keys.sort()
1272 1290 m1n = repo.changelog.read(parent1)[0]
1273 1291 m2n = repo.changelog.read(parent2)[0]
1274 1292 m1 = repo.manifest.read(m1n)
1275 1293 m2 = repo.manifest.read(m2n)
1276 1294 errors = 0
1277 1295 for f in dc:
1278 1296 state = repo.dirstate.state(f)
1279 1297 if state in "nr" and f not in m1:
1280 1298 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1281 1299 errors += 1
1282 1300 if state in "a" and f in m1:
1283 1301 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1284 1302 errors += 1
1285 1303 if state in "m" and f not in m1 and f not in m2:
1286 1304 ui.warn(_("%s in state %s, but not in either manifest\n") %
1287 1305 (f, state))
1288 1306 errors += 1
1289 1307 for f in m1:
1290 1308 state = repo.dirstate.state(f)
1291 1309 if state not in "nrm":
1292 1310 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1293 1311 errors += 1
1294 1312 if errors:
1295 1313 error = _(".hg/dirstate inconsistent with current parent's manifest")
1296 1314 raise util.Abort(error)
1297 1315
1298 1316 def debugconfig(ui, repo, *values):
1299 1317 """show combined config settings from all hgrc files
1300 1318
1301 1319 With no args, print names and values of all config items.
1302 1320
1303 1321 With one arg of the form section.name, print just the value of
1304 1322 that config item.
1305 1323
1306 1324 With multiple args, print names and values of all config items
1307 1325 with matching section names."""
1308 1326
1309 1327 if values:
1310 1328 if len([v for v in values if '.' in v]) > 1:
1311 1329 raise util.Abort(_('only one config item permitted'))
1312 1330 for section, name, value in ui.walkconfig():
1313 1331 sectname = section + '.' + name
1314 1332 if values:
1315 1333 for v in values:
1316 1334 if v == section:
1317 1335 ui.write('%s=%s\n' % (sectname, value))
1318 1336 elif v == sectname:
1319 1337 ui.write(value, '\n')
1320 1338 else:
1321 1339 ui.write('%s=%s\n' % (sectname, value))
1322 1340
1323 1341 def debugsetparents(ui, repo, rev1, rev2=None):
1324 1342 """manually set the parents of the current working directory
1325 1343
1326 1344 This is useful for writing repository conversion tools, but should
1327 1345 be used with care.
1328 1346 """
1329 1347
1330 1348 if not rev2:
1331 1349 rev2 = hex(nullid)
1332 1350
1333 1351 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1334 1352
1335 1353 def debugstate(ui, repo):
1336 1354 """show the contents of the current dirstate"""
1337 1355 repo.dirstate.read()
1338 1356 dc = repo.dirstate.map
1339 1357 keys = dc.keys()
1340 1358 keys.sort()
1341 1359 for file_ in keys:
1342 1360 ui.write("%c %3o %10d %s %s\n"
1343 1361 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1344 1362 time.strftime("%x %X",
1345 1363 time.localtime(dc[file_][3])), file_))
1346 1364 for f in repo.dirstate.copies:
1347 1365 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1348 1366
1349 1367 def debugdata(ui, file_, rev):
1350 1368 """dump the contents of an data file revision"""
1351 1369 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1352 1370 file_[:-2] + ".i", file_, 0)
1353 1371 try:
1354 1372 ui.write(r.revision(r.lookup(rev)))
1355 1373 except KeyError:
1356 1374 raise util.Abort(_('invalid revision identifier %s'), rev)
1357 1375
1358 1376 def debugindex(ui, file_):
1359 1377 """dump the contents of an index file"""
1360 1378 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1361 1379 ui.write(" rev offset length base linkrev" +
1362 1380 " nodeid p1 p2\n")
1363 1381 for i in range(r.count()):
1364 1382 node = r.node(i)
1365 1383 pp = r.parents(node)
1366 1384 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1367 1385 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1368 1386 short(node), short(pp[0]), short(pp[1])))
1369 1387
1370 1388 def debugindexdot(ui, file_):
1371 1389 """dump an index DAG as a .dot file"""
1372 1390 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1373 1391 ui.write("digraph G {\n")
1374 1392 for i in range(r.count()):
1375 1393 node = r.node(i)
1376 1394 pp = r.parents(node)
1377 1395 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1378 1396 if pp[1] != nullid:
1379 1397 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1380 1398 ui.write("}\n")
1381 1399
1382 1400 def debugrename(ui, repo, file, rev=None):
1383 1401 """dump rename information"""
1384 1402 r = repo.file(relpath(repo, [file])[0])
1385 1403 if rev:
1386 1404 try:
1387 1405 # assume all revision numbers are for changesets
1388 1406 n = repo.lookup(rev)
1389 1407 change = repo.changelog.read(n)
1390 1408 m = repo.manifest.read(change[0])
1391 1409 n = m[relpath(repo, [file])[0]]
1392 1410 except (hg.RepoError, KeyError):
1393 1411 n = r.lookup(rev)
1394 1412 else:
1395 1413 n = r.tip()
1396 1414 m = r.renamed(n)
1397 1415 if m:
1398 1416 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1399 1417 else:
1400 1418 ui.write(_("not renamed\n"))
1401 1419
1402 1420 def debugwalk(ui, repo, *pats, **opts):
1403 1421 """show how files match on given patterns"""
1404 1422 items = list(walk(repo, pats, opts))
1405 1423 if not items:
1406 1424 return
1407 1425 fmt = '%%s %%-%ds %%-%ds %%s' % (
1408 1426 max([len(abs) for (src, abs, rel, exact) in items]),
1409 1427 max([len(rel) for (src, abs, rel, exact) in items]))
1410 1428 for src, abs, rel, exact in items:
1411 1429 line = fmt % (src, abs, rel, exact and 'exact' or '')
1412 1430 ui.write("%s\n" % line.rstrip())
1413 1431
1414 1432 def diff(ui, repo, *pats, **opts):
1415 1433 """diff repository (or selected files)
1416 1434
1417 1435 Show differences between revisions for the specified files.
1418 1436
1419 1437 Differences between files are shown using the unified diff format.
1420 1438
1421 1439 When two revision arguments are given, then changes are shown
1422 1440 between those revisions. If only one revision is specified then
1423 1441 that revision is compared to the working directory, and, when no
1424 1442 revisions are specified, the working directory files are compared
1425 1443 to its parent.
1426 1444
1427 1445 Without the -a option, diff will avoid generating diffs of files
1428 1446 it detects as binary. With -a, diff will generate a diff anyway,
1429 1447 probably with undesirable results.
1430 1448 """
1431 1449 node1, node2 = revpair(ui, repo, opts['rev'])
1432 1450
1433 1451 fns, matchfn, anypats = matchpats(repo, pats, opts)
1434 1452
1435 1453 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1436 1454 text=opts['text'], opts=opts)
1437 1455
1438 1456 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1439 1457 node = repo.lookup(changeset)
1440 1458 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1441 1459 if opts['switch_parent']:
1442 1460 parents.reverse()
1443 1461 prev = (parents and parents[0]) or nullid
1444 1462 change = repo.changelog.read(node)
1445 1463
1446 1464 fp = make_file(repo, repo.changelog, opts['output'],
1447 1465 node=node, total=total, seqno=seqno,
1448 1466 revwidth=revwidth)
1449 1467 if fp != sys.stdout:
1450 1468 ui.note("%s\n" % fp.name)
1451 1469
1452 1470 fp.write("# HG changeset patch\n")
1453 1471 fp.write("# User %s\n" % change[1])
1454 1472 fp.write("# Date %d %d\n" % change[2])
1455 1473 fp.write("# Node ID %s\n" % hex(node))
1456 1474 fp.write("# Parent %s\n" % hex(prev))
1457 1475 if len(parents) > 1:
1458 1476 fp.write("# Parent %s\n" % hex(parents[1]))
1459 1477 fp.write(change[4].rstrip())
1460 1478 fp.write("\n\n")
1461 1479
1462 1480 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1463 1481 if fp != sys.stdout:
1464 1482 fp.close()
1465 1483
1466 1484 def export(ui, repo, *changesets, **opts):
1467 1485 """dump the header and diffs for one or more changesets
1468 1486
1469 1487 Print the changeset header and diffs for one or more revisions.
1470 1488
1471 1489 The information shown in the changeset header is: author,
1472 1490 changeset hash, parent and commit comment.
1473 1491
1474 1492 Output may be to a file, in which case the name of the file is
1475 1493 given using a format string. The formatting rules are as follows:
1476 1494
1477 1495 %% literal "%" character
1478 1496 %H changeset hash (40 bytes of hexadecimal)
1479 1497 %N number of patches being generated
1480 1498 %R changeset revision number
1481 1499 %b basename of the exporting repository
1482 1500 %h short-form changeset hash (12 bytes of hexadecimal)
1483 1501 %n zero-padded sequence number, starting at 1
1484 1502 %r zero-padded changeset revision number
1485 1503
1486 1504 Without the -a option, export will avoid generating diffs of files
1487 1505 it detects as binary. With -a, export will generate a diff anyway,
1488 1506 probably with undesirable results.
1489 1507
1490 1508 With the --switch-parent option, the diff will be against the second
1491 1509 parent. It can be useful to review a merge.
1492 1510 """
1493 1511 if not changesets:
1494 1512 raise util.Abort(_("export requires at least one changeset"))
1495 1513 seqno = 0
1496 1514 revs = list(revrange(ui, repo, changesets))
1497 1515 total = len(revs)
1498 1516 revwidth = max(map(len, revs))
1499 1517 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1500 1518 ui.note(msg)
1501 1519 for cset in revs:
1502 1520 seqno += 1
1503 1521 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1504 1522
1505 1523 def forget(ui, repo, *pats, **opts):
1506 1524 """don't add the specified files on the next commit (DEPRECATED)
1507 1525
1508 1526 (DEPRECATED)
1509 1527 Undo an 'hg add' scheduled for the next commit.
1510 1528
1511 1529 This command is now deprecated and will be removed in a future
1512 1530 release. Please use revert instead.
1513 1531 """
1514 1532 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1515 1533 forget = []
1516 1534 for src, abs, rel, exact in walk(repo, pats, opts):
1517 1535 if repo.dirstate.state(abs) == 'a':
1518 1536 forget.append(abs)
1519 1537 if ui.verbose or not exact:
1520 1538 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1521 1539 repo.forget(forget)
1522 1540
1523 1541 def grep(ui, repo, pattern, *pats, **opts):
1524 1542 """search for a pattern in specified files and revisions
1525 1543
1526 1544 Search revisions of files for a regular expression.
1527 1545
1528 1546 This command behaves differently than Unix grep. It only accepts
1529 1547 Python/Perl regexps. It searches repository history, not the
1530 1548 working directory. It always prints the revision number in which
1531 1549 a match appears.
1532 1550
1533 1551 By default, grep only prints output for the first revision of a
1534 1552 file in which it finds a match. To get it to print every revision
1535 1553 that contains a change in match status ("-" for a match that
1536 1554 becomes a non-match, or "+" for a non-match that becomes a match),
1537 1555 use the --all flag.
1538 1556 """
1539 1557 reflags = 0
1540 1558 if opts['ignore_case']:
1541 1559 reflags |= re.I
1542 1560 regexp = re.compile(pattern, reflags)
1543 1561 sep, eol = ':', '\n'
1544 1562 if opts['print0']:
1545 1563 sep = eol = '\0'
1546 1564
1547 1565 fcache = {}
1548 1566 def getfile(fn):
1549 1567 if fn not in fcache:
1550 1568 fcache[fn] = repo.file(fn)
1551 1569 return fcache[fn]
1552 1570
1553 1571 def matchlines(body):
1554 1572 begin = 0
1555 1573 linenum = 0
1556 1574 while True:
1557 1575 match = regexp.search(body, begin)
1558 1576 if not match:
1559 1577 break
1560 1578 mstart, mend = match.span()
1561 1579 linenum += body.count('\n', begin, mstart) + 1
1562 1580 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1563 1581 lend = body.find('\n', mend)
1564 1582 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1565 1583 begin = lend + 1
1566 1584
1567 1585 class linestate(object):
1568 1586 def __init__(self, line, linenum, colstart, colend):
1569 1587 self.line = line
1570 1588 self.linenum = linenum
1571 1589 self.colstart = colstart
1572 1590 self.colend = colend
1573 1591 def __eq__(self, other):
1574 1592 return self.line == other.line
1575 1593 def __hash__(self):
1576 1594 return hash(self.line)
1577 1595
1578 1596 matches = {}
1579 1597 def grepbody(fn, rev, body):
1580 1598 matches[rev].setdefault(fn, {})
1581 1599 m = matches[rev][fn]
1582 1600 for lnum, cstart, cend, line in matchlines(body):
1583 1601 s = linestate(line, lnum, cstart, cend)
1584 1602 m[s] = s
1585 1603
1586 1604 # FIXME: prev isn't used, why ?
1587 1605 prev = {}
1588 1606 ucache = {}
1589 1607 def display(fn, rev, states, prevstates):
1590 1608 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1591 1609 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1592 1610 counts = {'-': 0, '+': 0}
1593 1611 filerevmatches = {}
1594 1612 for l in diff:
1595 1613 if incrementing or not opts['all']:
1596 1614 change = ((l in prevstates) and '-') or '+'
1597 1615 r = rev
1598 1616 else:
1599 1617 change = ((l in states) and '-') or '+'
1600 1618 r = prev[fn]
1601 1619 cols = [fn, str(rev)]
1602 1620 if opts['line_number']:
1603 1621 cols.append(str(l.linenum))
1604 1622 if opts['all']:
1605 1623 cols.append(change)
1606 1624 if opts['user']:
1607 1625 cols.append(trimuser(ui, getchange(rev)[1], rev,
1608 1626 ucache))
1609 1627 if opts['files_with_matches']:
1610 1628 c = (fn, rev)
1611 1629 if c in filerevmatches:
1612 1630 continue
1613 1631 filerevmatches[c] = 1
1614 1632 else:
1615 1633 cols.append(l.line)
1616 1634 ui.write(sep.join(cols), eol)
1617 1635 counts[change] += 1
1618 1636 return counts['+'], counts['-']
1619 1637
1620 1638 fstate = {}
1621 1639 skip = {}
1622 1640 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1623 1641 count = 0
1624 1642 incrementing = False
1625 1643 for st, rev, fns in changeiter:
1626 1644 if st == 'window':
1627 1645 incrementing = rev
1628 1646 matches.clear()
1629 1647 elif st == 'add':
1630 1648 change = repo.changelog.read(repo.lookup(str(rev)))
1631 1649 mf = repo.manifest.read(change[0])
1632 1650 matches[rev] = {}
1633 1651 for fn in fns:
1634 1652 if fn in skip:
1635 1653 continue
1636 1654 fstate.setdefault(fn, {})
1637 1655 try:
1638 1656 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1639 1657 except KeyError:
1640 1658 pass
1641 1659 elif st == 'iter':
1642 1660 states = matches[rev].items()
1643 1661 states.sort()
1644 1662 for fn, m in states:
1645 1663 if fn in skip:
1646 1664 continue
1647 1665 if incrementing or not opts['all'] or fstate[fn]:
1648 1666 pos, neg = display(fn, rev, m, fstate[fn])
1649 1667 count += pos + neg
1650 1668 if pos and not opts['all']:
1651 1669 skip[fn] = True
1652 1670 fstate[fn] = m
1653 1671 prev[fn] = rev
1654 1672
1655 1673 if not incrementing:
1656 1674 fstate = fstate.items()
1657 1675 fstate.sort()
1658 1676 for fn, state in fstate:
1659 1677 if fn in skip:
1660 1678 continue
1661 1679 display(fn, rev, {}, state)
1662 1680 return (count == 0 and 1) or 0
1663 1681
1664 1682 def heads(ui, repo, **opts):
1665 1683 """show current repository heads
1666 1684
1667 1685 Show all repository head changesets.
1668 1686
1669 1687 Repository "heads" are changesets that don't have children
1670 1688 changesets. They are where development generally takes place and
1671 1689 are the usual targets for update and merge operations.
1672 1690 """
1673 1691 if opts['rev']:
1674 1692 heads = repo.heads(repo.lookup(opts['rev']))
1675 1693 else:
1676 1694 heads = repo.heads()
1677 1695 br = None
1678 1696 if opts['branches']:
1679 1697 br = repo.branchlookup(heads)
1680 1698 displayer = show_changeset(ui, repo, opts)
1681 1699 for n in heads:
1682 1700 displayer.show(changenode=n, brinfo=br)
1683 1701
1684 1702 def identify(ui, repo):
1685 1703 """print information about the working copy
1686 1704
1687 1705 Print a short summary of the current state of the repo.
1688 1706
1689 1707 This summary identifies the repository state using one or two parent
1690 1708 hash identifiers, followed by a "+" if there are uncommitted changes
1691 1709 in the working directory, followed by a list of tags for this revision.
1692 1710 """
1693 1711 parents = [p for p in repo.dirstate.parents() if p != nullid]
1694 1712 if not parents:
1695 1713 ui.write(_("unknown\n"))
1696 1714 return
1697 1715
1698 1716 hexfunc = ui.verbose and hex or short
1699 1717 modified, added, removed, deleted, unknown = repo.changes()
1700 1718 output = ["%s%s" %
1701 1719 ('+'.join([hexfunc(parent) for parent in parents]),
1702 1720 (modified or added or removed or deleted) and "+" or "")]
1703 1721
1704 1722 if not ui.quiet:
1705 1723 # multiple tags for a single parent separated by '/'
1706 1724 parenttags = ['/'.join(tags)
1707 1725 for tags in map(repo.nodetags, parents) if tags]
1708 1726 # tags for multiple parents separated by ' + '
1709 1727 if parenttags:
1710 1728 output.append(' + '.join(parenttags))
1711 1729
1712 1730 ui.write("%s\n" % ' '.join(output))
1713 1731
1714 1732 def import_(ui, repo, patch1, *patches, **opts):
1715 1733 """import an ordered set of patches
1716 1734
1717 1735 Import a list of patches and commit them individually.
1718 1736
1719 1737 If there are outstanding changes in the working directory, import
1720 1738 will abort unless given the -f flag.
1721 1739
1722 1740 You can import a patch straight from a mail message. Even patches
1723 1741 as attachments work (body part must be type text/plain or
1724 1742 text/x-patch to be used). From and Subject headers of email
1725 1743 message are used as default committer and commit message. All
1726 1744 text/plain body parts before first diff are added to commit
1727 1745 message.
1728 1746
1729 1747 If imported patch was generated by hg export, user and description
1730 1748 from patch override values from message headers and body. Values
1731 1749 given on command line with -m and -u override these.
1732 1750
1733 1751 To read a patch from standard input, use patch name "-".
1734 1752 """
1735 1753 patches = (patch1,) + patches
1736 1754
1737 1755 if not opts['force']:
1738 1756 bail_if_changed(repo)
1739 1757
1740 1758 d = opts["base"]
1741 1759 strip = opts["strip"]
1742 1760
1743 1761 mailre = re.compile(r'(?:From |[\w-]+:)')
1744 1762
1745 1763 # attempt to detect the start of a patch
1746 1764 # (this heuristic is borrowed from quilt)
1747 1765 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1748 1766 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1749 1767 '(---|\*\*\*)[ \t])', re.MULTILINE)
1750 1768
1751 1769 for patch in patches:
1752 1770 pf = os.path.join(d, patch)
1753 1771
1754 1772 message = None
1755 1773 user = None
1756 1774 date = None
1757 1775 hgpatch = False
1758 1776
1759 1777 p = email.Parser.Parser()
1760 1778 if pf == '-':
1761 1779 msg = p.parse(sys.stdin)
1762 1780 ui.status(_("applying patch from stdin\n"))
1763 1781 else:
1764 1782 msg = p.parse(file(pf))
1765 1783 ui.status(_("applying %s\n") % patch)
1766 1784
1767 1785 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
1768 1786 tmpfp = os.fdopen(fd, 'w')
1769 1787 try:
1770 1788 message = msg['Subject']
1771 1789 if message:
1772 1790 message = message.replace('\n\t', ' ')
1773 1791 ui.debug('Subject: %s\n' % message)
1774 1792 user = msg['From']
1775 1793 if user:
1776 1794 ui.debug('From: %s\n' % user)
1777 1795 diffs_seen = 0
1778 1796 ok_types = ('text/plain', 'text/x-patch')
1779 1797 for part in msg.walk():
1780 1798 content_type = part.get_content_type()
1781 1799 ui.debug('Content-Type: %s\n' % content_type)
1782 1800 if content_type not in ok_types:
1783 1801 continue
1784 1802 payload = part.get_payload(decode=True)
1785 1803 m = diffre.search(payload)
1786 1804 if m:
1787 1805 ui.debug(_('found patch at byte %d\n') % m.start(0))
1788 1806 diffs_seen += 1
1789 1807 hgpatch = False
1790 1808 fp = cStringIO.StringIO()
1791 1809 if message:
1792 1810 fp.write(message)
1793 1811 fp.write('\n')
1794 1812 for line in payload[:m.start(0)].splitlines():
1795 1813 if line.startswith('# HG changeset patch'):
1796 1814 ui.debug(_('patch generated by hg export\n'))
1797 1815 hgpatch = True
1798 1816 # drop earlier commit message content
1799 1817 fp.seek(0)
1800 1818 fp.truncate()
1801 1819 elif hgpatch:
1802 1820 if line.startswith('# User '):
1803 1821 user = line[7:]
1804 1822 ui.debug('From: %s\n' % user)
1805 1823 elif line.startswith("# Date "):
1806 1824 date = line[7:]
1807 1825 if not line.startswith('# '):
1808 1826 fp.write(line)
1809 1827 fp.write('\n')
1810 1828 message = fp.getvalue()
1811 1829 if tmpfp:
1812 1830 tmpfp.write(payload)
1813 1831 if not payload.endswith('\n'):
1814 1832 tmpfp.write('\n')
1815 1833 elif not diffs_seen and message and content_type == 'text/plain':
1816 1834 message += '\n' + payload
1817 1835
1818 1836 if opts['message']:
1819 1837 # pickup the cmdline msg
1820 1838 message = opts['message']
1821 1839 elif message:
1822 1840 # pickup the patch msg
1823 1841 message = message.strip()
1824 1842 else:
1825 1843 # launch the editor
1826 1844 message = None
1827 1845 ui.debug(_('message:\n%s\n') % message)
1828 1846
1829 1847 tmpfp.close()
1830 1848 if not diffs_seen:
1831 1849 raise util.Abort(_('no diffs found'))
1832 1850
1833 1851 files = util.patch(strip, tmpname, ui)
1834 1852 if len(files) > 0:
1835 1853 addremove_lock(ui, repo, files, {})
1836 1854 repo.commit(files, message, user, date)
1837 1855 finally:
1838 1856 os.unlink(tmpname)
1839 1857
1840 1858 def incoming(ui, repo, source="default", **opts):
1841 1859 """show new changesets found in source
1842 1860
1843 1861 Show new changesets found in the specified path/URL or the default
1844 1862 pull location. These are the changesets that would be pulled if a pull
1845 1863 was requested.
1846 1864
1847 1865 For remote repository, using --bundle avoids downloading the changesets
1848 1866 twice if the incoming is followed by a pull.
1849 1867
1850 1868 See pull for valid source format details.
1851 1869 """
1852 1870 source = ui.expandpath(source)
1853 1871 if opts['ssh']:
1854 1872 ui.setconfig("ui", "ssh", opts['ssh'])
1855 1873 if opts['remotecmd']:
1856 1874 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1857 1875
1858 1876 other = hg.repository(ui, source)
1859 1877 incoming = repo.findincoming(other, force=opts["force"])
1860 1878 if not incoming:
1861 1879 ui.status(_("no changes found\n"))
1862 1880 return
1863 1881
1864 1882 cleanup = None
1865 1883 try:
1866 1884 fname = opts["bundle"]
1867 1885 if fname or not other.local():
1868 1886 # create a bundle (uncompressed if other repo is not local)
1869 1887 cg = other.changegroup(incoming, "incoming")
1870 1888 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1871 1889 # keep written bundle?
1872 1890 if opts["bundle"]:
1873 1891 cleanup = None
1874 1892 if not other.local():
1875 1893 # use the created uncompressed bundlerepo
1876 1894 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1877 1895
1878 1896 revs = None
1879 1897 if opts['rev']:
1880 1898 revs = [other.lookup(rev) for rev in opts['rev']]
1881 1899 o = other.changelog.nodesbetween(incoming, revs)[0]
1882 1900 if opts['newest_first']:
1883 1901 o.reverse()
1884 1902 displayer = show_changeset(ui, other, opts)
1885 1903 for n in o:
1886 1904 parents = [p for p in other.changelog.parents(n) if p != nullid]
1887 1905 if opts['no_merges'] and len(parents) == 2:
1888 1906 continue
1889 1907 displayer.show(changenode=n)
1890 1908 if opts['patch']:
1891 1909 prev = (parents and parents[0]) or nullid
1892 1910 dodiff(ui, ui, other, prev, n)
1893 1911 ui.write("\n")
1894 1912 finally:
1895 1913 if hasattr(other, 'close'):
1896 1914 other.close()
1897 1915 if cleanup:
1898 1916 os.unlink(cleanup)
1899 1917
1900 1918 def init(ui, dest="."):
1901 1919 """create a new repository in the given directory
1902 1920
1903 1921 Initialize a new repository in the given directory. If the given
1904 1922 directory does not exist, it is created.
1905 1923
1906 1924 If no directory is given, the current directory is used.
1907 1925 """
1908 if not os.path.exists(dest):
1909 os.mkdir(dest)
1910 1926 hg.repository(ui, dest, create=1)
1911 1927
1912 1928 def locate(ui, repo, *pats, **opts):
1913 1929 """locate files matching specific patterns
1914 1930
1915 1931 Print all files under Mercurial control whose names match the
1916 1932 given patterns.
1917 1933
1918 1934 This command searches the current directory and its
1919 1935 subdirectories. To search an entire repository, move to the root
1920 1936 of the repository.
1921 1937
1922 1938 If no patterns are given to match, this command prints all file
1923 1939 names.
1924 1940
1925 1941 If you want to feed the output of this command into the "xargs"
1926 1942 command, use the "-0" option to both this command and "xargs".
1927 1943 This will avoid the problem of "xargs" treating single filenames
1928 1944 that contain white space as multiple filenames.
1929 1945 """
1930 1946 end = opts['print0'] and '\0' or '\n'
1931 1947 rev = opts['rev']
1932 1948 if rev:
1933 1949 node = repo.lookup(rev)
1934 1950 else:
1935 1951 node = None
1936 1952
1937 1953 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
1938 1954 head='(?:.*/|)'):
1939 1955 if not node and repo.dirstate.state(abs) == '?':
1940 1956 continue
1941 1957 if opts['fullpath']:
1942 1958 ui.write(os.path.join(repo.root, abs), end)
1943 1959 else:
1944 1960 ui.write(((pats and rel) or abs), end)
1945 1961
1946 1962 def log(ui, repo, *pats, **opts):
1947 1963 """show revision history of entire repository or files
1948 1964
1949 1965 Print the revision history of the specified files or the entire project.
1950 1966
1951 1967 By default this command outputs: changeset id and hash, tags,
1952 1968 non-trivial parents, user, date and time, and a summary for each
1953 1969 commit. When the -v/--verbose switch is used, the list of changed
1954 1970 files and full commit message is shown.
1955 1971 """
1956 1972 class dui(object):
1957 1973 # Implement and delegate some ui protocol. Save hunks of
1958 1974 # output for later display in the desired order.
1959 1975 def __init__(self, ui):
1960 1976 self.ui = ui
1961 1977 self.hunk = {}
1962 1978 self.header = {}
1963 1979 def bump(self, rev):
1964 1980 self.rev = rev
1965 1981 self.hunk[rev] = []
1966 1982 self.header[rev] = []
1967 1983 def note(self, *args):
1968 1984 if self.verbose:
1969 1985 self.write(*args)
1970 1986 def status(self, *args):
1971 1987 if not self.quiet:
1972 1988 self.write(*args)
1973 1989 def write(self, *args):
1974 1990 self.hunk[self.rev].append(args)
1975 1991 def write_header(self, *args):
1976 1992 self.header[self.rev].append(args)
1977 1993 def debug(self, *args):
1978 1994 if self.debugflag:
1979 1995 self.write(*args)
1980 1996 def __getattr__(self, key):
1981 1997 return getattr(self.ui, key)
1982 1998
1983 1999 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1984 2000
1985 2001 if opts['limit']:
1986 2002 try:
1987 2003 limit = int(opts['limit'])
1988 2004 except ValueError:
1989 2005 raise util.Abort(_('limit must be a positive integer'))
1990 2006 if limit <= 0: raise util.Abort(_('limit must be positive'))
1991 2007 else:
1992 2008 limit = sys.maxint
1993 2009 count = 0
1994 2010
1995 2011 displayer = show_changeset(ui, repo, opts)
1996 2012 for st, rev, fns in changeiter:
1997 2013 if st == 'window':
1998 2014 du = dui(ui)
1999 2015 displayer.ui = du
2000 2016 elif st == 'add':
2001 2017 du.bump(rev)
2002 2018 changenode = repo.changelog.node(rev)
2003 2019 parents = [p for p in repo.changelog.parents(changenode)
2004 2020 if p != nullid]
2005 2021 if opts['no_merges'] and len(parents) == 2:
2006 2022 continue
2007 2023 if opts['only_merges'] and len(parents) != 2:
2008 2024 continue
2009 2025
2010 2026 if opts['keyword']:
2011 2027 changes = getchange(rev)
2012 2028 miss = 0
2013 2029 for k in [kw.lower() for kw in opts['keyword']]:
2014 2030 if not (k in changes[1].lower() or
2015 2031 k in changes[4].lower() or
2016 2032 k in " ".join(changes[3][:20]).lower()):
2017 2033 miss = 1
2018 2034 break
2019 2035 if miss:
2020 2036 continue
2021 2037
2022 2038 br = None
2023 2039 if opts['branches']:
2024 2040 br = repo.branchlookup([repo.changelog.node(rev)])
2025 2041
2026 2042 displayer.show(rev, brinfo=br)
2027 2043 if opts['patch']:
2028 2044 prev = (parents and parents[0]) or nullid
2029 2045 dodiff(du, du, repo, prev, changenode, match=matchfn)
2030 2046 du.write("\n\n")
2031 2047 elif st == 'iter':
2032 2048 if count == limit: break
2033 2049 if du.header[rev]:
2034 2050 for args in du.header[rev]:
2035 2051 ui.write_header(*args)
2036 2052 if du.hunk[rev]:
2037 2053 count += 1
2038 2054 for args in du.hunk[rev]:
2039 2055 ui.write(*args)
2040 2056
2041 2057 def manifest(ui, repo, rev=None):
2042 2058 """output the latest or given revision of the project manifest
2043 2059
2044 2060 Print a list of version controlled files for the given revision.
2045 2061
2046 2062 The manifest is the list of files being version controlled. If no revision
2047 2063 is given then the tip is used.
2048 2064 """
2049 2065 if rev:
2050 2066 try:
2051 2067 # assume all revision numbers are for changesets
2052 2068 n = repo.lookup(rev)
2053 2069 change = repo.changelog.read(n)
2054 2070 n = change[0]
2055 2071 except hg.RepoError:
2056 2072 n = repo.manifest.lookup(rev)
2057 2073 else:
2058 2074 n = repo.manifest.tip()
2059 2075 m = repo.manifest.read(n)
2060 2076 mf = repo.manifest.readflags(n)
2061 2077 files = m.keys()
2062 2078 files.sort()
2063 2079
2064 2080 for f in files:
2065 2081 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
2066 2082
2067 2083 def merge(ui, repo, node=None, **opts):
2068 2084 """Merge working directory with another revision
2069 2085
2070 2086 Merge the contents of the current working directory and the
2071 2087 requested revision. Files that changed between either parent are
2072 2088 marked as changed for the next commit and a commit must be
2073 2089 performed before any further updates are allowed.
2074 2090 """
2075 2091 return doupdate(ui, repo, node=node, merge=True, **opts)
2076 2092
2077 2093 def outgoing(ui, repo, dest=None, **opts):
2078 2094 """show changesets not found in destination
2079 2095
2080 2096 Show changesets not found in the specified destination repository or
2081 2097 the default push location. These are the changesets that would be pushed
2082 2098 if a push was requested.
2083 2099
2084 2100 See pull for valid destination format details.
2085 2101 """
2086 2102 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2087 2103 if opts['ssh']:
2088 2104 ui.setconfig("ui", "ssh", opts['ssh'])
2089 2105 if opts['remotecmd']:
2090 2106 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
2091 2107 revs = None
2092 2108 if opts['rev']:
2093 2109 revs = [repo.lookup(rev) for rev in opts['rev']]
2094 2110
2095 2111 other = hg.repository(ui, dest)
2096 2112 o = repo.findoutgoing(other, force=opts['force'])
2097 2113 if not o:
2098 2114 ui.status(_("no changes found\n"))
2099 2115 return
2100 2116 o = repo.changelog.nodesbetween(o, revs)[0]
2101 2117 if opts['newest_first']:
2102 2118 o.reverse()
2103 2119 displayer = show_changeset(ui, repo, opts)
2104 2120 for n in o:
2105 2121 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2106 2122 if opts['no_merges'] and len(parents) == 2:
2107 2123 continue
2108 2124 displayer.show(changenode=n)
2109 2125 if opts['patch']:
2110 2126 prev = (parents and parents[0]) or nullid
2111 2127 dodiff(ui, ui, repo, prev, n)
2112 2128 ui.write("\n")
2113 2129
2114 2130 def parents(ui, repo, rev=None, branches=None, **opts):
2115 2131 """show the parents of the working dir or revision
2116 2132
2117 2133 Print the working directory's parent revisions.
2118 2134 """
2119 2135 if rev:
2120 2136 p = repo.changelog.parents(repo.lookup(rev))
2121 2137 else:
2122 2138 p = repo.dirstate.parents()
2123 2139
2124 2140 br = None
2125 2141 if branches is not None:
2126 2142 br = repo.branchlookup(p)
2127 2143 displayer = show_changeset(ui, repo, opts)
2128 2144 for n in p:
2129 2145 if n != nullid:
2130 2146 displayer.show(changenode=n, brinfo=br)
2131 2147
2132 2148 def paths(ui, repo, search=None):
2133 2149 """show definition of symbolic path names
2134 2150
2135 2151 Show definition of symbolic path name NAME. If no name is given, show
2136 2152 definition of available names.
2137 2153
2138 2154 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2139 2155 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2140 2156 """
2141 2157 if search:
2142 2158 for name, path in ui.configitems("paths"):
2143 2159 if name == search:
2144 2160 ui.write("%s\n" % path)
2145 2161 return
2146 2162 ui.warn(_("not found!\n"))
2147 2163 return 1
2148 2164 else:
2149 2165 for name, path in ui.configitems("paths"):
2150 2166 ui.write("%s = %s\n" % (name, path))
2151 2167
2152 2168 def postincoming(ui, repo, modheads, optupdate):
2153 2169 if modheads == 0:
2154 2170 return
2155 2171 if optupdate:
2156 2172 if modheads == 1:
2157 2173 return doupdate(ui, repo)
2158 2174 else:
2159 2175 ui.status(_("not updating, since new heads added\n"))
2160 2176 if modheads > 1:
2161 2177 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2162 2178 else:
2163 2179 ui.status(_("(run 'hg update' to get a working copy)\n"))
2164 2180
2165 2181 def pull(ui, repo, source="default", **opts):
2166 2182 """pull changes from the specified source
2167 2183
2168 2184 Pull changes from a remote repository to a local one.
2169 2185
2170 2186 This finds all changes from the repository at the specified path
2171 2187 or URL and adds them to the local repository. By default, this
2172 2188 does not update the copy of the project in the working directory.
2173 2189
2174 2190 Valid URLs are of the form:
2175 2191
2176 2192 local/filesystem/path
2177 2193 http://[user@]host[:port][/path]
2178 2194 https://[user@]host[:port][/path]
2179 2195 ssh://[user@]host[:port][/path]
2180 2196
2181 2197 Some notes about using SSH with Mercurial:
2182 2198 - SSH requires an accessible shell account on the destination machine
2183 2199 and a copy of hg in the remote path or specified with as remotecmd.
2184 2200 - /path is relative to the remote user's home directory by default.
2185 2201 Use two slashes at the start of a path to specify an absolute path.
2186 2202 - Mercurial doesn't use its own compression via SSH; the right thing
2187 2203 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2188 2204 Host *.mylocalnetwork.example.com
2189 2205 Compression off
2190 2206 Host *
2191 2207 Compression on
2192 2208 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2193 2209 with the --ssh command line option.
2194 2210 """
2195 2211 source = ui.expandpath(source)
2196 2212
2197 2213 if opts['ssh']:
2198 2214 ui.setconfig("ui", "ssh", opts['ssh'])
2199 2215 if opts['remotecmd']:
2200 2216 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
2201 2217
2202 2218 other = hg.repository(ui, source)
2203 2219 ui.status(_('pulling from %s\n') % (source))
2204 2220 revs = None
2205 2221 if opts['rev'] and not other.local():
2206 2222 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2207 2223 elif opts['rev']:
2208 2224 revs = [other.lookup(rev) for rev in opts['rev']]
2209 2225 modheads = repo.pull(other, heads=revs, force=opts['force'])
2210 2226 return postincoming(ui, repo, modheads, opts['update'])
2211 2227
2212 2228 def push(ui, repo, dest=None, **opts):
2213 2229 """push changes to the specified destination
2214 2230
2215 2231 Push changes from the local repository to the given destination.
2216 2232
2217 2233 This is the symmetrical operation for pull. It helps to move
2218 2234 changes from the current repository to a different one. If the
2219 2235 destination is local this is identical to a pull in that directory
2220 2236 from the current one.
2221 2237
2222 2238 By default, push will refuse to run if it detects the result would
2223 2239 increase the number of remote heads. This generally indicates the
2224 2240 the client has forgotten to sync and merge before pushing.
2225 2241
2226 2242 Valid URLs are of the form:
2227 2243
2228 2244 local/filesystem/path
2229 2245 ssh://[user@]host[:port][/path]
2230 2246
2231 2247 Look at the help text for the pull command for important details
2232 2248 about ssh:// URLs.
2233 2249 """
2234 2250 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2235 2251
2236 2252 if opts['ssh']:
2237 2253 ui.setconfig("ui", "ssh", opts['ssh'])
2238 2254 if opts['remotecmd']:
2239 2255 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
2240 2256
2241 2257 other = hg.repository(ui, dest)
2242 2258 ui.status('pushing to %s\n' % (dest))
2243 2259 revs = None
2244 2260 if opts['rev']:
2245 2261 revs = [repo.lookup(rev) for rev in opts['rev']]
2246 2262 r = repo.push(other, opts['force'], revs=revs)
2247 2263 return r == 0
2248 2264
2249 2265 def rawcommit(ui, repo, *flist, **rc):
2250 2266 """raw commit interface (DEPRECATED)
2251 2267
2252 2268 (DEPRECATED)
2253 2269 Lowlevel commit, for use in helper scripts.
2254 2270
2255 2271 This command is not intended to be used by normal users, as it is
2256 2272 primarily useful for importing from other SCMs.
2257 2273
2258 2274 This command is now deprecated and will be removed in a future
2259 2275 release, please use debugsetparents and commit instead.
2260 2276 """
2261 2277
2262 2278 ui.warn(_("(the rawcommit command is deprecated)\n"))
2263 2279
2264 2280 message = rc['message']
2265 2281 if not message and rc['logfile']:
2266 2282 try:
2267 2283 message = open(rc['logfile']).read()
2268 2284 except IOError:
2269 2285 pass
2270 2286 if not message and not rc['logfile']:
2271 2287 raise util.Abort(_("missing commit message"))
2272 2288
2273 2289 files = relpath(repo, list(flist))
2274 2290 if rc['files']:
2275 2291 files += open(rc['files']).read().splitlines()
2276 2292
2277 2293 rc['parent'] = map(repo.lookup, rc['parent'])
2278 2294
2279 2295 try:
2280 2296 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2281 2297 except ValueError, inst:
2282 2298 raise util.Abort(str(inst))
2283 2299
2284 2300 def recover(ui, repo):
2285 2301 """roll back an interrupted transaction
2286 2302
2287 2303 Recover from an interrupted commit or pull.
2288 2304
2289 2305 This command tries to fix the repository status after an interrupted
2290 2306 operation. It should only be necessary when Mercurial suggests it.
2291 2307 """
2292 2308 if repo.recover():
2293 2309 return repo.verify()
2294 2310 return 1
2295 2311
2296 2312 def remove(ui, repo, *pats, **opts):
2297 2313 """remove the specified files on the next commit
2298 2314
2299 2315 Schedule the indicated files for removal from the repository.
2300 2316
2301 2317 This command schedules the files to be removed at the next commit.
2302 2318 This only removes files from the current branch, not from the
2303 2319 entire project history. If the files still exist in the working
2304 2320 directory, they will be deleted from it. If invoked with --after,
2305 2321 files that have been manually deleted are marked as removed.
2306 2322
2307 2323 Modified files and added files are not removed by default. To
2308 2324 remove them, use the -f/--force option.
2309 2325 """
2310 2326 names = []
2311 2327 if not opts['after'] and not pats:
2312 2328 raise util.Abort(_('no files specified'))
2313 2329 files, matchfn, anypats = matchpats(repo, pats, opts)
2314 2330 exact = dict.fromkeys(files)
2315 2331 mardu = map(dict.fromkeys, repo.changes(files=files, match=matchfn))
2316 2332 modified, added, removed, deleted, unknown = mardu
2317 2333 remove, forget = [], []
2318 2334 for src, abs, rel, exact in walk(repo, pats, opts):
2319 2335 reason = None
2320 2336 if abs not in deleted and opts['after']:
2321 2337 reason = _('is still present')
2322 2338 elif abs in modified and not opts['force']:
2323 2339 reason = _('is modified (use -f to force removal)')
2324 2340 elif abs in added:
2325 2341 if opts['force']:
2326 2342 forget.append(abs)
2327 2343 continue
2328 2344 reason = _('has been marked for add (use -f to force removal)')
2329 2345 elif abs in unknown:
2330 2346 reason = _('is not managed')
2331 2347 elif abs in removed:
2332 2348 continue
2333 2349 if reason:
2334 2350 if exact:
2335 2351 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2336 2352 else:
2337 2353 if ui.verbose or not exact:
2338 2354 ui.status(_('removing %s\n') % rel)
2339 2355 remove.append(abs)
2340 2356 repo.forget(forget)
2341 2357 repo.remove(remove, unlink=not opts['after'])
2342 2358
2343 2359 def rename(ui, repo, *pats, **opts):
2344 2360 """rename files; equivalent of copy + remove
2345 2361
2346 2362 Mark dest as copies of sources; mark sources for deletion. If
2347 2363 dest is a directory, copies are put in that directory. If dest is
2348 2364 a file, there can only be one source.
2349 2365
2350 2366 By default, this command copies the contents of files as they
2351 2367 stand in the working directory. If invoked with --after, the
2352 2368 operation is recorded, but no copying is performed.
2353 2369
2354 2370 This command takes effect in the next commit.
2355 2371
2356 2372 NOTE: This command should be treated as experimental. While it
2357 2373 should properly record rename files, this information is not yet
2358 2374 fully used by merge, nor fully reported by log.
2359 2375 """
2360 2376 wlock = repo.wlock(0)
2361 2377 errs, copied = docopy(ui, repo, pats, opts, wlock)
2362 2378 names = []
2363 2379 for abs, rel, exact in copied:
2364 2380 if ui.verbose or not exact:
2365 2381 ui.status(_('removing %s\n') % rel)
2366 2382 names.append(abs)
2367 2383 if not opts.get('dry_run'):
2368 2384 repo.remove(names, True, wlock)
2369 2385 return errs
2370 2386
2371 2387 def revert(ui, repo, *pats, **opts):
2372 2388 """revert files or dirs to their states as of some revision
2373 2389
2374 2390 With no revision specified, revert the named files or directories
2375 2391 to the contents they had in the parent of the working directory.
2376 2392 This restores the contents of the affected files to an unmodified
2377 2393 state. If the working directory has two parents, you must
2378 2394 explicitly specify the revision to revert to.
2379 2395
2380 2396 Modified files are saved with a .orig suffix before reverting.
2381 2397 To disable these backups, use --no-backup.
2382 2398
2383 2399 Using the -r option, revert the given files or directories to
2384 2400 their contents as of a specific revision. This can be helpful to"roll
2385 2401 back" some or all of a change that should not have been committed.
2386 2402
2387 2403 Revert modifies the working directory. It does not commit any
2388 2404 changes, or change the parent of the working directory. If you
2389 2405 revert to a revision other than the parent of the working
2390 2406 directory, the reverted files will thus appear modified
2391 2407 afterwards.
2392 2408
2393 2409 If a file has been deleted, it is recreated. If the executable
2394 2410 mode of a file was changed, it is reset.
2395 2411
2396 2412 If names are given, all files matching the names are reverted.
2397 2413
2398 2414 If no arguments are given, all files in the repository are reverted.
2399 2415 """
2400 2416 parent, p2 = repo.dirstate.parents()
2401 2417 if opts['rev']:
2402 2418 node = repo.lookup(opts['rev'])
2403 2419 elif p2 != nullid:
2404 2420 raise util.Abort(_('working dir has two parents; '
2405 2421 'you must specify the revision to revert to'))
2406 2422 else:
2407 2423 node = parent
2408 2424 mf = repo.manifest.read(repo.changelog.read(node)[0])
2409 2425 if node == parent:
2410 2426 pmf = mf
2411 2427 else:
2412 2428 pmf = None
2413 2429
2414 2430 wlock = repo.wlock()
2415 2431
2416 2432 # need all matching names in dirstate and manifest of target rev,
2417 2433 # so have to walk both. do not print errors if files exist in one
2418 2434 # but not other.
2419 2435
2420 2436 names = {}
2421 2437 target_only = {}
2422 2438
2423 2439 # walk dirstate.
2424 2440
2425 2441 for src, abs, rel, exact in walk(repo, pats, opts, badmatch=mf.has_key):
2426 2442 names[abs] = (rel, exact)
2427 2443 if src == 'b':
2428 2444 target_only[abs] = True
2429 2445
2430 2446 # walk target manifest.
2431 2447
2432 2448 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2433 2449 badmatch=names.has_key):
2434 2450 if abs in names: continue
2435 2451 names[abs] = (rel, exact)
2436 2452 target_only[abs] = True
2437 2453
2438 2454 changes = repo.changes(match=names.has_key, wlock=wlock)
2439 2455 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2440 2456
2441 2457 revert = ([], _('reverting %s\n'))
2442 2458 add = ([], _('adding %s\n'))
2443 2459 remove = ([], _('removing %s\n'))
2444 2460 forget = ([], _('forgetting %s\n'))
2445 2461 undelete = ([], _('undeleting %s\n'))
2446 2462 update = {}
2447 2463
2448 2464 disptable = (
2449 2465 # dispatch table:
2450 2466 # file state
2451 2467 # action if in target manifest
2452 2468 # action if not in target manifest
2453 2469 # make backup if in target manifest
2454 2470 # make backup if not in target manifest
2455 2471 (modified, revert, remove, True, True),
2456 2472 (added, revert, forget, True, False),
2457 2473 (removed, undelete, None, False, False),
2458 2474 (deleted, revert, remove, False, False),
2459 2475 (unknown, add, None, True, False),
2460 2476 (target_only, add, None, False, False),
2461 2477 )
2462 2478
2463 2479 entries = names.items()
2464 2480 entries.sort()
2465 2481
2466 2482 for abs, (rel, exact) in entries:
2467 2483 mfentry = mf.get(abs)
2468 2484 def handle(xlist, dobackup):
2469 2485 xlist[0].append(abs)
2470 2486 update[abs] = 1
2471 2487 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2472 2488 bakname = "%s.orig" % rel
2473 2489 ui.note(_('saving current version of %s as %s\n') %
2474 2490 (rel, bakname))
2475 2491 if not opts.get('dry_run'):
2476 2492 shutil.copyfile(rel, bakname)
2477 2493 shutil.copymode(rel, bakname)
2478 2494 if ui.verbose or not exact:
2479 2495 ui.status(xlist[1] % rel)
2480 2496 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2481 2497 if abs not in table: continue
2482 2498 # file has changed in dirstate
2483 2499 if mfentry:
2484 2500 handle(hitlist, backuphit)
2485 2501 elif misslist is not None:
2486 2502 handle(misslist, backupmiss)
2487 2503 else:
2488 2504 if exact: ui.warn(_('file not managed: %s\n' % rel))
2489 2505 break
2490 2506 else:
2491 2507 # file has not changed in dirstate
2492 2508 if node == parent:
2493 2509 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2494 2510 continue
2495 2511 if pmf is None:
2496 2512 # only need parent manifest in this unlikely case,
2497 2513 # so do not read by default
2498 2514 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2499 2515 if abs in pmf:
2500 2516 if mfentry:
2501 2517 # if version of file is same in parent and target
2502 2518 # manifests, do nothing
2503 2519 if pmf[abs] != mfentry:
2504 2520 handle(revert, False)
2505 2521 else:
2506 2522 handle(remove, False)
2507 2523
2508 2524 if not opts.get('dry_run'):
2509 2525 repo.dirstate.forget(forget[0])
2510 2526 r = repo.update(node, False, True, update.has_key, False, wlock=wlock,
2511 2527 show_stats=False)
2512 2528 repo.dirstate.update(add[0], 'a')
2513 2529 repo.dirstate.update(undelete[0], 'n')
2514 2530 repo.dirstate.update(remove[0], 'r')
2515 2531 return r
2516 2532
2517 2533 def rollback(ui, repo):
2518 2534 """roll back the last transaction in this repository
2519 2535
2520 2536 Roll back the last transaction in this repository, restoring the
2521 2537 project to its state prior to the transaction.
2522 2538
2523 2539 Transactions are used to encapsulate the effects of all commands
2524 2540 that create new changesets or propagate existing changesets into a
2525 2541 repository. For example, the following commands are transactional,
2526 2542 and their effects can be rolled back:
2527 2543
2528 2544 commit
2529 2545 import
2530 2546 pull
2531 2547 push (with this repository as destination)
2532 2548 unbundle
2533 2549
2534 2550 This command should be used with care. There is only one level of
2535 2551 rollback, and there is no way to undo a rollback.
2536 2552
2537 2553 This command is not intended for use on public repositories. Once
2538 2554 changes are visible for pull by other users, rolling a transaction
2539 2555 back locally is ineffective (someone else may already have pulled
2540 2556 the changes). Furthermore, a race is possible with readers of the
2541 2557 repository; for example an in-progress pull from the repository
2542 2558 may fail if a rollback is performed.
2543 2559 """
2544 2560 repo.rollback()
2545 2561
2546 2562 def root(ui, repo):
2547 2563 """print the root (top) of the current working dir
2548 2564
2549 2565 Print the root directory of the current repository.
2550 2566 """
2551 2567 ui.write(repo.root + "\n")
2552 2568
2553 2569 def serve(ui, repo, **opts):
2554 2570 """export the repository via HTTP
2555 2571
2556 2572 Start a local HTTP repository browser and pull server.
2557 2573
2558 2574 By default, the server logs accesses to stdout and errors to
2559 2575 stderr. Use the "-A" and "-E" options to log to files.
2560 2576 """
2561 2577
2562 2578 if opts["stdio"]:
2563 2579 if repo is None:
2564 2580 raise hg.RepoError(_('no repo found'))
2565 2581 s = sshserver.sshserver(ui, repo)
2566 2582 s.serve_forever()
2567 2583
2568 2584 optlist = ("name templates style address port ipv6"
2569 2585 " accesslog errorlog webdir_conf")
2570 2586 for o in optlist.split():
2571 2587 if opts[o]:
2572 2588 ui.setconfig("web", o, opts[o])
2573 2589
2574 2590 if repo is None and not ui.config("web", "webdir_conf"):
2575 2591 raise hg.RepoError(_('no repo found'))
2576 2592
2577 2593 if opts['daemon'] and not opts['daemon_pipefds']:
2578 2594 rfd, wfd = os.pipe()
2579 2595 args = sys.argv[:]
2580 2596 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2581 2597 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2582 2598 args[0], args)
2583 2599 os.close(wfd)
2584 2600 os.read(rfd, 1)
2585 2601 os._exit(0)
2586 2602
2587 2603 try:
2588 2604 httpd = hgweb.server.create_server(ui, repo)
2589 2605 except socket.error, inst:
2590 2606 raise util.Abort(_('cannot start server: ') + inst.args[1])
2591 2607
2592 2608 if ui.verbose:
2593 2609 addr, port = httpd.socket.getsockname()
2594 2610 if addr == '0.0.0.0':
2595 2611 addr = socket.gethostname()
2596 2612 else:
2597 2613 try:
2598 2614 addr = socket.gethostbyaddr(addr)[0]
2599 2615 except socket.error:
2600 2616 pass
2601 2617 if port != 80:
2602 2618 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2603 2619 else:
2604 2620 ui.status(_('listening at http://%s/\n') % addr)
2605 2621
2606 2622 if opts['pid_file']:
2607 2623 fp = open(opts['pid_file'], 'w')
2608 2624 fp.write(str(os.getpid()))
2609 2625 fp.close()
2610 2626
2611 2627 if opts['daemon_pipefds']:
2612 2628 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2613 2629 os.close(rfd)
2614 2630 os.write(wfd, 'y')
2615 2631 os.close(wfd)
2616 2632 sys.stdout.flush()
2617 2633 sys.stderr.flush()
2618 2634 fd = os.open(util.nulldev, os.O_RDWR)
2619 2635 if fd != 0: os.dup2(fd, 0)
2620 2636 if fd != 1: os.dup2(fd, 1)
2621 2637 if fd != 2: os.dup2(fd, 2)
2622 2638 if fd not in (0, 1, 2): os.close(fd)
2623 2639
2624 2640 httpd.serve_forever()
2625 2641
2626 2642 def status(ui, repo, *pats, **opts):
2627 2643 """show changed files in the working directory
2628 2644
2629 2645 Show changed files in the repository. If names are
2630 2646 given, only files that match are shown.
2631 2647
2632 2648 The codes used to show the status of files are:
2633 2649 M = modified
2634 2650 A = added
2635 2651 R = removed
2636 2652 ! = deleted, but still tracked
2637 2653 ? = not tracked
2638 2654 I = ignored (not shown by default)
2639 2655 """
2640 2656
2641 2657 show_ignored = opts['ignored'] and True or False
2642 2658 files, matchfn, anypats = matchpats(repo, pats, opts)
2643 2659 cwd = (pats and repo.getcwd()) or ''
2644 2660 modified, added, removed, deleted, unknown, ignored = [
2645 2661 [util.pathto(cwd, x) for x in n]
2646 2662 for n in repo.changes(files=files, match=matchfn,
2647 2663 show_ignored=show_ignored)]
2648 2664
2649 2665 changetypes = [('modified', 'M', modified),
2650 2666 ('added', 'A', added),
2651 2667 ('removed', 'R', removed),
2652 2668 ('deleted', '!', deleted),
2653 2669 ('unknown', '?', unknown),
2654 2670 ('ignored', 'I', ignored)]
2655 2671
2656 2672 end = opts['print0'] and '\0' or '\n'
2657 2673
2658 2674 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
2659 2675 or changetypes):
2660 2676 if opts['no_status']:
2661 2677 format = "%%s%s" % end
2662 2678 else:
2663 2679 format = "%s %%s%s" % (char, end)
2664 2680
2665 2681 for f in changes:
2666 2682 ui.write(format % f)
2667 2683
2668 2684 def tag(ui, repo, name, rev_=None, **opts):
2669 2685 """add a tag for the current tip or a given revision
2670 2686
2671 2687 Name a particular revision using <name>.
2672 2688
2673 2689 Tags are used to name particular revisions of the repository and are
2674 2690 very useful to compare different revision, to go back to significant
2675 2691 earlier versions or to mark branch points as releases, etc.
2676 2692
2677 2693 If no revision is given, the tip is used.
2678 2694
2679 2695 To facilitate version control, distribution, and merging of tags,
2680 2696 they are stored as a file named ".hgtags" which is managed
2681 2697 similarly to other project files and can be hand-edited if
2682 2698 necessary. The file '.hg/localtags' is used for local tags (not
2683 2699 shared among repositories).
2684 2700 """
2685 2701 if name == "tip":
2686 2702 raise util.Abort(_("the name 'tip' is reserved"))
2687 2703 if rev_ is not None:
2688 2704 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2689 2705 "please use 'hg tag [-r REV] NAME' instead\n"))
2690 2706 if opts['rev']:
2691 2707 raise util.Abort(_("use only one form to specify the revision"))
2692 2708 if opts['rev']:
2693 2709 rev_ = opts['rev']
2694 2710 if rev_:
2695 2711 r = hex(repo.lookup(rev_))
2696 2712 else:
2697 2713 r = hex(repo.changelog.tip())
2698 2714
2699 2715 disallowed = (revrangesep, '\r', '\n')
2700 2716 for c in disallowed:
2701 2717 if name.find(c) >= 0:
2702 2718 raise util.Abort(_("%s cannot be used in a tag name") % repr(c))
2703 2719
2704 2720 repo.hook('pretag', throw=True, node=r, tag=name,
2705 2721 local=int(not not opts['local']))
2706 2722
2707 2723 if opts['local']:
2708 2724 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
2709 2725 repo.hook('tag', node=r, tag=name, local=1)
2710 2726 return
2711 2727
2712 2728 for x in repo.changes():
2713 2729 if ".hgtags" in x:
2714 2730 raise util.Abort(_("working copy of .hgtags is changed "
2715 2731 "(please commit .hgtags manually)"))
2716 2732
2717 2733 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
2718 2734 if repo.dirstate.state(".hgtags") == '?':
2719 2735 repo.add([".hgtags"])
2720 2736
2721 2737 message = (opts['message'] or
2722 2738 _("Added tag %s for changeset %s") % (name, r))
2723 2739 try:
2724 2740 repo.commit([".hgtags"], message, opts['user'], opts['date'])
2725 2741 repo.hook('tag', node=r, tag=name, local=0)
2726 2742 except ValueError, inst:
2727 2743 raise util.Abort(str(inst))
2728 2744
2729 2745 def tags(ui, repo):
2730 2746 """list repository tags
2731 2747
2732 2748 List the repository tags.
2733 2749
2734 2750 This lists both regular and local tags.
2735 2751 """
2736 2752
2737 2753 l = repo.tagslist()
2738 2754 l.reverse()
2739 2755 for t, n in l:
2740 2756 try:
2741 2757 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2742 2758 except KeyError:
2743 2759 r = " ?:?"
2744 2760 if ui.quiet:
2745 2761 ui.write("%s\n" % t)
2746 2762 else:
2747 2763 ui.write("%-30s %s\n" % (t, r))
2748 2764
2749 2765 def tip(ui, repo, **opts):
2750 2766 """show the tip revision
2751 2767
2752 2768 Show the tip revision.
2753 2769 """
2754 2770 n = repo.changelog.tip()
2755 2771 br = None
2756 2772 if opts['branches']:
2757 2773 br = repo.branchlookup([n])
2758 2774 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2759 2775 if opts['patch']:
2760 2776 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2761 2777
2762 2778 def unbundle(ui, repo, fname, **opts):
2763 2779 """apply a changegroup file
2764 2780
2765 2781 Apply a compressed changegroup file generated by the bundle
2766 2782 command.
2767 2783 """
2768 2784 f = urllib.urlopen(fname)
2769 2785
2770 2786 header = f.read(6)
2771 2787 if not header.startswith("HG"):
2772 2788 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2773 2789 elif not header.startswith("HG10"):
2774 2790 raise util.Abort(_("%s: unknown bundle version") % fname)
2775 2791 elif header == "HG10BZ":
2776 2792 def generator(f):
2777 2793 zd = bz2.BZ2Decompressor()
2778 2794 zd.decompress("BZ")
2779 2795 for chunk in f:
2780 2796 yield zd.decompress(chunk)
2781 2797 elif header == "HG10UN":
2782 2798 def generator(f):
2783 2799 for chunk in f:
2784 2800 yield chunk
2785 2801 else:
2786 2802 raise util.Abort(_("%s: unknown bundle compression type")
2787 2803 % fname)
2788 2804 gen = generator(util.filechunkiter(f, 4096))
2789 2805 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle')
2790 2806 return postincoming(ui, repo, modheads, opts['update'])
2791 2807
2792 2808 def undo(ui, repo):
2793 2809 """undo the last commit or pull (DEPRECATED)
2794 2810
2795 2811 (DEPRECATED)
2796 2812 This command is now deprecated and will be removed in a future
2797 2813 release. Please use the rollback command instead. For usage
2798 2814 instructions, see the rollback command.
2799 2815 """
2800 2816 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2801 2817 repo.rollback()
2802 2818
2803 2819 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2804 2820 branch=None, **opts):
2805 2821 """update or merge working directory
2806 2822
2807 2823 Update the working directory to the specified revision.
2808 2824
2809 2825 If there are no outstanding changes in the working directory and
2810 2826 there is a linear relationship between the current version and the
2811 2827 requested version, the result is the requested version.
2812 2828
2813 2829 To merge the working directory with another revision, use the
2814 2830 merge command.
2815 2831
2816 2832 By default, update will refuse to run if doing so would require
2817 2833 merging or discarding local changes.
2818 2834 """
2819 2835 if merge:
2820 2836 ui.warn(_('(the -m/--merge option is deprecated; '
2821 2837 'use the merge command instead)\n'))
2822 2838 return doupdate(ui, repo, node, merge, clean, force, branch, **opts)
2823 2839
2824 2840 def doupdate(ui, repo, node=None, merge=False, clean=False, force=None,
2825 2841 branch=None, **opts):
2826 2842 if branch:
2827 2843 br = repo.branchlookup(branch=branch)
2828 2844 found = []
2829 2845 for x in br:
2830 2846 if branch in br[x]:
2831 2847 found.append(x)
2832 2848 if len(found) > 1:
2833 2849 ui.warn(_("Found multiple heads for %s\n") % branch)
2834 2850 for x in found:
2835 2851 show_changeset(ui, repo, opts).show(changenode=x, brinfo=br)
2836 2852 return 1
2837 2853 if len(found) == 1:
2838 2854 node = found[0]
2839 2855 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2840 2856 else:
2841 2857 ui.warn(_("branch %s not found\n") % (branch))
2842 2858 return 1
2843 2859 else:
2844 2860 node = node and repo.lookup(node) or repo.changelog.tip()
2845 2861 return repo.update(node, allow=merge, force=clean, forcemerge=force)
2846 2862
2847 2863 def verify(ui, repo):
2848 2864 """verify the integrity of the repository
2849 2865
2850 2866 Verify the integrity of the current repository.
2851 2867
2852 2868 This will perform an extensive check of the repository's
2853 2869 integrity, validating the hashes and checksums of each entry in
2854 2870 the changelog, manifest, and tracked files, as well as the
2855 2871 integrity of their crosslinks and indices.
2856 2872 """
2857 2873 return repo.verify()
2858 2874
2859 2875 # Command options and aliases are listed here, alphabetically
2860 2876
2861 2877 table = {
2862 2878 "^add":
2863 2879 (add,
2864 2880 [('I', 'include', [], _('include names matching the given patterns')),
2865 2881 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2866 2882 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2867 2883 _('hg add [OPTION]... [FILE]...')),
2868 2884 "debugaddremove|addremove":
2869 2885 (addremove,
2870 2886 [('I', 'include', [], _('include names matching the given patterns')),
2871 2887 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2872 2888 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2873 2889 _('hg addremove [OPTION]... [FILE]...')),
2874 2890 "^annotate":
2875 2891 (annotate,
2876 2892 [('r', 'rev', '', _('annotate the specified revision')),
2877 2893 ('a', 'text', None, _('treat all files as text')),
2878 2894 ('u', 'user', None, _('list the author')),
2879 2895 ('d', 'date', None, _('list the date')),
2880 2896 ('n', 'number', None, _('list the revision number (default)')),
2881 2897 ('c', 'changeset', None, _('list the changeset')),
2882 2898 ('I', 'include', [], _('include names matching the given patterns')),
2883 2899 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2884 2900 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2885 2901 "archive":
2886 2902 (archive,
2887 2903 [('', 'no-decode', None, _('do not pass files through decoders')),
2888 2904 ('p', 'prefix', '', _('directory prefix for files in archive')),
2889 2905 ('r', 'rev', '', _('revision to distribute')),
2890 2906 ('t', 'type', '', _('type of distribution to create')),
2891 2907 ('I', 'include', [], _('include names matching the given patterns')),
2892 2908 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2893 2909 _('hg archive [OPTION]... DEST')),
2894 2910 "backout":
2895 2911 (backout,
2896 2912 [('', 'merge', None,
2897 2913 _('merge with old dirstate parent after backout')),
2898 2914 ('m', 'message', '', _('use <text> as commit message')),
2899 2915 ('l', 'logfile', '', _('read commit message from <file>')),
2900 2916 ('d', 'date', '', _('record datecode as commit date')),
2901 2917 ('u', 'user', '', _('record user as committer')),
2902 2918 ('I', 'include', [], _('include names matching the given patterns')),
2903 2919 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2904 2920 _('hg backout [OPTION]... REV')),
2905 2921 "bundle":
2906 2922 (bundle,
2907 2923 [('f', 'force', None,
2908 2924 _('run even when remote repository is unrelated'))],
2909 2925 _('hg bundle FILE DEST')),
2910 2926 "cat":
2911 2927 (cat,
2912 2928 [('o', 'output', '', _('print output to file with formatted name')),
2913 2929 ('r', 'rev', '', _('print the given revision')),
2914 2930 ('I', 'include', [], _('include names matching the given patterns')),
2915 2931 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2916 2932 _('hg cat [OPTION]... FILE...')),
2917 2933 "^clone":
2918 2934 (clone,
2919 2935 [('U', 'noupdate', None, _('do not update the new working directory')),
2920 2936 ('r', 'rev', [],
2921 2937 _('a changeset you would like to have after cloning')),
2922 2938 ('', 'pull', None, _('use pull protocol to copy metadata')),
2923 2939 ('e', 'ssh', '', _('specify ssh command to use')),
2924 2940 ('', 'remotecmd', '',
2925 2941 _('specify hg command to run on the remote side'))],
2926 2942 _('hg clone [OPTION]... SOURCE [DEST]')),
2927 2943 "^commit|ci":
2928 2944 (commit,
2929 2945 [('A', 'addremove', None,
2930 2946 _('mark new/missing files as added/removed before committing')),
2931 2947 ('m', 'message', '', _('use <text> as commit message')),
2932 2948 ('l', 'logfile', '', _('read the commit message from <file>')),
2933 2949 ('d', 'date', '', _('record datecode as commit date')),
2934 2950 ('u', 'user', '', _('record user as commiter')),
2935 2951 ('I', 'include', [], _('include names matching the given patterns')),
2936 2952 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2937 2953 _('hg commit [OPTION]... [FILE]...')),
2938 2954 "copy|cp":
2939 2955 (copy,
2940 2956 [('A', 'after', None, _('record a copy that has already occurred')),
2941 2957 ('f', 'force', None,
2942 2958 _('forcibly copy over an existing managed file')),
2943 2959 ('I', 'include', [], _('include names matching the given patterns')),
2944 2960 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2945 2961 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2946 2962 _('hg copy [OPTION]... [SOURCE]... DEST')),
2947 2963 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2948 2964 "debugcomplete":
2949 2965 (debugcomplete,
2950 2966 [('o', 'options', None, _('show the command options'))],
2951 2967 _('debugcomplete [-o] CMD')),
2952 2968 "debugrebuildstate":
2953 2969 (debugrebuildstate,
2954 2970 [('r', 'rev', '', _('revision to rebuild to'))],
2955 2971 _('debugrebuildstate [-r REV] [REV]')),
2956 2972 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2957 2973 "debugconfig": (debugconfig, [], _('debugconfig [NAME]...')),
2958 2974 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2959 2975 "debugstate": (debugstate, [], _('debugstate')),
2960 2976 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2961 2977 "debugindex": (debugindex, [], _('debugindex FILE')),
2962 2978 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2963 2979 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2964 2980 "debugwalk":
2965 2981 (debugwalk,
2966 2982 [('I', 'include', [], _('include names matching the given patterns')),
2967 2983 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2968 2984 _('debugwalk [OPTION]... [FILE]...')),
2969 2985 "^diff":
2970 2986 (diff,
2971 2987 [('r', 'rev', [], _('revision')),
2972 2988 ('a', 'text', None, _('treat all files as text')),
2973 2989 ('p', 'show-function', None,
2974 2990 _('show which function each change is in')),
2975 2991 ('w', 'ignore-all-space', None,
2976 2992 _('ignore white space when comparing lines')),
2977 2993 ('I', 'include', [], _('include names matching the given patterns')),
2978 2994 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2979 2995 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2980 2996 "^export":
2981 2997 (export,
2982 2998 [('o', 'output', '', _('print output to file with formatted name')),
2983 2999 ('a', 'text', None, _('treat all files as text')),
2984 3000 ('', 'switch-parent', None, _('diff against the second parent'))],
2985 3001 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2986 3002 "debugforget|forget":
2987 3003 (forget,
2988 3004 [('I', 'include', [], _('include names matching the given patterns')),
2989 3005 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2990 3006 _('hg forget [OPTION]... FILE...')),
2991 3007 "grep":
2992 3008 (grep,
2993 3009 [('0', 'print0', None, _('end fields with NUL')),
2994 3010 ('', 'all', None, _('print all revisions that match')),
2995 3011 ('i', 'ignore-case', None, _('ignore case when matching')),
2996 3012 ('l', 'files-with-matches', None,
2997 3013 _('print only filenames and revs that match')),
2998 3014 ('n', 'line-number', None, _('print matching line numbers')),
2999 3015 ('r', 'rev', [], _('search in given revision range')),
3000 3016 ('u', 'user', None, _('print user who committed change')),
3001 3017 ('I', 'include', [], _('include names matching the given patterns')),
3002 3018 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3003 3019 _('hg grep [OPTION]... PATTERN [FILE]...')),
3004 3020 "heads":
3005 3021 (heads,
3006 3022 [('b', 'branches', None, _('show branches')),
3007 3023 ('', 'style', '', _('display using template map file')),
3008 3024 ('r', 'rev', '', _('show only heads which are descendants of rev')),
3009 3025 ('', 'template', '', _('display with template'))],
3010 3026 _('hg heads [-b] [-r <rev>]')),
3011 3027 "help": (help_, [], _('hg help [COMMAND]')),
3012 3028 "identify|id": (identify, [], _('hg identify')),
3013 3029 "import|patch":
3014 3030 (import_,
3015 3031 [('p', 'strip', 1,
3016 3032 _('directory strip option for patch. This has the same\n'
3017 3033 'meaning as the corresponding patch option')),
3018 3034 ('m', 'message', '', _('use <text> as commit message')),
3019 3035 ('b', 'base', '', _('base path')),
3020 3036 ('f', 'force', None,
3021 3037 _('skip check for outstanding uncommitted changes'))],
3022 3038 _('hg import [-p NUM] [-b BASE] [-m MESSAGE] [-f] PATCH...')),
3023 3039 "incoming|in": (incoming,
3024 3040 [('M', 'no-merges', None, _('do not show merges')),
3025 3041 ('f', 'force', None,
3026 3042 _('run even when remote repository is unrelated')),
3027 3043 ('', 'style', '', _('display using template map file')),
3028 3044 ('n', 'newest-first', None, _('show newest record first')),
3029 3045 ('', 'bundle', '', _('file to store the bundles into')),
3030 3046 ('p', 'patch', None, _('show patch')),
3031 3047 ('r', 'rev', [], _('a specific revision you would like to pull')),
3032 3048 ('', 'template', '', _('display with template')),
3033 3049 ('e', 'ssh', '', _('specify ssh command to use')),
3034 3050 ('', 'remotecmd', '',
3035 3051 _('specify hg command to run on the remote side'))],
3036 3052 _('hg incoming [-p] [-n] [-M] [-r REV]...'
3037 3053 ' [--bundle FILENAME] [SOURCE]')),
3038 3054 "^init": (init, [], _('hg init [DEST]')),
3039 3055 "locate":
3040 3056 (locate,
3041 3057 [('r', 'rev', '', _('search the repository as it stood at rev')),
3042 3058 ('0', 'print0', None,
3043 3059 _('end filenames with NUL, for use with xargs')),
3044 3060 ('f', 'fullpath', None,
3045 3061 _('print complete paths from the filesystem root')),
3046 3062 ('I', 'include', [], _('include names matching the given patterns')),
3047 3063 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3048 3064 _('hg locate [OPTION]... [PATTERN]...')),
3049 3065 "^log|history":
3050 3066 (log,
3051 3067 [('b', 'branches', None, _('show branches')),
3052 3068 ('k', 'keyword', [], _('search for a keyword')),
3053 3069 ('l', 'limit', '', _('limit number of changes displayed')),
3054 3070 ('r', 'rev', [], _('show the specified revision or range')),
3055 3071 ('M', 'no-merges', None, _('do not show merges')),
3056 3072 ('', 'style', '', _('display using template map file')),
3057 3073 ('m', 'only-merges', None, _('show only merges')),
3058 3074 ('p', 'patch', None, _('show patch')),
3059 3075 ('', 'template', '', _('display with template')),
3060 3076 ('I', 'include', [], _('include names matching the given patterns')),
3061 3077 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3062 3078 _('hg log [OPTION]... [FILE]')),
3063 3079 "manifest": (manifest, [], _('hg manifest [REV]')),
3064 3080 "merge":
3065 3081 (merge,
3066 3082 [('b', 'branch', '', _('merge with head of a specific branch')),
3067 3083 ('f', 'force', None, _('force a merge with outstanding changes'))],
3068 3084 _('hg merge [-b TAG] [-f] [REV]')),
3069 3085 "outgoing|out": (outgoing,
3070 3086 [('M', 'no-merges', None, _('do not show merges')),
3071 3087 ('f', 'force', None,
3072 3088 _('run even when remote repository is unrelated')),
3073 3089 ('p', 'patch', None, _('show patch')),
3074 3090 ('', 'style', '', _('display using template map file')),
3075 3091 ('r', 'rev', [], _('a specific revision you would like to push')),
3076 3092 ('n', 'newest-first', None, _('show newest record first')),
3077 3093 ('', 'template', '', _('display with template')),
3078 3094 ('e', 'ssh', '', _('specify ssh command to use')),
3079 3095 ('', 'remotecmd', '',
3080 3096 _('specify hg command to run on the remote side'))],
3081 3097 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3082 3098 "^parents":
3083 3099 (parents,
3084 3100 [('b', 'branches', None, _('show branches')),
3085 3101 ('', 'style', '', _('display using template map file')),
3086 3102 ('', 'template', '', _('display with template'))],
3087 3103 _('hg parents [-b] [REV]')),
3088 3104 "paths": (paths, [], _('hg paths [NAME]')),
3089 3105 "^pull":
3090 3106 (pull,
3091 3107 [('u', 'update', None,
3092 3108 _('update the working directory to tip after pull')),
3093 3109 ('e', 'ssh', '', _('specify ssh command to use')),
3094 3110 ('f', 'force', None,
3095 3111 _('run even when remote repository is unrelated')),
3096 3112 ('r', 'rev', [], _('a specific revision you would like to pull')),
3097 3113 ('', 'remotecmd', '',
3098 3114 _('specify hg command to run on the remote side'))],
3099 3115 _('hg pull [-u] [-e FILE] [-r REV]... [--remotecmd FILE] [SOURCE]')),
3100 3116 "^push":
3101 3117 (push,
3102 3118 [('f', 'force', None, _('force push')),
3103 3119 ('e', 'ssh', '', _('specify ssh command to use')),
3104 3120 ('r', 'rev', [], _('a specific revision you would like to push')),
3105 3121 ('', 'remotecmd', '',
3106 3122 _('specify hg command to run on the remote side'))],
3107 3123 _('hg push [-f] [-e FILE] [-r REV]... [--remotecmd FILE] [DEST]')),
3108 3124 "debugrawcommit|rawcommit":
3109 3125 (rawcommit,
3110 3126 [('p', 'parent', [], _('parent')),
3111 3127 ('d', 'date', '', _('date code')),
3112 3128 ('u', 'user', '', _('user')),
3113 3129 ('F', 'files', '', _('file list')),
3114 3130 ('m', 'message', '', _('commit message')),
3115 3131 ('l', 'logfile', '', _('commit message file'))],
3116 3132 _('hg debugrawcommit [OPTION]... [FILE]...')),
3117 3133 "recover": (recover, [], _('hg recover')),
3118 3134 "^remove|rm":
3119 3135 (remove,
3120 3136 [('A', 'after', None, _('record remove that has already occurred')),
3121 3137 ('f', 'force', None, _('remove file even if modified')),
3122 3138 ('I', 'include', [], _('include names matching the given patterns')),
3123 3139 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3124 3140 _('hg remove [OPTION]... FILE...')),
3125 3141 "rename|mv":
3126 3142 (rename,
3127 3143 [('A', 'after', None, _('record a rename that has already occurred')),
3128 3144 ('f', 'force', None,
3129 3145 _('forcibly copy over an existing managed file')),
3130 3146 ('I', 'include', [], _('include names matching the given patterns')),
3131 3147 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3132 3148 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3133 3149 _('hg rename [OPTION]... SOURCE... DEST')),
3134 3150 "^revert":
3135 3151 (revert,
3136 3152 [('r', 'rev', '', _('revision to revert to')),
3137 3153 ('', 'no-backup', None, _('do not save backup copies of files')),
3138 3154 ('I', 'include', [], _('include names matching given patterns')),
3139 3155 ('X', 'exclude', [], _('exclude names matching given patterns')),
3140 3156 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3141 3157 _('hg revert [-r REV] [NAME]...')),
3142 3158 "rollback": (rollback, [], _('hg rollback')),
3143 3159 "root": (root, [], _('hg root')),
3144 3160 "^serve":
3145 3161 (serve,
3146 3162 [('A', 'accesslog', '', _('name of access log file to write to')),
3147 3163 ('d', 'daemon', None, _('run server in background')),
3148 3164 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3149 3165 ('E', 'errorlog', '', _('name of error log file to write to')),
3150 3166 ('p', 'port', 0, _('port to use (default: 8000)')),
3151 3167 ('a', 'address', '', _('address to use')),
3152 3168 ('n', 'name', '',
3153 3169 _('name to show in web pages (default: working dir)')),
3154 3170 ('', 'webdir-conf', '', _('name of the webdir config file'
3155 3171 ' (serve more than one repo)')),
3156 3172 ('', 'pid-file', '', _('name of file to write process ID to')),
3157 3173 ('', 'stdio', None, _('for remote clients')),
3158 3174 ('t', 'templates', '', _('web templates to use')),
3159 3175 ('', 'style', '', _('template style to use')),
3160 3176 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3161 3177 _('hg serve [OPTION]...')),
3162 3178 "^status|st":
3163 3179 (status,
3164 3180 [('m', 'modified', None, _('show only modified files')),
3165 3181 ('a', 'added', None, _('show only added files')),
3166 3182 ('r', 'removed', None, _('show only removed files')),
3167 3183 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3168 3184 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3169 3185 ('i', 'ignored', None, _('show ignored files')),
3170 3186 ('n', 'no-status', None, _('hide status prefix')),
3171 3187 ('0', 'print0', None,
3172 3188 _('end filenames with NUL, for use with xargs')),
3173 3189 ('I', 'include', [], _('include names matching the given patterns')),
3174 3190 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3175 3191 _('hg status [OPTION]... [FILE]...')),
3176 3192 "tag":
3177 3193 (tag,
3178 3194 [('l', 'local', None, _('make the tag local')),
3179 3195 ('m', 'message', '', _('message for tag commit log entry')),
3180 3196 ('d', 'date', '', _('record datecode as commit date')),
3181 3197 ('u', 'user', '', _('record user as commiter')),
3182 3198 ('r', 'rev', '', _('revision to tag'))],
3183 3199 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3184 3200 "tags": (tags, [], _('hg tags')),
3185 3201 "tip":
3186 3202 (tip,
3187 3203 [('b', 'branches', None, _('show branches')),
3188 3204 ('', 'style', '', _('display using template map file')),
3189 3205 ('p', 'patch', None, _('show patch')),
3190 3206 ('', 'template', '', _('display with template'))],
3191 3207 _('hg tip [-b] [-p]')),
3192 3208 "unbundle":
3193 3209 (unbundle,
3194 3210 [('u', 'update', None,
3195 3211 _('update the working directory to tip after unbundle'))],
3196 3212 _('hg unbundle [-u] FILE')),
3197 3213 "debugundo|undo": (undo, [], _('hg undo')),
3198 3214 "^update|up|checkout|co":
3199 3215 (update,
3200 3216 [('b', 'branch', '', _('checkout the head of a specific branch')),
3201 3217 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3202 3218 ('C', 'clean', None, _('overwrite locally modified files')),
3203 3219 ('f', 'force', None, _('force a merge with outstanding changes'))],
3204 3220 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3205 3221 "verify": (verify, [], _('hg verify')),
3206 3222 "version": (show_version, [], _('hg version')),
3207 3223 }
3208 3224
3209 3225 globalopts = [
3210 3226 ('R', 'repository', '',
3211 3227 _('repository root directory or symbolic path name')),
3212 3228 ('', 'cwd', '', _('change working directory')),
3213 3229 ('y', 'noninteractive', None,
3214 3230 _('do not prompt, assume \'yes\' for any required answers')),
3215 3231 ('q', 'quiet', None, _('suppress output')),
3216 3232 ('v', 'verbose', None, _('enable additional output')),
3217 3233 ('', 'config', [], _('set/override config option')),
3218 3234 ('', 'debug', None, _('enable debugging output')),
3219 3235 ('', 'debugger', None, _('start debugger')),
3220 3236 ('', 'lsprof', None, _('print improved command execution profile')),
3221 3237 ('', 'traceback', None, _('print traceback on exception')),
3222 3238 ('', 'time', None, _('time how long the command takes')),
3223 3239 ('', 'profile', None, _('print command execution profile')),
3224 3240 ('', 'version', None, _('output version information and exit')),
3225 3241 ('h', 'help', None, _('display help and exit')),
3226 3242 ]
3227 3243
3228 3244 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3229 3245 " debugindex debugindexdot")
3230 3246 optionalrepo = ("paths serve debugconfig")
3231 3247
3232 3248 def findpossible(cmd):
3233 3249 """
3234 3250 Return cmd -> (aliases, command table entry)
3235 3251 for each matching command.
3236 3252 Return debug commands (or their aliases) only if no normal command matches.
3237 3253 """
3238 3254 choice = {}
3239 3255 debugchoice = {}
3240 3256 for e in table.keys():
3241 3257 aliases = e.lstrip("^").split("|")
3242 3258 found = None
3243 3259 if cmd in aliases:
3244 3260 found = cmd
3245 3261 else:
3246 3262 for a in aliases:
3247 3263 if a.startswith(cmd):
3248 3264 found = a
3249 3265 break
3250 3266 if found is not None:
3251 3267 if aliases[0].startswith("debug"):
3252 3268 debugchoice[found] = (aliases, table[e])
3253 3269 else:
3254 3270 choice[found] = (aliases, table[e])
3255 3271
3256 3272 if not choice and debugchoice:
3257 3273 choice = debugchoice
3258 3274
3259 3275 return choice
3260 3276
3261 3277 def find(cmd):
3262 3278 """Return (aliases, command table entry) for command string."""
3263 3279 choice = findpossible(cmd)
3264 3280
3265 3281 if choice.has_key(cmd):
3266 3282 return choice[cmd]
3267 3283
3268 3284 if len(choice) > 1:
3269 3285 clist = choice.keys()
3270 3286 clist.sort()
3271 3287 raise AmbiguousCommand(cmd, clist)
3272 3288
3273 3289 if choice:
3274 3290 return choice.values()[0]
3275 3291
3276 3292 raise UnknownCommand(cmd)
3277 3293
3278 3294 def catchterm(*args):
3279 3295 raise util.SignalInterrupt
3280 3296
3281 3297 def run():
3282 3298 sys.exit(dispatch(sys.argv[1:]))
3283 3299
3284 3300 class ParseError(Exception):
3285 3301 """Exception raised on errors in parsing the command line."""
3286 3302
3287 3303 def parse(ui, args):
3288 3304 options = {}
3289 3305 cmdoptions = {}
3290 3306
3291 3307 try:
3292 3308 args = fancyopts.fancyopts(args, globalopts, options)
3293 3309 except fancyopts.getopt.GetoptError, inst:
3294 3310 raise ParseError(None, inst)
3295 3311
3296 3312 if args:
3297 3313 cmd, args = args[0], args[1:]
3298 3314 aliases, i = find(cmd)
3299 3315 cmd = aliases[0]
3300 3316 defaults = ui.config("defaults", cmd)
3301 3317 if defaults:
3302 3318 args = defaults.split() + args
3303 3319 c = list(i[1])
3304 3320 else:
3305 3321 cmd = None
3306 3322 c = []
3307 3323
3308 3324 # combine global options into local
3309 3325 for o in globalopts:
3310 3326 c.append((o[0], o[1], options[o[1]], o[3]))
3311 3327
3312 3328 try:
3313 3329 args = fancyopts.fancyopts(args, c, cmdoptions)
3314 3330 except fancyopts.getopt.GetoptError, inst:
3315 3331 raise ParseError(cmd, inst)
3316 3332
3317 3333 # separate global options back out
3318 3334 for o in globalopts:
3319 3335 n = o[1]
3320 3336 options[n] = cmdoptions[n]
3321 3337 del cmdoptions[n]
3322 3338
3323 3339 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3324 3340
3325 3341 def dispatch(args):
3326 3342 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3327 3343 num = getattr(signal, name, None)
3328 3344 if num: signal.signal(num, catchterm)
3329 3345
3330 3346 try:
3331 3347 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3332 3348 except util.Abort, inst:
3333 3349 sys.stderr.write(_("abort: %s\n") % inst)
3334 3350 return -1
3335 3351
3336 3352 external = []
3337 3353 for x in u.extensions():
3338 3354 try:
3339 3355 if x[1]:
3340 3356 # the module will be loaded in sys.modules
3341 3357 # choose an unique name so that it doesn't
3342 3358 # conflicts with other modules
3343 3359 module_name = "hgext_%s" % x[0].replace('.', '_')
3344 3360 mod = imp.load_source(module_name, x[1])
3345 3361 else:
3346 3362 def importh(name):
3347 3363 mod = __import__(name)
3348 3364 components = name.split('.')
3349 3365 for comp in components[1:]:
3350 3366 mod = getattr(mod, comp)
3351 3367 return mod
3352 3368 try:
3353 3369 mod = importh("hgext." + x[0])
3354 3370 except ImportError:
3355 3371 mod = importh(x[0])
3356 3372 external.append(mod)
3357 3373 except (util.SignalInterrupt, KeyboardInterrupt):
3358 3374 raise
3359 3375 except Exception, inst:
3360 3376 u.warn(_("*** failed to import extension %s: %s\n") % (x[0], inst))
3361 3377 if u.print_exc():
3362 3378 return 1
3363 3379
3364 3380 for x in external:
3365 3381 uisetup = getattr(x, 'uisetup', None)
3366 3382 if uisetup:
3367 3383 uisetup(u)
3368 3384 cmdtable = getattr(x, 'cmdtable', {})
3369 3385 for t in cmdtable:
3370 3386 if t in table:
3371 3387 u.warn(_("module %s overrides %s\n") % (x.__name__, t))
3372 3388 table.update(cmdtable)
3373 3389
3374 3390 try:
3375 3391 cmd, func, args, options, cmdoptions = parse(u, args)
3376 3392 if options["time"]:
3377 3393 def get_times():
3378 3394 t = os.times()
3379 3395 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3380 3396 t = (t[0], t[1], t[2], t[3], time.clock())
3381 3397 return t
3382 3398 s = get_times()
3383 3399 def print_time():
3384 3400 t = get_times()
3385 3401 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3386 3402 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3387 3403 atexit.register(print_time)
3388 3404
3389 3405 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3390 3406 not options["noninteractive"], options["traceback"],
3391 3407 options["config"])
3392 3408
3393 3409 # enter the debugger before command execution
3394 3410 if options['debugger']:
3395 3411 pdb.set_trace()
3396 3412
3397 3413 try:
3398 3414 if options['cwd']:
3399 3415 try:
3400 3416 os.chdir(options['cwd'])
3401 3417 except OSError, inst:
3402 3418 raise util.Abort('%s: %s' %
3403 3419 (options['cwd'], inst.strerror))
3404 3420
3405 3421 path = u.expandpath(options["repository"]) or ""
3406 3422 repo = path and hg.repository(u, path=path) or None
3407 3423
3408 3424 if options['help']:
3409 3425 return help_(u, cmd, options['version'])
3410 3426 elif options['version']:
3411 3427 return show_version(u)
3412 3428 elif not cmd:
3413 3429 return help_(u, 'shortlist')
3414 3430
3415 3431 if cmd not in norepo.split():
3416 3432 try:
3417 3433 if not repo:
3418 3434 repo = hg.repository(u, path=path)
3419 3435 u = repo.ui
3420 3436 for x in external:
3421 3437 if hasattr(x, 'reposetup'):
3422 3438 x.reposetup(u, repo)
3423 3439 except hg.RepoError:
3424 3440 if cmd not in optionalrepo.split():
3425 3441 raise
3426 3442 d = lambda: func(u, repo, *args, **cmdoptions)
3427 3443 else:
3428 3444 d = lambda: func(u, *args, **cmdoptions)
3429 3445
3430 3446 try:
3431 3447 if options['profile']:
3432 3448 import hotshot, hotshot.stats
3433 3449 prof = hotshot.Profile("hg.prof")
3434 3450 try:
3435 3451 try:
3436 3452 return prof.runcall(d)
3437 3453 except:
3438 3454 try:
3439 3455 u.warn(_('exception raised - generating '
3440 3456 'profile anyway\n'))
3441 3457 except:
3442 3458 pass
3443 3459 raise
3444 3460 finally:
3445 3461 prof.close()
3446 3462 stats = hotshot.stats.load("hg.prof")
3447 3463 stats.strip_dirs()
3448 3464 stats.sort_stats('time', 'calls')
3449 3465 stats.print_stats(40)
3450 3466 elif options['lsprof']:
3451 3467 try:
3452 3468 from mercurial import lsprof
3453 3469 except ImportError:
3454 3470 raise util.Abort(_(
3455 3471 'lsprof not available - install from '
3456 3472 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3457 3473 p = lsprof.Profiler()
3458 3474 p.enable(subcalls=True)
3459 3475 try:
3460 3476 return d()
3461 3477 finally:
3462 3478 p.disable()
3463 3479 stats = lsprof.Stats(p.getstats())
3464 3480 stats.sort()
3465 3481 stats.pprint(top=10, file=sys.stderr, climit=5)
3466 3482 else:
3467 3483 return d()
3468 3484 finally:
3469 3485 u.flush()
3470 3486 except:
3471 3487 # enter the debugger when we hit an exception
3472 3488 if options['debugger']:
3473 3489 pdb.post_mortem(sys.exc_info()[2])
3474 3490 u.print_exc()
3475 3491 raise
3476 3492 except ParseError, inst:
3477 3493 if inst.args[0]:
3478 3494 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3479 3495 help_(u, inst.args[0])
3480 3496 else:
3481 3497 u.warn(_("hg: %s\n") % inst.args[1])
3482 3498 help_(u, 'shortlist')
3483 3499 except AmbiguousCommand, inst:
3484 3500 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3485 3501 (inst.args[0], " ".join(inst.args[1])))
3486 3502 except UnknownCommand, inst:
3487 3503 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3488 3504 help_(u, 'shortlist')
3489 3505 except hg.RepoError, inst:
3490 3506 u.warn(_("abort: %s!\n") % inst)
3491 3507 except lock.LockHeld, inst:
3492 3508 if inst.errno == errno.ETIMEDOUT:
3493 3509 reason = _('timed out waiting for lock held by %s') % inst.locker
3494 3510 else:
3495 3511 reason = _('lock held by %s') % inst.locker
3496 3512 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3497 3513 except lock.LockUnavailable, inst:
3498 3514 u.warn(_("abort: could not lock %s: %s\n") %
3499 3515 (inst.desc or inst.filename, inst.strerror))
3500 3516 except revlog.RevlogError, inst:
3501 3517 u.warn(_("abort: "), inst, "!\n")
3502 3518 except util.SignalInterrupt:
3503 3519 u.warn(_("killed!\n"))
3504 3520 except KeyboardInterrupt:
3505 3521 try:
3506 3522 u.warn(_("interrupted!\n"))
3507 3523 except IOError, inst:
3508 3524 if inst.errno == errno.EPIPE:
3509 3525 if u.debugflag:
3510 3526 u.warn(_("\nbroken pipe\n"))
3511 3527 else:
3512 3528 raise
3513 3529 except IOError, inst:
3514 3530 if hasattr(inst, "code"):
3515 3531 u.warn(_("abort: %s\n") % inst)
3516 3532 elif hasattr(inst, "reason"):
3517 3533 u.warn(_("abort: error: %s\n") % inst.reason[1])
3518 3534 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3519 3535 if u.debugflag:
3520 3536 u.warn(_("broken pipe\n"))
3521 3537 elif getattr(inst, "strerror", None):
3522 3538 if getattr(inst, "filename", None):
3523 3539 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3524 3540 else:
3525 3541 u.warn(_("abort: %s\n") % inst.strerror)
3526 3542 else:
3527 3543 raise
3528 3544 except OSError, inst:
3529 3545 if hasattr(inst, "filename"):
3530 3546 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3531 3547 else:
3532 3548 u.warn(_("abort: %s\n") % inst.strerror)
3533 3549 except util.Abort, inst:
3534 3550 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3535 3551 except TypeError, inst:
3536 3552 # was this an argument error?
3537 3553 tb = traceback.extract_tb(sys.exc_info()[2])
3538 3554 if len(tb) > 2: # no
3539 3555 raise
3540 3556 u.debug(inst, "\n")
3541 3557 u.warn(_("%s: invalid arguments\n") % cmd)
3542 3558 help_(u, cmd)
3543 3559 except SystemExit, inst:
3544 3560 # Commands shouldn't sys.exit directly, but give a return code.
3545 3561 # Just in case catch this and and pass exit code to caller.
3546 3562 return inst.code
3547 3563 except:
3548 3564 u.warn(_("** unknown exception encountered, details follow\n"))
3549 3565 u.warn(_("** report bug details to mercurial@selenic.com\n"))
3550 3566 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3551 3567 % version.get_version())
3552 3568 raise
3553 3569
3554 3570 return -1
@@ -1,70 +1,73 b''
1 1 # hg.py - repository classes for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from repo import *
10 10 from demandload import *
11 11 from i18n import gettext as _
12 12 demandload(globals(), "localrepo bundlerepo httprepo sshrepo statichttprepo")
13 13 demandload(globals(), "os util")
14 14
15 15 def bundle(ui, path):
16 16 if path.startswith('bundle://'):
17 17 path = path[9:]
18 18 else:
19 19 path = path[7:]
20 20 s = path.split("+", 1)
21 21 if len(s) == 1:
22 22 repopath, bundlename = "", s[0]
23 23 else:
24 24 repopath, bundlename = s
25 25 return bundlerepo.bundlerepository(ui, repopath, bundlename)
26 26
27 27 def hg(ui, path):
28 28 ui.warn(_("hg:// syntax is deprecated, please use http:// instead\n"))
29 29 return httprepo.httprepository(ui, path.replace("hg://", "http://"))
30 30
31 31 def local_(ui, path, create=0):
32 32 if path.startswith('file:'):
33 33 path = path[5:]
34 34 return localrepo.localrepository(ui, path, create)
35 35
36 def ssh_(ui, path, create=0):
37 return sshrepo.sshrepository(ui, path, create)
38
36 39 def old_http(ui, path):
37 40 ui.warn(_("old-http:// syntax is deprecated, "
38 41 "please use static-http:// instead\n"))
39 42 return statichttprepo.statichttprepository(
40 43 ui, path.replace("old-http://", "http://"))
41 44
42 45 def static_http(ui, path):
43 46 return statichttprepo.statichttprepository(
44 47 ui, path.replace("static-http://", "http://"))
45 48
46 49 schemes = {
47 50 'bundle': bundle,
48 51 'file': local_,
49 52 'hg': hg,
50 53 'http': lambda ui, path: httprepo.httprepository(ui, path),
51 54 'https': lambda ui, path: httprepo.httpsrepository(ui, path),
52 55 'old-http': old_http,
53 'ssh': lambda ui, path: sshrepo.sshrepository(ui, path),
56 'ssh': ssh_,
54 57 'static-http': static_http,
55 58 }
56 59
57 60 def repository(ui, path=None, create=0):
58 61 if not path: path = ''
59 62 scheme = path
60 63 if scheme:
61 64 c = scheme.find(':')
62 65 scheme = c >= 0 and scheme[:c]
63 66 try:
64 67 ctor = schemes.get(scheme) or schemes['file']
65 68 if create:
66 69 return ctor(ui, path, create)
67 70 return ctor(ui, path)
68 71 except TypeError:
69 72 raise util.Abort(_('cannot create new repository over "%s" protocol') %
70 73 scheme)
@@ -1,2152 +1,2154 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import gettext as _
10 10 from demandload import *
11 11 demandload(globals(), "appendfile changegroup")
12 12 demandload(globals(), "changelog dirstate filelog manifest repo")
13 13 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
14 14 demandload(globals(), "os revlog util")
15 15
16 16 class localrepository(object):
17 17 capabilities = ()
18 18
19 19 def __del__(self):
20 20 self.transhandle = None
21 21 def __init__(self, parentui, path=None, create=0):
22 22 if not path:
23 23 p = os.getcwd()
24 24 while not os.path.isdir(os.path.join(p, ".hg")):
25 25 oldp = p
26 26 p = os.path.dirname(p)
27 27 if p == oldp:
28 28 raise repo.RepoError(_("no repo found"))
29 29 path = p
30 30 self.path = os.path.join(path, ".hg")
31 31
32 32 if not create and not os.path.isdir(self.path):
33 33 raise repo.RepoError(_("repository %s not found") % path)
34 34
35 35 self.root = os.path.abspath(path)
36 36 self.origroot = path
37 37 self.ui = ui.ui(parentui=parentui)
38 38 self.opener = util.opener(self.path)
39 39 self.wopener = util.opener(self.root)
40 40
41 41 try:
42 42 self.ui.readconfig(self.join("hgrc"), self.root)
43 43 except IOError:
44 44 pass
45 45
46 46 v = self.ui.revlogopts
47 47 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
48 48 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
49 49 fl = v.get('flags', None)
50 50 flags = 0
51 51 if fl != None:
52 52 for x in fl.split():
53 53 flags |= revlog.flagstr(x)
54 54 elif self.revlogv1:
55 55 flags = revlog.REVLOG_DEFAULT_FLAGS
56 56
57 57 v = self.revlogversion | flags
58 58 self.manifest = manifest.manifest(self.opener, v)
59 59 self.changelog = changelog.changelog(self.opener, v)
60 60
61 61 # the changelog might not have the inline index flag
62 62 # on. If the format of the changelog is the same as found in
63 63 # .hgrc, apply any flags found in the .hgrc as well.
64 64 # Otherwise, just version from the changelog
65 65 v = self.changelog.version
66 66 if v == self.revlogversion:
67 67 v |= flags
68 68 self.revlogversion = v
69 69
70 70 self.tagscache = None
71 71 self.nodetagscache = None
72 72 self.encodepats = None
73 73 self.decodepats = None
74 74 self.transhandle = None
75 75
76 76 if create:
77 if not os.path.exists(path):
78 os.mkdir(path)
77 79 os.mkdir(self.path)
78 80 os.mkdir(self.join("data"))
79 81
80 82 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
81 83
82 84 def hook(self, name, throw=False, **args):
83 85 def callhook(hname, funcname):
84 86 '''call python hook. hook is callable object, looked up as
85 87 name in python module. if callable returns "true", hook
86 88 fails, else passes. if hook raises exception, treated as
87 89 hook failure. exception propagates if throw is "true".
88 90
89 91 reason for "true" meaning "hook failed" is so that
90 92 unmodified commands (e.g. mercurial.commands.update) can
91 93 be run as hooks without wrappers to convert return values.'''
92 94
93 95 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
94 96 d = funcname.rfind('.')
95 97 if d == -1:
96 98 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
97 99 % (hname, funcname))
98 100 modname = funcname[:d]
99 101 try:
100 102 obj = __import__(modname)
101 103 except ImportError:
102 104 raise util.Abort(_('%s hook is invalid '
103 105 '(import of "%s" failed)') %
104 106 (hname, modname))
105 107 try:
106 108 for p in funcname.split('.')[1:]:
107 109 obj = getattr(obj, p)
108 110 except AttributeError, err:
109 111 raise util.Abort(_('%s hook is invalid '
110 112 '("%s" is not defined)') %
111 113 (hname, funcname))
112 114 if not callable(obj):
113 115 raise util.Abort(_('%s hook is invalid '
114 116 '("%s" is not callable)') %
115 117 (hname, funcname))
116 118 try:
117 119 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
118 120 except (KeyboardInterrupt, util.SignalInterrupt):
119 121 raise
120 122 except Exception, exc:
121 123 if isinstance(exc, util.Abort):
122 124 self.ui.warn(_('error: %s hook failed: %s\n') %
123 125 (hname, exc.args[0] % exc.args[1:]))
124 126 else:
125 127 self.ui.warn(_('error: %s hook raised an exception: '
126 128 '%s\n') % (hname, exc))
127 129 if throw:
128 130 raise
129 131 self.ui.print_exc()
130 132 return True
131 133 if r:
132 134 if throw:
133 135 raise util.Abort(_('%s hook failed') % hname)
134 136 self.ui.warn(_('warning: %s hook failed\n') % hname)
135 137 return r
136 138
137 139 def runhook(name, cmd):
138 140 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
139 141 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
140 142 r = util.system(cmd, environ=env, cwd=self.root)
141 143 if r:
142 144 desc, r = util.explain_exit(r)
143 145 if throw:
144 146 raise util.Abort(_('%s hook %s') % (name, desc))
145 147 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
146 148 return r
147 149
148 150 r = False
149 151 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
150 152 if hname.split(".", 1)[0] == name and cmd]
151 153 hooks.sort()
152 154 for hname, cmd in hooks:
153 155 if cmd.startswith('python:'):
154 156 r = callhook(hname, cmd[7:].strip()) or r
155 157 else:
156 158 r = runhook(hname, cmd) or r
157 159 return r
158 160
159 161 def tags(self):
160 162 '''return a mapping of tag to node'''
161 163 if not self.tagscache:
162 164 self.tagscache = {}
163 165
164 166 def parsetag(line, context):
165 167 if not line:
166 168 return
167 169 s = l.split(" ", 1)
168 170 if len(s) != 2:
169 171 self.ui.warn(_("%s: cannot parse entry\n") % context)
170 172 return
171 173 node, key = s
172 174 key = key.strip()
173 175 try:
174 176 bin_n = bin(node)
175 177 except TypeError:
176 178 self.ui.warn(_("%s: node '%s' is not well formed\n") %
177 179 (context, node))
178 180 return
179 181 if bin_n not in self.changelog.nodemap:
180 182 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
181 183 (context, key))
182 184 return
183 185 self.tagscache[key] = bin_n
184 186
185 187 # read the tags file from each head, ending with the tip,
186 188 # and add each tag found to the map, with "newer" ones
187 189 # taking precedence
188 190 heads = self.heads()
189 191 heads.reverse()
190 192 fl = self.file(".hgtags")
191 193 for node in heads:
192 194 change = self.changelog.read(node)
193 195 rev = self.changelog.rev(node)
194 196 fn, ff = self.manifest.find(change[0], '.hgtags')
195 197 if fn is None: continue
196 198 count = 0
197 199 for l in fl.read(fn).splitlines():
198 200 count += 1
199 201 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
200 202 (rev, short(node), count))
201 203 try:
202 204 f = self.opener("localtags")
203 205 count = 0
204 206 for l in f:
205 207 count += 1
206 208 parsetag(l, _("localtags, line %d") % count)
207 209 except IOError:
208 210 pass
209 211
210 212 self.tagscache['tip'] = self.changelog.tip()
211 213
212 214 return self.tagscache
213 215
214 216 def tagslist(self):
215 217 '''return a list of tags ordered by revision'''
216 218 l = []
217 219 for t, n in self.tags().items():
218 220 try:
219 221 r = self.changelog.rev(n)
220 222 except:
221 223 r = -2 # sort to the beginning of the list if unknown
222 224 l.append((r, t, n))
223 225 l.sort()
224 226 return [(t, n) for r, t, n in l]
225 227
226 228 def nodetags(self, node):
227 229 '''return the tags associated with a node'''
228 230 if not self.nodetagscache:
229 231 self.nodetagscache = {}
230 232 for t, n in self.tags().items():
231 233 self.nodetagscache.setdefault(n, []).append(t)
232 234 return self.nodetagscache.get(node, [])
233 235
234 236 def lookup(self, key):
235 237 try:
236 238 return self.tags()[key]
237 239 except KeyError:
238 240 try:
239 241 return self.changelog.lookup(key)
240 242 except:
241 243 raise repo.RepoError(_("unknown revision '%s'") % key)
242 244
243 245 def dev(self):
244 246 return os.lstat(self.path).st_dev
245 247
246 248 def local(self):
247 249 return True
248 250
249 251 def join(self, f):
250 252 return os.path.join(self.path, f)
251 253
252 254 def wjoin(self, f):
253 255 return os.path.join(self.root, f)
254 256
255 257 def file(self, f):
256 258 if f[0] == '/':
257 259 f = f[1:]
258 260 return filelog.filelog(self.opener, f, self.revlogversion)
259 261
260 262 def getcwd(self):
261 263 return self.dirstate.getcwd()
262 264
263 265 def wfile(self, f, mode='r'):
264 266 return self.wopener(f, mode)
265 267
266 268 def wread(self, filename):
267 269 if self.encodepats == None:
268 270 l = []
269 271 for pat, cmd in self.ui.configitems("encode"):
270 272 mf = util.matcher(self.root, "", [pat], [], [])[1]
271 273 l.append((mf, cmd))
272 274 self.encodepats = l
273 275
274 276 data = self.wopener(filename, 'r').read()
275 277
276 278 for mf, cmd in self.encodepats:
277 279 if mf(filename):
278 280 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
279 281 data = util.filter(data, cmd)
280 282 break
281 283
282 284 return data
283 285
284 286 def wwrite(self, filename, data, fd=None):
285 287 if self.decodepats == None:
286 288 l = []
287 289 for pat, cmd in self.ui.configitems("decode"):
288 290 mf = util.matcher(self.root, "", [pat], [], [])[1]
289 291 l.append((mf, cmd))
290 292 self.decodepats = l
291 293
292 294 for mf, cmd in self.decodepats:
293 295 if mf(filename):
294 296 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
295 297 data = util.filter(data, cmd)
296 298 break
297 299
298 300 if fd:
299 301 return fd.write(data)
300 302 return self.wopener(filename, 'w').write(data)
301 303
302 304 def transaction(self):
303 305 tr = self.transhandle
304 306 if tr != None and tr.running():
305 307 return tr.nest()
306 308
307 309 # save dirstate for rollback
308 310 try:
309 311 ds = self.opener("dirstate").read()
310 312 except IOError:
311 313 ds = ""
312 314 self.opener("journal.dirstate", "w").write(ds)
313 315
314 316 tr = transaction.transaction(self.ui.warn, self.opener,
315 317 self.join("journal"),
316 318 aftertrans(self.path))
317 319 self.transhandle = tr
318 320 return tr
319 321
320 322 def recover(self):
321 323 l = self.lock()
322 324 if os.path.exists(self.join("journal")):
323 325 self.ui.status(_("rolling back interrupted transaction\n"))
324 326 transaction.rollback(self.opener, self.join("journal"))
325 327 self.reload()
326 328 return True
327 329 else:
328 330 self.ui.warn(_("no interrupted transaction available\n"))
329 331 return False
330 332
331 333 def rollback(self, wlock=None):
332 334 if not wlock:
333 335 wlock = self.wlock()
334 336 l = self.lock()
335 337 if os.path.exists(self.join("undo")):
336 338 self.ui.status(_("rolling back last transaction\n"))
337 339 transaction.rollback(self.opener, self.join("undo"))
338 340 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
339 341 self.reload()
340 342 self.wreload()
341 343 else:
342 344 self.ui.warn(_("no rollback information available\n"))
343 345
344 346 def wreload(self):
345 347 self.dirstate.read()
346 348
347 349 def reload(self):
348 350 self.changelog.load()
349 351 self.manifest.load()
350 352 self.tagscache = None
351 353 self.nodetagscache = None
352 354
353 355 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
354 356 desc=None):
355 357 try:
356 358 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
357 359 except lock.LockHeld, inst:
358 360 if not wait:
359 361 raise
360 362 self.ui.warn(_("waiting for lock on %s held by %s\n") %
361 363 (desc, inst.args[0]))
362 364 # default to 600 seconds timeout
363 365 l = lock.lock(self.join(lockname),
364 366 int(self.ui.config("ui", "timeout") or 600),
365 367 releasefn, desc=desc)
366 368 if acquirefn:
367 369 acquirefn()
368 370 return l
369 371
370 372 def lock(self, wait=1):
371 373 return self.do_lock("lock", wait, acquirefn=self.reload,
372 374 desc=_('repository %s') % self.origroot)
373 375
374 376 def wlock(self, wait=1):
375 377 return self.do_lock("wlock", wait, self.dirstate.write,
376 378 self.wreload,
377 379 desc=_('working directory of %s') % self.origroot)
378 380
379 381 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
380 382 "determine whether a new filenode is needed"
381 383 fp1 = manifest1.get(filename, nullid)
382 384 fp2 = manifest2.get(filename, nullid)
383 385
384 386 if fp2 != nullid:
385 387 # is one parent an ancestor of the other?
386 388 fpa = filelog.ancestor(fp1, fp2)
387 389 if fpa == fp1:
388 390 fp1, fp2 = fp2, nullid
389 391 elif fpa == fp2:
390 392 fp2 = nullid
391 393
392 394 # is the file unmodified from the parent? report existing entry
393 395 if fp2 == nullid and text == filelog.read(fp1):
394 396 return (fp1, None, None)
395 397
396 398 return (None, fp1, fp2)
397 399
398 400 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
399 401 orig_parent = self.dirstate.parents()[0] or nullid
400 402 p1 = p1 or self.dirstate.parents()[0] or nullid
401 403 p2 = p2 or self.dirstate.parents()[1] or nullid
402 404 c1 = self.changelog.read(p1)
403 405 c2 = self.changelog.read(p2)
404 406 m1 = self.manifest.read(c1[0])
405 407 mf1 = self.manifest.readflags(c1[0])
406 408 m2 = self.manifest.read(c2[0])
407 409 changed = []
408 410
409 411 if orig_parent == p1:
410 412 update_dirstate = 1
411 413 else:
412 414 update_dirstate = 0
413 415
414 416 if not wlock:
415 417 wlock = self.wlock()
416 418 l = self.lock()
417 419 tr = self.transaction()
418 420 mm = m1.copy()
419 421 mfm = mf1.copy()
420 422 linkrev = self.changelog.count()
421 423 for f in files:
422 424 try:
423 425 t = self.wread(f)
424 426 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
425 427 r = self.file(f)
426 428 mfm[f] = tm
427 429
428 430 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
429 431 if entry:
430 432 mm[f] = entry
431 433 continue
432 434
433 435 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
434 436 changed.append(f)
435 437 if update_dirstate:
436 438 self.dirstate.update([f], "n")
437 439 except IOError:
438 440 try:
439 441 del mm[f]
440 442 del mfm[f]
441 443 if update_dirstate:
442 444 self.dirstate.forget([f])
443 445 except:
444 446 # deleted from p2?
445 447 pass
446 448
447 449 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
448 450 user = user or self.ui.username()
449 451 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
450 452 tr.close()
451 453 if update_dirstate:
452 454 self.dirstate.setparents(n, nullid)
453 455
454 456 def commit(self, files=None, text="", user=None, date=None,
455 457 match=util.always, force=False, lock=None, wlock=None,
456 458 force_editor=False):
457 459 commit = []
458 460 remove = []
459 461 changed = []
460 462
461 463 if files:
462 464 for f in files:
463 465 s = self.dirstate.state(f)
464 466 if s in 'nmai':
465 467 commit.append(f)
466 468 elif s == 'r':
467 469 remove.append(f)
468 470 else:
469 471 self.ui.warn(_("%s not tracked!\n") % f)
470 472 else:
471 473 modified, added, removed, deleted, unknown = self.changes(match=match)
472 474 commit = modified + added
473 475 remove = removed
474 476
475 477 p1, p2 = self.dirstate.parents()
476 478 c1 = self.changelog.read(p1)
477 479 c2 = self.changelog.read(p2)
478 480 m1 = self.manifest.read(c1[0])
479 481 mf1 = self.manifest.readflags(c1[0])
480 482 m2 = self.manifest.read(c2[0])
481 483
482 484 if not commit and not remove and not force and p2 == nullid:
483 485 self.ui.status(_("nothing changed\n"))
484 486 return None
485 487
486 488 xp1 = hex(p1)
487 489 if p2 == nullid: xp2 = ''
488 490 else: xp2 = hex(p2)
489 491
490 492 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
491 493
492 494 if not wlock:
493 495 wlock = self.wlock()
494 496 if not lock:
495 497 lock = self.lock()
496 498 tr = self.transaction()
497 499
498 500 # check in files
499 501 new = {}
500 502 linkrev = self.changelog.count()
501 503 commit.sort()
502 504 for f in commit:
503 505 self.ui.note(f + "\n")
504 506 try:
505 507 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
506 508 t = self.wread(f)
507 509 except IOError:
508 510 self.ui.warn(_("trouble committing %s!\n") % f)
509 511 raise
510 512
511 513 r = self.file(f)
512 514
513 515 meta = {}
514 516 cp = self.dirstate.copied(f)
515 517 if cp:
516 518 meta["copy"] = cp
517 519 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
518 520 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
519 521 fp1, fp2 = nullid, nullid
520 522 else:
521 523 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
522 524 if entry:
523 525 new[f] = entry
524 526 continue
525 527
526 528 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
527 529 # remember what we've added so that we can later calculate
528 530 # the files to pull from a set of changesets
529 531 changed.append(f)
530 532
531 533 # update manifest
532 534 m1 = m1.copy()
533 535 m1.update(new)
534 536 for f in remove:
535 537 if f in m1:
536 538 del m1[f]
537 539 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
538 540 (new, remove))
539 541
540 542 # add changeset
541 543 new = new.keys()
542 544 new.sort()
543 545
544 546 user = user or self.ui.username()
545 547 if not text or force_editor:
546 548 edittext = []
547 549 if text:
548 550 edittext.append(text)
549 551 edittext.append("")
550 552 if p2 != nullid:
551 553 edittext.append("HG: branch merge")
552 554 edittext.extend(["HG: changed %s" % f for f in changed])
553 555 edittext.extend(["HG: removed %s" % f for f in remove])
554 556 if not changed and not remove:
555 557 edittext.append("HG: no files changed")
556 558 edittext.append("")
557 559 # run editor in the repository root
558 560 olddir = os.getcwd()
559 561 os.chdir(self.root)
560 562 text = self.ui.edit("\n".join(edittext), user)
561 563 os.chdir(olddir)
562 564
563 565 lines = [line.rstrip() for line in text.rstrip().splitlines()]
564 566 while lines and not lines[0]:
565 567 del lines[0]
566 568 if not lines:
567 569 return None
568 570 text = '\n'.join(lines)
569 571 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
570 572 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
571 573 parent2=xp2)
572 574 tr.close()
573 575
574 576 self.dirstate.setparents(n)
575 577 self.dirstate.update(new, "n")
576 578 self.dirstate.forget(remove)
577 579
578 580 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
579 581 return n
580 582
581 583 def walk(self, node=None, files=[], match=util.always, badmatch=None):
582 584 if node:
583 585 fdict = dict.fromkeys(files)
584 586 for fn in self.manifest.read(self.changelog.read(node)[0]):
585 587 fdict.pop(fn, None)
586 588 if match(fn):
587 589 yield 'm', fn
588 590 for fn in fdict:
589 591 if badmatch and badmatch(fn):
590 592 if match(fn):
591 593 yield 'b', fn
592 594 else:
593 595 self.ui.warn(_('%s: No such file in rev %s\n') % (
594 596 util.pathto(self.getcwd(), fn), short(node)))
595 597 else:
596 598 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
597 599 yield src, fn
598 600
599 601 def changes(self, node1=None, node2=None, files=[], match=util.always,
600 602 wlock=None, show_ignored=None):
601 603 """return changes between two nodes or node and working directory
602 604
603 605 If node1 is None, use the first dirstate parent instead.
604 606 If node2 is None, compare node1 with working directory.
605 607 """
606 608
607 609 def fcmp(fn, mf):
608 610 t1 = self.wread(fn)
609 611 t2 = self.file(fn).read(mf.get(fn, nullid))
610 612 return cmp(t1, t2)
611 613
612 614 def mfmatches(node):
613 615 change = self.changelog.read(node)
614 616 mf = dict(self.manifest.read(change[0]))
615 617 for fn in mf.keys():
616 618 if not match(fn):
617 619 del mf[fn]
618 620 return mf
619 621
620 622 modified, added, removed, deleted, unknown, ignored = [],[],[],[],[],[]
621 623 compareworking = False
622 624 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
623 625 compareworking = True
624 626
625 627 if not compareworking:
626 628 # read the manifest from node1 before the manifest from node2,
627 629 # so that we'll hit the manifest cache if we're going through
628 630 # all the revisions in parent->child order.
629 631 mf1 = mfmatches(node1)
630 632
631 633 # are we comparing the working directory?
632 634 if not node2:
633 635 if not wlock:
634 636 try:
635 637 wlock = self.wlock(wait=0)
636 638 except lock.LockException:
637 639 wlock = None
638 640 lookup, modified, added, removed, deleted, unknown, ignored = (
639 641 self.dirstate.changes(files, match, show_ignored))
640 642
641 643 # are we comparing working dir against its parent?
642 644 if compareworking:
643 645 if lookup:
644 646 # do a full compare of any files that might have changed
645 647 mf2 = mfmatches(self.dirstate.parents()[0])
646 648 for f in lookup:
647 649 if fcmp(f, mf2):
648 650 modified.append(f)
649 651 elif wlock is not None:
650 652 self.dirstate.update([f], "n")
651 653 else:
652 654 # we are comparing working dir against non-parent
653 655 # generate a pseudo-manifest for the working dir
654 656 mf2 = mfmatches(self.dirstate.parents()[0])
655 657 for f in lookup + modified + added:
656 658 mf2[f] = ""
657 659 for f in removed:
658 660 if f in mf2:
659 661 del mf2[f]
660 662 else:
661 663 # we are comparing two revisions
662 664 deleted, unknown, ignored = [], [], []
663 665 mf2 = mfmatches(node2)
664 666
665 667 if not compareworking:
666 668 # flush lists from dirstate before comparing manifests
667 669 modified, added = [], []
668 670
669 671 # make sure to sort the files so we talk to the disk in a
670 672 # reasonable order
671 673 mf2keys = mf2.keys()
672 674 mf2keys.sort()
673 675 for fn in mf2keys:
674 676 if mf1.has_key(fn):
675 677 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
676 678 modified.append(fn)
677 679 del mf1[fn]
678 680 else:
679 681 added.append(fn)
680 682
681 683 removed = mf1.keys()
682 684
683 685 # sort and return results:
684 686 for l in modified, added, removed, deleted, unknown, ignored:
685 687 l.sort()
686 688 if show_ignored is None:
687 689 return (modified, added, removed, deleted, unknown)
688 690 else:
689 691 return (modified, added, removed, deleted, unknown, ignored)
690 692
691 693 def add(self, list, wlock=None):
692 694 if not wlock:
693 695 wlock = self.wlock()
694 696 for f in list:
695 697 p = self.wjoin(f)
696 698 if not os.path.exists(p):
697 699 self.ui.warn(_("%s does not exist!\n") % f)
698 700 elif not os.path.isfile(p):
699 701 self.ui.warn(_("%s not added: only files supported currently\n")
700 702 % f)
701 703 elif self.dirstate.state(f) in 'an':
702 704 self.ui.warn(_("%s already tracked!\n") % f)
703 705 else:
704 706 self.dirstate.update([f], "a")
705 707
706 708 def forget(self, list, wlock=None):
707 709 if not wlock:
708 710 wlock = self.wlock()
709 711 for f in list:
710 712 if self.dirstate.state(f) not in 'ai':
711 713 self.ui.warn(_("%s not added!\n") % f)
712 714 else:
713 715 self.dirstate.forget([f])
714 716
715 717 def remove(self, list, unlink=False, wlock=None):
716 718 if unlink:
717 719 for f in list:
718 720 try:
719 721 util.unlink(self.wjoin(f))
720 722 except OSError, inst:
721 723 if inst.errno != errno.ENOENT:
722 724 raise
723 725 if not wlock:
724 726 wlock = self.wlock()
725 727 for f in list:
726 728 p = self.wjoin(f)
727 729 if os.path.exists(p):
728 730 self.ui.warn(_("%s still exists!\n") % f)
729 731 elif self.dirstate.state(f) == 'a':
730 732 self.dirstate.forget([f])
731 733 elif f not in self.dirstate:
732 734 self.ui.warn(_("%s not tracked!\n") % f)
733 735 else:
734 736 self.dirstate.update([f], "r")
735 737
736 738 def undelete(self, list, wlock=None):
737 739 p = self.dirstate.parents()[0]
738 740 mn = self.changelog.read(p)[0]
739 741 mf = self.manifest.readflags(mn)
740 742 m = self.manifest.read(mn)
741 743 if not wlock:
742 744 wlock = self.wlock()
743 745 for f in list:
744 746 if self.dirstate.state(f) not in "r":
745 747 self.ui.warn("%s not removed!\n" % f)
746 748 else:
747 749 t = self.file(f).read(m[f])
748 750 self.wwrite(f, t)
749 751 util.set_exec(self.wjoin(f), mf[f])
750 752 self.dirstate.update([f], "n")
751 753
752 754 def copy(self, source, dest, wlock=None):
753 755 p = self.wjoin(dest)
754 756 if not os.path.exists(p):
755 757 self.ui.warn(_("%s does not exist!\n") % dest)
756 758 elif not os.path.isfile(p):
757 759 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
758 760 else:
759 761 if not wlock:
760 762 wlock = self.wlock()
761 763 if self.dirstate.state(dest) == '?':
762 764 self.dirstate.update([dest], "a")
763 765 self.dirstate.copy(source, dest)
764 766
765 767 def heads(self, start=None):
766 768 heads = self.changelog.heads(start)
767 769 # sort the output in rev descending order
768 770 heads = [(-self.changelog.rev(h), h) for h in heads]
769 771 heads.sort()
770 772 return [n for (r, n) in heads]
771 773
772 774 # branchlookup returns a dict giving a list of branches for
773 775 # each head. A branch is defined as the tag of a node or
774 776 # the branch of the node's parents. If a node has multiple
775 777 # branch tags, tags are eliminated if they are visible from other
776 778 # branch tags.
777 779 #
778 780 # So, for this graph: a->b->c->d->e
779 781 # \ /
780 782 # aa -----/
781 783 # a has tag 2.6.12
782 784 # d has tag 2.6.13
783 785 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
784 786 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
785 787 # from the list.
786 788 #
787 789 # It is possible that more than one head will have the same branch tag.
788 790 # callers need to check the result for multiple heads under the same
789 791 # branch tag if that is a problem for them (ie checkout of a specific
790 792 # branch).
791 793 #
792 794 # passing in a specific branch will limit the depth of the search
793 795 # through the parents. It won't limit the branches returned in the
794 796 # result though.
795 797 def branchlookup(self, heads=None, branch=None):
796 798 if not heads:
797 799 heads = self.heads()
798 800 headt = [ h for h in heads ]
799 801 chlog = self.changelog
800 802 branches = {}
801 803 merges = []
802 804 seenmerge = {}
803 805
804 806 # traverse the tree once for each head, recording in the branches
805 807 # dict which tags are visible from this head. The branches
806 808 # dict also records which tags are visible from each tag
807 809 # while we traverse.
808 810 while headt or merges:
809 811 if merges:
810 812 n, found = merges.pop()
811 813 visit = [n]
812 814 else:
813 815 h = headt.pop()
814 816 visit = [h]
815 817 found = [h]
816 818 seen = {}
817 819 while visit:
818 820 n = visit.pop()
819 821 if n in seen:
820 822 continue
821 823 pp = chlog.parents(n)
822 824 tags = self.nodetags(n)
823 825 if tags:
824 826 for x in tags:
825 827 if x == 'tip':
826 828 continue
827 829 for f in found:
828 830 branches.setdefault(f, {})[n] = 1
829 831 branches.setdefault(n, {})[n] = 1
830 832 break
831 833 if n not in found:
832 834 found.append(n)
833 835 if branch in tags:
834 836 continue
835 837 seen[n] = 1
836 838 if pp[1] != nullid and n not in seenmerge:
837 839 merges.append((pp[1], [x for x in found]))
838 840 seenmerge[n] = 1
839 841 if pp[0] != nullid:
840 842 visit.append(pp[0])
841 843 # traverse the branches dict, eliminating branch tags from each
842 844 # head that are visible from another branch tag for that head.
843 845 out = {}
844 846 viscache = {}
845 847 for h in heads:
846 848 def visible(node):
847 849 if node in viscache:
848 850 return viscache[node]
849 851 ret = {}
850 852 visit = [node]
851 853 while visit:
852 854 x = visit.pop()
853 855 if x in viscache:
854 856 ret.update(viscache[x])
855 857 elif x not in ret:
856 858 ret[x] = 1
857 859 if x in branches:
858 860 visit[len(visit):] = branches[x].keys()
859 861 viscache[node] = ret
860 862 return ret
861 863 if h not in branches:
862 864 continue
863 865 # O(n^2), but somewhat limited. This only searches the
864 866 # tags visible from a specific head, not all the tags in the
865 867 # whole repo.
866 868 for b in branches[h]:
867 869 vis = False
868 870 for bb in branches[h].keys():
869 871 if b != bb:
870 872 if b in visible(bb):
871 873 vis = True
872 874 break
873 875 if not vis:
874 876 l = out.setdefault(h, [])
875 877 l[len(l):] = self.nodetags(b)
876 878 return out
877 879
878 880 def branches(self, nodes):
879 881 if not nodes:
880 882 nodes = [self.changelog.tip()]
881 883 b = []
882 884 for n in nodes:
883 885 t = n
884 886 while 1:
885 887 p = self.changelog.parents(n)
886 888 if p[1] != nullid or p[0] == nullid:
887 889 b.append((t, n, p[0], p[1]))
888 890 break
889 891 n = p[0]
890 892 return b
891 893
892 894 def between(self, pairs):
893 895 r = []
894 896
895 897 for top, bottom in pairs:
896 898 n, l, i = top, [], 0
897 899 f = 1
898 900
899 901 while n != bottom:
900 902 p = self.changelog.parents(n)[0]
901 903 if i == f:
902 904 l.append(n)
903 905 f = f * 2
904 906 n = p
905 907 i += 1
906 908
907 909 r.append(l)
908 910
909 911 return r
910 912
911 913 def findincoming(self, remote, base=None, heads=None, force=False):
912 914 """Return list of roots of the subsets of missing nodes from remote
913 915
914 916 If base dict is specified, assume that these nodes and their parents
915 917 exist on the remote side and that no child of a node of base exists
916 918 in both remote and self.
917 919 Furthermore base will be updated to include the nodes that exists
918 920 in self and remote but no children exists in self and remote.
919 921 If a list of heads is specified, return only nodes which are heads
920 922 or ancestors of these heads.
921 923
922 924 All the ancestors of base are in self and in remote.
923 925 All the descendants of the list returned are missing in self.
924 926 (and so we know that the rest of the nodes are missing in remote, see
925 927 outgoing)
926 928 """
927 929 m = self.changelog.nodemap
928 930 search = []
929 931 fetch = {}
930 932 seen = {}
931 933 seenbranch = {}
932 934 if base == None:
933 935 base = {}
934 936
935 937 if not heads:
936 938 heads = remote.heads()
937 939
938 940 if self.changelog.tip() == nullid:
939 941 base[nullid] = 1
940 942 if heads != [nullid]:
941 943 return [nullid]
942 944 return []
943 945
944 946 # assume we're closer to the tip than the root
945 947 # and start by examining the heads
946 948 self.ui.status(_("searching for changes\n"))
947 949
948 950 unknown = []
949 951 for h in heads:
950 952 if h not in m:
951 953 unknown.append(h)
952 954 else:
953 955 base[h] = 1
954 956
955 957 if not unknown:
956 958 return []
957 959
958 960 req = dict.fromkeys(unknown)
959 961 reqcnt = 0
960 962
961 963 # search through remote branches
962 964 # a 'branch' here is a linear segment of history, with four parts:
963 965 # head, root, first parent, second parent
964 966 # (a branch always has two parents (or none) by definition)
965 967 unknown = remote.branches(unknown)
966 968 while unknown:
967 969 r = []
968 970 while unknown:
969 971 n = unknown.pop(0)
970 972 if n[0] in seen:
971 973 continue
972 974
973 975 self.ui.debug(_("examining %s:%s\n")
974 976 % (short(n[0]), short(n[1])))
975 977 if n[0] == nullid: # found the end of the branch
976 978 pass
977 979 elif n in seenbranch:
978 980 self.ui.debug(_("branch already found\n"))
979 981 continue
980 982 elif n[1] and n[1] in m: # do we know the base?
981 983 self.ui.debug(_("found incomplete branch %s:%s\n")
982 984 % (short(n[0]), short(n[1])))
983 985 search.append(n) # schedule branch range for scanning
984 986 seenbranch[n] = 1
985 987 else:
986 988 if n[1] not in seen and n[1] not in fetch:
987 989 if n[2] in m and n[3] in m:
988 990 self.ui.debug(_("found new changeset %s\n") %
989 991 short(n[1]))
990 992 fetch[n[1]] = 1 # earliest unknown
991 993 for p in n[2:4]:
992 994 if p in m:
993 995 base[p] = 1 # latest known
994 996
995 997 for p in n[2:4]:
996 998 if p not in req and p not in m:
997 999 r.append(p)
998 1000 req[p] = 1
999 1001 seen[n[0]] = 1
1000 1002
1001 1003 if r:
1002 1004 reqcnt += 1
1003 1005 self.ui.debug(_("request %d: %s\n") %
1004 1006 (reqcnt, " ".join(map(short, r))))
1005 1007 for p in range(0, len(r), 10):
1006 1008 for b in remote.branches(r[p:p+10]):
1007 1009 self.ui.debug(_("received %s:%s\n") %
1008 1010 (short(b[0]), short(b[1])))
1009 1011 unknown.append(b)
1010 1012
1011 1013 # do binary search on the branches we found
1012 1014 while search:
1013 1015 n = search.pop(0)
1014 1016 reqcnt += 1
1015 1017 l = remote.between([(n[0], n[1])])[0]
1016 1018 l.append(n[1])
1017 1019 p = n[0]
1018 1020 f = 1
1019 1021 for i in l:
1020 1022 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1021 1023 if i in m:
1022 1024 if f <= 2:
1023 1025 self.ui.debug(_("found new branch changeset %s\n") %
1024 1026 short(p))
1025 1027 fetch[p] = 1
1026 1028 base[i] = 1
1027 1029 else:
1028 1030 self.ui.debug(_("narrowed branch search to %s:%s\n")
1029 1031 % (short(p), short(i)))
1030 1032 search.append((p, i))
1031 1033 break
1032 1034 p, f = i, f * 2
1033 1035
1034 1036 # sanity check our fetch list
1035 1037 for f in fetch.keys():
1036 1038 if f in m:
1037 1039 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1038 1040
1039 1041 if base.keys() == [nullid]:
1040 1042 if force:
1041 1043 self.ui.warn(_("warning: repository is unrelated\n"))
1042 1044 else:
1043 1045 raise util.Abort(_("repository is unrelated"))
1044 1046
1045 1047 self.ui.note(_("found new changesets starting at ") +
1046 1048 " ".join([short(f) for f in fetch]) + "\n")
1047 1049
1048 1050 self.ui.debug(_("%d total queries\n") % reqcnt)
1049 1051
1050 1052 return fetch.keys()
1051 1053
1052 1054 def findoutgoing(self, remote, base=None, heads=None, force=False):
1053 1055 """Return list of nodes that are roots of subsets not in remote
1054 1056
1055 1057 If base dict is specified, assume that these nodes and their parents
1056 1058 exist on the remote side.
1057 1059 If a list of heads is specified, return only nodes which are heads
1058 1060 or ancestors of these heads, and return a second element which
1059 1061 contains all remote heads which get new children.
1060 1062 """
1061 1063 if base == None:
1062 1064 base = {}
1063 1065 self.findincoming(remote, base, heads, force=force)
1064 1066
1065 1067 self.ui.debug(_("common changesets up to ")
1066 1068 + " ".join(map(short, base.keys())) + "\n")
1067 1069
1068 1070 remain = dict.fromkeys(self.changelog.nodemap)
1069 1071
1070 1072 # prune everything remote has from the tree
1071 1073 del remain[nullid]
1072 1074 remove = base.keys()
1073 1075 while remove:
1074 1076 n = remove.pop(0)
1075 1077 if n in remain:
1076 1078 del remain[n]
1077 1079 for p in self.changelog.parents(n):
1078 1080 remove.append(p)
1079 1081
1080 1082 # find every node whose parents have been pruned
1081 1083 subset = []
1082 1084 # find every remote head that will get new children
1083 1085 updated_heads = {}
1084 1086 for n in remain:
1085 1087 p1, p2 = self.changelog.parents(n)
1086 1088 if p1 not in remain and p2 not in remain:
1087 1089 subset.append(n)
1088 1090 if heads:
1089 1091 if p1 in heads:
1090 1092 updated_heads[p1] = True
1091 1093 if p2 in heads:
1092 1094 updated_heads[p2] = True
1093 1095
1094 1096 # this is the set of all roots we have to push
1095 1097 if heads:
1096 1098 return subset, updated_heads.keys()
1097 1099 else:
1098 1100 return subset
1099 1101
1100 1102 def pull(self, remote, heads=None, force=False):
1101 1103 l = self.lock()
1102 1104
1103 1105 fetch = self.findincoming(remote, force=force)
1104 1106 if fetch == [nullid]:
1105 1107 self.ui.status(_("requesting all changes\n"))
1106 1108
1107 1109 if not fetch:
1108 1110 self.ui.status(_("no changes found\n"))
1109 1111 return 0
1110 1112
1111 1113 if heads is None:
1112 1114 cg = remote.changegroup(fetch, 'pull')
1113 1115 else:
1114 1116 cg = remote.changegroupsubset(fetch, heads, 'pull')
1115 1117 return self.addchangegroup(cg, 'pull')
1116 1118
1117 1119 def push(self, remote, force=False, revs=None):
1118 1120 # there are two ways to push to remote repo:
1119 1121 #
1120 1122 # addchangegroup assumes local user can lock remote
1121 1123 # repo (local filesystem, old ssh servers).
1122 1124 #
1123 1125 # unbundle assumes local user cannot lock remote repo (new ssh
1124 1126 # servers, http servers).
1125 1127
1126 1128 if 'unbundle' in remote.capabilities:
1127 1129 return self.push_unbundle(remote, force, revs)
1128 1130 return self.push_addchangegroup(remote, force, revs)
1129 1131
1130 1132 def prepush(self, remote, force, revs):
1131 1133 base = {}
1132 1134 remote_heads = remote.heads()
1133 1135 inc = self.findincoming(remote, base, remote_heads, force=force)
1134 1136 if not force and inc:
1135 1137 self.ui.warn(_("abort: unsynced remote changes!\n"))
1136 1138 self.ui.status(_("(did you forget to sync?"
1137 1139 " use push -f to force)\n"))
1138 1140 return None, 1
1139 1141
1140 1142 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1141 1143 if revs is not None:
1142 1144 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1143 1145 else:
1144 1146 bases, heads = update, self.changelog.heads()
1145 1147
1146 1148 if not bases:
1147 1149 self.ui.status(_("no changes found\n"))
1148 1150 return None, 1
1149 1151 elif not force:
1150 1152 # FIXME we don't properly detect creation of new heads
1151 1153 # in the push -r case, assume the user knows what he's doing
1152 1154 if not revs and len(remote_heads) < len(heads) \
1153 1155 and remote_heads != [nullid]:
1154 1156 self.ui.warn(_("abort: push creates new remote branches!\n"))
1155 1157 self.ui.status(_("(did you forget to merge?"
1156 1158 " use push -f to force)\n"))
1157 1159 return None, 1
1158 1160
1159 1161 if revs is None:
1160 1162 cg = self.changegroup(update, 'push')
1161 1163 else:
1162 1164 cg = self.changegroupsubset(update, revs, 'push')
1163 1165 return cg, remote_heads
1164 1166
1165 1167 def push_addchangegroup(self, remote, force, revs):
1166 1168 lock = remote.lock()
1167 1169
1168 1170 ret = self.prepush(remote, force, revs)
1169 1171 if ret[0] is not None:
1170 1172 cg, remote_heads = ret
1171 1173 return remote.addchangegroup(cg, 'push')
1172 1174 return ret[1]
1173 1175
1174 1176 def push_unbundle(self, remote, force, revs):
1175 1177 # local repo finds heads on server, finds out what revs it
1176 1178 # must push. once revs transferred, if server finds it has
1177 1179 # different heads (someone else won commit/push race), server
1178 1180 # aborts.
1179 1181
1180 1182 ret = self.prepush(remote, force, revs)
1181 1183 if ret[0] is not None:
1182 1184 cg, remote_heads = ret
1183 1185 if force: remote_heads = ['force']
1184 1186 return remote.unbundle(cg, remote_heads, 'push')
1185 1187 return ret[1]
1186 1188
1187 1189 def changegroupsubset(self, bases, heads, source):
1188 1190 """This function generates a changegroup consisting of all the nodes
1189 1191 that are descendents of any of the bases, and ancestors of any of
1190 1192 the heads.
1191 1193
1192 1194 It is fairly complex as determining which filenodes and which
1193 1195 manifest nodes need to be included for the changeset to be complete
1194 1196 is non-trivial.
1195 1197
1196 1198 Another wrinkle is doing the reverse, figuring out which changeset in
1197 1199 the changegroup a particular filenode or manifestnode belongs to."""
1198 1200
1199 1201 self.hook('preoutgoing', throw=True, source=source)
1200 1202
1201 1203 # Set up some initial variables
1202 1204 # Make it easy to refer to self.changelog
1203 1205 cl = self.changelog
1204 1206 # msng is short for missing - compute the list of changesets in this
1205 1207 # changegroup.
1206 1208 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1207 1209 # Some bases may turn out to be superfluous, and some heads may be
1208 1210 # too. nodesbetween will return the minimal set of bases and heads
1209 1211 # necessary to re-create the changegroup.
1210 1212
1211 1213 # Known heads are the list of heads that it is assumed the recipient
1212 1214 # of this changegroup will know about.
1213 1215 knownheads = {}
1214 1216 # We assume that all parents of bases are known heads.
1215 1217 for n in bases:
1216 1218 for p in cl.parents(n):
1217 1219 if p != nullid:
1218 1220 knownheads[p] = 1
1219 1221 knownheads = knownheads.keys()
1220 1222 if knownheads:
1221 1223 # Now that we know what heads are known, we can compute which
1222 1224 # changesets are known. The recipient must know about all
1223 1225 # changesets required to reach the known heads from the null
1224 1226 # changeset.
1225 1227 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1226 1228 junk = None
1227 1229 # Transform the list into an ersatz set.
1228 1230 has_cl_set = dict.fromkeys(has_cl_set)
1229 1231 else:
1230 1232 # If there were no known heads, the recipient cannot be assumed to
1231 1233 # know about any changesets.
1232 1234 has_cl_set = {}
1233 1235
1234 1236 # Make it easy to refer to self.manifest
1235 1237 mnfst = self.manifest
1236 1238 # We don't know which manifests are missing yet
1237 1239 msng_mnfst_set = {}
1238 1240 # Nor do we know which filenodes are missing.
1239 1241 msng_filenode_set = {}
1240 1242
1241 1243 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1242 1244 junk = None
1243 1245
1244 1246 # A changeset always belongs to itself, so the changenode lookup
1245 1247 # function for a changenode is identity.
1246 1248 def identity(x):
1247 1249 return x
1248 1250
1249 1251 # A function generating function. Sets up an environment for the
1250 1252 # inner function.
1251 1253 def cmp_by_rev_func(revlog):
1252 1254 # Compare two nodes by their revision number in the environment's
1253 1255 # revision history. Since the revision number both represents the
1254 1256 # most efficient order to read the nodes in, and represents a
1255 1257 # topological sorting of the nodes, this function is often useful.
1256 1258 def cmp_by_rev(a, b):
1257 1259 return cmp(revlog.rev(a), revlog.rev(b))
1258 1260 return cmp_by_rev
1259 1261
1260 1262 # If we determine that a particular file or manifest node must be a
1261 1263 # node that the recipient of the changegroup will already have, we can
1262 1264 # also assume the recipient will have all the parents. This function
1263 1265 # prunes them from the set of missing nodes.
1264 1266 def prune_parents(revlog, hasset, msngset):
1265 1267 haslst = hasset.keys()
1266 1268 haslst.sort(cmp_by_rev_func(revlog))
1267 1269 for node in haslst:
1268 1270 parentlst = [p for p in revlog.parents(node) if p != nullid]
1269 1271 while parentlst:
1270 1272 n = parentlst.pop()
1271 1273 if n not in hasset:
1272 1274 hasset[n] = 1
1273 1275 p = [p for p in revlog.parents(n) if p != nullid]
1274 1276 parentlst.extend(p)
1275 1277 for n in hasset:
1276 1278 msngset.pop(n, None)
1277 1279
1278 1280 # This is a function generating function used to set up an environment
1279 1281 # for the inner function to execute in.
1280 1282 def manifest_and_file_collector(changedfileset):
1281 1283 # This is an information gathering function that gathers
1282 1284 # information from each changeset node that goes out as part of
1283 1285 # the changegroup. The information gathered is a list of which
1284 1286 # manifest nodes are potentially required (the recipient may
1285 1287 # already have them) and total list of all files which were
1286 1288 # changed in any changeset in the changegroup.
1287 1289 #
1288 1290 # We also remember the first changenode we saw any manifest
1289 1291 # referenced by so we can later determine which changenode 'owns'
1290 1292 # the manifest.
1291 1293 def collect_manifests_and_files(clnode):
1292 1294 c = cl.read(clnode)
1293 1295 for f in c[3]:
1294 1296 # This is to make sure we only have one instance of each
1295 1297 # filename string for each filename.
1296 1298 changedfileset.setdefault(f, f)
1297 1299 msng_mnfst_set.setdefault(c[0], clnode)
1298 1300 return collect_manifests_and_files
1299 1301
1300 1302 # Figure out which manifest nodes (of the ones we think might be part
1301 1303 # of the changegroup) the recipient must know about and remove them
1302 1304 # from the changegroup.
1303 1305 def prune_manifests():
1304 1306 has_mnfst_set = {}
1305 1307 for n in msng_mnfst_set:
1306 1308 # If a 'missing' manifest thinks it belongs to a changenode
1307 1309 # the recipient is assumed to have, obviously the recipient
1308 1310 # must have that manifest.
1309 1311 linknode = cl.node(mnfst.linkrev(n))
1310 1312 if linknode in has_cl_set:
1311 1313 has_mnfst_set[n] = 1
1312 1314 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1313 1315
1314 1316 # Use the information collected in collect_manifests_and_files to say
1315 1317 # which changenode any manifestnode belongs to.
1316 1318 def lookup_manifest_link(mnfstnode):
1317 1319 return msng_mnfst_set[mnfstnode]
1318 1320
1319 1321 # A function generating function that sets up the initial environment
1320 1322 # the inner function.
1321 1323 def filenode_collector(changedfiles):
1322 1324 next_rev = [0]
1323 1325 # This gathers information from each manifestnode included in the
1324 1326 # changegroup about which filenodes the manifest node references
1325 1327 # so we can include those in the changegroup too.
1326 1328 #
1327 1329 # It also remembers which changenode each filenode belongs to. It
1328 1330 # does this by assuming the a filenode belongs to the changenode
1329 1331 # the first manifest that references it belongs to.
1330 1332 def collect_msng_filenodes(mnfstnode):
1331 1333 r = mnfst.rev(mnfstnode)
1332 1334 if r == next_rev[0]:
1333 1335 # If the last rev we looked at was the one just previous,
1334 1336 # we only need to see a diff.
1335 1337 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1336 1338 # For each line in the delta
1337 1339 for dline in delta.splitlines():
1338 1340 # get the filename and filenode for that line
1339 1341 f, fnode = dline.split('\0')
1340 1342 fnode = bin(fnode[:40])
1341 1343 f = changedfiles.get(f, None)
1342 1344 # And if the file is in the list of files we care
1343 1345 # about.
1344 1346 if f is not None:
1345 1347 # Get the changenode this manifest belongs to
1346 1348 clnode = msng_mnfst_set[mnfstnode]
1347 1349 # Create the set of filenodes for the file if
1348 1350 # there isn't one already.
1349 1351 ndset = msng_filenode_set.setdefault(f, {})
1350 1352 # And set the filenode's changelog node to the
1351 1353 # manifest's if it hasn't been set already.
1352 1354 ndset.setdefault(fnode, clnode)
1353 1355 else:
1354 1356 # Otherwise we need a full manifest.
1355 1357 m = mnfst.read(mnfstnode)
1356 1358 # For every file in we care about.
1357 1359 for f in changedfiles:
1358 1360 fnode = m.get(f, None)
1359 1361 # If it's in the manifest
1360 1362 if fnode is not None:
1361 1363 # See comments above.
1362 1364 clnode = msng_mnfst_set[mnfstnode]
1363 1365 ndset = msng_filenode_set.setdefault(f, {})
1364 1366 ndset.setdefault(fnode, clnode)
1365 1367 # Remember the revision we hope to see next.
1366 1368 next_rev[0] = r + 1
1367 1369 return collect_msng_filenodes
1368 1370
1369 1371 # We have a list of filenodes we think we need for a file, lets remove
1370 1372 # all those we now the recipient must have.
1371 1373 def prune_filenodes(f, filerevlog):
1372 1374 msngset = msng_filenode_set[f]
1373 1375 hasset = {}
1374 1376 # If a 'missing' filenode thinks it belongs to a changenode we
1375 1377 # assume the recipient must have, then the recipient must have
1376 1378 # that filenode.
1377 1379 for n in msngset:
1378 1380 clnode = cl.node(filerevlog.linkrev(n))
1379 1381 if clnode in has_cl_set:
1380 1382 hasset[n] = 1
1381 1383 prune_parents(filerevlog, hasset, msngset)
1382 1384
1383 1385 # A function generator function that sets up the a context for the
1384 1386 # inner function.
1385 1387 def lookup_filenode_link_func(fname):
1386 1388 msngset = msng_filenode_set[fname]
1387 1389 # Lookup the changenode the filenode belongs to.
1388 1390 def lookup_filenode_link(fnode):
1389 1391 return msngset[fnode]
1390 1392 return lookup_filenode_link
1391 1393
1392 1394 # Now that we have all theses utility functions to help out and
1393 1395 # logically divide up the task, generate the group.
1394 1396 def gengroup():
1395 1397 # The set of changed files starts empty.
1396 1398 changedfiles = {}
1397 1399 # Create a changenode group generator that will call our functions
1398 1400 # back to lookup the owning changenode and collect information.
1399 1401 group = cl.group(msng_cl_lst, identity,
1400 1402 manifest_and_file_collector(changedfiles))
1401 1403 for chnk in group:
1402 1404 yield chnk
1403 1405
1404 1406 # The list of manifests has been collected by the generator
1405 1407 # calling our functions back.
1406 1408 prune_manifests()
1407 1409 msng_mnfst_lst = msng_mnfst_set.keys()
1408 1410 # Sort the manifestnodes by revision number.
1409 1411 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1410 1412 # Create a generator for the manifestnodes that calls our lookup
1411 1413 # and data collection functions back.
1412 1414 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1413 1415 filenode_collector(changedfiles))
1414 1416 for chnk in group:
1415 1417 yield chnk
1416 1418
1417 1419 # These are no longer needed, dereference and toss the memory for
1418 1420 # them.
1419 1421 msng_mnfst_lst = None
1420 1422 msng_mnfst_set.clear()
1421 1423
1422 1424 changedfiles = changedfiles.keys()
1423 1425 changedfiles.sort()
1424 1426 # Go through all our files in order sorted by name.
1425 1427 for fname in changedfiles:
1426 1428 filerevlog = self.file(fname)
1427 1429 # Toss out the filenodes that the recipient isn't really
1428 1430 # missing.
1429 1431 if msng_filenode_set.has_key(fname):
1430 1432 prune_filenodes(fname, filerevlog)
1431 1433 msng_filenode_lst = msng_filenode_set[fname].keys()
1432 1434 else:
1433 1435 msng_filenode_lst = []
1434 1436 # If any filenodes are left, generate the group for them,
1435 1437 # otherwise don't bother.
1436 1438 if len(msng_filenode_lst) > 0:
1437 1439 yield changegroup.genchunk(fname)
1438 1440 # Sort the filenodes by their revision #
1439 1441 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1440 1442 # Create a group generator and only pass in a changenode
1441 1443 # lookup function as we need to collect no information
1442 1444 # from filenodes.
1443 1445 group = filerevlog.group(msng_filenode_lst,
1444 1446 lookup_filenode_link_func(fname))
1445 1447 for chnk in group:
1446 1448 yield chnk
1447 1449 if msng_filenode_set.has_key(fname):
1448 1450 # Don't need this anymore, toss it to free memory.
1449 1451 del msng_filenode_set[fname]
1450 1452 # Signal that no more groups are left.
1451 1453 yield changegroup.closechunk()
1452 1454
1453 1455 if msng_cl_lst:
1454 1456 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1455 1457
1456 1458 return util.chunkbuffer(gengroup())
1457 1459
1458 1460 def changegroup(self, basenodes, source):
1459 1461 """Generate a changegroup of all nodes that we have that a recipient
1460 1462 doesn't.
1461 1463
1462 1464 This is much easier than the previous function as we can assume that
1463 1465 the recipient has any changenode we aren't sending them."""
1464 1466
1465 1467 self.hook('preoutgoing', throw=True, source=source)
1466 1468
1467 1469 cl = self.changelog
1468 1470 nodes = cl.nodesbetween(basenodes, None)[0]
1469 1471 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1470 1472
1471 1473 def identity(x):
1472 1474 return x
1473 1475
1474 1476 def gennodelst(revlog):
1475 1477 for r in xrange(0, revlog.count()):
1476 1478 n = revlog.node(r)
1477 1479 if revlog.linkrev(n) in revset:
1478 1480 yield n
1479 1481
1480 1482 def changed_file_collector(changedfileset):
1481 1483 def collect_changed_files(clnode):
1482 1484 c = cl.read(clnode)
1483 1485 for fname in c[3]:
1484 1486 changedfileset[fname] = 1
1485 1487 return collect_changed_files
1486 1488
1487 1489 def lookuprevlink_func(revlog):
1488 1490 def lookuprevlink(n):
1489 1491 return cl.node(revlog.linkrev(n))
1490 1492 return lookuprevlink
1491 1493
1492 1494 def gengroup():
1493 1495 # construct a list of all changed files
1494 1496 changedfiles = {}
1495 1497
1496 1498 for chnk in cl.group(nodes, identity,
1497 1499 changed_file_collector(changedfiles)):
1498 1500 yield chnk
1499 1501 changedfiles = changedfiles.keys()
1500 1502 changedfiles.sort()
1501 1503
1502 1504 mnfst = self.manifest
1503 1505 nodeiter = gennodelst(mnfst)
1504 1506 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1505 1507 yield chnk
1506 1508
1507 1509 for fname in changedfiles:
1508 1510 filerevlog = self.file(fname)
1509 1511 nodeiter = gennodelst(filerevlog)
1510 1512 nodeiter = list(nodeiter)
1511 1513 if nodeiter:
1512 1514 yield changegroup.genchunk(fname)
1513 1515 lookup = lookuprevlink_func(filerevlog)
1514 1516 for chnk in filerevlog.group(nodeiter, lookup):
1515 1517 yield chnk
1516 1518
1517 1519 yield changegroup.closechunk()
1518 1520
1519 1521 if nodes:
1520 1522 self.hook('outgoing', node=hex(nodes[0]), source=source)
1521 1523
1522 1524 return util.chunkbuffer(gengroup())
1523 1525
1524 1526 def addchangegroup(self, source, srctype):
1525 1527 """add changegroup to repo.
1526 1528 returns number of heads modified or added + 1."""
1527 1529
1528 1530 def csmap(x):
1529 1531 self.ui.debug(_("add changeset %s\n") % short(x))
1530 1532 return cl.count()
1531 1533
1532 1534 def revmap(x):
1533 1535 return cl.rev(x)
1534 1536
1535 1537 if not source:
1536 1538 return 0
1537 1539
1538 1540 self.hook('prechangegroup', throw=True, source=srctype)
1539 1541
1540 1542 changesets = files = revisions = 0
1541 1543
1542 1544 tr = self.transaction()
1543 1545
1544 1546 # write changelog data to temp files so concurrent readers will not see
1545 1547 # inconsistent view
1546 1548 cl = None
1547 1549 try:
1548 1550 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1549 1551
1550 1552 oldheads = len(cl.heads())
1551 1553
1552 1554 # pull off the changeset group
1553 1555 self.ui.status(_("adding changesets\n"))
1554 1556 cor = cl.count() - 1
1555 1557 chunkiter = changegroup.chunkiter(source)
1556 1558 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1557 1559 raise util.Abort(_("received changelog group is empty"))
1558 1560 cnr = cl.count() - 1
1559 1561 changesets = cnr - cor
1560 1562
1561 1563 # pull off the manifest group
1562 1564 self.ui.status(_("adding manifests\n"))
1563 1565 chunkiter = changegroup.chunkiter(source)
1564 1566 # no need to check for empty manifest group here:
1565 1567 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1566 1568 # no new manifest will be created and the manifest group will
1567 1569 # be empty during the pull
1568 1570 self.manifest.addgroup(chunkiter, revmap, tr)
1569 1571
1570 1572 # process the files
1571 1573 self.ui.status(_("adding file changes\n"))
1572 1574 while 1:
1573 1575 f = changegroup.getchunk(source)
1574 1576 if not f:
1575 1577 break
1576 1578 self.ui.debug(_("adding %s revisions\n") % f)
1577 1579 fl = self.file(f)
1578 1580 o = fl.count()
1579 1581 chunkiter = changegroup.chunkiter(source)
1580 1582 if fl.addgroup(chunkiter, revmap, tr) is None:
1581 1583 raise util.Abort(_("received file revlog group is empty"))
1582 1584 revisions += fl.count() - o
1583 1585 files += 1
1584 1586
1585 1587 cl.writedata()
1586 1588 finally:
1587 1589 if cl:
1588 1590 cl.cleanup()
1589 1591
1590 1592 # make changelog see real files again
1591 1593 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1592 1594 self.changelog.checkinlinesize(tr)
1593 1595
1594 1596 newheads = len(self.changelog.heads())
1595 1597 heads = ""
1596 1598 if oldheads and newheads != oldheads:
1597 1599 heads = _(" (%+d heads)") % (newheads - oldheads)
1598 1600
1599 1601 self.ui.status(_("added %d changesets"
1600 1602 " with %d changes to %d files%s\n")
1601 1603 % (changesets, revisions, files, heads))
1602 1604
1603 1605 if changesets > 0:
1604 1606 self.hook('pretxnchangegroup', throw=True,
1605 1607 node=hex(self.changelog.node(cor+1)), source=srctype)
1606 1608
1607 1609 tr.close()
1608 1610
1609 1611 if changesets > 0:
1610 1612 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1611 1613 source=srctype)
1612 1614
1613 1615 for i in range(cor + 1, cnr + 1):
1614 1616 self.hook("incoming", node=hex(self.changelog.node(i)),
1615 1617 source=srctype)
1616 1618
1617 1619 return newheads - oldheads + 1
1618 1620
1619 1621 def update(self, node, allow=False, force=False, choose=None,
1620 1622 moddirstate=True, forcemerge=False, wlock=None, show_stats=True):
1621 1623 pl = self.dirstate.parents()
1622 1624 if not force and pl[1] != nullid:
1623 1625 raise util.Abort(_("outstanding uncommitted merges"))
1624 1626
1625 1627 err = False
1626 1628
1627 1629 p1, p2 = pl[0], node
1628 1630 pa = self.changelog.ancestor(p1, p2)
1629 1631 m1n = self.changelog.read(p1)[0]
1630 1632 m2n = self.changelog.read(p2)[0]
1631 1633 man = self.manifest.ancestor(m1n, m2n)
1632 1634 m1 = self.manifest.read(m1n)
1633 1635 mf1 = self.manifest.readflags(m1n)
1634 1636 m2 = self.manifest.read(m2n).copy()
1635 1637 mf2 = self.manifest.readflags(m2n)
1636 1638 ma = self.manifest.read(man)
1637 1639 mfa = self.manifest.readflags(man)
1638 1640
1639 1641 modified, added, removed, deleted, unknown = self.changes()
1640 1642
1641 1643 # is this a jump, or a merge? i.e. is there a linear path
1642 1644 # from p1 to p2?
1643 1645 linear_path = (pa == p1 or pa == p2)
1644 1646
1645 1647 if allow and linear_path:
1646 1648 raise util.Abort(_("there is nothing to merge, "
1647 1649 "just use 'hg update'"))
1648 1650 if allow and not forcemerge:
1649 1651 if modified or added or removed:
1650 1652 raise util.Abort(_("outstanding uncommitted changes"))
1651 1653
1652 1654 if not forcemerge and not force:
1653 1655 for f in unknown:
1654 1656 if f in m2:
1655 1657 t1 = self.wread(f)
1656 1658 t2 = self.file(f).read(m2[f])
1657 1659 if cmp(t1, t2) != 0:
1658 1660 raise util.Abort(_("'%s' already exists in the working"
1659 1661 " dir and differs from remote") % f)
1660 1662
1661 1663 # resolve the manifest to determine which files
1662 1664 # we care about merging
1663 1665 self.ui.note(_("resolving manifests\n"))
1664 1666 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1665 1667 (force, allow, moddirstate, linear_path))
1666 1668 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1667 1669 (short(man), short(m1n), short(m2n)))
1668 1670
1669 1671 merge = {}
1670 1672 get = {}
1671 1673 remove = []
1672 1674
1673 1675 # construct a working dir manifest
1674 1676 mw = m1.copy()
1675 1677 mfw = mf1.copy()
1676 1678 umap = dict.fromkeys(unknown)
1677 1679
1678 1680 for f in added + modified + unknown:
1679 1681 mw[f] = ""
1680 1682 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1681 1683
1682 1684 if moddirstate and not wlock:
1683 1685 wlock = self.wlock()
1684 1686
1685 1687 for f in deleted + removed:
1686 1688 if f in mw:
1687 1689 del mw[f]
1688 1690
1689 1691 # If we're jumping between revisions (as opposed to merging),
1690 1692 # and if neither the working directory nor the target rev has
1691 1693 # the file, then we need to remove it from the dirstate, to
1692 1694 # prevent the dirstate from listing the file when it is no
1693 1695 # longer in the manifest.
1694 1696 if moddirstate and linear_path and f not in m2:
1695 1697 self.dirstate.forget((f,))
1696 1698
1697 1699 # Compare manifests
1698 1700 for f, n in mw.iteritems():
1699 1701 if choose and not choose(f):
1700 1702 continue
1701 1703 if f in m2:
1702 1704 s = 0
1703 1705
1704 1706 # is the wfile new since m1, and match m2?
1705 1707 if f not in m1:
1706 1708 t1 = self.wread(f)
1707 1709 t2 = self.file(f).read(m2[f])
1708 1710 if cmp(t1, t2) == 0:
1709 1711 n = m2[f]
1710 1712 del t1, t2
1711 1713
1712 1714 # are files different?
1713 1715 if n != m2[f]:
1714 1716 a = ma.get(f, nullid)
1715 1717 # are both different from the ancestor?
1716 1718 if n != a and m2[f] != a:
1717 1719 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1718 1720 # merge executable bits
1719 1721 # "if we changed or they changed, change in merge"
1720 1722 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1721 1723 mode = ((a^b) | (a^c)) ^ a
1722 1724 merge[f] = (m1.get(f, nullid), m2[f], mode)
1723 1725 s = 1
1724 1726 # are we clobbering?
1725 1727 # is remote's version newer?
1726 1728 # or are we going back in time?
1727 1729 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1728 1730 self.ui.debug(_(" remote %s is newer, get\n") % f)
1729 1731 get[f] = m2[f]
1730 1732 s = 1
1731 1733 elif f in umap or f in added:
1732 1734 # this unknown file is the same as the checkout
1733 1735 # we need to reset the dirstate if the file was added
1734 1736 get[f] = m2[f]
1735 1737
1736 1738 if not s and mfw[f] != mf2[f]:
1737 1739 if force:
1738 1740 self.ui.debug(_(" updating permissions for %s\n") % f)
1739 1741 util.set_exec(self.wjoin(f), mf2[f])
1740 1742 else:
1741 1743 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1742 1744 mode = ((a^b) | (a^c)) ^ a
1743 1745 if mode != b:
1744 1746 self.ui.debug(_(" updating permissions for %s\n")
1745 1747 % f)
1746 1748 util.set_exec(self.wjoin(f), mode)
1747 1749 del m2[f]
1748 1750 elif f in ma:
1749 1751 if n != ma[f]:
1750 1752 r = _("d")
1751 1753 if not force and (linear_path or allow):
1752 1754 r = self.ui.prompt(
1753 1755 (_(" local changed %s which remote deleted\n") % f) +
1754 1756 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1755 1757 if r == _("d"):
1756 1758 remove.append(f)
1757 1759 else:
1758 1760 self.ui.debug(_("other deleted %s\n") % f)
1759 1761 remove.append(f) # other deleted it
1760 1762 else:
1761 1763 # file is created on branch or in working directory
1762 1764 if force and f not in umap:
1763 1765 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1764 1766 remove.append(f)
1765 1767 elif n == m1.get(f, nullid): # same as parent
1766 1768 if p2 == pa: # going backwards?
1767 1769 self.ui.debug(_("remote deleted %s\n") % f)
1768 1770 remove.append(f)
1769 1771 else:
1770 1772 self.ui.debug(_("local modified %s, keeping\n") % f)
1771 1773 else:
1772 1774 self.ui.debug(_("working dir created %s, keeping\n") % f)
1773 1775
1774 1776 for f, n in m2.iteritems():
1775 1777 if choose and not choose(f):
1776 1778 continue
1777 1779 if f[0] == "/":
1778 1780 continue
1779 1781 if f in ma and n != ma[f]:
1780 1782 r = _("k")
1781 1783 if not force and (linear_path or allow):
1782 1784 r = self.ui.prompt(
1783 1785 (_("remote changed %s which local deleted\n") % f) +
1784 1786 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1785 1787 if r == _("k"):
1786 1788 get[f] = n
1787 1789 elif f not in ma:
1788 1790 self.ui.debug(_("remote created %s\n") % f)
1789 1791 get[f] = n
1790 1792 else:
1791 1793 if force or p2 == pa: # going backwards?
1792 1794 self.ui.debug(_("local deleted %s, recreating\n") % f)
1793 1795 get[f] = n
1794 1796 else:
1795 1797 self.ui.debug(_("local deleted %s\n") % f)
1796 1798
1797 1799 del mw, m1, m2, ma
1798 1800
1799 1801 if force:
1800 1802 for f in merge:
1801 1803 get[f] = merge[f][1]
1802 1804 merge = {}
1803 1805
1804 1806 if linear_path or force:
1805 1807 # we don't need to do any magic, just jump to the new rev
1806 1808 branch_merge = False
1807 1809 p1, p2 = p2, nullid
1808 1810 else:
1809 1811 if not allow:
1810 1812 self.ui.status(_("this update spans a branch"
1811 1813 " affecting the following files:\n"))
1812 1814 fl = merge.keys() + get.keys()
1813 1815 fl.sort()
1814 1816 for f in fl:
1815 1817 cf = ""
1816 1818 if f in merge:
1817 1819 cf = _(" (resolve)")
1818 1820 self.ui.status(" %s%s\n" % (f, cf))
1819 1821 self.ui.warn(_("aborting update spanning branches!\n"))
1820 1822 self.ui.status(_("(use 'hg merge' to merge across branches"
1821 1823 " or 'hg update -C' to lose changes)\n"))
1822 1824 return 1
1823 1825 branch_merge = True
1824 1826
1825 1827 xp1 = hex(p1)
1826 1828 xp2 = hex(p2)
1827 1829 if p2 == nullid: xxp2 = ''
1828 1830 else: xxp2 = xp2
1829 1831
1830 1832 self.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2)
1831 1833
1832 1834 # get the files we don't need to change
1833 1835 files = get.keys()
1834 1836 files.sort()
1835 1837 for f in files:
1836 1838 if f[0] == "/":
1837 1839 continue
1838 1840 self.ui.note(_("getting %s\n") % f)
1839 1841 t = self.file(f).read(get[f])
1840 1842 self.wwrite(f, t)
1841 1843 util.set_exec(self.wjoin(f), mf2[f])
1842 1844 if moddirstate:
1843 1845 if branch_merge:
1844 1846 self.dirstate.update([f], 'n', st_mtime=-1)
1845 1847 else:
1846 1848 self.dirstate.update([f], 'n')
1847 1849
1848 1850 # merge the tricky bits
1849 1851 failedmerge = []
1850 1852 files = merge.keys()
1851 1853 files.sort()
1852 1854 for f in files:
1853 1855 self.ui.status(_("merging %s\n") % f)
1854 1856 my, other, flag = merge[f]
1855 1857 ret = self.merge3(f, my, other, xp1, xp2)
1856 1858 if ret:
1857 1859 err = True
1858 1860 failedmerge.append(f)
1859 1861 util.set_exec(self.wjoin(f), flag)
1860 1862 if moddirstate:
1861 1863 if branch_merge:
1862 1864 # We've done a branch merge, mark this file as merged
1863 1865 # so that we properly record the merger later
1864 1866 self.dirstate.update([f], 'm')
1865 1867 else:
1866 1868 # We've update-merged a locally modified file, so
1867 1869 # we set the dirstate to emulate a normal checkout
1868 1870 # of that file some time in the past. Thus our
1869 1871 # merge will appear as a normal local file
1870 1872 # modification.
1871 1873 f_len = len(self.file(f).read(other))
1872 1874 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1873 1875
1874 1876 remove.sort()
1875 1877 for f in remove:
1876 1878 self.ui.note(_("removing %s\n") % f)
1877 1879 util.audit_path(f)
1878 1880 try:
1879 1881 util.unlink(self.wjoin(f))
1880 1882 except OSError, inst:
1881 1883 if inst.errno != errno.ENOENT:
1882 1884 self.ui.warn(_("update failed to remove %s: %s!\n") %
1883 1885 (f, inst.strerror))
1884 1886 if moddirstate:
1885 1887 if branch_merge:
1886 1888 self.dirstate.update(remove, 'r')
1887 1889 else:
1888 1890 self.dirstate.forget(remove)
1889 1891
1890 1892 if moddirstate:
1891 1893 self.dirstate.setparents(p1, p2)
1892 1894
1893 1895 if show_stats:
1894 1896 stats = ((len(get), _("updated")),
1895 1897 (len(merge) - len(failedmerge), _("merged")),
1896 1898 (len(remove), _("removed")),
1897 1899 (len(failedmerge), _("unresolved")))
1898 1900 note = ", ".join([_("%d files %s") % s for s in stats])
1899 1901 self.ui.status("%s\n" % note)
1900 1902 if moddirstate:
1901 1903 if branch_merge:
1902 1904 if failedmerge:
1903 1905 self.ui.status(_("There are unresolved merges,"
1904 1906 " you can redo the full merge using:\n"
1905 1907 " hg update -C %s\n"
1906 1908 " hg merge %s\n"
1907 1909 % (self.changelog.rev(p1),
1908 1910 self.changelog.rev(p2))))
1909 1911 else:
1910 1912 self.ui.status(_("(branch merge, don't forget to commit)\n"))
1911 1913 elif failedmerge:
1912 1914 self.ui.status(_("There are unresolved merges with"
1913 1915 " locally modified files.\n"))
1914 1916
1915 1917 self.hook('update', parent1=xp1, parent2=xxp2, error=int(err))
1916 1918 return err
1917 1919
1918 1920 def merge3(self, fn, my, other, p1, p2):
1919 1921 """perform a 3-way merge in the working directory"""
1920 1922
1921 1923 def temp(prefix, node):
1922 1924 pre = "%s~%s." % (os.path.basename(fn), prefix)
1923 1925 (fd, name) = tempfile.mkstemp(prefix=pre)
1924 1926 f = os.fdopen(fd, "wb")
1925 1927 self.wwrite(fn, fl.read(node), f)
1926 1928 f.close()
1927 1929 return name
1928 1930
1929 1931 fl = self.file(fn)
1930 1932 base = fl.ancestor(my, other)
1931 1933 a = self.wjoin(fn)
1932 1934 b = temp("base", base)
1933 1935 c = temp("other", other)
1934 1936
1935 1937 self.ui.note(_("resolving %s\n") % fn)
1936 1938 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1937 1939 (fn, short(my), short(other), short(base)))
1938 1940
1939 1941 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1940 1942 or "hgmerge")
1941 1943 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
1942 1944 environ={'HG_FILE': fn,
1943 1945 'HG_MY_NODE': p1,
1944 1946 'HG_OTHER_NODE': p2,
1945 1947 'HG_FILE_MY_NODE': hex(my),
1946 1948 'HG_FILE_OTHER_NODE': hex(other),
1947 1949 'HG_FILE_BASE_NODE': hex(base)})
1948 1950 if r:
1949 1951 self.ui.warn(_("merging %s failed!\n") % fn)
1950 1952
1951 1953 os.unlink(b)
1952 1954 os.unlink(c)
1953 1955 return r
1954 1956
1955 1957 def verify(self):
1956 1958 filelinkrevs = {}
1957 1959 filenodes = {}
1958 1960 changesets = revisions = files = 0
1959 1961 errors = [0]
1960 1962 warnings = [0]
1961 1963 neededmanifests = {}
1962 1964
1963 1965 def err(msg):
1964 1966 self.ui.warn(msg + "\n")
1965 1967 errors[0] += 1
1966 1968
1967 1969 def warn(msg):
1968 1970 self.ui.warn(msg + "\n")
1969 1971 warnings[0] += 1
1970 1972
1971 1973 def checksize(obj, name):
1972 1974 d = obj.checksize()
1973 1975 if d[0]:
1974 1976 err(_("%s data length off by %d bytes") % (name, d[0]))
1975 1977 if d[1]:
1976 1978 err(_("%s index contains %d extra bytes") % (name, d[1]))
1977 1979
1978 1980 def checkversion(obj, name):
1979 1981 if obj.version != revlog.REVLOGV0:
1980 1982 if not revlogv1:
1981 1983 warn(_("warning: `%s' uses revlog format 1") % name)
1982 1984 elif revlogv1:
1983 1985 warn(_("warning: `%s' uses revlog format 0") % name)
1984 1986
1985 1987 revlogv1 = self.revlogversion != revlog.REVLOGV0
1986 1988 if self.ui.verbose or revlogv1 != self.revlogv1:
1987 1989 self.ui.status(_("repository uses revlog format %d\n") %
1988 1990 (revlogv1 and 1 or 0))
1989 1991
1990 1992 seen = {}
1991 1993 self.ui.status(_("checking changesets\n"))
1992 1994 checksize(self.changelog, "changelog")
1993 1995
1994 1996 for i in range(self.changelog.count()):
1995 1997 changesets += 1
1996 1998 n = self.changelog.node(i)
1997 1999 l = self.changelog.linkrev(n)
1998 2000 if l != i:
1999 2001 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
2000 2002 if n in seen:
2001 2003 err(_("duplicate changeset at revision %d") % i)
2002 2004 seen[n] = 1
2003 2005
2004 2006 for p in self.changelog.parents(n):
2005 2007 if p not in self.changelog.nodemap:
2006 2008 err(_("changeset %s has unknown parent %s") %
2007 2009 (short(n), short(p)))
2008 2010 try:
2009 2011 changes = self.changelog.read(n)
2010 2012 except KeyboardInterrupt:
2011 2013 self.ui.warn(_("interrupted"))
2012 2014 raise
2013 2015 except Exception, inst:
2014 2016 err(_("unpacking changeset %s: %s") % (short(n), inst))
2015 2017 continue
2016 2018
2017 2019 neededmanifests[changes[0]] = n
2018 2020
2019 2021 for f in changes[3]:
2020 2022 filelinkrevs.setdefault(f, []).append(i)
2021 2023
2022 2024 seen = {}
2023 2025 self.ui.status(_("checking manifests\n"))
2024 2026 checkversion(self.manifest, "manifest")
2025 2027 checksize(self.manifest, "manifest")
2026 2028
2027 2029 for i in range(self.manifest.count()):
2028 2030 n = self.manifest.node(i)
2029 2031 l = self.manifest.linkrev(n)
2030 2032
2031 2033 if l < 0 or l >= self.changelog.count():
2032 2034 err(_("bad manifest link (%d) at revision %d") % (l, i))
2033 2035
2034 2036 if n in neededmanifests:
2035 2037 del neededmanifests[n]
2036 2038
2037 2039 if n in seen:
2038 2040 err(_("duplicate manifest at revision %d") % i)
2039 2041
2040 2042 seen[n] = 1
2041 2043
2042 2044 for p in self.manifest.parents(n):
2043 2045 if p not in self.manifest.nodemap:
2044 2046 err(_("manifest %s has unknown parent %s") %
2045 2047 (short(n), short(p)))
2046 2048
2047 2049 try:
2048 2050 delta = mdiff.patchtext(self.manifest.delta(n))
2049 2051 except KeyboardInterrupt:
2050 2052 self.ui.warn(_("interrupted"))
2051 2053 raise
2052 2054 except Exception, inst:
2053 2055 err(_("unpacking manifest %s: %s") % (short(n), inst))
2054 2056 continue
2055 2057
2056 2058 try:
2057 2059 ff = [ l.split('\0') for l in delta.splitlines() ]
2058 2060 for f, fn in ff:
2059 2061 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
2060 2062 except (ValueError, TypeError), inst:
2061 2063 err(_("broken delta in manifest %s: %s") % (short(n), inst))
2062 2064
2063 2065 self.ui.status(_("crosschecking files in changesets and manifests\n"))
2064 2066
2065 2067 for m, c in neededmanifests.items():
2066 2068 err(_("Changeset %s refers to unknown manifest %s") %
2067 2069 (short(m), short(c)))
2068 2070 del neededmanifests
2069 2071
2070 2072 for f in filenodes:
2071 2073 if f not in filelinkrevs:
2072 2074 err(_("file %s in manifest but not in changesets") % f)
2073 2075
2074 2076 for f in filelinkrevs:
2075 2077 if f not in filenodes:
2076 2078 err(_("file %s in changeset but not in manifest") % f)
2077 2079
2078 2080 self.ui.status(_("checking files\n"))
2079 2081 ff = filenodes.keys()
2080 2082 ff.sort()
2081 2083 for f in ff:
2082 2084 if f == "/dev/null":
2083 2085 continue
2084 2086 files += 1
2085 2087 if not f:
2086 2088 err(_("file without name in manifest %s") % short(n))
2087 2089 continue
2088 2090 fl = self.file(f)
2089 2091 checkversion(fl, f)
2090 2092 checksize(fl, f)
2091 2093
2092 2094 nodes = {nullid: 1}
2093 2095 seen = {}
2094 2096 for i in range(fl.count()):
2095 2097 revisions += 1
2096 2098 n = fl.node(i)
2097 2099
2098 2100 if n in seen:
2099 2101 err(_("%s: duplicate revision %d") % (f, i))
2100 2102 if n not in filenodes[f]:
2101 2103 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
2102 2104 else:
2103 2105 del filenodes[f][n]
2104 2106
2105 2107 flr = fl.linkrev(n)
2106 2108 if flr not in filelinkrevs.get(f, []):
2107 2109 err(_("%s:%s points to unexpected changeset %d")
2108 2110 % (f, short(n), flr))
2109 2111 else:
2110 2112 filelinkrevs[f].remove(flr)
2111 2113
2112 2114 # verify contents
2113 2115 try:
2114 2116 t = fl.read(n)
2115 2117 except KeyboardInterrupt:
2116 2118 self.ui.warn(_("interrupted"))
2117 2119 raise
2118 2120 except Exception, inst:
2119 2121 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
2120 2122
2121 2123 # verify parents
2122 2124 (p1, p2) = fl.parents(n)
2123 2125 if p1 not in nodes:
2124 2126 err(_("file %s:%s unknown parent 1 %s") %
2125 2127 (f, short(n), short(p1)))
2126 2128 if p2 not in nodes:
2127 2129 err(_("file %s:%s unknown parent 2 %s") %
2128 2130 (f, short(n), short(p1)))
2129 2131 nodes[n] = 1
2130 2132
2131 2133 # cross-check
2132 2134 for node in filenodes[f]:
2133 2135 err(_("node %s in manifests not in %s") % (hex(node), f))
2134 2136
2135 2137 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
2136 2138 (files, changesets, revisions))
2137 2139
2138 2140 if warnings[0]:
2139 2141 self.ui.warn(_("%d warnings encountered!\n") % warnings[0])
2140 2142 if errors[0]:
2141 2143 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
2142 2144 return 1
2143 2145
2144 2146 # used to avoid circular references so destructors work
2145 2147 def aftertrans(base):
2146 2148 p = base
2147 2149 def a():
2148 2150 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
2149 2151 util.rename(os.path.join(p, "journal.dirstate"),
2150 2152 os.path.join(p, "undo.dirstate"))
2151 2153 return a
2152 2154
@@ -1,181 +1,200 b''
1 1 # sshrepo.py - ssh repository proxy class for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from remoterepo import *
10 10 from i18n import gettext as _
11 11 from demandload import *
12 12 demandload(globals(), "hg os re stat util")
13 13
14 14 class sshrepository(remoterepository):
15 def __init__(self, ui, path):
15 def __init__(self, ui, path, create=0):
16 16 self.url = path
17 17 self.ui = ui
18 18
19 19 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?', path)
20 20 if not m:
21 21 raise hg.RepoError(_("couldn't parse location %s") % path)
22 22
23 23 self.user = m.group(2)
24 24 self.host = m.group(3)
25 25 self.port = m.group(5)
26 26 self.path = m.group(7) or "."
27 27
28 28 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
29 29 args = self.port and ("%s -p %s") % (args, self.port) or args
30 30
31 31 sshcmd = self.ui.config("ui", "ssh", "ssh")
32 32 remotecmd = self.ui.config("ui", "remotecmd", "hg")
33
34 if create:
35 try:
36 self.validate_repo(ui, sshcmd, args, remotecmd)
37 return # the repo is good, nothing more to do
38 except hg.RepoError:
39 pass
40
41 cmd = '%s %s "%s init %s"'
42 cmd = cmd % (sshcmd, args, remotecmd, self.path)
43
44 ui.note('running %s\n' % cmd)
45 res = os.system(cmd)
46 if res != 0:
47 raise hg.RepoError(_("could not create remote repo"))
48
49 self.validate_repo(ui, sshcmd, args, remotecmd)
50
51 def validate_repo(self, ui, sshcmd, args, remotecmd):
33 52 cmd = '%s %s "%s -R %s serve --stdio"'
34 53 cmd = cmd % (sshcmd, args, remotecmd, self.path)
35 54
36 55 ui.note('running %s\n' % cmd)
37 56 self.pipeo, self.pipei, self.pipee = os.popen3(cmd, 'b')
38 57
39 58 # skip any noise generated by remote shell
40 59 self.do_cmd("hello")
41 60 r = self.do_cmd("between", pairs=("%s-%s" % ("0"*40, "0"*40)))
42 61 lines = ["", "dummy"]
43 62 max_noise = 500
44 63 while lines[-1] and max_noise:
45 64 l = r.readline()
46 65 self.readerr()
47 66 if lines[-1] == "1\n" and l == "\n":
48 67 break
49 68 if l:
50 69 ui.debug(_("remote: "), l)
51 70 lines.append(l)
52 71 max_noise -= 1
53 72 else:
54 73 raise hg.RepoError(_("no response from remote hg"))
55 74
56 75 self.capabilities = ()
57 76 lines.reverse()
58 77 for l in lines:
59 78 if l.startswith("capabilities:"):
60 79 self.capabilities = l[:-1].split(":")[1].split()
61 80 break
62 81
63 82 def readerr(self):
64 83 while 1:
65 84 size = util.fstat(self.pipee).st_size
66 85 if size == 0: break
67 86 l = self.pipee.readline()
68 87 if not l: break
69 88 self.ui.status(_("remote: "), l)
70 89
71 90 def __del__(self):
72 91 try:
73 92 self.pipeo.close()
74 93 self.pipei.close()
75 94 # read the error descriptor until EOF
76 95 for l in self.pipee:
77 96 self.ui.status(_("remote: "), l)
78 97 self.pipee.close()
79 98 except:
80 99 pass
81 100
82 101 def do_cmd(self, cmd, **args):
83 102 self.ui.debug(_("sending %s command\n") % cmd)
84 103 self.pipeo.write("%s\n" % cmd)
85 104 for k, v in args.items():
86 105 self.pipeo.write("%s %d\n" % (k, len(v)))
87 106 self.pipeo.write(v)
88 107 self.pipeo.flush()
89 108
90 109 return self.pipei
91 110
92 111 def call(self, cmd, **args):
93 112 r = self.do_cmd(cmd, **args)
94 113 l = r.readline()
95 114 self.readerr()
96 115 try:
97 116 l = int(l)
98 117 except:
99 118 raise hg.RepoError(_("unexpected response '%s'") % l)
100 119 return r.read(l)
101 120
102 121 def lock(self):
103 122 self.call("lock")
104 123 return remotelock(self)
105 124
106 125 def unlock(self):
107 126 self.call("unlock")
108 127
109 128 def heads(self):
110 129 d = self.call("heads")
111 130 try:
112 131 return map(bin, d[:-1].split(" "))
113 132 except:
114 133 raise hg.RepoError(_("unexpected response '%s'") % (d[:400] + "..."))
115 134
116 135 def branches(self, nodes):
117 136 n = " ".join(map(hex, nodes))
118 137 d = self.call("branches", nodes=n)
119 138 try:
120 139 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
121 140 return br
122 141 except:
123 142 raise hg.RepoError(_("unexpected response '%s'") % (d[:400] + "..."))
124 143
125 144 def between(self, pairs):
126 145 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
127 146 d = self.call("between", pairs=n)
128 147 try:
129 148 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
130 149 return p
131 150 except:
132 151 raise hg.RepoError(_("unexpected response '%s'") % (d[:400] + "..."))
133 152
134 153 def changegroup(self, nodes, kind):
135 154 n = " ".join(map(hex, nodes))
136 155 return self.do_cmd("changegroup", roots=n)
137 156
138 157 def unbundle(self, cg, heads, source):
139 158 d = self.call("unbundle", heads=' '.join(map(hex, heads)))
140 159 if d:
141 160 raise hg.RepoError(_("push refused: %s") % d)
142 161
143 162 while 1:
144 163 d = cg.read(4096)
145 164 if not d: break
146 165 self.pipeo.write(str(len(d)) + '\n')
147 166 self.pipeo.write(d)
148 167 self.readerr()
149 168
150 169 self.pipeo.write('0\n')
151 170 self.pipeo.flush()
152 171
153 172 self.readerr()
154 173 d = self.pipei.readline()
155 174 if d != '\n':
156 175 return 1
157 176
158 177 l = int(self.pipei.readline())
159 178 r = self.pipei.read(l)
160 179 if not r:
161 180 return 1
162 181 return int(r)
163 182
164 183 def addchangegroup(self, cg, source):
165 184 d = self.call("addchangegroup")
166 185 if d:
167 186 raise hg.RepoError(_("push refused: %s") % d)
168 187 while 1:
169 188 d = cg.read(4096)
170 189 if not d: break
171 190 self.pipeo.write(d)
172 191 self.readerr()
173 192
174 193 self.pipeo.flush()
175 194
176 195 self.readerr()
177 196 l = int(self.pipei.readline())
178 197 r = self.pipei.read(l)
179 198 if not r:
180 199 return 1
181 200 return int(r)
General Comments 0
You need to be logged in to leave comments. Login now