##// END OF EJS Templates
do not check sys.argv from localrepo when running hooks....
Vadim Gelfer -
r2166:d0c02b4d default
parent child Browse files
Show More
@@ -1,3568 +1,3568 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from demandload import demandload
9 9 from node import *
10 10 from i18n import gettext as _
11 11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
13 13 demandload(globals(), "fnmatch hgweb mdiff random signal tempfile time")
14 14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
15 15 demandload(globals(), "archival changegroup")
16 16
17 17 class UnknownCommand(Exception):
18 18 """Exception raised if command is not in the command table."""
19 19 class AmbiguousCommand(Exception):
20 20 """Exception raised if command shortcut matches more than one command."""
21 21
22 22 def bail_if_changed(repo):
23 23 modified, added, removed, deleted, unknown = repo.changes()
24 24 if modified or added or removed or deleted:
25 25 raise util.Abort(_("outstanding uncommitted changes"))
26 26
27 27 def filterfiles(filters, files):
28 28 l = [x for x in files if x in filters]
29 29
30 30 for t in filters:
31 31 if t and t[-1] != "/":
32 32 t += "/"
33 33 l += [x for x in files if x.startswith(t)]
34 34 return l
35 35
36 36 def relpath(repo, args):
37 37 cwd = repo.getcwd()
38 38 if cwd:
39 39 return [util.normpath(os.path.join(cwd, x)) for x in args]
40 40 return args
41 41
42 42 def matchpats(repo, pats=[], opts={}, head=''):
43 43 cwd = repo.getcwd()
44 44 if not pats and cwd:
45 45 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
46 46 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
47 47 cwd = ''
48 48 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
49 49 opts.get('exclude'), head)
50 50
51 51 def makewalk(repo, pats, opts, node=None, head='', badmatch=None):
52 52 files, matchfn, anypats = matchpats(repo, pats, opts, head)
53 53 exact = dict(zip(files, files))
54 54 def walk():
55 55 for src, fn in repo.walk(node=node, files=files, match=matchfn,
56 56 badmatch=badmatch):
57 57 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
58 58 return files, matchfn, walk()
59 59
60 60 def walk(repo, pats, opts, node=None, head='', badmatch=None):
61 61 files, matchfn, results = makewalk(repo, pats, opts, node, head, badmatch)
62 62 for r in results:
63 63 yield r
64 64
65 65 def walkchangerevs(ui, repo, pats, opts):
66 66 '''Iterate over files and the revs they changed in.
67 67
68 68 Callers most commonly need to iterate backwards over the history
69 69 it is interested in. Doing so has awful (quadratic-looking)
70 70 performance, so we use iterators in a "windowed" way.
71 71
72 72 We walk a window of revisions in the desired order. Within the
73 73 window, we first walk forwards to gather data, then in the desired
74 74 order (usually backwards) to display it.
75 75
76 76 This function returns an (iterator, getchange, matchfn) tuple. The
77 77 getchange function returns the changelog entry for a numeric
78 78 revision. The iterator yields 3-tuples. They will be of one of
79 79 the following forms:
80 80
81 81 "window", incrementing, lastrev: stepping through a window,
82 82 positive if walking forwards through revs, last rev in the
83 83 sequence iterated over - use to reset state for the current window
84 84
85 85 "add", rev, fns: out-of-order traversal of the given file names
86 86 fns, which changed during revision rev - use to gather data for
87 87 possible display
88 88
89 89 "iter", rev, None: in-order traversal of the revs earlier iterated
90 90 over with "add" - use to display data'''
91 91
92 92 def increasing_windows(start, end, windowsize=8, sizelimit=512):
93 93 if start < end:
94 94 while start < end:
95 95 yield start, min(windowsize, end-start)
96 96 start += windowsize
97 97 if windowsize < sizelimit:
98 98 windowsize *= 2
99 99 else:
100 100 while start > end:
101 101 yield start, min(windowsize, start-end-1)
102 102 start -= windowsize
103 103 if windowsize < sizelimit:
104 104 windowsize *= 2
105 105
106 106
107 107 files, matchfn, anypats = matchpats(repo, pats, opts)
108 108
109 109 if repo.changelog.count() == 0:
110 110 return [], False, matchfn
111 111
112 112 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
113 113 wanted = {}
114 114 slowpath = anypats
115 115 fncache = {}
116 116
117 117 chcache = {}
118 118 def getchange(rev):
119 119 ch = chcache.get(rev)
120 120 if ch is None:
121 121 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
122 122 return ch
123 123
124 124 if not slowpath and not files:
125 125 # No files, no patterns. Display all revs.
126 126 wanted = dict(zip(revs, revs))
127 127 if not slowpath:
128 128 # Only files, no patterns. Check the history of each file.
129 129 def filerevgen(filelog):
130 130 for i, window in increasing_windows(filelog.count()-1, -1):
131 131 revs = []
132 132 for j in xrange(i - window, i + 1):
133 133 revs.append(filelog.linkrev(filelog.node(j)))
134 134 revs.reverse()
135 135 for rev in revs:
136 136 yield rev
137 137
138 138 minrev, maxrev = min(revs), max(revs)
139 139 for file_ in files:
140 140 filelog = repo.file(file_)
141 141 # A zero count may be a directory or deleted file, so
142 142 # try to find matching entries on the slow path.
143 143 if filelog.count() == 0:
144 144 slowpath = True
145 145 break
146 146 for rev in filerevgen(filelog):
147 147 if rev <= maxrev:
148 148 if rev < minrev:
149 149 break
150 150 fncache.setdefault(rev, [])
151 151 fncache[rev].append(file_)
152 152 wanted[rev] = 1
153 153 if slowpath:
154 154 # The slow path checks files modified in every changeset.
155 155 def changerevgen():
156 156 for i, window in increasing_windows(repo.changelog.count()-1, -1):
157 157 for j in xrange(i - window, i + 1):
158 158 yield j, getchange(j)[3]
159 159
160 160 for rev, changefiles in changerevgen():
161 161 matches = filter(matchfn, changefiles)
162 162 if matches:
163 163 fncache[rev] = matches
164 164 wanted[rev] = 1
165 165
166 166 def iterate():
167 167 for i, window in increasing_windows(0, len(revs)):
168 168 yield 'window', revs[0] < revs[-1], revs[-1]
169 169 nrevs = [rev for rev in revs[i:i+window]
170 170 if rev in wanted]
171 171 srevs = list(nrevs)
172 172 srevs.sort()
173 173 for rev in srevs:
174 174 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
175 175 yield 'add', rev, fns
176 176 for rev in nrevs:
177 177 yield 'iter', rev, None
178 178 return iterate(), getchange, matchfn
179 179
180 180 revrangesep = ':'
181 181
182 182 def revrange(ui, repo, revs, revlog=None):
183 183 """Yield revision as strings from a list of revision specifications."""
184 184 if revlog is None:
185 185 revlog = repo.changelog
186 186 revcount = revlog.count()
187 187 def fix(val, defval):
188 188 if not val:
189 189 return defval
190 190 try:
191 191 num = int(val)
192 192 if str(num) != val:
193 193 raise ValueError
194 194 if num < 0:
195 195 num += revcount
196 196 if num < 0:
197 197 num = 0
198 198 elif num >= revcount:
199 199 raise ValueError
200 200 except ValueError:
201 201 try:
202 202 num = repo.changelog.rev(repo.lookup(val))
203 203 except KeyError:
204 204 try:
205 205 num = revlog.rev(revlog.lookup(val))
206 206 except KeyError:
207 207 raise util.Abort(_('invalid revision identifier %s'), val)
208 208 return num
209 209 seen = {}
210 210 for spec in revs:
211 211 if spec.find(revrangesep) >= 0:
212 212 start, end = spec.split(revrangesep, 1)
213 213 start = fix(start, 0)
214 214 end = fix(end, revcount - 1)
215 215 step = start > end and -1 or 1
216 216 for rev in xrange(start, end+step, step):
217 217 if rev in seen:
218 218 continue
219 219 seen[rev] = 1
220 220 yield str(rev)
221 221 else:
222 222 rev = fix(spec, None)
223 223 if rev in seen:
224 224 continue
225 225 seen[rev] = 1
226 226 yield str(rev)
227 227
228 228 def make_filename(repo, r, pat, node=None,
229 229 total=None, seqno=None, revwidth=None, pathname=None):
230 230 node_expander = {
231 231 'H': lambda: hex(node),
232 232 'R': lambda: str(r.rev(node)),
233 233 'h': lambda: short(node),
234 234 }
235 235 expander = {
236 236 '%': lambda: '%',
237 237 'b': lambda: os.path.basename(repo.root),
238 238 }
239 239
240 240 try:
241 241 if node:
242 242 expander.update(node_expander)
243 243 if node and revwidth is not None:
244 244 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
245 245 if total is not None:
246 246 expander['N'] = lambda: str(total)
247 247 if seqno is not None:
248 248 expander['n'] = lambda: str(seqno)
249 249 if total is not None and seqno is not None:
250 250 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
251 251 if pathname is not None:
252 252 expander['s'] = lambda: os.path.basename(pathname)
253 253 expander['d'] = lambda: os.path.dirname(pathname) or '.'
254 254 expander['p'] = lambda: pathname
255 255
256 256 newname = []
257 257 patlen = len(pat)
258 258 i = 0
259 259 while i < patlen:
260 260 c = pat[i]
261 261 if c == '%':
262 262 i += 1
263 263 c = pat[i]
264 264 c = expander[c]()
265 265 newname.append(c)
266 266 i += 1
267 267 return ''.join(newname)
268 268 except KeyError, inst:
269 269 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
270 270 inst.args[0])
271 271
272 272 def make_file(repo, r, pat, node=None,
273 273 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
274 274 if not pat or pat == '-':
275 275 return 'w' in mode and sys.stdout or sys.stdin
276 276 if hasattr(pat, 'write') and 'w' in mode:
277 277 return pat
278 278 if hasattr(pat, 'read') and 'r' in mode:
279 279 return pat
280 280 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
281 281 pathname),
282 282 mode)
283 283
284 284 def write_bundle(cg, filename=None, compress=True):
285 285 """Write a bundle file and return its filename.
286 286
287 287 Existing files will not be overwritten.
288 288 If no filename is specified, a temporary file is created.
289 289 bz2 compression can be turned off.
290 290 The bundle file will be deleted in case of errors.
291 291 """
292 292 class nocompress(object):
293 293 def compress(self, x):
294 294 return x
295 295 def flush(self):
296 296 return ""
297 297
298 298 fh = None
299 299 cleanup = None
300 300 try:
301 301 if filename:
302 302 if os.path.exists(filename):
303 303 raise util.Abort(_("file '%s' already exists"), filename)
304 304 fh = open(filename, "wb")
305 305 else:
306 306 fd, filename = tempfile.mkstemp(suffix=".hg", prefix="hg-bundle-")
307 307 fh = os.fdopen(fd, "wb")
308 308 cleanup = filename
309 309
310 310 if compress:
311 311 fh.write("HG10")
312 312 z = bz2.BZ2Compressor(9)
313 313 else:
314 314 fh.write("HG10UN")
315 315 z = nocompress()
316 316 # parse the changegroup data, otherwise we will block
317 317 # in case of sshrepo because we don't know the end of the stream
318 318
319 319 # an empty chunkiter is the end of the changegroup
320 320 empty = False
321 321 while not empty:
322 322 empty = True
323 323 for chunk in changegroup.chunkiter(cg):
324 324 empty = False
325 325 fh.write(z.compress(changegroup.genchunk(chunk)))
326 326 fh.write(z.compress(changegroup.closechunk()))
327 327 fh.write(z.flush())
328 328 cleanup = None
329 329 return filename
330 330 finally:
331 331 if fh is not None:
332 332 fh.close()
333 333 if cleanup is not None:
334 334 os.unlink(cleanup)
335 335
336 336 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
337 337 changes=None, text=False, opts={}):
338 338 if not node1:
339 339 node1 = repo.dirstate.parents()[0]
340 340 # reading the data for node1 early allows it to play nicely
341 341 # with repo.changes and the revlog cache.
342 342 change = repo.changelog.read(node1)
343 343 mmap = repo.manifest.read(change[0])
344 344 date1 = util.datestr(change[2])
345 345
346 346 if not changes:
347 347 changes = repo.changes(node1, node2, files, match=match)
348 348 modified, added, removed, deleted, unknown = changes
349 349 if files:
350 350 modified, added, removed = map(lambda x: filterfiles(files, x),
351 351 (modified, added, removed))
352 352
353 353 if not modified and not added and not removed:
354 354 return
355 355
356 356 if node2:
357 357 change = repo.changelog.read(node2)
358 358 mmap2 = repo.manifest.read(change[0])
359 359 date2 = util.datestr(change[2])
360 360 def read(f):
361 361 return repo.file(f).read(mmap2[f])
362 362 else:
363 363 date2 = util.datestr()
364 364 def read(f):
365 365 return repo.wread(f)
366 366
367 367 if ui.quiet:
368 368 r = None
369 369 else:
370 370 hexfunc = ui.verbose and hex or short
371 371 r = [hexfunc(node) for node in [node1, node2] if node]
372 372
373 373 diffopts = ui.diffopts()
374 374 showfunc = opts.get('show_function') or diffopts['showfunc']
375 375 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
376 376 for f in modified:
377 377 to = None
378 378 if f in mmap:
379 379 to = repo.file(f).read(mmap[f])
380 380 tn = read(f)
381 381 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
382 382 showfunc=showfunc, ignorews=ignorews))
383 383 for f in added:
384 384 to = None
385 385 tn = read(f)
386 386 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
387 387 showfunc=showfunc, ignorews=ignorews))
388 388 for f in removed:
389 389 to = repo.file(f).read(mmap[f])
390 390 tn = None
391 391 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
392 392 showfunc=showfunc, ignorews=ignorews))
393 393
394 394 def trimuser(ui, name, rev, revcache):
395 395 """trim the name of the user who committed a change"""
396 396 user = revcache.get(rev)
397 397 if user is None:
398 398 user = revcache[rev] = ui.shortuser(name)
399 399 return user
400 400
401 401 class changeset_templater(object):
402 402 '''use templater module to format changeset information.'''
403 403
404 404 def __init__(self, ui, repo, mapfile):
405 405 self.t = templater.templater(mapfile, templater.common_filters,
406 406 cache={'parent': '{rev}:{node|short} ',
407 407 'manifest': '{rev}:{node|short}'})
408 408 self.ui = ui
409 409 self.repo = repo
410 410
411 411 def use_template(self, t):
412 412 '''set template string to use'''
413 413 self.t.cache['changeset'] = t
414 414
415 415 def write(self, thing, header=False):
416 416 '''write expanded template.
417 417 uses in-order recursive traverse of iterators.'''
418 418 for t in thing:
419 419 if hasattr(t, '__iter__'):
420 420 self.write(t, header=header)
421 421 elif header:
422 422 self.ui.write_header(t)
423 423 else:
424 424 self.ui.write(t)
425 425
426 426 def write_header(self, thing):
427 427 self.write(thing, header=True)
428 428
429 429 def show(self, rev=0, changenode=None, brinfo=None):
430 430 '''show a single changeset or file revision'''
431 431 log = self.repo.changelog
432 432 if changenode is None:
433 433 changenode = log.node(rev)
434 434 elif not rev:
435 435 rev = log.rev(changenode)
436 436
437 437 changes = log.read(changenode)
438 438
439 439 def showlist(name, values, plural=None, **args):
440 440 '''expand set of values.
441 441 name is name of key in template map.
442 442 values is list of strings or dicts.
443 443 plural is plural of name, if not simply name + 's'.
444 444
445 445 expansion works like this, given name 'foo'.
446 446
447 447 if values is empty, expand 'no_foos'.
448 448
449 449 if 'foo' not in template map, return values as a string,
450 450 joined by space.
451 451
452 452 expand 'start_foos'.
453 453
454 454 for each value, expand 'foo'. if 'last_foo' in template
455 455 map, expand it instead of 'foo' for last key.
456 456
457 457 expand 'end_foos'.
458 458 '''
459 459 if plural: names = plural
460 460 else: names = name + 's'
461 461 if not values:
462 462 noname = 'no_' + names
463 463 if noname in self.t:
464 464 yield self.t(noname, **args)
465 465 return
466 466 if name not in self.t:
467 467 if isinstance(values[0], str):
468 468 yield ' '.join(values)
469 469 else:
470 470 for v in values:
471 471 yield dict(v, **args)
472 472 return
473 473 startname = 'start_' + names
474 474 if startname in self.t:
475 475 yield self.t(startname, **args)
476 476 vargs = args.copy()
477 477 def one(v, tag=name):
478 478 try:
479 479 vargs.update(v)
480 480 except (AttributeError, ValueError):
481 481 try:
482 482 for a, b in v:
483 483 vargs[a] = b
484 484 except ValueError:
485 485 vargs[name] = v
486 486 return self.t(tag, **vargs)
487 487 lastname = 'last_' + name
488 488 if lastname in self.t:
489 489 last = values.pop()
490 490 else:
491 491 last = None
492 492 for v in values:
493 493 yield one(v)
494 494 if last is not None:
495 495 yield one(last, tag=lastname)
496 496 endname = 'end_' + names
497 497 if endname in self.t:
498 498 yield self.t(endname, **args)
499 499
500 500 if brinfo:
501 501 def showbranches(**args):
502 502 if changenode in brinfo:
503 503 for x in showlist('branch', brinfo[changenode],
504 504 plural='branches', **args):
505 505 yield x
506 506 else:
507 507 showbranches = ''
508 508
509 509 if self.ui.debugflag:
510 510 def showmanifest(**args):
511 511 args = args.copy()
512 512 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
513 513 node=hex(changes[0])))
514 514 yield self.t('manifest', **args)
515 515 else:
516 516 showmanifest = ''
517 517
518 518 def showparents(**args):
519 519 parents = [[('rev', log.rev(p)), ('node', hex(p))]
520 520 for p in log.parents(changenode)
521 521 if self.ui.debugflag or p != nullid]
522 522 if (not self.ui.debugflag and len(parents) == 1 and
523 523 parents[0][0][1] == rev - 1):
524 524 return
525 525 for x in showlist('parent', parents, **args):
526 526 yield x
527 527
528 528 def showtags(**args):
529 529 for x in showlist('tag', self.repo.nodetags(changenode), **args):
530 530 yield x
531 531
532 532 if self.ui.debugflag:
533 533 files = self.repo.changes(log.parents(changenode)[0], changenode)
534 534 def showfiles(**args):
535 535 for x in showlist('file', files[0], **args): yield x
536 536 def showadds(**args):
537 537 for x in showlist('file_add', files[1], **args): yield x
538 538 def showdels(**args):
539 539 for x in showlist('file_del', files[2], **args): yield x
540 540 else:
541 541 def showfiles(**args):
542 542 for x in showlist('file', changes[3], **args): yield x
543 543 showadds = ''
544 544 showdels = ''
545 545
546 546 props = {
547 547 'author': changes[1],
548 548 'branches': showbranches,
549 549 'date': changes[2],
550 550 'desc': changes[4],
551 551 'file_adds': showadds,
552 552 'file_dels': showdels,
553 553 'files': showfiles,
554 554 'manifest': showmanifest,
555 555 'node': hex(changenode),
556 556 'parents': showparents,
557 557 'rev': rev,
558 558 'tags': showtags,
559 559 }
560 560
561 561 try:
562 562 if self.ui.debugflag and 'header_debug' in self.t:
563 563 key = 'header_debug'
564 564 elif self.ui.quiet and 'header_quiet' in self.t:
565 565 key = 'header_quiet'
566 566 elif self.ui.verbose and 'header_verbose' in self.t:
567 567 key = 'header_verbose'
568 568 elif 'header' in self.t:
569 569 key = 'header'
570 570 else:
571 571 key = ''
572 572 if key:
573 573 self.write_header(self.t(key, **props))
574 574 if self.ui.debugflag and 'changeset_debug' in self.t:
575 575 key = 'changeset_debug'
576 576 elif self.ui.quiet and 'changeset_quiet' in self.t:
577 577 key = 'changeset_quiet'
578 578 elif self.ui.verbose and 'changeset_verbose' in self.t:
579 579 key = 'changeset_verbose'
580 580 else:
581 581 key = 'changeset'
582 582 self.write(self.t(key, **props))
583 583 except KeyError, inst:
584 584 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
585 585 inst.args[0]))
586 586 except SyntaxError, inst:
587 587 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
588 588
589 589 class changeset_printer(object):
590 590 '''show changeset information when templating not requested.'''
591 591
592 592 def __init__(self, ui, repo):
593 593 self.ui = ui
594 594 self.repo = repo
595 595
596 596 def show(self, rev=0, changenode=None, brinfo=None):
597 597 '''show a single changeset or file revision'''
598 598 log = self.repo.changelog
599 599 if changenode is None:
600 600 changenode = log.node(rev)
601 601 elif not rev:
602 602 rev = log.rev(changenode)
603 603
604 604 if self.ui.quiet:
605 605 self.ui.write("%d:%s\n" % (rev, short(changenode)))
606 606 return
607 607
608 608 changes = log.read(changenode)
609 609 date = util.datestr(changes[2])
610 610
611 611 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
612 612 for p in log.parents(changenode)
613 613 if self.ui.debugflag or p != nullid]
614 614 if (not self.ui.debugflag and len(parents) == 1 and
615 615 parents[0][0] == rev-1):
616 616 parents = []
617 617
618 618 if self.ui.verbose:
619 619 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
620 620 else:
621 621 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
622 622
623 623 for tag in self.repo.nodetags(changenode):
624 624 self.ui.status(_("tag: %s\n") % tag)
625 625 for parent in parents:
626 626 self.ui.write(_("parent: %d:%s\n") % parent)
627 627
628 628 if brinfo and changenode in brinfo:
629 629 br = brinfo[changenode]
630 630 self.ui.write(_("branch: %s\n") % " ".join(br))
631 631
632 632 self.ui.debug(_("manifest: %d:%s\n") %
633 633 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
634 634 self.ui.status(_("user: %s\n") % changes[1])
635 635 self.ui.status(_("date: %s\n") % date)
636 636
637 637 if self.ui.debugflag:
638 638 files = self.repo.changes(log.parents(changenode)[0], changenode)
639 639 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
640 640 files):
641 641 if value:
642 642 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
643 643 else:
644 644 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
645 645
646 646 description = changes[4].strip()
647 647 if description:
648 648 if self.ui.verbose:
649 649 self.ui.status(_("description:\n"))
650 650 self.ui.status(description)
651 651 self.ui.status("\n\n")
652 652 else:
653 653 self.ui.status(_("summary: %s\n") %
654 654 description.splitlines()[0])
655 655 self.ui.status("\n")
656 656
657 657 def show_changeset(ui, repo, opts):
658 658 '''show one changeset. uses template or regular display. caller
659 659 can pass in 'style' and 'template' options in opts.'''
660 660
661 661 tmpl = opts.get('template')
662 662 if tmpl:
663 663 tmpl = templater.parsestring(tmpl, quoted=False)
664 664 else:
665 665 tmpl = ui.config('ui', 'logtemplate')
666 666 if tmpl: tmpl = templater.parsestring(tmpl)
667 667 mapfile = opts.get('style') or ui.config('ui', 'style')
668 668 if tmpl or mapfile:
669 669 if mapfile:
670 670 if not os.path.isfile(mapfile):
671 671 mapname = templater.templatepath('map-cmdline.' + mapfile)
672 672 if not mapname: mapname = templater.templatepath(mapfile)
673 673 if mapname: mapfile = mapname
674 674 try:
675 675 t = changeset_templater(ui, repo, mapfile)
676 676 except SyntaxError, inst:
677 677 raise util.Abort(inst.args[0])
678 678 if tmpl: t.use_template(tmpl)
679 679 return t
680 680 return changeset_printer(ui, repo)
681 681
682 682 def show_version(ui):
683 683 """output version and copyright information"""
684 684 ui.write(_("Mercurial Distributed SCM (version %s)\n")
685 685 % version.get_version())
686 686 ui.status(_(
687 687 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
688 688 "This is free software; see the source for copying conditions. "
689 689 "There is NO\nwarranty; "
690 690 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
691 691 ))
692 692
693 693 def help_(ui, cmd=None, with_version=False):
694 694 """show help for a given command or all commands"""
695 695 option_lists = []
696 696 if cmd and cmd != 'shortlist':
697 697 if with_version:
698 698 show_version(ui)
699 699 ui.write('\n')
700 700 aliases, i = find(cmd)
701 701 # synopsis
702 702 ui.write("%s\n\n" % i[2])
703 703
704 704 # description
705 705 doc = i[0].__doc__
706 706 if not doc:
707 707 doc = _("(No help text available)")
708 708 if ui.quiet:
709 709 doc = doc.splitlines(0)[0]
710 710 ui.write("%s\n" % doc.rstrip())
711 711
712 712 if not ui.quiet:
713 713 # aliases
714 714 if len(aliases) > 1:
715 715 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
716 716
717 717 # options
718 718 if i[1]:
719 719 option_lists.append(("options", i[1]))
720 720
721 721 else:
722 722 # program name
723 723 if ui.verbose or with_version:
724 724 show_version(ui)
725 725 else:
726 726 ui.status(_("Mercurial Distributed SCM\n"))
727 727 ui.status('\n')
728 728
729 729 # list of commands
730 730 if cmd == "shortlist":
731 731 ui.status(_('basic commands (use "hg help" '
732 732 'for the full list or option "-v" for details):\n\n'))
733 733 elif ui.verbose:
734 734 ui.status(_('list of commands:\n\n'))
735 735 else:
736 736 ui.status(_('list of commands (use "hg help -v" '
737 737 'to show aliases and global options):\n\n'))
738 738
739 739 h = {}
740 740 cmds = {}
741 741 for c, e in table.items():
742 742 f = c.split("|")[0]
743 743 if cmd == "shortlist" and not f.startswith("^"):
744 744 continue
745 745 f = f.lstrip("^")
746 746 if not ui.debugflag and f.startswith("debug"):
747 747 continue
748 748 doc = e[0].__doc__
749 749 if not doc:
750 750 doc = _("(No help text available)")
751 751 h[f] = doc.splitlines(0)[0].rstrip()
752 752 cmds[f] = c.lstrip("^")
753 753
754 754 fns = h.keys()
755 755 fns.sort()
756 756 m = max(map(len, fns))
757 757 for f in fns:
758 758 if ui.verbose:
759 759 commands = cmds[f].replace("|",", ")
760 760 ui.write(" %s:\n %s\n"%(commands, h[f]))
761 761 else:
762 762 ui.write(' %-*s %s\n' % (m, f, h[f]))
763 763
764 764 # global options
765 765 if ui.verbose:
766 766 option_lists.append(("global options", globalopts))
767 767
768 768 # list all option lists
769 769 opt_output = []
770 770 for title, options in option_lists:
771 771 opt_output.append(("\n%s:\n" % title, None))
772 772 for shortopt, longopt, default, desc in options:
773 773 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
774 774 longopt and " --%s" % longopt),
775 775 "%s%s" % (desc,
776 776 default
777 777 and _(" (default: %s)") % default
778 778 or "")))
779 779
780 780 if opt_output:
781 781 opts_len = max([len(line[0]) for line in opt_output if line[1]])
782 782 for first, second in opt_output:
783 783 if second:
784 784 ui.write(" %-*s %s\n" % (opts_len, first, second))
785 785 else:
786 786 ui.write("%s\n" % first)
787 787
788 788 # Commands start here, listed alphabetically
789 789
790 790 def add(ui, repo, *pats, **opts):
791 791 """add the specified files on the next commit
792 792
793 793 Schedule files to be version controlled and added to the repository.
794 794
795 795 The files will be added to the repository at the next commit.
796 796
797 797 If no names are given, add all files in the repository.
798 798 """
799 799
800 800 names = []
801 801 for src, abs, rel, exact in walk(repo, pats, opts):
802 802 if exact:
803 803 if ui.verbose:
804 804 ui.status(_('adding %s\n') % rel)
805 805 names.append(abs)
806 806 elif repo.dirstate.state(abs) == '?':
807 807 ui.status(_('adding %s\n') % rel)
808 808 names.append(abs)
809 809 repo.add(names)
810 810
811 811 def addremove(ui, repo, *pats, **opts):
812 812 """add all new files, delete all missing files
813 813
814 814 Add all new files and remove all missing files from the repository.
815 815
816 816 New files are ignored if they match any of the patterns in .hgignore. As
817 817 with add, these changes take effect at the next commit.
818 818 """
819 819 return addremove_lock(ui, repo, pats, opts)
820 820
821 821 def addremove_lock(ui, repo, pats, opts, wlock=None):
822 822 add, remove = [], []
823 823 for src, abs, rel, exact in walk(repo, pats, opts):
824 824 if src == 'f' and repo.dirstate.state(abs) == '?':
825 825 add.append(abs)
826 826 if ui.verbose or not exact:
827 827 ui.status(_('adding %s\n') % ((pats and rel) or abs))
828 828 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
829 829 remove.append(abs)
830 830 if ui.verbose or not exact:
831 831 ui.status(_('removing %s\n') % ((pats and rel) or abs))
832 832 repo.add(add, wlock=wlock)
833 833 repo.remove(remove, wlock=wlock)
834 834
835 835 def annotate(ui, repo, *pats, **opts):
836 836 """show changeset information per file line
837 837
838 838 List changes in files, showing the revision id responsible for each line
839 839
840 840 This command is useful to discover who did a change or when a change took
841 841 place.
842 842
843 843 Without the -a option, annotate will avoid processing files it
844 844 detects as binary. With -a, annotate will generate an annotation
845 845 anyway, probably with undesirable results.
846 846 """
847 847 def getnode(rev):
848 848 return short(repo.changelog.node(rev))
849 849
850 850 ucache = {}
851 851 def getname(rev):
852 852 cl = repo.changelog.read(repo.changelog.node(rev))
853 853 return trimuser(ui, cl[1], rev, ucache)
854 854
855 855 dcache = {}
856 856 def getdate(rev):
857 857 datestr = dcache.get(rev)
858 858 if datestr is None:
859 859 cl = repo.changelog.read(repo.changelog.node(rev))
860 860 datestr = dcache[rev] = util.datestr(cl[2])
861 861 return datestr
862 862
863 863 if not pats:
864 864 raise util.Abort(_('at least one file name or pattern required'))
865 865
866 866 opmap = [['user', getname], ['number', str], ['changeset', getnode],
867 867 ['date', getdate]]
868 868 if not opts['user'] and not opts['changeset'] and not opts['date']:
869 869 opts['number'] = 1
870 870
871 871 if opts['rev']:
872 872 node = repo.changelog.lookup(opts['rev'])
873 873 else:
874 874 node = repo.dirstate.parents()[0]
875 875 change = repo.changelog.read(node)
876 876 mmap = repo.manifest.read(change[0])
877 877
878 878 for src, abs, rel, exact in walk(repo, pats, opts, node=node):
879 879 f = repo.file(abs)
880 880 if not opts['text'] and util.binary(f.read(mmap[abs])):
881 881 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
882 882 continue
883 883
884 884 lines = f.annotate(mmap[abs])
885 885 pieces = []
886 886
887 887 for o, f in opmap:
888 888 if opts[o]:
889 889 l = [f(n) for n, dummy in lines]
890 890 if l:
891 891 m = max(map(len, l))
892 892 pieces.append(["%*s" % (m, x) for x in l])
893 893
894 894 if pieces:
895 895 for p, l in zip(zip(*pieces), lines):
896 896 ui.write("%s: %s" % (" ".join(p), l[1]))
897 897
898 898 def archive(ui, repo, dest, **opts):
899 899 '''create unversioned archive of a repository revision
900 900
901 901 By default, the revision used is the parent of the working
902 902 directory; use "-r" to specify a different revision.
903 903
904 904 To specify the type of archive to create, use "-t". Valid
905 905 types are:
906 906
907 907 "files" (default): a directory full of files
908 908 "tar": tar archive, uncompressed
909 909 "tbz2": tar archive, compressed using bzip2
910 910 "tgz": tar archive, compressed using gzip
911 911 "uzip": zip archive, uncompressed
912 912 "zip": zip archive, compressed using deflate
913 913
914 914 The exact name of the destination archive or directory is given
915 915 using a format string; see "hg help export" for details.
916 916
917 917 Each member added to an archive file has a directory prefix
918 918 prepended. Use "-p" to specify a format string for the prefix.
919 919 The default is the basename of the archive, with suffixes removed.
920 920 '''
921 921
922 922 if opts['rev']:
923 923 node = repo.lookup(opts['rev'])
924 924 else:
925 925 node, p2 = repo.dirstate.parents()
926 926 if p2 != nullid:
927 927 raise util.Abort(_('uncommitted merge - please provide a '
928 928 'specific revision'))
929 929
930 930 dest = make_filename(repo, repo.changelog, dest, node)
931 931 prefix = make_filename(repo, repo.changelog, opts['prefix'], node)
932 932 if os.path.realpath(dest) == repo.root:
933 933 raise util.Abort(_('repository root cannot be destination'))
934 934 dummy, matchfn, dummy = matchpats(repo, [], opts)
935 935 archival.archive(repo, dest, node, opts.get('type') or 'files',
936 936 not opts['no_decode'], matchfn, prefix)
937 937
938 938 def backout(ui, repo, rev, **opts):
939 939 '''reverse effect of earlier changeset
940 940
941 941 Commit the backed out changes as a new changeset.
942 942
943 943 If you back out a changeset other than the tip, a new head is
944 944 created. The --merge option remembers the parent of the working
945 945 directory before starting the backout, then merges the new head
946 946 with it afterwards, to save you from doing this by hand. The
947 947 result of this merge is not committed, as for a normal merge.'''
948 948
949 949 bail_if_changed(repo)
950 950 op1, op2 = repo.dirstate.parents()
951 951 if op2 != nullid:
952 952 raise util.Abort(_('outstanding uncommitted merge'))
953 953 node = repo.lookup(rev)
954 954 parent, p2 = repo.changelog.parents(node)
955 955 if parent == nullid:
956 956 raise util.Abort(_('cannot back out a change with no parents'))
957 957 if p2 != nullid:
958 958 raise util.Abort(_('cannot back out a merge'))
959 959 repo.update(node, force=True)
960 960 revert_opts = opts.copy()
961 961 revert_opts['rev'] = hex(parent)
962 962 revert(ui, repo, **revert_opts)
963 963 commit_opts = opts.copy()
964 964 commit_opts['addremove'] = False
965 965 if not commit_opts['message'] and not commit_opts['logfile']:
966 966 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
967 967 commit(ui, repo, **commit_opts)
968 968 def nice(node):
969 969 return '%d:%s' % (repo.changelog.rev(node), short(node))
970 970 ui.status(_('changeset %s backs out changeset %s\n') %
971 971 (nice(repo.changelog.tip()), nice(node)))
972 972 if opts['merge'] and op1 != node:
973 973 ui.status(_('merging with changeset %s\n') % nice(op1))
974 974 update(ui, repo, hex(op1), **opts)
975 975
976 976 def bundle(ui, repo, fname, dest="default-push", **opts):
977 977 """create a changegroup file
978 978
979 979 Generate a compressed changegroup file collecting all changesets
980 980 not found in the other repository.
981 981
982 982 This file can then be transferred using conventional means and
983 983 applied to another repository with the unbundle command. This is
984 984 useful when native push and pull are not available or when
985 985 exporting an entire repository is undesirable. The standard file
986 986 extension is ".hg".
987 987
988 988 Unlike import/export, this exactly preserves all changeset
989 989 contents including permissions, rename data, and revision history.
990 990 """
991 991 dest = ui.expandpath(dest)
992 992 other = hg.repository(ui, dest)
993 993 o = repo.findoutgoing(other, force=opts['force'])
994 994 cg = repo.changegroup(o, 'bundle')
995 995 write_bundle(cg, fname)
996 996
997 997 def cat(ui, repo, file1, *pats, **opts):
998 998 """output the latest or given revisions of files
999 999
1000 1000 Print the specified files as they were at the given revision.
1001 1001 If no revision is given then the tip is used.
1002 1002
1003 1003 Output may be to a file, in which case the name of the file is
1004 1004 given using a format string. The formatting rules are the same as
1005 1005 for the export command, with the following additions:
1006 1006
1007 1007 %s basename of file being printed
1008 1008 %d dirname of file being printed, or '.' if in repo root
1009 1009 %p root-relative path name of file being printed
1010 1010 """
1011 1011 mf = {}
1012 1012 rev = opts['rev']
1013 1013 if rev:
1014 1014 node = repo.lookup(rev)
1015 1015 else:
1016 1016 node = repo.changelog.tip()
1017 1017 change = repo.changelog.read(node)
1018 1018 mf = repo.manifest.read(change[0])
1019 1019 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, node):
1020 1020 r = repo.file(abs)
1021 1021 n = mf[abs]
1022 1022 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
1023 1023 fp.write(r.read(n))
1024 1024
1025 1025 def clone(ui, source, dest=None, **opts):
1026 1026 """make a copy of an existing repository
1027 1027
1028 1028 Create a copy of an existing repository in a new directory.
1029 1029
1030 1030 If no destination directory name is specified, it defaults to the
1031 1031 basename of the source.
1032 1032
1033 1033 The location of the source is added to the new repository's
1034 1034 .hg/hgrc file, as the default to be used for future pulls.
1035 1035
1036 1036 For efficiency, hardlinks are used for cloning whenever the source
1037 1037 and destination are on the same filesystem. Some filesystems,
1038 1038 such as AFS, implement hardlinking incorrectly, but do not report
1039 1039 errors. In these cases, use the --pull option to avoid
1040 1040 hardlinking.
1041 1041
1042 1042 See pull for valid source format details.
1043 1043 """
1044 1044 if dest is None:
1045 1045 dest = os.path.basename(os.path.normpath(source))
1046 1046
1047 1047 if os.path.exists(dest):
1048 1048 raise util.Abort(_("destination '%s' already exists"), dest)
1049 1049
1050 1050 dest = os.path.realpath(dest)
1051 1051
1052 1052 class Dircleanup(object):
1053 1053 def __init__(self, dir_):
1054 1054 self.rmtree = shutil.rmtree
1055 1055 self.dir_ = dir_
1056 1056 os.mkdir(dir_)
1057 1057 def close(self):
1058 1058 self.dir_ = None
1059 1059 def __del__(self):
1060 1060 if self.dir_:
1061 1061 self.rmtree(self.dir_, True)
1062 1062
1063 1063 if opts['ssh']:
1064 1064 ui.setconfig("ui", "ssh", opts['ssh'])
1065 1065 if opts['remotecmd']:
1066 1066 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1067 1067
1068 1068 source = ui.expandpath(source)
1069 1069
1070 1070 d = Dircleanup(dest)
1071 1071 abspath = source
1072 1072 other = hg.repository(ui, source)
1073 1073
1074 1074 copy = False
1075 1075 if other.dev() != -1:
1076 1076 abspath = os.path.abspath(source)
1077 1077 if not opts['pull'] and not opts['rev']:
1078 1078 copy = True
1079 1079
1080 1080 if copy:
1081 1081 try:
1082 1082 # we use a lock here because if we race with commit, we
1083 1083 # can end up with extra data in the cloned revlogs that's
1084 1084 # not pointed to by changesets, thus causing verify to
1085 1085 # fail
1086 1086 l1 = other.lock()
1087 1087 except lock.LockException:
1088 1088 copy = False
1089 1089
1090 1090 if copy:
1091 1091 # we lock here to avoid premature writing to the target
1092 1092 os.mkdir(os.path.join(dest, ".hg"))
1093 1093 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
1094 1094
1095 1095 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
1096 1096 for f in files.split():
1097 1097 src = os.path.join(source, ".hg", f)
1098 1098 dst = os.path.join(dest, ".hg", f)
1099 1099 try:
1100 1100 util.copyfiles(src, dst)
1101 1101 except OSError, inst:
1102 1102 if inst.errno != errno.ENOENT:
1103 1103 raise
1104 1104
1105 1105 repo = hg.repository(ui, dest)
1106 1106
1107 1107 else:
1108 1108 revs = None
1109 1109 if opts['rev']:
1110 1110 if not other.local():
1111 1111 error = _("clone -r not supported yet for remote repositories.")
1112 1112 raise util.Abort(error)
1113 1113 else:
1114 1114 revs = [other.lookup(rev) for rev in opts['rev']]
1115 1115 repo = hg.repository(ui, dest, create=1)
1116 1116 repo.pull(other, heads = revs)
1117 1117
1118 1118 f = repo.opener("hgrc", "w", text=True)
1119 1119 f.write("[paths]\n")
1120 1120 f.write("default = %s\n" % abspath)
1121 1121 f.close()
1122 1122
1123 1123 if not opts['noupdate']:
1124 1124 update(repo.ui, repo)
1125 1125
1126 1126 d.close()
1127 1127
1128 1128 def commit(ui, repo, *pats, **opts):
1129 1129 """commit the specified files or all outstanding changes
1130 1130
1131 1131 Commit changes to the given files into the repository.
1132 1132
1133 1133 If a list of files is omitted, all changes reported by "hg status"
1134 1134 will be committed.
1135 1135
1136 1136 If no commit message is specified, the editor configured in your hgrc
1137 1137 or in the EDITOR environment variable is started to enter a message.
1138 1138 """
1139 1139 message = opts['message']
1140 1140 logfile = opts['logfile']
1141 1141
1142 1142 if message and logfile:
1143 1143 raise util.Abort(_('options --message and --logfile are mutually '
1144 1144 'exclusive'))
1145 1145 if not message and logfile:
1146 1146 try:
1147 1147 if logfile == '-':
1148 1148 message = sys.stdin.read()
1149 1149 else:
1150 1150 message = open(logfile).read()
1151 1151 except IOError, inst:
1152 1152 raise util.Abort(_("can't read commit message '%s': %s") %
1153 1153 (logfile, inst.strerror))
1154 1154
1155 1155 if opts['addremove']:
1156 1156 addremove(ui, repo, *pats, **opts)
1157 1157 fns, match, anypats = matchpats(repo, pats, opts)
1158 1158 if pats:
1159 1159 modified, added, removed, deleted, unknown = (
1160 1160 repo.changes(files=fns, match=match))
1161 1161 files = modified + added + removed
1162 1162 else:
1163 1163 files = []
1164 1164 try:
1165 1165 repo.commit(files, message, opts['user'], opts['date'], match)
1166 1166 except ValueError, inst:
1167 1167 raise util.Abort(str(inst))
1168 1168
1169 1169 def docopy(ui, repo, pats, opts, wlock):
1170 1170 # called with the repo lock held
1171 1171 cwd = repo.getcwd()
1172 1172 errors = 0
1173 1173 copied = []
1174 1174 targets = {}
1175 1175
1176 1176 def okaytocopy(abs, rel, exact):
1177 1177 reasons = {'?': _('is not managed'),
1178 1178 'a': _('has been marked for add'),
1179 1179 'r': _('has been marked for remove')}
1180 1180 state = repo.dirstate.state(abs)
1181 1181 reason = reasons.get(state)
1182 1182 if reason:
1183 1183 if state == 'a':
1184 1184 origsrc = repo.dirstate.copied(abs)
1185 1185 if origsrc is not None:
1186 1186 return origsrc
1187 1187 if exact:
1188 1188 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
1189 1189 else:
1190 1190 return abs
1191 1191
1192 1192 def copy(origsrc, abssrc, relsrc, target, exact):
1193 1193 abstarget = util.canonpath(repo.root, cwd, target)
1194 1194 reltarget = util.pathto(cwd, abstarget)
1195 1195 prevsrc = targets.get(abstarget)
1196 1196 if prevsrc is not None:
1197 1197 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1198 1198 (reltarget, abssrc, prevsrc))
1199 1199 return
1200 1200 if (not opts['after'] and os.path.exists(reltarget) or
1201 1201 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
1202 1202 if not opts['force']:
1203 1203 ui.warn(_('%s: not overwriting - file exists\n') %
1204 1204 reltarget)
1205 1205 return
1206 1206 if not opts['after']:
1207 1207 os.unlink(reltarget)
1208 1208 if opts['after']:
1209 1209 if not os.path.exists(reltarget):
1210 1210 return
1211 1211 else:
1212 1212 targetdir = os.path.dirname(reltarget) or '.'
1213 1213 if not os.path.isdir(targetdir):
1214 1214 os.makedirs(targetdir)
1215 1215 try:
1216 1216 restore = repo.dirstate.state(abstarget) == 'r'
1217 1217 if restore:
1218 1218 repo.undelete([abstarget], wlock)
1219 1219 try:
1220 1220 shutil.copyfile(relsrc, reltarget)
1221 1221 shutil.copymode(relsrc, reltarget)
1222 1222 restore = False
1223 1223 finally:
1224 1224 if restore:
1225 1225 repo.remove([abstarget], wlock)
1226 1226 except shutil.Error, inst:
1227 1227 raise util.Abort(str(inst))
1228 1228 except IOError, inst:
1229 1229 if inst.errno == errno.ENOENT:
1230 1230 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1231 1231 else:
1232 1232 ui.warn(_('%s: cannot copy - %s\n') %
1233 1233 (relsrc, inst.strerror))
1234 1234 errors += 1
1235 1235 return
1236 1236 if ui.verbose or not exact:
1237 1237 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1238 1238 targets[abstarget] = abssrc
1239 1239 if abstarget != origsrc:
1240 1240 repo.copy(origsrc, abstarget, wlock)
1241 1241 copied.append((abssrc, relsrc, exact))
1242 1242
1243 1243 def targetpathfn(pat, dest, srcs):
1244 1244 if os.path.isdir(pat):
1245 1245 abspfx = util.canonpath(repo.root, cwd, pat)
1246 1246 if destdirexists:
1247 1247 striplen = len(os.path.split(abspfx)[0])
1248 1248 else:
1249 1249 striplen = len(abspfx)
1250 1250 if striplen:
1251 1251 striplen += len(os.sep)
1252 1252 res = lambda p: os.path.join(dest, p[striplen:])
1253 1253 elif destdirexists:
1254 1254 res = lambda p: os.path.join(dest, os.path.basename(p))
1255 1255 else:
1256 1256 res = lambda p: dest
1257 1257 return res
1258 1258
1259 1259 def targetpathafterfn(pat, dest, srcs):
1260 1260 if util.patkind(pat, None)[0]:
1261 1261 # a mercurial pattern
1262 1262 res = lambda p: os.path.join(dest, os.path.basename(p))
1263 1263 else:
1264 1264 abspfx = util.canonpath(repo.root, cwd, pat)
1265 1265 if len(abspfx) < len(srcs[0][0]):
1266 1266 # A directory. Either the target path contains the last
1267 1267 # component of the source path or it does not.
1268 1268 def evalpath(striplen):
1269 1269 score = 0
1270 1270 for s in srcs:
1271 1271 t = os.path.join(dest, s[0][striplen:])
1272 1272 if os.path.exists(t):
1273 1273 score += 1
1274 1274 return score
1275 1275
1276 1276 striplen = len(abspfx)
1277 1277 if striplen:
1278 1278 striplen += len(os.sep)
1279 1279 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1280 1280 score = evalpath(striplen)
1281 1281 striplen1 = len(os.path.split(abspfx)[0])
1282 1282 if striplen1:
1283 1283 striplen1 += len(os.sep)
1284 1284 if evalpath(striplen1) > score:
1285 1285 striplen = striplen1
1286 1286 res = lambda p: os.path.join(dest, p[striplen:])
1287 1287 else:
1288 1288 # a file
1289 1289 if destdirexists:
1290 1290 res = lambda p: os.path.join(dest, os.path.basename(p))
1291 1291 else:
1292 1292 res = lambda p: dest
1293 1293 return res
1294 1294
1295 1295
1296 1296 pats = list(pats)
1297 1297 if not pats:
1298 1298 raise util.Abort(_('no source or destination specified'))
1299 1299 if len(pats) == 1:
1300 1300 raise util.Abort(_('no destination specified'))
1301 1301 dest = pats.pop()
1302 1302 destdirexists = os.path.isdir(dest)
1303 1303 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1304 1304 raise util.Abort(_('with multiple sources, destination must be an '
1305 1305 'existing directory'))
1306 1306 if opts['after']:
1307 1307 tfn = targetpathafterfn
1308 1308 else:
1309 1309 tfn = targetpathfn
1310 1310 copylist = []
1311 1311 for pat in pats:
1312 1312 srcs = []
1313 1313 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
1314 1314 origsrc = okaytocopy(abssrc, relsrc, exact)
1315 1315 if origsrc:
1316 1316 srcs.append((origsrc, abssrc, relsrc, exact))
1317 1317 if not srcs:
1318 1318 continue
1319 1319 copylist.append((tfn(pat, dest, srcs), srcs))
1320 1320 if not copylist:
1321 1321 raise util.Abort(_('no files to copy'))
1322 1322
1323 1323 for targetpath, srcs in copylist:
1324 1324 for origsrc, abssrc, relsrc, exact in srcs:
1325 1325 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1326 1326
1327 1327 if errors:
1328 1328 ui.warn(_('(consider using --after)\n'))
1329 1329 return errors, copied
1330 1330
1331 1331 def copy(ui, repo, *pats, **opts):
1332 1332 """mark files as copied for the next commit
1333 1333
1334 1334 Mark dest as having copies of source files. If dest is a
1335 1335 directory, copies are put in that directory. If dest is a file,
1336 1336 there can only be one source.
1337 1337
1338 1338 By default, this command copies the contents of files as they
1339 1339 stand in the working directory. If invoked with --after, the
1340 1340 operation is recorded, but no copying is performed.
1341 1341
1342 1342 This command takes effect in the next commit.
1343 1343
1344 1344 NOTE: This command should be treated as experimental. While it
1345 1345 should properly record copied files, this information is not yet
1346 1346 fully used by merge, nor fully reported by log.
1347 1347 """
1348 1348 wlock = repo.wlock(0)
1349 1349 errs, copied = docopy(ui, repo, pats, opts, wlock)
1350 1350 return errs
1351 1351
1352 1352 def debugancestor(ui, index, rev1, rev2):
1353 1353 """find the ancestor revision of two revisions in a given index"""
1354 1354 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1355 1355 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1356 1356 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1357 1357
1358 1358 def debugcomplete(ui, cmd='', **opts):
1359 1359 """returns the completion list associated with the given command"""
1360 1360
1361 1361 if opts['options']:
1362 1362 options = []
1363 1363 otables = [globalopts]
1364 1364 if cmd:
1365 1365 aliases, entry = find(cmd)
1366 1366 otables.append(entry[1])
1367 1367 for t in otables:
1368 1368 for o in t:
1369 1369 if o[0]:
1370 1370 options.append('-%s' % o[0])
1371 1371 options.append('--%s' % o[1])
1372 1372 ui.write("%s\n" % "\n".join(options))
1373 1373 return
1374 1374
1375 1375 clist = findpossible(cmd).keys()
1376 1376 clist.sort()
1377 1377 ui.write("%s\n" % "\n".join(clist))
1378 1378
1379 1379 def debugrebuildstate(ui, repo, rev=None):
1380 1380 """rebuild the dirstate as it would look like for the given revision"""
1381 1381 if not rev:
1382 1382 rev = repo.changelog.tip()
1383 1383 else:
1384 1384 rev = repo.lookup(rev)
1385 1385 change = repo.changelog.read(rev)
1386 1386 n = change[0]
1387 1387 files = repo.manifest.readflags(n)
1388 1388 wlock = repo.wlock()
1389 1389 repo.dirstate.rebuild(rev, files.iteritems())
1390 1390
1391 1391 def debugcheckstate(ui, repo):
1392 1392 """validate the correctness of the current dirstate"""
1393 1393 parent1, parent2 = repo.dirstate.parents()
1394 1394 repo.dirstate.read()
1395 1395 dc = repo.dirstate.map
1396 1396 keys = dc.keys()
1397 1397 keys.sort()
1398 1398 m1n = repo.changelog.read(parent1)[0]
1399 1399 m2n = repo.changelog.read(parent2)[0]
1400 1400 m1 = repo.manifest.read(m1n)
1401 1401 m2 = repo.manifest.read(m2n)
1402 1402 errors = 0
1403 1403 for f in dc:
1404 1404 state = repo.dirstate.state(f)
1405 1405 if state in "nr" and f not in m1:
1406 1406 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1407 1407 errors += 1
1408 1408 if state in "a" and f in m1:
1409 1409 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1410 1410 errors += 1
1411 1411 if state in "m" and f not in m1 and f not in m2:
1412 1412 ui.warn(_("%s in state %s, but not in either manifest\n") %
1413 1413 (f, state))
1414 1414 errors += 1
1415 1415 for f in m1:
1416 1416 state = repo.dirstate.state(f)
1417 1417 if state not in "nrm":
1418 1418 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1419 1419 errors += 1
1420 1420 if errors:
1421 1421 error = _(".hg/dirstate inconsistent with current parent's manifest")
1422 1422 raise util.Abort(error)
1423 1423
1424 1424 def debugconfig(ui, repo):
1425 1425 """show combined config settings from all hgrc files"""
1426 1426 for section, name, value in ui.walkconfig():
1427 1427 ui.write('%s.%s=%s\n' % (section, name, value))
1428 1428
1429 1429 def debugsetparents(ui, repo, rev1, rev2=None):
1430 1430 """manually set the parents of the current working directory
1431 1431
1432 1432 This is useful for writing repository conversion tools, but should
1433 1433 be used with care.
1434 1434 """
1435 1435
1436 1436 if not rev2:
1437 1437 rev2 = hex(nullid)
1438 1438
1439 1439 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1440 1440
1441 1441 def debugstate(ui, repo):
1442 1442 """show the contents of the current dirstate"""
1443 1443 repo.dirstate.read()
1444 1444 dc = repo.dirstate.map
1445 1445 keys = dc.keys()
1446 1446 keys.sort()
1447 1447 for file_ in keys:
1448 1448 ui.write("%c %3o %10d %s %s\n"
1449 1449 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1450 1450 time.strftime("%x %X",
1451 1451 time.localtime(dc[file_][3])), file_))
1452 1452 for f in repo.dirstate.copies:
1453 1453 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1454 1454
1455 1455 def debugdata(ui, file_, rev):
1456 1456 """dump the contents of an data file revision"""
1457 1457 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1458 1458 file_[:-2] + ".i", file_, 0)
1459 1459 try:
1460 1460 ui.write(r.revision(r.lookup(rev)))
1461 1461 except KeyError:
1462 1462 raise util.Abort(_('invalid revision identifier %s'), rev)
1463 1463
1464 1464 def debugindex(ui, file_):
1465 1465 """dump the contents of an index file"""
1466 1466 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1467 1467 ui.write(" rev offset length base linkrev" +
1468 1468 " nodeid p1 p2\n")
1469 1469 for i in range(r.count()):
1470 1470 node = r.node(i)
1471 1471 pp = r.parents(node)
1472 1472 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1473 1473 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1474 1474 short(node), short(pp[0]), short(pp[1])))
1475 1475
1476 1476 def debugindexdot(ui, file_):
1477 1477 """dump an index DAG as a .dot file"""
1478 1478 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1479 1479 ui.write("digraph G {\n")
1480 1480 for i in range(r.count()):
1481 1481 e = r.index[i]
1482 1482 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
1483 1483 if e[5] != nullid:
1484 1484 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
1485 1485 ui.write("}\n")
1486 1486
1487 1487 def debugrename(ui, repo, file, rev=None):
1488 1488 """dump rename information"""
1489 1489 r = repo.file(relpath(repo, [file])[0])
1490 1490 if rev:
1491 1491 try:
1492 1492 # assume all revision numbers are for changesets
1493 1493 n = repo.lookup(rev)
1494 1494 change = repo.changelog.read(n)
1495 1495 m = repo.manifest.read(change[0])
1496 1496 n = m[relpath(repo, [file])[0]]
1497 1497 except (hg.RepoError, KeyError):
1498 1498 n = r.lookup(rev)
1499 1499 else:
1500 1500 n = r.tip()
1501 1501 m = r.renamed(n)
1502 1502 if m:
1503 1503 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1504 1504 else:
1505 1505 ui.write(_("not renamed\n"))
1506 1506
1507 1507 def debugwalk(ui, repo, *pats, **opts):
1508 1508 """show how files match on given patterns"""
1509 1509 items = list(walk(repo, pats, opts))
1510 1510 if not items:
1511 1511 return
1512 1512 fmt = '%%s %%-%ds %%-%ds %%s' % (
1513 1513 max([len(abs) for (src, abs, rel, exact) in items]),
1514 1514 max([len(rel) for (src, abs, rel, exact) in items]))
1515 1515 for src, abs, rel, exact in items:
1516 1516 line = fmt % (src, abs, rel, exact and 'exact' or '')
1517 1517 ui.write("%s\n" % line.rstrip())
1518 1518
1519 1519 def diff(ui, repo, *pats, **opts):
1520 1520 """diff repository (or selected files)
1521 1521
1522 1522 Show differences between revisions for the specified files.
1523 1523
1524 1524 Differences between files are shown using the unified diff format.
1525 1525
1526 1526 When two revision arguments are given, then changes are shown
1527 1527 between those revisions. If only one revision is specified then
1528 1528 that revision is compared to the working directory, and, when no
1529 1529 revisions are specified, the working directory files are compared
1530 1530 to its parent.
1531 1531
1532 1532 Without the -a option, diff will avoid generating diffs of files
1533 1533 it detects as binary. With -a, diff will generate a diff anyway,
1534 1534 probably with undesirable results.
1535 1535 """
1536 1536 node1, node2 = None, None
1537 1537 revs = [repo.lookup(x) for x in opts['rev']]
1538 1538
1539 1539 if len(revs) > 0:
1540 1540 node1 = revs[0]
1541 1541 if len(revs) > 1:
1542 1542 node2 = revs[1]
1543 1543 if len(revs) > 2:
1544 1544 raise util.Abort(_("too many revisions to diff"))
1545 1545
1546 1546 fns, matchfn, anypats = matchpats(repo, pats, opts)
1547 1547
1548 1548 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1549 1549 text=opts['text'], opts=opts)
1550 1550
1551 1551 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1552 1552 node = repo.lookup(changeset)
1553 1553 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1554 1554 if opts['switch_parent']:
1555 1555 parents.reverse()
1556 1556 prev = (parents and parents[0]) or nullid
1557 1557 change = repo.changelog.read(node)
1558 1558
1559 1559 fp = make_file(repo, repo.changelog, opts['output'],
1560 1560 node=node, total=total, seqno=seqno,
1561 1561 revwidth=revwidth)
1562 1562 if fp != sys.stdout:
1563 1563 ui.note("%s\n" % fp.name)
1564 1564
1565 1565 fp.write("# HG changeset patch\n")
1566 1566 fp.write("# User %s\n" % change[1])
1567 1567 fp.write("# Node ID %s\n" % hex(node))
1568 1568 fp.write("# Parent %s\n" % hex(prev))
1569 1569 if len(parents) > 1:
1570 1570 fp.write("# Parent %s\n" % hex(parents[1]))
1571 1571 fp.write(change[4].rstrip())
1572 1572 fp.write("\n\n")
1573 1573
1574 1574 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1575 1575 if fp != sys.stdout:
1576 1576 fp.close()
1577 1577
1578 1578 def export(ui, repo, *changesets, **opts):
1579 1579 """dump the header and diffs for one or more changesets
1580 1580
1581 1581 Print the changeset header and diffs for one or more revisions.
1582 1582
1583 1583 The information shown in the changeset header is: author,
1584 1584 changeset hash, parent and commit comment.
1585 1585
1586 1586 Output may be to a file, in which case the name of the file is
1587 1587 given using a format string. The formatting rules are as follows:
1588 1588
1589 1589 %% literal "%" character
1590 1590 %H changeset hash (40 bytes of hexadecimal)
1591 1591 %N number of patches being generated
1592 1592 %R changeset revision number
1593 1593 %b basename of the exporting repository
1594 1594 %h short-form changeset hash (12 bytes of hexadecimal)
1595 1595 %n zero-padded sequence number, starting at 1
1596 1596 %r zero-padded changeset revision number
1597 1597
1598 1598 Without the -a option, export will avoid generating diffs of files
1599 1599 it detects as binary. With -a, export will generate a diff anyway,
1600 1600 probably with undesirable results.
1601 1601
1602 1602 With the --switch-parent option, the diff will be against the second
1603 1603 parent. It can be useful to review a merge.
1604 1604 """
1605 1605 if not changesets:
1606 1606 raise util.Abort(_("export requires at least one changeset"))
1607 1607 seqno = 0
1608 1608 revs = list(revrange(ui, repo, changesets))
1609 1609 total = len(revs)
1610 1610 revwidth = max(map(len, revs))
1611 1611 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1612 1612 ui.note(msg)
1613 1613 for cset in revs:
1614 1614 seqno += 1
1615 1615 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1616 1616
1617 1617 def forget(ui, repo, *pats, **opts):
1618 1618 """don't add the specified files on the next commit (DEPRECATED)
1619 1619
1620 1620 (DEPRECATED)
1621 1621 Undo an 'hg add' scheduled for the next commit.
1622 1622
1623 1623 This command is now deprecated and will be removed in a future
1624 1624 release. Please use revert instead.
1625 1625 """
1626 1626 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1627 1627 forget = []
1628 1628 for src, abs, rel, exact in walk(repo, pats, opts):
1629 1629 if repo.dirstate.state(abs) == 'a':
1630 1630 forget.append(abs)
1631 1631 if ui.verbose or not exact:
1632 1632 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1633 1633 repo.forget(forget)
1634 1634
1635 1635 def grep(ui, repo, pattern, *pats, **opts):
1636 1636 """search for a pattern in specified files and revisions
1637 1637
1638 1638 Search revisions of files for a regular expression.
1639 1639
1640 1640 This command behaves differently than Unix grep. It only accepts
1641 1641 Python/Perl regexps. It searches repository history, not the
1642 1642 working directory. It always prints the revision number in which
1643 1643 a match appears.
1644 1644
1645 1645 By default, grep only prints output for the first revision of a
1646 1646 file in which it finds a match. To get it to print every revision
1647 1647 that contains a change in match status ("-" for a match that
1648 1648 becomes a non-match, or "+" for a non-match that becomes a match),
1649 1649 use the --all flag.
1650 1650 """
1651 1651 reflags = 0
1652 1652 if opts['ignore_case']:
1653 1653 reflags |= re.I
1654 1654 regexp = re.compile(pattern, reflags)
1655 1655 sep, eol = ':', '\n'
1656 1656 if opts['print0']:
1657 1657 sep = eol = '\0'
1658 1658
1659 1659 fcache = {}
1660 1660 def getfile(fn):
1661 1661 if fn not in fcache:
1662 1662 fcache[fn] = repo.file(fn)
1663 1663 return fcache[fn]
1664 1664
1665 1665 def matchlines(body):
1666 1666 begin = 0
1667 1667 linenum = 0
1668 1668 while True:
1669 1669 match = regexp.search(body, begin)
1670 1670 if not match:
1671 1671 break
1672 1672 mstart, mend = match.span()
1673 1673 linenum += body.count('\n', begin, mstart) + 1
1674 1674 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1675 1675 lend = body.find('\n', mend)
1676 1676 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1677 1677 begin = lend + 1
1678 1678
1679 1679 class linestate(object):
1680 1680 def __init__(self, line, linenum, colstart, colend):
1681 1681 self.line = line
1682 1682 self.linenum = linenum
1683 1683 self.colstart = colstart
1684 1684 self.colend = colend
1685 1685 def __eq__(self, other):
1686 1686 return self.line == other.line
1687 1687 def __hash__(self):
1688 1688 return hash(self.line)
1689 1689
1690 1690 matches = {}
1691 1691 def grepbody(fn, rev, body):
1692 1692 matches[rev].setdefault(fn, {})
1693 1693 m = matches[rev][fn]
1694 1694 for lnum, cstart, cend, line in matchlines(body):
1695 1695 s = linestate(line, lnum, cstart, cend)
1696 1696 m[s] = s
1697 1697
1698 1698 # FIXME: prev isn't used, why ?
1699 1699 prev = {}
1700 1700 ucache = {}
1701 1701 def display(fn, rev, states, prevstates):
1702 1702 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1703 1703 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1704 1704 counts = {'-': 0, '+': 0}
1705 1705 filerevmatches = {}
1706 1706 for l in diff:
1707 1707 if incrementing or not opts['all']:
1708 1708 change = ((l in prevstates) and '-') or '+'
1709 1709 r = rev
1710 1710 else:
1711 1711 change = ((l in states) and '-') or '+'
1712 1712 r = prev[fn]
1713 1713 cols = [fn, str(rev)]
1714 1714 if opts['line_number']:
1715 1715 cols.append(str(l.linenum))
1716 1716 if opts['all']:
1717 1717 cols.append(change)
1718 1718 if opts['user']:
1719 1719 cols.append(trimuser(ui, getchange(rev)[1], rev,
1720 1720 ucache))
1721 1721 if opts['files_with_matches']:
1722 1722 c = (fn, rev)
1723 1723 if c in filerevmatches:
1724 1724 continue
1725 1725 filerevmatches[c] = 1
1726 1726 else:
1727 1727 cols.append(l.line)
1728 1728 ui.write(sep.join(cols), eol)
1729 1729 counts[change] += 1
1730 1730 return counts['+'], counts['-']
1731 1731
1732 1732 fstate = {}
1733 1733 skip = {}
1734 1734 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1735 1735 count = 0
1736 1736 incrementing = False
1737 1737 for st, rev, fns in changeiter:
1738 1738 if st == 'window':
1739 1739 incrementing = rev
1740 1740 matches.clear()
1741 1741 elif st == 'add':
1742 1742 change = repo.changelog.read(repo.lookup(str(rev)))
1743 1743 mf = repo.manifest.read(change[0])
1744 1744 matches[rev] = {}
1745 1745 for fn in fns:
1746 1746 if fn in skip:
1747 1747 continue
1748 1748 fstate.setdefault(fn, {})
1749 1749 try:
1750 1750 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1751 1751 except KeyError:
1752 1752 pass
1753 1753 elif st == 'iter':
1754 1754 states = matches[rev].items()
1755 1755 states.sort()
1756 1756 for fn, m in states:
1757 1757 if fn in skip:
1758 1758 continue
1759 1759 if incrementing or not opts['all'] or fstate[fn]:
1760 1760 pos, neg = display(fn, rev, m, fstate[fn])
1761 1761 count += pos + neg
1762 1762 if pos and not opts['all']:
1763 1763 skip[fn] = True
1764 1764 fstate[fn] = m
1765 1765 prev[fn] = rev
1766 1766
1767 1767 if not incrementing:
1768 1768 fstate = fstate.items()
1769 1769 fstate.sort()
1770 1770 for fn, state in fstate:
1771 1771 if fn in skip:
1772 1772 continue
1773 1773 display(fn, rev, {}, state)
1774 1774 return (count == 0 and 1) or 0
1775 1775
1776 1776 def heads(ui, repo, **opts):
1777 1777 """show current repository heads
1778 1778
1779 1779 Show all repository head changesets.
1780 1780
1781 1781 Repository "heads" are changesets that don't have children
1782 1782 changesets. They are where development generally takes place and
1783 1783 are the usual targets for update and merge operations.
1784 1784 """
1785 1785 if opts['rev']:
1786 1786 heads = repo.heads(repo.lookup(opts['rev']))
1787 1787 else:
1788 1788 heads = repo.heads()
1789 1789 br = None
1790 1790 if opts['branches']:
1791 1791 br = repo.branchlookup(heads)
1792 1792 displayer = show_changeset(ui, repo, opts)
1793 1793 for n in heads:
1794 1794 displayer.show(changenode=n, brinfo=br)
1795 1795
1796 1796 def identify(ui, repo):
1797 1797 """print information about the working copy
1798 1798
1799 1799 Print a short summary of the current state of the repo.
1800 1800
1801 1801 This summary identifies the repository state using one or two parent
1802 1802 hash identifiers, followed by a "+" if there are uncommitted changes
1803 1803 in the working directory, followed by a list of tags for this revision.
1804 1804 """
1805 1805 parents = [p for p in repo.dirstate.parents() if p != nullid]
1806 1806 if not parents:
1807 1807 ui.write(_("unknown\n"))
1808 1808 return
1809 1809
1810 1810 hexfunc = ui.verbose and hex or short
1811 1811 modified, added, removed, deleted, unknown = repo.changes()
1812 1812 output = ["%s%s" %
1813 1813 ('+'.join([hexfunc(parent) for parent in parents]),
1814 1814 (modified or added or removed or deleted) and "+" or "")]
1815 1815
1816 1816 if not ui.quiet:
1817 1817 # multiple tags for a single parent separated by '/'
1818 1818 parenttags = ['/'.join(tags)
1819 1819 for tags in map(repo.nodetags, parents) if tags]
1820 1820 # tags for multiple parents separated by ' + '
1821 1821 if parenttags:
1822 1822 output.append(' + '.join(parenttags))
1823 1823
1824 1824 ui.write("%s\n" % ' '.join(output))
1825 1825
1826 1826 def import_(ui, repo, patch1, *patches, **opts):
1827 1827 """import an ordered set of patches
1828 1828
1829 1829 Import a list of patches and commit them individually.
1830 1830
1831 1831 If there are outstanding changes in the working directory, import
1832 1832 will abort unless given the -f flag.
1833 1833
1834 1834 If a patch looks like a mail message (its first line starts with
1835 1835 "From " or looks like an RFC822 header), it will not be applied
1836 1836 unless the -f option is used. The importer neither parses nor
1837 1837 discards mail headers, so use -f only to override the "mailness"
1838 1838 safety check, not to import a real mail message.
1839 1839 """
1840 1840 patches = (patch1,) + patches
1841 1841
1842 1842 if not opts['force']:
1843 1843 bail_if_changed(repo)
1844 1844
1845 1845 d = opts["base"]
1846 1846 strip = opts["strip"]
1847 1847
1848 1848 mailre = re.compile(r'(?:From |[\w-]+:)')
1849 1849
1850 1850 # attempt to detect the start of a patch
1851 1851 # (this heuristic is borrowed from quilt)
1852 1852 diffre = re.compile(r'(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1853 1853 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1854 1854 '(---|\*\*\*)[ \t])')
1855 1855
1856 1856 for patch in patches:
1857 1857 ui.status(_("applying %s\n") % patch)
1858 1858 pf = os.path.join(d, patch)
1859 1859
1860 1860 message = []
1861 1861 user = None
1862 1862 hgpatch = False
1863 1863 for line in file(pf):
1864 1864 line = line.rstrip()
1865 1865 if (not message and not hgpatch and
1866 1866 mailre.match(line) and not opts['force']):
1867 1867 if len(line) > 35:
1868 1868 line = line[:32] + '...'
1869 1869 raise util.Abort(_('first line looks like a '
1870 1870 'mail header: ') + line)
1871 1871 if diffre.match(line):
1872 1872 break
1873 1873 elif hgpatch:
1874 1874 # parse values when importing the result of an hg export
1875 1875 if line.startswith("# User "):
1876 1876 user = line[7:]
1877 1877 ui.debug(_('User: %s\n') % user)
1878 1878 elif not line.startswith("# ") and line:
1879 1879 message.append(line)
1880 1880 hgpatch = False
1881 1881 elif line == '# HG changeset patch':
1882 1882 hgpatch = True
1883 1883 message = [] # We may have collected garbage
1884 1884 else:
1885 1885 message.append(line)
1886 1886
1887 1887 # make sure message isn't empty
1888 1888 if not message:
1889 1889 message = _("imported patch %s\n") % patch
1890 1890 else:
1891 1891 message = "%s\n" % '\n'.join(message)
1892 1892 ui.debug(_('message:\n%s\n') % message)
1893 1893
1894 1894 files = util.patch(strip, pf, ui)
1895 1895
1896 1896 if len(files) > 0:
1897 1897 addremove(ui, repo, *files)
1898 1898 repo.commit(files, message, user)
1899 1899
1900 1900 def incoming(ui, repo, source="default", **opts):
1901 1901 """show new changesets found in source
1902 1902
1903 1903 Show new changesets found in the specified path/URL or the default
1904 1904 pull location. These are the changesets that would be pulled if a pull
1905 1905 was requested.
1906 1906
1907 1907 For remote repository, using --bundle avoids downloading the changesets
1908 1908 twice if the incoming is followed by a pull.
1909 1909
1910 1910 See pull for valid source format details.
1911 1911 """
1912 1912 source = ui.expandpath(source)
1913 1913 if opts['ssh']:
1914 1914 ui.setconfig("ui", "ssh", opts['ssh'])
1915 1915 if opts['remotecmd']:
1916 1916 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1917 1917
1918 1918 other = hg.repository(ui, source)
1919 1919 incoming = repo.findincoming(other, force=opts["force"])
1920 1920 if not incoming:
1921 1921 ui.status(_("no changes found\n"))
1922 1922 return
1923 1923
1924 1924 cleanup = None
1925 1925 try:
1926 1926 fname = opts["bundle"]
1927 1927 if fname or not other.local():
1928 1928 # create a bundle (uncompressed if other repo is not local)
1929 1929 cg = other.changegroup(incoming, "incoming")
1930 1930 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1931 1931 # keep written bundle?
1932 1932 if opts["bundle"]:
1933 1933 cleanup = None
1934 1934 if not other.local():
1935 1935 # use the created uncompressed bundlerepo
1936 1936 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1937 1937
1938 1938 o = other.changelog.nodesbetween(incoming)[0]
1939 1939 if opts['newest_first']:
1940 1940 o.reverse()
1941 1941 displayer = show_changeset(ui, other, opts)
1942 1942 for n in o:
1943 1943 parents = [p for p in other.changelog.parents(n) if p != nullid]
1944 1944 if opts['no_merges'] and len(parents) == 2:
1945 1945 continue
1946 1946 displayer.show(changenode=n)
1947 1947 if opts['patch']:
1948 1948 prev = (parents and parents[0]) or nullid
1949 1949 dodiff(ui, ui, other, prev, n)
1950 1950 ui.write("\n")
1951 1951 finally:
1952 1952 if hasattr(other, 'close'):
1953 1953 other.close()
1954 1954 if cleanup:
1955 1955 os.unlink(cleanup)
1956 1956
1957 1957 def init(ui, dest="."):
1958 1958 """create a new repository in the given directory
1959 1959
1960 1960 Initialize a new repository in the given directory. If the given
1961 1961 directory does not exist, it is created.
1962 1962
1963 1963 If no directory is given, the current directory is used.
1964 1964 """
1965 1965 if not os.path.exists(dest):
1966 1966 os.mkdir(dest)
1967 1967 hg.repository(ui, dest, create=1)
1968 1968
1969 1969 def locate(ui, repo, *pats, **opts):
1970 1970 """locate files matching specific patterns
1971 1971
1972 1972 Print all files under Mercurial control whose names match the
1973 1973 given patterns.
1974 1974
1975 1975 This command searches the current directory and its
1976 1976 subdirectories. To search an entire repository, move to the root
1977 1977 of the repository.
1978 1978
1979 1979 If no patterns are given to match, this command prints all file
1980 1980 names.
1981 1981
1982 1982 If you want to feed the output of this command into the "xargs"
1983 1983 command, use the "-0" option to both this command and "xargs".
1984 1984 This will avoid the problem of "xargs" treating single filenames
1985 1985 that contain white space as multiple filenames.
1986 1986 """
1987 1987 end = opts['print0'] and '\0' or '\n'
1988 1988 rev = opts['rev']
1989 1989 if rev:
1990 1990 node = repo.lookup(rev)
1991 1991 else:
1992 1992 node = None
1993 1993
1994 1994 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
1995 1995 head='(?:.*/|)'):
1996 1996 if not node and repo.dirstate.state(abs) == '?':
1997 1997 continue
1998 1998 if opts['fullpath']:
1999 1999 ui.write(os.path.join(repo.root, abs), end)
2000 2000 else:
2001 2001 ui.write(((pats and rel) or abs), end)
2002 2002
2003 2003 def log(ui, repo, *pats, **opts):
2004 2004 """show revision history of entire repository or files
2005 2005
2006 2006 Print the revision history of the specified files or the entire project.
2007 2007
2008 2008 By default this command outputs: changeset id and hash, tags,
2009 2009 non-trivial parents, user, date and time, and a summary for each
2010 2010 commit. When the -v/--verbose switch is used, the list of changed
2011 2011 files and full commit message is shown.
2012 2012 """
2013 2013 class dui(object):
2014 2014 # Implement and delegate some ui protocol. Save hunks of
2015 2015 # output for later display in the desired order.
2016 2016 def __init__(self, ui):
2017 2017 self.ui = ui
2018 2018 self.hunk = {}
2019 2019 self.header = {}
2020 2020 def bump(self, rev):
2021 2021 self.rev = rev
2022 2022 self.hunk[rev] = []
2023 2023 self.header[rev] = []
2024 2024 def note(self, *args):
2025 2025 if self.verbose:
2026 2026 self.write(*args)
2027 2027 def status(self, *args):
2028 2028 if not self.quiet:
2029 2029 self.write(*args)
2030 2030 def write(self, *args):
2031 2031 self.hunk[self.rev].append(args)
2032 2032 def write_header(self, *args):
2033 2033 self.header[self.rev].append(args)
2034 2034 def debug(self, *args):
2035 2035 if self.debugflag:
2036 2036 self.write(*args)
2037 2037 def __getattr__(self, key):
2038 2038 return getattr(self.ui, key)
2039 2039
2040 2040 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
2041 2041
2042 2042 if opts['limit']:
2043 2043 try:
2044 2044 limit = int(opts['limit'])
2045 2045 except ValueError:
2046 2046 raise util.Abort(_('limit must be a positive integer'))
2047 2047 if limit <= 0: raise util.Abort(_('limit must be positive'))
2048 2048 else:
2049 2049 limit = sys.maxint
2050 2050 count = 0
2051 2051
2052 2052 displayer = show_changeset(ui, repo, opts)
2053 2053 for st, rev, fns in changeiter:
2054 2054 if st == 'window':
2055 2055 du = dui(ui)
2056 2056 displayer.ui = du
2057 2057 elif st == 'add':
2058 2058 du.bump(rev)
2059 2059 changenode = repo.changelog.node(rev)
2060 2060 parents = [p for p in repo.changelog.parents(changenode)
2061 2061 if p != nullid]
2062 2062 if opts['no_merges'] and len(parents) == 2:
2063 2063 continue
2064 2064 if opts['only_merges'] and len(parents) != 2:
2065 2065 continue
2066 2066
2067 2067 if opts['keyword']:
2068 2068 changes = getchange(rev)
2069 2069 miss = 0
2070 2070 for k in [kw.lower() for kw in opts['keyword']]:
2071 2071 if not (k in changes[1].lower() or
2072 2072 k in changes[4].lower() or
2073 2073 k in " ".join(changes[3][:20]).lower()):
2074 2074 miss = 1
2075 2075 break
2076 2076 if miss:
2077 2077 continue
2078 2078
2079 2079 br = None
2080 2080 if opts['branches']:
2081 2081 br = repo.branchlookup([repo.changelog.node(rev)])
2082 2082
2083 2083 displayer.show(rev, brinfo=br)
2084 2084 if opts['patch']:
2085 2085 prev = (parents and parents[0]) or nullid
2086 2086 dodiff(du, du, repo, prev, changenode, match=matchfn)
2087 2087 du.write("\n\n")
2088 2088 elif st == 'iter':
2089 2089 if count == limit: break
2090 2090 if du.header[rev]:
2091 2091 for args in du.header[rev]:
2092 2092 ui.write_header(*args)
2093 2093 if du.hunk[rev]:
2094 2094 count += 1
2095 2095 for args in du.hunk[rev]:
2096 2096 ui.write(*args)
2097 2097
2098 2098 def manifest(ui, repo, rev=None):
2099 2099 """output the latest or given revision of the project manifest
2100 2100
2101 2101 Print a list of version controlled files for the given revision.
2102 2102
2103 2103 The manifest is the list of files being version controlled. If no revision
2104 2104 is given then the tip is used.
2105 2105 """
2106 2106 if rev:
2107 2107 try:
2108 2108 # assume all revision numbers are for changesets
2109 2109 n = repo.lookup(rev)
2110 2110 change = repo.changelog.read(n)
2111 2111 n = change[0]
2112 2112 except hg.RepoError:
2113 2113 n = repo.manifest.lookup(rev)
2114 2114 else:
2115 2115 n = repo.manifest.tip()
2116 2116 m = repo.manifest.read(n)
2117 2117 mf = repo.manifest.readflags(n)
2118 2118 files = m.keys()
2119 2119 files.sort()
2120 2120
2121 2121 for f in files:
2122 2122 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
2123 2123
2124 2124 def merge(ui, repo, node=None, **opts):
2125 2125 """Merge working directory with another revision
2126 2126
2127 2127 Merge the contents of the current working directory and the
2128 2128 requested revision. Files that changed between either parent are
2129 2129 marked as changed for the next commit and a commit must be
2130 2130 performed before any further updates are allowed.
2131 2131 """
2132 2132 return update(ui, repo, node=node, merge=True, **opts)
2133 2133
2134 2134 def outgoing(ui, repo, dest="default-push", **opts):
2135 2135 """show changesets not found in destination
2136 2136
2137 2137 Show changesets not found in the specified destination repository or
2138 2138 the default push location. These are the changesets that would be pushed
2139 2139 if a push was requested.
2140 2140
2141 2141 See pull for valid destination format details.
2142 2142 """
2143 2143 dest = ui.expandpath(dest)
2144 2144 if opts['ssh']:
2145 2145 ui.setconfig("ui", "ssh", opts['ssh'])
2146 2146 if opts['remotecmd']:
2147 2147 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
2148 2148
2149 2149 other = hg.repository(ui, dest)
2150 2150 o = repo.findoutgoing(other, force=opts['force'])
2151 2151 if not o:
2152 2152 ui.status(_("no changes found\n"))
2153 2153 return
2154 2154 o = repo.changelog.nodesbetween(o)[0]
2155 2155 if opts['newest_first']:
2156 2156 o.reverse()
2157 2157 displayer = show_changeset(ui, repo, opts)
2158 2158 for n in o:
2159 2159 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2160 2160 if opts['no_merges'] and len(parents) == 2:
2161 2161 continue
2162 2162 displayer.show(changenode=n)
2163 2163 if opts['patch']:
2164 2164 prev = (parents and parents[0]) or nullid
2165 2165 dodiff(ui, ui, repo, prev, n)
2166 2166 ui.write("\n")
2167 2167
2168 2168 def parents(ui, repo, rev=None, branches=None, **opts):
2169 2169 """show the parents of the working dir or revision
2170 2170
2171 2171 Print the working directory's parent revisions.
2172 2172 """
2173 2173 if rev:
2174 2174 p = repo.changelog.parents(repo.lookup(rev))
2175 2175 else:
2176 2176 p = repo.dirstate.parents()
2177 2177
2178 2178 br = None
2179 2179 if branches is not None:
2180 2180 br = repo.branchlookup(p)
2181 2181 displayer = show_changeset(ui, repo, opts)
2182 2182 for n in p:
2183 2183 if n != nullid:
2184 2184 displayer.show(changenode=n, brinfo=br)
2185 2185
2186 2186 def paths(ui, repo, search=None):
2187 2187 """show definition of symbolic path names
2188 2188
2189 2189 Show definition of symbolic path name NAME. If no name is given, show
2190 2190 definition of available names.
2191 2191
2192 2192 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2193 2193 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2194 2194 """
2195 2195 if search:
2196 2196 for name, path in ui.configitems("paths"):
2197 2197 if name == search:
2198 2198 ui.write("%s\n" % path)
2199 2199 return
2200 2200 ui.warn(_("not found!\n"))
2201 2201 return 1
2202 2202 else:
2203 2203 for name, path in ui.configitems("paths"):
2204 2204 ui.write("%s = %s\n" % (name, path))
2205 2205
2206 2206 def postincoming(ui, repo, modheads, optupdate):
2207 2207 if modheads == 0:
2208 2208 return
2209 2209 if optupdate:
2210 2210 if modheads == 1:
2211 2211 return update(ui, repo)
2212 2212 else:
2213 2213 ui.status(_("not updating, since new heads added\n"))
2214 2214 if modheads > 1:
2215 2215 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2216 2216 else:
2217 2217 ui.status(_("(run 'hg update' to get a working copy)\n"))
2218 2218
2219 2219 def pull(ui, repo, source="default", **opts):
2220 2220 """pull changes from the specified source
2221 2221
2222 2222 Pull changes from a remote repository to a local one.
2223 2223
2224 2224 This finds all changes from the repository at the specified path
2225 2225 or URL and adds them to the local repository. By default, this
2226 2226 does not update the copy of the project in the working directory.
2227 2227
2228 2228 Valid URLs are of the form:
2229 2229
2230 2230 local/filesystem/path
2231 2231 http://[user@]host[:port][/path]
2232 2232 https://[user@]host[:port][/path]
2233 2233 ssh://[user@]host[:port][/path]
2234 2234
2235 2235 Some notes about using SSH with Mercurial:
2236 2236 - SSH requires an accessible shell account on the destination machine
2237 2237 and a copy of hg in the remote path or specified with as remotecmd.
2238 2238 - /path is relative to the remote user's home directory by default.
2239 2239 Use two slashes at the start of a path to specify an absolute path.
2240 2240 - Mercurial doesn't use its own compression via SSH; the right thing
2241 2241 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2242 2242 Host *.mylocalnetwork.example.com
2243 2243 Compression off
2244 2244 Host *
2245 2245 Compression on
2246 2246 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2247 2247 with the --ssh command line option.
2248 2248 """
2249 2249 source = ui.expandpath(source)
2250 2250 ui.status(_('pulling from %s\n') % (source))
2251 2251
2252 2252 if opts['ssh']:
2253 2253 ui.setconfig("ui", "ssh", opts['ssh'])
2254 2254 if opts['remotecmd']:
2255 2255 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
2256 2256
2257 2257 other = hg.repository(ui, source)
2258 2258 revs = None
2259 2259 if opts['rev'] and not other.local():
2260 2260 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2261 2261 elif opts['rev']:
2262 2262 revs = [other.lookup(rev) for rev in opts['rev']]
2263 2263 modheads = repo.pull(other, heads=revs, force=opts['force'])
2264 2264 return postincoming(ui, repo, modheads, opts['update'])
2265 2265
2266 2266 def push(ui, repo, dest="default-push", **opts):
2267 2267 """push changes to the specified destination
2268 2268
2269 2269 Push changes from the local repository to the given destination.
2270 2270
2271 2271 This is the symmetrical operation for pull. It helps to move
2272 2272 changes from the current repository to a different one. If the
2273 2273 destination is local this is identical to a pull in that directory
2274 2274 from the current one.
2275 2275
2276 2276 By default, push will refuse to run if it detects the result would
2277 2277 increase the number of remote heads. This generally indicates the
2278 2278 the client has forgotten to sync and merge before pushing.
2279 2279
2280 2280 Valid URLs are of the form:
2281 2281
2282 2282 local/filesystem/path
2283 2283 ssh://[user@]host[:port][/path]
2284 2284
2285 2285 Look at the help text for the pull command for important details
2286 2286 about ssh:// URLs.
2287 2287 """
2288 2288 dest = ui.expandpath(dest)
2289 2289 ui.status('pushing to %s\n' % (dest))
2290 2290
2291 2291 if opts['ssh']:
2292 2292 ui.setconfig("ui", "ssh", opts['ssh'])
2293 2293 if opts['remotecmd']:
2294 2294 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
2295 2295
2296 2296 other = hg.repository(ui, dest)
2297 2297 revs = None
2298 2298 if opts['rev']:
2299 2299 revs = [repo.lookup(rev) for rev in opts['rev']]
2300 2300 r = repo.push(other, opts['force'], revs=revs)
2301 2301 return r == 0
2302 2302
2303 2303 def rawcommit(ui, repo, *flist, **rc):
2304 2304 """raw commit interface (DEPRECATED)
2305 2305
2306 2306 (DEPRECATED)
2307 2307 Lowlevel commit, for use in helper scripts.
2308 2308
2309 2309 This command is not intended to be used by normal users, as it is
2310 2310 primarily useful for importing from other SCMs.
2311 2311
2312 2312 This command is now deprecated and will be removed in a future
2313 2313 release, please use debugsetparents and commit instead.
2314 2314 """
2315 2315
2316 2316 ui.warn(_("(the rawcommit command is deprecated)\n"))
2317 2317
2318 2318 message = rc['message']
2319 2319 if not message and rc['logfile']:
2320 2320 try:
2321 2321 message = open(rc['logfile']).read()
2322 2322 except IOError:
2323 2323 pass
2324 2324 if not message and not rc['logfile']:
2325 2325 raise util.Abort(_("missing commit message"))
2326 2326
2327 2327 files = relpath(repo, list(flist))
2328 2328 if rc['files']:
2329 2329 files += open(rc['files']).read().splitlines()
2330 2330
2331 2331 rc['parent'] = map(repo.lookup, rc['parent'])
2332 2332
2333 2333 try:
2334 2334 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2335 2335 except ValueError, inst:
2336 2336 raise util.Abort(str(inst))
2337 2337
2338 2338 def recover(ui, repo):
2339 2339 """roll back an interrupted transaction
2340 2340
2341 2341 Recover from an interrupted commit or pull.
2342 2342
2343 2343 This command tries to fix the repository status after an interrupted
2344 2344 operation. It should only be necessary when Mercurial suggests it.
2345 2345 """
2346 2346 if repo.recover():
2347 2347 return repo.verify()
2348 2348 return 1
2349 2349
2350 2350 def remove(ui, repo, pat, *pats, **opts):
2351 2351 """remove the specified files on the next commit
2352 2352
2353 2353 Schedule the indicated files for removal from the repository.
2354 2354
2355 2355 This command schedules the files to be removed at the next commit.
2356 2356 This only removes files from the current branch, not from the
2357 2357 entire project history. If the files still exist in the working
2358 2358 directory, they will be deleted from it.
2359 2359 """
2360 2360 names = []
2361 2361 def okaytoremove(abs, rel, exact):
2362 2362 modified, added, removed, deleted, unknown = repo.changes(files=[abs])
2363 2363 reason = None
2364 2364 if modified and not opts['force']:
2365 2365 reason = _('is modified')
2366 2366 elif added:
2367 2367 reason = _('has been marked for add')
2368 2368 elif unknown:
2369 2369 reason = _('is not managed')
2370 2370 if reason:
2371 2371 if exact:
2372 2372 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2373 2373 else:
2374 2374 return True
2375 2375 for src, abs, rel, exact in walk(repo, (pat,) + pats, opts):
2376 2376 if okaytoremove(abs, rel, exact):
2377 2377 if ui.verbose or not exact:
2378 2378 ui.status(_('removing %s\n') % rel)
2379 2379 names.append(abs)
2380 2380 repo.remove(names, unlink=True)
2381 2381
2382 2382 def rename(ui, repo, *pats, **opts):
2383 2383 """rename files; equivalent of copy + remove
2384 2384
2385 2385 Mark dest as copies of sources; mark sources for deletion. If
2386 2386 dest is a directory, copies are put in that directory. If dest is
2387 2387 a file, there can only be one source.
2388 2388
2389 2389 By default, this command copies the contents of files as they
2390 2390 stand in the working directory. If invoked with --after, the
2391 2391 operation is recorded, but no copying is performed.
2392 2392
2393 2393 This command takes effect in the next commit.
2394 2394
2395 2395 NOTE: This command should be treated as experimental. While it
2396 2396 should properly record rename files, this information is not yet
2397 2397 fully used by merge, nor fully reported by log.
2398 2398 """
2399 2399 wlock = repo.wlock(0)
2400 2400 errs, copied = docopy(ui, repo, pats, opts, wlock)
2401 2401 names = []
2402 2402 for abs, rel, exact in copied:
2403 2403 if ui.verbose or not exact:
2404 2404 ui.status(_('removing %s\n') % rel)
2405 2405 names.append(abs)
2406 2406 repo.remove(names, True, wlock)
2407 2407 return errs
2408 2408
2409 2409 def revert(ui, repo, *pats, **opts):
2410 2410 """revert modified files or dirs back to their unmodified states
2411 2411
2412 2412 In its default mode, it reverts any uncommitted modifications made
2413 2413 to the named files or directories. This restores the contents of
2414 2414 the affected files to an unmodified state.
2415 2415
2416 2416 Modified files are saved with a .orig suffix before reverting.
2417 2417 To disable these backups, use --no-backup.
2418 2418
2419 2419 Using the -r option, it reverts the given files or directories to
2420 2420 their state as of an earlier revision. This can be helpful to "roll
2421 2421 back" some or all of a change that should not have been committed.
2422 2422
2423 2423 Revert modifies the working directory. It does not commit any
2424 2424 changes, or change the parent of the current working directory.
2425 2425
2426 2426 If a file has been deleted, it is recreated. If the executable
2427 2427 mode of a file was changed, it is reset.
2428 2428
2429 2429 If names are given, all files matching the names are reverted.
2430 2430
2431 2431 If no arguments are given, all files in the repository are reverted.
2432 2432 """
2433 2433 parent = repo.dirstate.parents()[0]
2434 2434 node = opts['rev'] and repo.lookup(opts['rev']) or parent
2435 2435 mf = repo.manifest.read(repo.changelog.read(node)[0])
2436 2436
2437 2437 wlock = repo.wlock()
2438 2438
2439 2439 # need all matching names in dirstate and manifest of target rev,
2440 2440 # so have to walk both. do not print errors if files exist in one
2441 2441 # but not other.
2442 2442
2443 2443 names = {}
2444 2444 target_only = {}
2445 2445
2446 2446 # walk dirstate.
2447 2447
2448 2448 for src, abs, rel, exact in walk(repo, pats, opts, badmatch=mf.has_key):
2449 2449 names[abs] = (rel, exact)
2450 2450 if src == 'b':
2451 2451 target_only[abs] = True
2452 2452
2453 2453 # walk target manifest.
2454 2454
2455 2455 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2456 2456 badmatch=names.has_key):
2457 2457 if abs in names: continue
2458 2458 names[abs] = (rel, exact)
2459 2459 target_only[abs] = True
2460 2460
2461 2461 changes = repo.changes(match=names.has_key, wlock=wlock)
2462 2462 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2463 2463
2464 2464 revert = ([], _('reverting %s\n'))
2465 2465 add = ([], _('adding %s\n'))
2466 2466 remove = ([], _('removing %s\n'))
2467 2467 forget = ([], _('forgetting %s\n'))
2468 2468 undelete = ([], _('undeleting %s\n'))
2469 2469 update = {}
2470 2470
2471 2471 disptable = (
2472 2472 # dispatch table:
2473 2473 # file state
2474 2474 # action if in target manifest
2475 2475 # action if not in target manifest
2476 2476 # make backup if in target manifest
2477 2477 # make backup if not in target manifest
2478 2478 (modified, revert, remove, True, True),
2479 2479 (added, revert, forget, True, False),
2480 2480 (removed, undelete, None, False, False),
2481 2481 (deleted, revert, remove, False, False),
2482 2482 (unknown, add, None, True, False),
2483 2483 (target_only, add, None, False, False),
2484 2484 )
2485 2485
2486 2486 entries = names.items()
2487 2487 entries.sort()
2488 2488
2489 2489 for abs, (rel, exact) in entries:
2490 2490 in_mf = abs in mf
2491 2491 def handle(xlist, dobackup):
2492 2492 xlist[0].append(abs)
2493 2493 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2494 2494 bakname = "%s.orig" % rel
2495 2495 ui.note(_('saving current version of %s as %s\n') %
2496 2496 (rel, bakname))
2497 2497 shutil.copyfile(rel, bakname)
2498 2498 shutil.copymode(rel, bakname)
2499 2499 if ui.verbose or not exact:
2500 2500 ui.status(xlist[1] % rel)
2501 2501 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2502 2502 if abs not in table: continue
2503 2503 # file has changed in dirstate
2504 2504 if in_mf:
2505 2505 handle(hitlist, backuphit)
2506 2506 elif misslist is not None:
2507 2507 handle(misslist, backupmiss)
2508 2508 else:
2509 2509 if exact: ui.warn(_('file not managed: %s\n' % rel))
2510 2510 break
2511 2511 else:
2512 2512 # file has not changed in dirstate
2513 2513 if node == parent:
2514 2514 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2515 2515 continue
2516 2516 if not in_mf:
2517 2517 handle(remove, False)
2518 2518 update[abs] = True
2519 2519
2520 2520 repo.dirstate.forget(forget[0])
2521 2521 r = repo.update(node, False, True, update.has_key, False, wlock=wlock)
2522 2522 repo.dirstate.update(add[0], 'a')
2523 2523 repo.dirstate.update(undelete[0], 'n')
2524 2524 repo.dirstate.update(remove[0], 'r')
2525 2525 return r
2526 2526
2527 2527 def root(ui, repo):
2528 2528 """print the root (top) of the current working dir
2529 2529
2530 2530 Print the root directory of the current repository.
2531 2531 """
2532 2532 ui.write(repo.root + "\n")
2533 2533
2534 2534 def serve(ui, repo, **opts):
2535 2535 """export the repository via HTTP
2536 2536
2537 2537 Start a local HTTP repository browser and pull server.
2538 2538
2539 2539 By default, the server logs accesses to stdout and errors to
2540 2540 stderr. Use the "-A" and "-E" options to log to files.
2541 2541 """
2542 2542
2543 2543 if opts["stdio"]:
2544 2544 if repo is None:
2545 2545 raise hg.RepoError(_('no repo found'))
2546 2546 fin, fout = sys.stdin, sys.stdout
2547 2547 sys.stdout = sys.stderr
2548 2548
2549 2549 # Prevent insertion/deletion of CRs
2550 2550 util.set_binary(fin)
2551 2551 util.set_binary(fout)
2552 2552
2553 2553 def getarg():
2554 2554 argline = fin.readline()[:-1]
2555 2555 arg, l = argline.split()
2556 2556 val = fin.read(int(l))
2557 2557 return arg, val
2558 2558 def respond(v):
2559 2559 fout.write("%d\n" % len(v))
2560 2560 fout.write(v)
2561 2561 fout.flush()
2562 2562
2563 2563 lock = None
2564 2564
2565 2565 while 1:
2566 2566 cmd = fin.readline()[:-1]
2567 2567 if cmd == '':
2568 2568 return
2569 2569 if cmd == "heads":
2570 2570 h = repo.heads()
2571 2571 respond(" ".join(map(hex, h)) + "\n")
2572 2572 if cmd == "lock":
2573 2573 lock = repo.lock()
2574 2574 respond("")
2575 2575 if cmd == "unlock":
2576 2576 if lock:
2577 2577 lock.release()
2578 2578 lock = None
2579 2579 respond("")
2580 2580 elif cmd == "branches":
2581 2581 arg, nodes = getarg()
2582 2582 nodes = map(bin, nodes.split(" "))
2583 2583 r = []
2584 2584 for b in repo.branches(nodes):
2585 2585 r.append(" ".join(map(hex, b)) + "\n")
2586 2586 respond("".join(r))
2587 2587 elif cmd == "between":
2588 2588 arg, pairs = getarg()
2589 2589 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
2590 2590 r = []
2591 2591 for b in repo.between(pairs):
2592 2592 r.append(" ".join(map(hex, b)) + "\n")
2593 2593 respond("".join(r))
2594 2594 elif cmd == "changegroup":
2595 2595 nodes = []
2596 2596 arg, roots = getarg()
2597 2597 nodes = map(bin, roots.split(" "))
2598 2598
2599 2599 cg = repo.changegroup(nodes, 'serve')
2600 2600 while 1:
2601 2601 d = cg.read(4096)
2602 2602 if not d:
2603 2603 break
2604 2604 fout.write(d)
2605 2605
2606 2606 fout.flush()
2607 2607
2608 2608 elif cmd == "addchangegroup":
2609 2609 if not lock:
2610 2610 respond("not locked")
2611 2611 continue
2612 2612 respond("")
2613 2613
2614 2614 r = repo.addchangegroup(fin)
2615 2615 respond(str(r))
2616 2616
2617 2617 optlist = ("name templates style address port ipv6"
2618 2618 " accesslog errorlog webdir_conf")
2619 2619 for o in optlist.split():
2620 2620 if opts[o]:
2621 2621 ui.setconfig("web", o, opts[o])
2622 2622
2623 2623 if repo is None and not ui.config("web", "webdir_conf"):
2624 2624 raise hg.RepoError(_('no repo found'))
2625 2625
2626 2626 if opts['daemon'] and not opts['daemon_pipefds']:
2627 2627 rfd, wfd = os.pipe()
2628 2628 args = sys.argv[:]
2629 2629 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2630 2630 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2631 2631 args[0], args)
2632 2632 os.close(wfd)
2633 2633 os.read(rfd, 1)
2634 2634 os._exit(0)
2635 2635
2636 2636 try:
2637 2637 httpd = hgweb.create_server(ui, repo)
2638 2638 except socket.error, inst:
2639 2639 raise util.Abort(_('cannot start server: ') + inst.args[1])
2640 2640
2641 2641 if ui.verbose:
2642 2642 addr, port = httpd.socket.getsockname()
2643 2643 if addr == '0.0.0.0':
2644 2644 addr = socket.gethostname()
2645 2645 else:
2646 2646 try:
2647 2647 addr = socket.gethostbyaddr(addr)[0]
2648 2648 except socket.error:
2649 2649 pass
2650 2650 if port != 80:
2651 2651 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2652 2652 else:
2653 2653 ui.status(_('listening at http://%s/\n') % addr)
2654 2654
2655 2655 if opts['pid_file']:
2656 2656 fp = open(opts['pid_file'], 'w')
2657 2657 fp.write(str(os.getpid()))
2658 2658 fp.close()
2659 2659
2660 2660 if opts['daemon_pipefds']:
2661 2661 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2662 2662 os.close(rfd)
2663 2663 os.write(wfd, 'y')
2664 2664 os.close(wfd)
2665 2665 sys.stdout.flush()
2666 2666 sys.stderr.flush()
2667 2667 fd = os.open(util.nulldev, os.O_RDWR)
2668 2668 if fd != 0: os.dup2(fd, 0)
2669 2669 if fd != 1: os.dup2(fd, 1)
2670 2670 if fd != 2: os.dup2(fd, 2)
2671 2671 if fd not in (0, 1, 2): os.close(fd)
2672 2672
2673 2673 httpd.serve_forever()
2674 2674
2675 2675 def status(ui, repo, *pats, **opts):
2676 2676 """show changed files in the working directory
2677 2677
2678 2678 Show changed files in the repository. If names are
2679 2679 given, only files that match are shown.
2680 2680
2681 2681 The codes used to show the status of files are:
2682 2682 M = modified
2683 2683 A = added
2684 2684 R = removed
2685 2685 ! = deleted, but still tracked
2686 2686 ? = not tracked
2687 2687 I = ignored (not shown by default)
2688 2688 """
2689 2689
2690 2690 show_ignored = opts['ignored'] and True or False
2691 2691 files, matchfn, anypats = matchpats(repo, pats, opts)
2692 2692 cwd = (pats and repo.getcwd()) or ''
2693 2693 modified, added, removed, deleted, unknown, ignored = [
2694 2694 [util.pathto(cwd, x) for x in n]
2695 2695 for n in repo.changes(files=files, match=matchfn,
2696 2696 show_ignored=show_ignored)]
2697 2697
2698 2698 changetypes = [('modified', 'M', modified),
2699 2699 ('added', 'A', added),
2700 2700 ('removed', 'R', removed),
2701 2701 ('deleted', '!', deleted),
2702 2702 ('unknown', '?', unknown),
2703 2703 ('ignored', 'I', ignored)]
2704 2704
2705 2705 end = opts['print0'] and '\0' or '\n'
2706 2706
2707 2707 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
2708 2708 or changetypes):
2709 2709 if opts['no_status']:
2710 2710 format = "%%s%s" % end
2711 2711 else:
2712 2712 format = "%s %%s%s" % (char, end)
2713 2713
2714 2714 for f in changes:
2715 2715 ui.write(format % f)
2716 2716
2717 2717 def tag(ui, repo, name, rev_=None, **opts):
2718 2718 """add a tag for the current tip or a given revision
2719 2719
2720 2720 Name a particular revision using <name>.
2721 2721
2722 2722 Tags are used to name particular revisions of the repository and are
2723 2723 very useful to compare different revision, to go back to significant
2724 2724 earlier versions or to mark branch points as releases, etc.
2725 2725
2726 2726 If no revision is given, the tip is used.
2727 2727
2728 2728 To facilitate version control, distribution, and merging of tags,
2729 2729 they are stored as a file named ".hgtags" which is managed
2730 2730 similarly to other project files and can be hand-edited if
2731 2731 necessary. The file '.hg/localtags' is used for local tags (not
2732 2732 shared among repositories).
2733 2733 """
2734 2734 if name == "tip":
2735 2735 raise util.Abort(_("the name 'tip' is reserved"))
2736 2736 if rev_ is not None:
2737 2737 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2738 2738 "please use 'hg tag [-r REV] NAME' instead\n"))
2739 2739 if opts['rev']:
2740 2740 raise util.Abort(_("use only one form to specify the revision"))
2741 2741 if opts['rev']:
2742 2742 rev_ = opts['rev']
2743 2743 if rev_:
2744 2744 r = hex(repo.lookup(rev_))
2745 2745 else:
2746 2746 r = hex(repo.changelog.tip())
2747 2747
2748 2748 disallowed = (revrangesep, '\r', '\n')
2749 2749 for c in disallowed:
2750 2750 if name.find(c) >= 0:
2751 2751 raise util.Abort(_("%s cannot be used in a tag name") % repr(c))
2752 2752
2753 2753 repo.hook('pretag', throw=True, node=r, tag=name,
2754 2754 local=int(not not opts['local']))
2755 2755
2756 2756 if opts['local']:
2757 2757 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
2758 2758 repo.hook('tag', node=r, tag=name, local=1)
2759 2759 return
2760 2760
2761 2761 for x in repo.changes():
2762 2762 if ".hgtags" in x:
2763 2763 raise util.Abort(_("working copy of .hgtags is changed "
2764 2764 "(please commit .hgtags manually)"))
2765 2765
2766 2766 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
2767 2767 if repo.dirstate.state(".hgtags") == '?':
2768 2768 repo.add([".hgtags"])
2769 2769
2770 2770 message = (opts['message'] or
2771 2771 _("Added tag %s for changeset %s") % (name, r))
2772 2772 try:
2773 2773 repo.commit([".hgtags"], message, opts['user'], opts['date'])
2774 2774 repo.hook('tag', node=r, tag=name, local=0)
2775 2775 except ValueError, inst:
2776 2776 raise util.Abort(str(inst))
2777 2777
2778 2778 def tags(ui, repo):
2779 2779 """list repository tags
2780 2780
2781 2781 List the repository tags.
2782 2782
2783 2783 This lists both regular and local tags.
2784 2784 """
2785 2785
2786 2786 l = repo.tagslist()
2787 2787 l.reverse()
2788 2788 for t, n in l:
2789 2789 try:
2790 2790 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2791 2791 except KeyError:
2792 2792 r = " ?:?"
2793 2793 if ui.quiet:
2794 2794 ui.write("%s\n" % t)
2795 2795 else:
2796 2796 ui.write("%-30s %s\n" % (t, r))
2797 2797
2798 2798 def tip(ui, repo, **opts):
2799 2799 """show the tip revision
2800 2800
2801 2801 Show the tip revision.
2802 2802 """
2803 2803 n = repo.changelog.tip()
2804 2804 br = None
2805 2805 if opts['branches']:
2806 2806 br = repo.branchlookup([n])
2807 2807 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2808 2808 if opts['patch']:
2809 2809 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2810 2810
2811 2811 def unbundle(ui, repo, fname, **opts):
2812 2812 """apply a changegroup file
2813 2813
2814 2814 Apply a compressed changegroup file generated by the bundle
2815 2815 command.
2816 2816 """
2817 2817 f = urllib.urlopen(fname)
2818 2818
2819 2819 header = f.read(6)
2820 2820 if not header.startswith("HG"):
2821 2821 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2822 2822 elif not header.startswith("HG10"):
2823 2823 raise util.Abort(_("%s: unknown bundle version") % fname)
2824 2824 elif header == "HG10BZ":
2825 2825 def generator(f):
2826 2826 zd = bz2.BZ2Decompressor()
2827 2827 zd.decompress("BZ")
2828 2828 for chunk in f:
2829 2829 yield zd.decompress(chunk)
2830 2830 elif header == "HG10UN":
2831 2831 def generator(f):
2832 2832 for chunk in f:
2833 2833 yield chunk
2834 2834 else:
2835 2835 raise util.Abort(_("%s: unknown bundle compression type")
2836 2836 % fname)
2837 2837 gen = generator(util.filechunkiter(f, 4096))
2838 2838 modheads = repo.addchangegroup(util.chunkbuffer(gen))
2839 2839 return postincoming(ui, repo, modheads, opts['update'])
2840 2840
2841 2841 def undo(ui, repo):
2842 2842 """undo the last commit or pull
2843 2843
2844 2844 Roll back the last pull or commit transaction on the
2845 2845 repository, restoring the project to its earlier state.
2846 2846
2847 2847 This command should be used with care. There is only one level of
2848 2848 undo and there is no redo.
2849 2849
2850 2850 This command is not intended for use on public repositories. Once
2851 2851 a change is visible for pull by other users, undoing it locally is
2852 2852 ineffective. Furthemore a race is possible with readers of the
2853 2853 repository, for example an ongoing pull from the repository will
2854 2854 fail and rollback.
2855 2855 """
2856 2856 repo.undo()
2857 2857
2858 2858 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2859 2859 branch=None, **opts):
2860 2860 """update or merge working directory
2861 2861
2862 2862 Update the working directory to the specified revision.
2863 2863
2864 2864 If there are no outstanding changes in the working directory and
2865 2865 there is a linear relationship between the current version and the
2866 2866 requested version, the result is the requested version.
2867 2867
2868 2868 Otherwise the result is a merge between the contents of the
2869 2869 current working directory and the requested version. Files that
2870 2870 changed between either parent are marked as changed for the next
2871 2871 commit and a commit must be performed before any further updates
2872 2872 are allowed.
2873 2873
2874 2874 By default, update will refuse to run if doing so would require
2875 2875 merging or discarding local changes.
2876 2876 """
2877 2877 if branch:
2878 2878 br = repo.branchlookup(branch=branch)
2879 2879 found = []
2880 2880 for x in br:
2881 2881 if branch in br[x]:
2882 2882 found.append(x)
2883 2883 if len(found) > 1:
2884 2884 ui.warn(_("Found multiple heads for %s\n") % branch)
2885 2885 for x in found:
2886 2886 show_changeset(ui, repo, opts).show(changenode=x, brinfo=br)
2887 2887 return 1
2888 2888 if len(found) == 1:
2889 2889 node = found[0]
2890 2890 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2891 2891 else:
2892 2892 ui.warn(_("branch %s not found\n") % (branch))
2893 2893 return 1
2894 2894 else:
2895 2895 node = node and repo.lookup(node) or repo.changelog.tip()
2896 2896 return repo.update(node, allow=merge, force=clean, forcemerge=force)
2897 2897
2898 2898 def verify(ui, repo):
2899 2899 """verify the integrity of the repository
2900 2900
2901 2901 Verify the integrity of the current repository.
2902 2902
2903 2903 This will perform an extensive check of the repository's
2904 2904 integrity, validating the hashes and checksums of each entry in
2905 2905 the changelog, manifest, and tracked files, as well as the
2906 2906 integrity of their crosslinks and indices.
2907 2907 """
2908 2908 return repo.verify()
2909 2909
2910 2910 # Command options and aliases are listed here, alphabetically
2911 2911
2912 2912 table = {
2913 2913 "^add":
2914 2914 (add,
2915 2915 [('I', 'include', [], _('include names matching the given patterns')),
2916 2916 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2917 2917 _('hg add [OPTION]... [FILE]...')),
2918 2918 "addremove":
2919 2919 (addremove,
2920 2920 [('I', 'include', [], _('include names matching the given patterns')),
2921 2921 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2922 2922 _('hg addremove [OPTION]... [FILE]...')),
2923 2923 "^annotate":
2924 2924 (annotate,
2925 2925 [('r', 'rev', '', _('annotate the specified revision')),
2926 2926 ('a', 'text', None, _('treat all files as text')),
2927 2927 ('u', 'user', None, _('list the author')),
2928 2928 ('d', 'date', None, _('list the date')),
2929 2929 ('n', 'number', None, _('list the revision number (default)')),
2930 2930 ('c', 'changeset', None, _('list the changeset')),
2931 2931 ('I', 'include', [], _('include names matching the given patterns')),
2932 2932 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2933 2933 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2934 2934 "archive":
2935 2935 (archive,
2936 2936 [('', 'no-decode', None, _('do not pass files through decoders')),
2937 2937 ('p', 'prefix', '', _('directory prefix for files in archive')),
2938 2938 ('r', 'rev', '', _('revision to distribute')),
2939 2939 ('t', 'type', '', _('type of distribution to create')),
2940 2940 ('I', 'include', [], _('include names matching the given patterns')),
2941 2941 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2942 2942 _('hg archive [OPTION]... DEST')),
2943 2943 "backout":
2944 2944 (backout,
2945 2945 [('', 'merge', None,
2946 2946 _('merge with old dirstate parent after backout')),
2947 2947 ('m', 'message', '', _('use <text> as commit message')),
2948 2948 ('l', 'logfile', '', _('read commit message from <file>')),
2949 2949 ('d', 'date', '', _('record datecode as commit date')),
2950 2950 ('u', 'user', '', _('record user as committer')),
2951 2951 ('I', 'include', [], _('include names matching the given patterns')),
2952 2952 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2953 2953 _('hg backout [OPTION]... REV')),
2954 2954 "bundle":
2955 2955 (bundle,
2956 2956 [('f', 'force', None,
2957 2957 _('run even when remote repository is unrelated'))],
2958 2958 _('hg bundle FILE DEST')),
2959 2959 "cat":
2960 2960 (cat,
2961 2961 [('o', 'output', '', _('print output to file with formatted name')),
2962 2962 ('r', 'rev', '', _('print the given revision')),
2963 2963 ('I', 'include', [], _('include names matching the given patterns')),
2964 2964 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2965 2965 _('hg cat [OPTION]... FILE...')),
2966 2966 "^clone":
2967 2967 (clone,
2968 2968 [('U', 'noupdate', None, _('do not update the new working directory')),
2969 2969 ('r', 'rev', [],
2970 2970 _('a changeset you would like to have after cloning')),
2971 2971 ('', 'pull', None, _('use pull protocol to copy metadata')),
2972 2972 ('e', 'ssh', '', _('specify ssh command to use')),
2973 2973 ('', 'remotecmd', '',
2974 2974 _('specify hg command to run on the remote side'))],
2975 2975 _('hg clone [OPTION]... SOURCE [DEST]')),
2976 2976 "^commit|ci":
2977 2977 (commit,
2978 2978 [('A', 'addremove', None, _('run addremove during commit')),
2979 2979 ('m', 'message', '', _('use <text> as commit message')),
2980 2980 ('l', 'logfile', '', _('read the commit message from <file>')),
2981 2981 ('d', 'date', '', _('record datecode as commit date')),
2982 2982 ('u', 'user', '', _('record user as commiter')),
2983 2983 ('I', 'include', [], _('include names matching the given patterns')),
2984 2984 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2985 2985 _('hg commit [OPTION]... [FILE]...')),
2986 2986 "copy|cp":
2987 2987 (copy,
2988 2988 [('A', 'after', None, _('record a copy that has already occurred')),
2989 2989 ('f', 'force', None,
2990 2990 _('forcibly copy over an existing managed file')),
2991 2991 ('I', 'include', [], _('include names matching the given patterns')),
2992 2992 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2993 2993 _('hg copy [OPTION]... [SOURCE]... DEST')),
2994 2994 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2995 2995 "debugcomplete":
2996 2996 (debugcomplete,
2997 2997 [('o', 'options', None, _('show the command options'))],
2998 2998 _('debugcomplete [-o] CMD')),
2999 2999 "debugrebuildstate":
3000 3000 (debugrebuildstate,
3001 3001 [('r', 'rev', '', _('revision to rebuild to'))],
3002 3002 _('debugrebuildstate [-r REV] [REV]')),
3003 3003 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
3004 3004 "debugconfig": (debugconfig, [], _('debugconfig')),
3005 3005 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
3006 3006 "debugstate": (debugstate, [], _('debugstate')),
3007 3007 "debugdata": (debugdata, [], _('debugdata FILE REV')),
3008 3008 "debugindex": (debugindex, [], _('debugindex FILE')),
3009 3009 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
3010 3010 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
3011 3011 "debugwalk":
3012 3012 (debugwalk,
3013 3013 [('I', 'include', [], _('include names matching the given patterns')),
3014 3014 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3015 3015 _('debugwalk [OPTION]... [FILE]...')),
3016 3016 "^diff":
3017 3017 (diff,
3018 3018 [('r', 'rev', [], _('revision')),
3019 3019 ('a', 'text', None, _('treat all files as text')),
3020 3020 ('p', 'show-function', None,
3021 3021 _('show which function each change is in')),
3022 3022 ('w', 'ignore-all-space', None,
3023 3023 _('ignore white space when comparing lines')),
3024 3024 ('I', 'include', [], _('include names matching the given patterns')),
3025 3025 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3026 3026 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
3027 3027 "^export":
3028 3028 (export,
3029 3029 [('o', 'output', '', _('print output to file with formatted name')),
3030 3030 ('a', 'text', None, _('treat all files as text')),
3031 3031 ('', 'switch-parent', None, _('diff against the second parent'))],
3032 3032 _('hg export [-a] [-o OUTFILESPEC] REV...')),
3033 3033 "debugforget|forget":
3034 3034 (forget,
3035 3035 [('I', 'include', [], _('include names matching the given patterns')),
3036 3036 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3037 3037 _('hg forget [OPTION]... FILE...')),
3038 3038 "grep":
3039 3039 (grep,
3040 3040 [('0', 'print0', None, _('end fields with NUL')),
3041 3041 ('', 'all', None, _('print all revisions that match')),
3042 3042 ('i', 'ignore-case', None, _('ignore case when matching')),
3043 3043 ('l', 'files-with-matches', None,
3044 3044 _('print only filenames and revs that match')),
3045 3045 ('n', 'line-number', None, _('print matching line numbers')),
3046 3046 ('r', 'rev', [], _('search in given revision range')),
3047 3047 ('u', 'user', None, _('print user who committed change')),
3048 3048 ('I', 'include', [], _('include names matching the given patterns')),
3049 3049 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3050 3050 _('hg grep [OPTION]... PATTERN [FILE]...')),
3051 3051 "heads":
3052 3052 (heads,
3053 3053 [('b', 'branches', None, _('show branches')),
3054 3054 ('', 'style', '', _('display using template map file')),
3055 3055 ('r', 'rev', '', _('show only heads which are descendants of rev')),
3056 3056 ('', 'template', '', _('display with template'))],
3057 3057 _('hg heads [-b] [-r <rev>]')),
3058 3058 "help": (help_, [], _('hg help [COMMAND]')),
3059 3059 "identify|id": (identify, [], _('hg identify')),
3060 3060 "import|patch":
3061 3061 (import_,
3062 3062 [('p', 'strip', 1,
3063 3063 _('directory strip option for patch. This has the same\n'
3064 3064 'meaning as the corresponding patch option')),
3065 3065 ('b', 'base', '', _('base path')),
3066 3066 ('f', 'force', None,
3067 3067 _('skip check for outstanding uncommitted changes'))],
3068 3068 _('hg import [-p NUM] [-b BASE] [-f] PATCH...')),
3069 3069 "incoming|in": (incoming,
3070 3070 [('M', 'no-merges', None, _('do not show merges')),
3071 3071 ('f', 'force', None,
3072 3072 _('run even when remote repository is unrelated')),
3073 3073 ('', 'style', '', _('display using template map file')),
3074 3074 ('n', 'newest-first', None, _('show newest record first')),
3075 3075 ('', 'bundle', '', _('file to store the bundles into')),
3076 3076 ('p', 'patch', None, _('show patch')),
3077 3077 ('', 'template', '', _('display with template')),
3078 3078 ('e', 'ssh', '', _('specify ssh command to use')),
3079 3079 ('', 'remotecmd', '',
3080 3080 _('specify hg command to run on the remote side'))],
3081 3081 _('hg incoming [-p] [-n] [-M] [--bundle FILENAME] [SOURCE]')),
3082 3082 "^init": (init, [], _('hg init [DEST]')),
3083 3083 "locate":
3084 3084 (locate,
3085 3085 [('r', 'rev', '', _('search the repository as it stood at rev')),
3086 3086 ('0', 'print0', None,
3087 3087 _('end filenames with NUL, for use with xargs')),
3088 3088 ('f', 'fullpath', None,
3089 3089 _('print complete paths from the filesystem root')),
3090 3090 ('I', 'include', [], _('include names matching the given patterns')),
3091 3091 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3092 3092 _('hg locate [OPTION]... [PATTERN]...')),
3093 3093 "^log|history":
3094 3094 (log,
3095 3095 [('b', 'branches', None, _('show branches')),
3096 3096 ('k', 'keyword', [], _('search for a keyword')),
3097 3097 ('l', 'limit', '', _('limit number of changes displayed')),
3098 3098 ('r', 'rev', [], _('show the specified revision or range')),
3099 3099 ('M', 'no-merges', None, _('do not show merges')),
3100 3100 ('', 'style', '', _('display using template map file')),
3101 3101 ('m', 'only-merges', None, _('show only merges')),
3102 3102 ('p', 'patch', None, _('show patch')),
3103 3103 ('', 'template', '', _('display with template')),
3104 3104 ('I', 'include', [], _('include names matching the given patterns')),
3105 3105 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3106 3106 _('hg log [OPTION]... [FILE]')),
3107 3107 "manifest": (manifest, [], _('hg manifest [REV]')),
3108 3108 "merge":
3109 3109 (merge,
3110 3110 [('b', 'branch', '', _('merge with head of a specific branch')),
3111 3111 ('f', 'force', None, _('force a merge with outstanding changes'))],
3112 3112 _('hg merge [-b TAG] [-f] [REV]')),
3113 3113 "outgoing|out": (outgoing,
3114 3114 [('M', 'no-merges', None, _('do not show merges')),
3115 3115 ('f', 'force', None,
3116 3116 _('run even when remote repository is unrelated')),
3117 3117 ('p', 'patch', None, _('show patch')),
3118 3118 ('', 'style', '', _('display using template map file')),
3119 3119 ('n', 'newest-first', None, _('show newest record first')),
3120 3120 ('', 'template', '', _('display with template')),
3121 3121 ('e', 'ssh', '', _('specify ssh command to use')),
3122 3122 ('', 'remotecmd', '',
3123 3123 _('specify hg command to run on the remote side'))],
3124 3124 _('hg outgoing [-M] [-p] [-n] [DEST]')),
3125 3125 "^parents":
3126 3126 (parents,
3127 3127 [('b', 'branches', None, _('show branches')),
3128 3128 ('', 'style', '', _('display using template map file')),
3129 3129 ('', 'template', '', _('display with template'))],
3130 3130 _('hg parents [-b] [REV]')),
3131 3131 "paths": (paths, [], _('hg paths [NAME]')),
3132 3132 "^pull":
3133 3133 (pull,
3134 3134 [('u', 'update', None,
3135 3135 _('update the working directory to tip after pull')),
3136 3136 ('e', 'ssh', '', _('specify ssh command to use')),
3137 3137 ('f', 'force', None,
3138 3138 _('run even when remote repository is unrelated')),
3139 3139 ('r', 'rev', [], _('a specific revision you would like to pull')),
3140 3140 ('', 'remotecmd', '',
3141 3141 _('specify hg command to run on the remote side'))],
3142 3142 _('hg pull [-u] [-e FILE] [-r REV]... [--remotecmd FILE] [SOURCE]')),
3143 3143 "^push":
3144 3144 (push,
3145 3145 [('f', 'force', None, _('force push')),
3146 3146 ('e', 'ssh', '', _('specify ssh command to use')),
3147 3147 ('r', 'rev', [], _('a specific revision you would like to push')),
3148 3148 ('', 'remotecmd', '',
3149 3149 _('specify hg command to run on the remote side'))],
3150 3150 _('hg push [-f] [-e FILE] [-r REV]... [--remotecmd FILE] [DEST]')),
3151 3151 "debugrawcommit|rawcommit":
3152 3152 (rawcommit,
3153 3153 [('p', 'parent', [], _('parent')),
3154 3154 ('d', 'date', '', _('date code')),
3155 3155 ('u', 'user', '', _('user')),
3156 3156 ('F', 'files', '', _('file list')),
3157 3157 ('m', 'message', '', _('commit message')),
3158 3158 ('l', 'logfile', '', _('commit message file'))],
3159 3159 _('hg debugrawcommit [OPTION]... [FILE]...')),
3160 3160 "recover": (recover, [], _('hg recover')),
3161 3161 "^remove|rm":
3162 3162 (remove,
3163 3163 [('f', 'force', None, _('remove file even if modified')),
3164 3164 ('I', 'include', [], _('include names matching the given patterns')),
3165 3165 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3166 3166 _('hg remove [OPTION]... FILE...')),
3167 3167 "rename|mv":
3168 3168 (rename,
3169 3169 [('A', 'after', None, _('record a rename that has already occurred')),
3170 3170 ('f', 'force', None,
3171 3171 _('forcibly copy over an existing managed file')),
3172 3172 ('I', 'include', [], _('include names matching the given patterns')),
3173 3173 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3174 3174 _('hg rename [OPTION]... SOURCE... DEST')),
3175 3175 "^revert":
3176 3176 (revert,
3177 3177 [('r', 'rev', '', _('revision to revert to')),
3178 3178 ('', 'no-backup', None, _('do not save backup copies of files')),
3179 3179 ('I', 'include', [], _('include names matching given patterns')),
3180 3180 ('X', 'exclude', [], _('exclude names matching given patterns'))],
3181 3181 _('hg revert [-r REV] [NAME]...')),
3182 3182 "root": (root, [], _('hg root')),
3183 3183 "^serve":
3184 3184 (serve,
3185 3185 [('A', 'accesslog', '', _('name of access log file to write to')),
3186 3186 ('d', 'daemon', None, _('run server in background')),
3187 3187 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3188 3188 ('E', 'errorlog', '', _('name of error log file to write to')),
3189 3189 ('p', 'port', 0, _('port to use (default: 8000)')),
3190 3190 ('a', 'address', '', _('address to use')),
3191 3191 ('n', 'name', '',
3192 3192 _('name to show in web pages (default: working dir)')),
3193 3193 ('', 'webdir-conf', '', _('name of the webdir config file'
3194 3194 ' (serve more than one repo)')),
3195 3195 ('', 'pid-file', '', _('name of file to write process ID to')),
3196 3196 ('', 'stdio', None, _('for remote clients')),
3197 3197 ('t', 'templates', '', _('web templates to use')),
3198 3198 ('', 'style', '', _('template style to use')),
3199 3199 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3200 3200 _('hg serve [OPTION]...')),
3201 3201 "^status|st":
3202 3202 (status,
3203 3203 [('m', 'modified', None, _('show only modified files')),
3204 3204 ('a', 'added', None, _('show only added files')),
3205 3205 ('r', 'removed', None, _('show only removed files')),
3206 3206 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3207 3207 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3208 3208 ('i', 'ignored', None, _('show ignored files')),
3209 3209 ('n', 'no-status', None, _('hide status prefix')),
3210 3210 ('0', 'print0', None,
3211 3211 _('end filenames with NUL, for use with xargs')),
3212 3212 ('I', 'include', [], _('include names matching the given patterns')),
3213 3213 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3214 3214 _('hg status [OPTION]... [FILE]...')),
3215 3215 "tag":
3216 3216 (tag,
3217 3217 [('l', 'local', None, _('make the tag local')),
3218 3218 ('m', 'message', '', _('message for tag commit log entry')),
3219 3219 ('d', 'date', '', _('record datecode as commit date')),
3220 3220 ('u', 'user', '', _('record user as commiter')),
3221 3221 ('r', 'rev', '', _('revision to tag'))],
3222 3222 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3223 3223 "tags": (tags, [], _('hg tags')),
3224 3224 "tip":
3225 3225 (tip,
3226 3226 [('b', 'branches', None, _('show branches')),
3227 3227 ('', 'style', '', _('display using template map file')),
3228 3228 ('p', 'patch', None, _('show patch')),
3229 3229 ('', 'template', '', _('display with template'))],
3230 3230 _('hg tip [-b] [-p]')),
3231 3231 "unbundle":
3232 3232 (unbundle,
3233 3233 [('u', 'update', None,
3234 3234 _('update the working directory to tip after unbundle'))],
3235 3235 _('hg unbundle [-u] FILE')),
3236 3236 "undo": (undo, [], _('hg undo')),
3237 3237 "^update|up|checkout|co":
3238 3238 (update,
3239 3239 [('b', 'branch', '', _('checkout the head of a specific branch')),
3240 3240 ('m', 'merge', None, _('allow merging of branches')),
3241 3241 ('C', 'clean', None, _('overwrite locally modified files')),
3242 3242 ('f', 'force', None, _('force a merge with outstanding changes'))],
3243 3243 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3244 3244 "verify": (verify, [], _('hg verify')),
3245 3245 "version": (show_version, [], _('hg version')),
3246 3246 }
3247 3247
3248 3248 globalopts = [
3249 3249 ('R', 'repository', '',
3250 3250 _('repository root directory or symbolic path name')),
3251 3251 ('', 'cwd', '', _('change working directory')),
3252 3252 ('y', 'noninteractive', None,
3253 3253 _('do not prompt, assume \'yes\' for any required answers')),
3254 3254 ('q', 'quiet', None, _('suppress output')),
3255 3255 ('v', 'verbose', None, _('enable additional output')),
3256 3256 ('', 'debug', None, _('enable debugging output')),
3257 3257 ('', 'debugger', None, _('start debugger')),
3258 3258 ('', 'traceback', None, _('print traceback on exception')),
3259 3259 ('', 'time', None, _('time how long the command takes')),
3260 3260 ('', 'profile', None, _('print command execution profile')),
3261 3261 ('', 'version', None, _('output version information and exit')),
3262 3262 ('h', 'help', None, _('display help and exit')),
3263 3263 ]
3264 3264
3265 3265 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3266 3266 " debugindex debugindexdot")
3267 3267 optionalrepo = ("paths serve debugconfig")
3268 3268
3269 3269 def findpossible(cmd):
3270 3270 """
3271 3271 Return cmd -> (aliases, command table entry)
3272 3272 for each matching command.
3273 3273 Return debug commands (or their aliases) only if no normal command matches.
3274 3274 """
3275 3275 choice = {}
3276 3276 debugchoice = {}
3277 3277 for e in table.keys():
3278 3278 aliases = e.lstrip("^").split("|")
3279 3279 found = None
3280 3280 if cmd in aliases:
3281 3281 found = cmd
3282 3282 else:
3283 3283 for a in aliases:
3284 3284 if a.startswith(cmd):
3285 3285 found = a
3286 3286 break
3287 3287 if found is not None:
3288 3288 if aliases[0].startswith("debug"):
3289 3289 debugchoice[found] = (aliases, table[e])
3290 3290 else:
3291 3291 choice[found] = (aliases, table[e])
3292 3292
3293 3293 if not choice and debugchoice:
3294 3294 choice = debugchoice
3295 3295
3296 3296 return choice
3297 3297
3298 3298 def find(cmd):
3299 3299 """Return (aliases, command table entry) for command string."""
3300 3300 choice = findpossible(cmd)
3301 3301
3302 3302 if choice.has_key(cmd):
3303 3303 return choice[cmd]
3304 3304
3305 3305 if len(choice) > 1:
3306 3306 clist = choice.keys()
3307 3307 clist.sort()
3308 3308 raise AmbiguousCommand(cmd, clist)
3309 3309
3310 3310 if choice:
3311 3311 return choice.values()[0]
3312 3312
3313 3313 raise UnknownCommand(cmd)
3314 3314
3315 3315 def catchterm(*args):
3316 3316 raise util.SignalInterrupt
3317 3317
3318 3318 def run():
3319 3319 sys.exit(dispatch(sys.argv[1:]))
3320 3320
3321 3321 class ParseError(Exception):
3322 3322 """Exception raised on errors in parsing the command line."""
3323 3323
3324 3324 def parse(ui, args):
3325 3325 options = {}
3326 3326 cmdoptions = {}
3327 3327
3328 3328 try:
3329 3329 args = fancyopts.fancyopts(args, globalopts, options)
3330 3330 except fancyopts.getopt.GetoptError, inst:
3331 3331 raise ParseError(None, inst)
3332 3332
3333 3333 if args:
3334 3334 cmd, args = args[0], args[1:]
3335 3335 aliases, i = find(cmd)
3336 3336 cmd = aliases[0]
3337 3337 defaults = ui.config("defaults", cmd)
3338 3338 if defaults:
3339 3339 args = defaults.split() + args
3340 3340 c = list(i[1])
3341 3341 else:
3342 3342 cmd = None
3343 3343 c = []
3344 3344
3345 3345 # combine global options into local
3346 3346 for o in globalopts:
3347 3347 c.append((o[0], o[1], options[o[1]], o[3]))
3348 3348
3349 3349 try:
3350 3350 args = fancyopts.fancyopts(args, c, cmdoptions)
3351 3351 except fancyopts.getopt.GetoptError, inst:
3352 3352 raise ParseError(cmd, inst)
3353 3353
3354 3354 # separate global options back out
3355 3355 for o in globalopts:
3356 3356 n = o[1]
3357 3357 options[n] = cmdoptions[n]
3358 3358 del cmdoptions[n]
3359 3359
3360 3360 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3361 3361
3362 3362 def dispatch(args):
3363 3363 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3364 3364 num = getattr(signal, name, None)
3365 3365 if num: signal.signal(num, catchterm)
3366 3366
3367 3367 try:
3368 u = ui.ui()
3368 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3369 3369 except util.Abort, inst:
3370 3370 sys.stderr.write(_("abort: %s\n") % inst)
3371 3371 return -1
3372 3372
3373 3373 external = []
3374 3374 for x in u.extensions():
3375 3375 try:
3376 3376 if x[1]:
3377 3377 mod = imp.load_source(x[0], x[1])
3378 3378 else:
3379 3379 def importh(name):
3380 3380 mod = __import__(name)
3381 3381 components = name.split('.')
3382 3382 for comp in components[1:]:
3383 3383 mod = getattr(mod, comp)
3384 3384 return mod
3385 3385 try:
3386 3386 mod = importh("hgext." + x[0])
3387 3387 except ImportError:
3388 3388 mod = importh(x[0])
3389 3389 external.append(mod)
3390 3390 except Exception, inst:
3391 3391 u.warn(_("*** failed to import extension %s: %s\n") % (x[0], inst))
3392 if "--traceback" in sys.argv[1:]:
3392 if u.traceback:
3393 3393 traceback.print_exc()
3394 3394 return 1
3395 3395 continue
3396 3396
3397 3397 for x in external:
3398 3398 cmdtable = getattr(x, 'cmdtable', {})
3399 3399 for t in cmdtable:
3400 3400 if t in table:
3401 3401 u.warn(_("module %s overrides %s\n") % (x.__name__, t))
3402 3402 table.update(cmdtable)
3403 3403
3404 3404 try:
3405 3405 cmd, func, args, options, cmdoptions = parse(u, args)
3406 3406 if options["time"]:
3407 3407 def get_times():
3408 3408 t = os.times()
3409 3409 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3410 3410 t = (t[0], t[1], t[2], t[3], time.clock())
3411 3411 return t
3412 3412 s = get_times()
3413 3413 def print_time():
3414 3414 t = get_times()
3415 3415 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3416 3416 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3417 3417 atexit.register(print_time)
3418 3418
3419 3419 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3420 not options["noninteractive"])
3420 not options["noninteractive"], options["traceback"])
3421 3421
3422 3422 # enter the debugger before command execution
3423 3423 if options['debugger']:
3424 3424 pdb.set_trace()
3425 3425
3426 3426 try:
3427 3427 if options['cwd']:
3428 3428 try:
3429 3429 os.chdir(options['cwd'])
3430 3430 except OSError, inst:
3431 3431 raise util.Abort('%s: %s' %
3432 3432 (options['cwd'], inst.strerror))
3433 3433
3434 3434 path = u.expandpath(options["repository"]) or ""
3435 3435 repo = path and hg.repository(u, path=path) or None
3436 3436
3437 3437 if options['help']:
3438 3438 return help_(u, cmd, options['version'])
3439 3439 elif options['version']:
3440 3440 return show_version(u)
3441 3441 elif not cmd:
3442 3442 return help_(u, 'shortlist')
3443 3443
3444 3444 if cmd not in norepo.split():
3445 3445 try:
3446 3446 if not repo:
3447 3447 repo = hg.repository(u, path=path)
3448 3448 u = repo.ui
3449 3449 for x in external:
3450 3450 if hasattr(x, 'reposetup'):
3451 3451 x.reposetup(u, repo)
3452 3452 except hg.RepoError:
3453 3453 if cmd not in optionalrepo.split():
3454 3454 raise
3455 3455 d = lambda: func(u, repo, *args, **cmdoptions)
3456 3456 else:
3457 3457 d = lambda: func(u, *args, **cmdoptions)
3458 3458
3459 3459 try:
3460 3460 if options['profile']:
3461 3461 import hotshot, hotshot.stats
3462 3462 prof = hotshot.Profile("hg.prof")
3463 3463 try:
3464 3464 try:
3465 3465 return prof.runcall(d)
3466 3466 except:
3467 3467 try:
3468 3468 u.warn(_('exception raised - generating '
3469 3469 'profile anyway\n'))
3470 3470 except:
3471 3471 pass
3472 3472 raise
3473 3473 finally:
3474 3474 prof.close()
3475 3475 stats = hotshot.stats.load("hg.prof")
3476 3476 stats.strip_dirs()
3477 3477 stats.sort_stats('time', 'calls')
3478 3478 stats.print_stats(40)
3479 3479 else:
3480 3480 return d()
3481 3481 finally:
3482 3482 u.flush()
3483 3483 except:
3484 3484 # enter the debugger when we hit an exception
3485 3485 if options['debugger']:
3486 3486 pdb.post_mortem(sys.exc_info()[2])
3487 if options['traceback']:
3487 if u.traceback:
3488 3488 traceback.print_exc()
3489 3489 raise
3490 3490 except ParseError, inst:
3491 3491 if inst.args[0]:
3492 3492 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3493 3493 help_(u, inst.args[0])
3494 3494 else:
3495 3495 u.warn(_("hg: %s\n") % inst.args[1])
3496 3496 help_(u, 'shortlist')
3497 3497 except AmbiguousCommand, inst:
3498 3498 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3499 3499 (inst.args[0], " ".join(inst.args[1])))
3500 3500 except UnknownCommand, inst:
3501 3501 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3502 3502 help_(u, 'shortlist')
3503 3503 except hg.RepoError, inst:
3504 3504 u.warn(_("abort: %s!\n") % inst)
3505 3505 except lock.LockHeld, inst:
3506 3506 if inst.errno == errno.ETIMEDOUT:
3507 3507 reason = _('timed out waiting for lock held by %s') % inst.locker
3508 3508 else:
3509 3509 reason = _('lock held by %s') % inst.locker
3510 3510 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3511 3511 except lock.LockUnavailable, inst:
3512 3512 u.warn(_("abort: could not lock %s: %s\n") %
3513 3513 (inst.desc or inst.filename, inst.strerror))
3514 3514 except revlog.RevlogError, inst:
3515 3515 u.warn(_("abort: "), inst, "!\n")
3516 3516 except util.SignalInterrupt:
3517 3517 u.warn(_("killed!\n"))
3518 3518 except KeyboardInterrupt:
3519 3519 try:
3520 3520 u.warn(_("interrupted!\n"))
3521 3521 except IOError, inst:
3522 3522 if inst.errno == errno.EPIPE:
3523 3523 if u.debugflag:
3524 3524 u.warn(_("\nbroken pipe\n"))
3525 3525 else:
3526 3526 raise
3527 3527 except IOError, inst:
3528 3528 if hasattr(inst, "code"):
3529 3529 u.warn(_("abort: %s\n") % inst)
3530 3530 elif hasattr(inst, "reason"):
3531 3531 u.warn(_("abort: error: %s\n") % inst.reason[1])
3532 3532 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3533 3533 if u.debugflag:
3534 3534 u.warn(_("broken pipe\n"))
3535 3535 elif getattr(inst, "strerror", None):
3536 3536 if getattr(inst, "filename", None):
3537 3537 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3538 3538 else:
3539 3539 u.warn(_("abort: %s\n") % inst.strerror)
3540 3540 else:
3541 3541 raise
3542 3542 except OSError, inst:
3543 3543 if hasattr(inst, "filename"):
3544 3544 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3545 3545 else:
3546 3546 u.warn(_("abort: %s\n") % inst.strerror)
3547 3547 except util.Abort, inst:
3548 3548 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3549 3549 except TypeError, inst:
3550 3550 # was this an argument error?
3551 3551 tb = traceback.extract_tb(sys.exc_info()[2])
3552 3552 if len(tb) > 2: # no
3553 3553 raise
3554 3554 u.debug(inst, "\n")
3555 3555 u.warn(_("%s: invalid arguments\n") % cmd)
3556 3556 help_(u, cmd)
3557 3557 except SystemExit, inst:
3558 3558 # Commands shouldn't sys.exit directly, but give a return code.
3559 3559 # Just in case catch this and and pass exit code to caller.
3560 3560 return inst.code
3561 3561 except:
3562 3562 u.warn(_("** unknown exception encountered, details follow\n"))
3563 3563 u.warn(_("** report bug details to mercurial@selenic.com\n"))
3564 3564 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3565 3565 % version.get_version())
3566 3566 raise
3567 3567
3568 3568 return -1
@@ -1,2056 +1,2056 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import os, util
9 9 import filelog, manifest, changelog, dirstate, repo
10 10 from node import *
11 11 from i18n import gettext as _
12 12 from demandload import *
13 13 demandload(globals(), "appendfile changegroup")
14 14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 demandload(globals(), "revlog sys traceback")
15 demandload(globals(), "revlog traceback")
16 16
17 17 class localrepository(object):
18 18 def __del__(self):
19 19 self.transhandle = None
20 20 def __init__(self, parentui, path=None, create=0):
21 21 if not path:
22 22 p = os.getcwd()
23 23 while not os.path.isdir(os.path.join(p, ".hg")):
24 24 oldp = p
25 25 p = os.path.dirname(p)
26 26 if p == oldp:
27 27 raise repo.RepoError(_("no repo found"))
28 28 path = p
29 29 self.path = os.path.join(path, ".hg")
30 30
31 31 if not create and not os.path.isdir(self.path):
32 32 raise repo.RepoError(_("repository %s not found") % path)
33 33
34 34 self.root = os.path.abspath(path)
35 35 self.origroot = path
36 36 self.ui = ui.ui(parentui=parentui)
37 37 self.opener = util.opener(self.path)
38 38 self.wopener = util.opener(self.root)
39 39
40 40 try:
41 41 self.ui.readconfig(self.join("hgrc"), self.root)
42 42 except IOError:
43 43 pass
44 44
45 45 v = self.ui.revlogopts
46 46 self.revlogversion = int(v.get('format', revlog.REVLOGV0))
47 47 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
48 48 flags = 0
49 49 for x in v.get('flags', "").split():
50 50 flags |= revlog.flagstr(x)
51 51
52 52 v = self.revlogversion | flags
53 53 self.manifest = manifest.manifest(self.opener, v)
54 54 self.changelog = changelog.changelog(self.opener, v)
55 55
56 56 # the changelog might not have the inline index flag
57 57 # on. If the format of the changelog is the same as found in
58 58 # .hgrc, apply any flags found in the .hgrc as well.
59 59 # Otherwise, just version from the changelog
60 60 v = self.changelog.version
61 61 if v == self.revlogversion:
62 62 v |= flags
63 63 self.revlogversion = v
64 64
65 65 self.tagscache = None
66 66 self.nodetagscache = None
67 67 self.encodepats = None
68 68 self.decodepats = None
69 69 self.transhandle = None
70 70
71 71 if create:
72 72 os.mkdir(self.path)
73 73 os.mkdir(self.join("data"))
74 74
75 75 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
76 76
77 77 def hook(self, name, throw=False, **args):
78 78 def callhook(hname, funcname):
79 79 '''call python hook. hook is callable object, looked up as
80 80 name in python module. if callable returns "true", hook
81 81 passes, else fails. if hook raises exception, treated as
82 82 hook failure. exception propagates if throw is "true".'''
83 83
84 84 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
85 85 d = funcname.rfind('.')
86 86 if d == -1:
87 87 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
88 88 % (hname, funcname))
89 89 modname = funcname[:d]
90 90 try:
91 91 obj = __import__(modname)
92 92 except ImportError:
93 93 raise util.Abort(_('%s hook is invalid '
94 94 '(import of "%s" failed)') %
95 95 (hname, modname))
96 96 try:
97 97 for p in funcname.split('.')[1:]:
98 98 obj = getattr(obj, p)
99 99 except AttributeError, err:
100 100 raise util.Abort(_('%s hook is invalid '
101 101 '("%s" is not defined)') %
102 102 (hname, funcname))
103 103 if not callable(obj):
104 104 raise util.Abort(_('%s hook is invalid '
105 105 '("%s" is not callable)') %
106 106 (hname, funcname))
107 107 try:
108 108 r = obj(ui=ui, repo=repo, hooktype=name, **args)
109 109 except (KeyboardInterrupt, util.SignalInterrupt):
110 110 raise
111 111 except Exception, exc:
112 112 if isinstance(exc, util.Abort):
113 113 self.ui.warn(_('error: %s hook failed: %s\n') %
114 114 (hname, exc.args[0] % exc.args[1:]))
115 115 else:
116 116 self.ui.warn(_('error: %s hook raised an exception: '
117 117 '%s\n') % (hname, exc))
118 118 if throw:
119 119 raise
120 if "--traceback" in sys.argv[1:]:
120 if self.ui.traceback:
121 121 traceback.print_exc()
122 122 return False
123 123 if not r:
124 124 if throw:
125 125 raise util.Abort(_('%s hook failed') % hname)
126 126 self.ui.warn(_('error: %s hook failed\n') % hname)
127 127 return r
128 128
129 129 def runhook(name, cmd):
130 130 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
131 131 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()] +
132 132 [(k.upper(), v) for k, v in args.iteritems()])
133 133 r = util.system(cmd, environ=env, cwd=self.root)
134 134 if r:
135 135 desc, r = util.explain_exit(r)
136 136 if throw:
137 137 raise util.Abort(_('%s hook %s') % (name, desc))
138 138 self.ui.warn(_('error: %s hook %s\n') % (name, desc))
139 139 return False
140 140 return True
141 141
142 142 r = True
143 143 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
144 144 if hname.split(".", 1)[0] == name and cmd]
145 145 hooks.sort()
146 146 for hname, cmd in hooks:
147 147 if cmd.startswith('python:'):
148 148 r = callhook(hname, cmd[7:].strip()) and r
149 149 else:
150 150 r = runhook(hname, cmd) and r
151 151 return r
152 152
153 153 def tags(self):
154 154 '''return a mapping of tag to node'''
155 155 if not self.tagscache:
156 156 self.tagscache = {}
157 157
158 158 def parsetag(line, context):
159 159 if not line:
160 160 return
161 161 s = l.split(" ", 1)
162 162 if len(s) != 2:
163 163 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
164 164 return
165 165 node, key = s
166 166 try:
167 167 bin_n = bin(node)
168 168 except TypeError:
169 169 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
170 170 return
171 171 if bin_n not in self.changelog.nodemap:
172 172 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
173 173 return
174 174 self.tagscache[key.strip()] = bin_n
175 175
176 176 # read each head of the tags file, ending with the tip
177 177 # and add each tag found to the map, with "newer" ones
178 178 # taking precedence
179 179 fl = self.file(".hgtags")
180 180 h = fl.heads()
181 181 h.reverse()
182 182 for r in h:
183 183 count = 0
184 184 for l in fl.read(r).splitlines():
185 185 count += 1
186 186 parsetag(l, ".hgtags:%d" % count)
187 187
188 188 try:
189 189 f = self.opener("localtags")
190 190 count = 0
191 191 for l in f:
192 192 count += 1
193 193 parsetag(l, "localtags:%d" % count)
194 194 except IOError:
195 195 pass
196 196
197 197 self.tagscache['tip'] = self.changelog.tip()
198 198
199 199 return self.tagscache
200 200
201 201 def tagslist(self):
202 202 '''return a list of tags ordered by revision'''
203 203 l = []
204 204 for t, n in self.tags().items():
205 205 try:
206 206 r = self.changelog.rev(n)
207 207 except:
208 208 r = -2 # sort to the beginning of the list if unknown
209 209 l.append((r, t, n))
210 210 l.sort()
211 211 return [(t, n) for r, t, n in l]
212 212
213 213 def nodetags(self, node):
214 214 '''return the tags associated with a node'''
215 215 if not self.nodetagscache:
216 216 self.nodetagscache = {}
217 217 for t, n in self.tags().items():
218 218 self.nodetagscache.setdefault(n, []).append(t)
219 219 return self.nodetagscache.get(node, [])
220 220
221 221 def lookup(self, key):
222 222 try:
223 223 return self.tags()[key]
224 224 except KeyError:
225 225 try:
226 226 return self.changelog.lookup(key)
227 227 except:
228 228 raise repo.RepoError(_("unknown revision '%s'") % key)
229 229
230 230 def dev(self):
231 231 return os.stat(self.path).st_dev
232 232
233 233 def local(self):
234 234 return True
235 235
236 236 def join(self, f):
237 237 return os.path.join(self.path, f)
238 238
239 239 def wjoin(self, f):
240 240 return os.path.join(self.root, f)
241 241
242 242 def file(self, f):
243 243 if f[0] == '/':
244 244 f = f[1:]
245 245 return filelog.filelog(self.opener, f, self.revlogversion)
246 246
247 247 def getcwd(self):
248 248 return self.dirstate.getcwd()
249 249
250 250 def wfile(self, f, mode='r'):
251 251 return self.wopener(f, mode)
252 252
253 253 def wread(self, filename):
254 254 if self.encodepats == None:
255 255 l = []
256 256 for pat, cmd in self.ui.configitems("encode"):
257 257 mf = util.matcher(self.root, "", [pat], [], [])[1]
258 258 l.append((mf, cmd))
259 259 self.encodepats = l
260 260
261 261 data = self.wopener(filename, 'r').read()
262 262
263 263 for mf, cmd in self.encodepats:
264 264 if mf(filename):
265 265 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
266 266 data = util.filter(data, cmd)
267 267 break
268 268
269 269 return data
270 270
271 271 def wwrite(self, filename, data, fd=None):
272 272 if self.decodepats == None:
273 273 l = []
274 274 for pat, cmd in self.ui.configitems("decode"):
275 275 mf = util.matcher(self.root, "", [pat], [], [])[1]
276 276 l.append((mf, cmd))
277 277 self.decodepats = l
278 278
279 279 for mf, cmd in self.decodepats:
280 280 if mf(filename):
281 281 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
282 282 data = util.filter(data, cmd)
283 283 break
284 284
285 285 if fd:
286 286 return fd.write(data)
287 287 return self.wopener(filename, 'w').write(data)
288 288
289 289 def transaction(self):
290 290 tr = self.transhandle
291 291 if tr != None and tr.running():
292 292 return tr.nest()
293 293
294 294 # save dirstate for undo
295 295 try:
296 296 ds = self.opener("dirstate").read()
297 297 except IOError:
298 298 ds = ""
299 299 self.opener("journal.dirstate", "w").write(ds)
300 300
301 301 tr = transaction.transaction(self.ui.warn, self.opener,
302 302 self.join("journal"),
303 303 aftertrans(self.path))
304 304 self.transhandle = tr
305 305 return tr
306 306
307 307 def recover(self):
308 308 l = self.lock()
309 309 if os.path.exists(self.join("journal")):
310 310 self.ui.status(_("rolling back interrupted transaction\n"))
311 311 transaction.rollback(self.opener, self.join("journal"))
312 312 self.reload()
313 313 return True
314 314 else:
315 315 self.ui.warn(_("no interrupted transaction available\n"))
316 316 return False
317 317
318 318 def undo(self, wlock=None):
319 319 if not wlock:
320 320 wlock = self.wlock()
321 321 l = self.lock()
322 322 if os.path.exists(self.join("undo")):
323 323 self.ui.status(_("rolling back last transaction\n"))
324 324 transaction.rollback(self.opener, self.join("undo"))
325 325 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
326 326 self.reload()
327 327 self.wreload()
328 328 else:
329 329 self.ui.warn(_("no undo information available\n"))
330 330
331 331 def wreload(self):
332 332 self.dirstate.read()
333 333
334 334 def reload(self):
335 335 self.changelog.load()
336 336 self.manifest.load()
337 337 self.tagscache = None
338 338 self.nodetagscache = None
339 339
340 340 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
341 341 desc=None):
342 342 try:
343 343 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
344 344 except lock.LockHeld, inst:
345 345 if not wait:
346 346 raise
347 347 self.ui.warn(_("waiting for lock on %s held by %s\n") %
348 348 (desc, inst.args[0]))
349 349 # default to 600 seconds timeout
350 350 l = lock.lock(self.join(lockname),
351 351 int(self.ui.config("ui", "timeout") or 600),
352 352 releasefn, desc=desc)
353 353 if acquirefn:
354 354 acquirefn()
355 355 return l
356 356
357 357 def lock(self, wait=1):
358 358 return self.do_lock("lock", wait, acquirefn=self.reload,
359 359 desc=_('repository %s') % self.origroot)
360 360
361 361 def wlock(self, wait=1):
362 362 return self.do_lock("wlock", wait, self.dirstate.write,
363 363 self.wreload,
364 364 desc=_('working directory of %s') % self.origroot)
365 365
366 366 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
367 367 "determine whether a new filenode is needed"
368 368 fp1 = manifest1.get(filename, nullid)
369 369 fp2 = manifest2.get(filename, nullid)
370 370
371 371 if fp2 != nullid:
372 372 # is one parent an ancestor of the other?
373 373 fpa = filelog.ancestor(fp1, fp2)
374 374 if fpa == fp1:
375 375 fp1, fp2 = fp2, nullid
376 376 elif fpa == fp2:
377 377 fp2 = nullid
378 378
379 379 # is the file unmodified from the parent? report existing entry
380 380 if fp2 == nullid and text == filelog.read(fp1):
381 381 return (fp1, None, None)
382 382
383 383 return (None, fp1, fp2)
384 384
385 385 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
386 386 orig_parent = self.dirstate.parents()[0] or nullid
387 387 p1 = p1 or self.dirstate.parents()[0] or nullid
388 388 p2 = p2 or self.dirstate.parents()[1] or nullid
389 389 c1 = self.changelog.read(p1)
390 390 c2 = self.changelog.read(p2)
391 391 m1 = self.manifest.read(c1[0])
392 392 mf1 = self.manifest.readflags(c1[0])
393 393 m2 = self.manifest.read(c2[0])
394 394 changed = []
395 395
396 396 if orig_parent == p1:
397 397 update_dirstate = 1
398 398 else:
399 399 update_dirstate = 0
400 400
401 401 if not wlock:
402 402 wlock = self.wlock()
403 403 l = self.lock()
404 404 tr = self.transaction()
405 405 mm = m1.copy()
406 406 mfm = mf1.copy()
407 407 linkrev = self.changelog.count()
408 408 for f in files:
409 409 try:
410 410 t = self.wread(f)
411 411 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
412 412 r = self.file(f)
413 413 mfm[f] = tm
414 414
415 415 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
416 416 if entry:
417 417 mm[f] = entry
418 418 continue
419 419
420 420 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
421 421 changed.append(f)
422 422 if update_dirstate:
423 423 self.dirstate.update([f], "n")
424 424 except IOError:
425 425 try:
426 426 del mm[f]
427 427 del mfm[f]
428 428 if update_dirstate:
429 429 self.dirstate.forget([f])
430 430 except:
431 431 # deleted from p2?
432 432 pass
433 433
434 434 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
435 435 user = user or self.ui.username()
436 436 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
437 437 tr.close()
438 438 if update_dirstate:
439 439 self.dirstate.setparents(n, nullid)
440 440
441 441 def commit(self, files=None, text="", user=None, date=None,
442 442 match=util.always, force=False, lock=None, wlock=None):
443 443 commit = []
444 444 remove = []
445 445 changed = []
446 446
447 447 if files:
448 448 for f in files:
449 449 s = self.dirstate.state(f)
450 450 if s in 'nmai':
451 451 commit.append(f)
452 452 elif s == 'r':
453 453 remove.append(f)
454 454 else:
455 455 self.ui.warn(_("%s not tracked!\n") % f)
456 456 else:
457 457 modified, added, removed, deleted, unknown = self.changes(match=match)
458 458 commit = modified + added
459 459 remove = removed
460 460
461 461 p1, p2 = self.dirstate.parents()
462 462 c1 = self.changelog.read(p1)
463 463 c2 = self.changelog.read(p2)
464 464 m1 = self.manifest.read(c1[0])
465 465 mf1 = self.manifest.readflags(c1[0])
466 466 m2 = self.manifest.read(c2[0])
467 467
468 468 if not commit and not remove and not force and p2 == nullid:
469 469 self.ui.status(_("nothing changed\n"))
470 470 return None
471 471
472 472 xp1 = hex(p1)
473 473 if p2 == nullid: xp2 = ''
474 474 else: xp2 = hex(p2)
475 475
476 476 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
477 477
478 478 if not wlock:
479 479 wlock = self.wlock()
480 480 if not lock:
481 481 lock = self.lock()
482 482 tr = self.transaction()
483 483
484 484 # check in files
485 485 new = {}
486 486 linkrev = self.changelog.count()
487 487 commit.sort()
488 488 for f in commit:
489 489 self.ui.note(f + "\n")
490 490 try:
491 491 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
492 492 t = self.wread(f)
493 493 except IOError:
494 494 self.ui.warn(_("trouble committing %s!\n") % f)
495 495 raise
496 496
497 497 r = self.file(f)
498 498
499 499 meta = {}
500 500 cp = self.dirstate.copied(f)
501 501 if cp:
502 502 meta["copy"] = cp
503 503 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
504 504 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
505 505 fp1, fp2 = nullid, nullid
506 506 else:
507 507 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
508 508 if entry:
509 509 new[f] = entry
510 510 continue
511 511
512 512 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
513 513 # remember what we've added so that we can later calculate
514 514 # the files to pull from a set of changesets
515 515 changed.append(f)
516 516
517 517 # update manifest
518 518 m1 = m1.copy()
519 519 m1.update(new)
520 520 for f in remove:
521 521 if f in m1:
522 522 del m1[f]
523 523 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
524 524 (new, remove))
525 525
526 526 # add changeset
527 527 new = new.keys()
528 528 new.sort()
529 529
530 530 user = user or self.ui.username()
531 531 if not text:
532 532 edittext = [""]
533 533 if p2 != nullid:
534 534 edittext.append("HG: branch merge")
535 535 edittext.extend(["HG: changed %s" % f for f in changed])
536 536 edittext.extend(["HG: removed %s" % f for f in remove])
537 537 if not changed and not remove:
538 538 edittext.append("HG: no files changed")
539 539 edittext.append("")
540 540 # run editor in the repository root
541 541 olddir = os.getcwd()
542 542 os.chdir(self.root)
543 543 edittext = self.ui.edit("\n".join(edittext), user)
544 544 os.chdir(olddir)
545 545 if not edittext.rstrip():
546 546 return None
547 547 text = edittext
548 548
549 549 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
550 550 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
551 551 parent2=xp2)
552 552 tr.close()
553 553
554 554 self.dirstate.setparents(n)
555 555 self.dirstate.update(new, "n")
556 556 self.dirstate.forget(remove)
557 557
558 558 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
559 559 return n
560 560
561 561 def walk(self, node=None, files=[], match=util.always, badmatch=None):
562 562 if node:
563 563 fdict = dict.fromkeys(files)
564 564 for fn in self.manifest.read(self.changelog.read(node)[0]):
565 565 fdict.pop(fn, None)
566 566 if match(fn):
567 567 yield 'm', fn
568 568 for fn in fdict:
569 569 if badmatch and badmatch(fn):
570 570 if match(fn):
571 571 yield 'b', fn
572 572 else:
573 573 self.ui.warn(_('%s: No such file in rev %s\n') % (
574 574 util.pathto(self.getcwd(), fn), short(node)))
575 575 else:
576 576 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
577 577 yield src, fn
578 578
579 579 def changes(self, node1=None, node2=None, files=[], match=util.always,
580 580 wlock=None, show_ignored=None):
581 581 """return changes between two nodes or node and working directory
582 582
583 583 If node1 is None, use the first dirstate parent instead.
584 584 If node2 is None, compare node1 with working directory.
585 585 """
586 586
587 587 def fcmp(fn, mf):
588 588 t1 = self.wread(fn)
589 589 t2 = self.file(fn).read(mf.get(fn, nullid))
590 590 return cmp(t1, t2)
591 591
592 592 def mfmatches(node):
593 593 change = self.changelog.read(node)
594 594 mf = dict(self.manifest.read(change[0]))
595 595 for fn in mf.keys():
596 596 if not match(fn):
597 597 del mf[fn]
598 598 return mf
599 599
600 600 if node1:
601 601 # read the manifest from node1 before the manifest from node2,
602 602 # so that we'll hit the manifest cache if we're going through
603 603 # all the revisions in parent->child order.
604 604 mf1 = mfmatches(node1)
605 605
606 606 # are we comparing the working directory?
607 607 if not node2:
608 608 if not wlock:
609 609 try:
610 610 wlock = self.wlock(wait=0)
611 611 except lock.LockException:
612 612 wlock = None
613 613 lookup, modified, added, removed, deleted, unknown, ignored = (
614 614 self.dirstate.changes(files, match, show_ignored))
615 615
616 616 # are we comparing working dir against its parent?
617 617 if not node1:
618 618 if lookup:
619 619 # do a full compare of any files that might have changed
620 620 mf2 = mfmatches(self.dirstate.parents()[0])
621 621 for f in lookup:
622 622 if fcmp(f, mf2):
623 623 modified.append(f)
624 624 elif wlock is not None:
625 625 self.dirstate.update([f], "n")
626 626 else:
627 627 # we are comparing working dir against non-parent
628 628 # generate a pseudo-manifest for the working dir
629 629 mf2 = mfmatches(self.dirstate.parents()[0])
630 630 for f in lookup + modified + added:
631 631 mf2[f] = ""
632 632 for f in removed:
633 633 if f in mf2:
634 634 del mf2[f]
635 635 else:
636 636 # we are comparing two revisions
637 637 deleted, unknown, ignored = [], [], []
638 638 mf2 = mfmatches(node2)
639 639
640 640 if node1:
641 641 # flush lists from dirstate before comparing manifests
642 642 modified, added = [], []
643 643
644 644 for fn in mf2:
645 645 if mf1.has_key(fn):
646 646 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
647 647 modified.append(fn)
648 648 del mf1[fn]
649 649 else:
650 650 added.append(fn)
651 651
652 652 removed = mf1.keys()
653 653
654 654 # sort and return results:
655 655 for l in modified, added, removed, deleted, unknown, ignored:
656 656 l.sort()
657 657 if show_ignored is None:
658 658 return (modified, added, removed, deleted, unknown)
659 659 else:
660 660 return (modified, added, removed, deleted, unknown, ignored)
661 661
662 662 def add(self, list, wlock=None):
663 663 if not wlock:
664 664 wlock = self.wlock()
665 665 for f in list:
666 666 p = self.wjoin(f)
667 667 if not os.path.exists(p):
668 668 self.ui.warn(_("%s does not exist!\n") % f)
669 669 elif not os.path.isfile(p):
670 670 self.ui.warn(_("%s not added: only files supported currently\n")
671 671 % f)
672 672 elif self.dirstate.state(f) in 'an':
673 673 self.ui.warn(_("%s already tracked!\n") % f)
674 674 else:
675 675 self.dirstate.update([f], "a")
676 676
677 677 def forget(self, list, wlock=None):
678 678 if not wlock:
679 679 wlock = self.wlock()
680 680 for f in list:
681 681 if self.dirstate.state(f) not in 'ai':
682 682 self.ui.warn(_("%s not added!\n") % f)
683 683 else:
684 684 self.dirstate.forget([f])
685 685
686 686 def remove(self, list, unlink=False, wlock=None):
687 687 if unlink:
688 688 for f in list:
689 689 try:
690 690 util.unlink(self.wjoin(f))
691 691 except OSError, inst:
692 692 if inst.errno != errno.ENOENT:
693 693 raise
694 694 if not wlock:
695 695 wlock = self.wlock()
696 696 for f in list:
697 697 p = self.wjoin(f)
698 698 if os.path.exists(p):
699 699 self.ui.warn(_("%s still exists!\n") % f)
700 700 elif self.dirstate.state(f) == 'a':
701 701 self.dirstate.forget([f])
702 702 elif f not in self.dirstate:
703 703 self.ui.warn(_("%s not tracked!\n") % f)
704 704 else:
705 705 self.dirstate.update([f], "r")
706 706
707 707 def undelete(self, list, wlock=None):
708 708 p = self.dirstate.parents()[0]
709 709 mn = self.changelog.read(p)[0]
710 710 mf = self.manifest.readflags(mn)
711 711 m = self.manifest.read(mn)
712 712 if not wlock:
713 713 wlock = self.wlock()
714 714 for f in list:
715 715 if self.dirstate.state(f) not in "r":
716 716 self.ui.warn("%s not removed!\n" % f)
717 717 else:
718 718 t = self.file(f).read(m[f])
719 719 self.wwrite(f, t)
720 720 util.set_exec(self.wjoin(f), mf[f])
721 721 self.dirstate.update([f], "n")
722 722
723 723 def copy(self, source, dest, wlock=None):
724 724 p = self.wjoin(dest)
725 725 if not os.path.exists(p):
726 726 self.ui.warn(_("%s does not exist!\n") % dest)
727 727 elif not os.path.isfile(p):
728 728 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
729 729 else:
730 730 if not wlock:
731 731 wlock = self.wlock()
732 732 if self.dirstate.state(dest) == '?':
733 733 self.dirstate.update([dest], "a")
734 734 self.dirstate.copy(source, dest)
735 735
736 736 def heads(self, start=None):
737 737 heads = self.changelog.heads(start)
738 738 # sort the output in rev descending order
739 739 heads = [(-self.changelog.rev(h), h) for h in heads]
740 740 heads.sort()
741 741 return [n for (r, n) in heads]
742 742
743 743 # branchlookup returns a dict giving a list of branches for
744 744 # each head. A branch is defined as the tag of a node or
745 745 # the branch of the node's parents. If a node has multiple
746 746 # branch tags, tags are eliminated if they are visible from other
747 747 # branch tags.
748 748 #
749 749 # So, for this graph: a->b->c->d->e
750 750 # \ /
751 751 # aa -----/
752 752 # a has tag 2.6.12
753 753 # d has tag 2.6.13
754 754 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
755 755 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
756 756 # from the list.
757 757 #
758 758 # It is possible that more than one head will have the same branch tag.
759 759 # callers need to check the result for multiple heads under the same
760 760 # branch tag if that is a problem for them (ie checkout of a specific
761 761 # branch).
762 762 #
763 763 # passing in a specific branch will limit the depth of the search
764 764 # through the parents. It won't limit the branches returned in the
765 765 # result though.
766 766 def branchlookup(self, heads=None, branch=None):
767 767 if not heads:
768 768 heads = self.heads()
769 769 headt = [ h for h in heads ]
770 770 chlog = self.changelog
771 771 branches = {}
772 772 merges = []
773 773 seenmerge = {}
774 774
775 775 # traverse the tree once for each head, recording in the branches
776 776 # dict which tags are visible from this head. The branches
777 777 # dict also records which tags are visible from each tag
778 778 # while we traverse.
779 779 while headt or merges:
780 780 if merges:
781 781 n, found = merges.pop()
782 782 visit = [n]
783 783 else:
784 784 h = headt.pop()
785 785 visit = [h]
786 786 found = [h]
787 787 seen = {}
788 788 while visit:
789 789 n = visit.pop()
790 790 if n in seen:
791 791 continue
792 792 pp = chlog.parents(n)
793 793 tags = self.nodetags(n)
794 794 if tags:
795 795 for x in tags:
796 796 if x == 'tip':
797 797 continue
798 798 for f in found:
799 799 branches.setdefault(f, {})[n] = 1
800 800 branches.setdefault(n, {})[n] = 1
801 801 break
802 802 if n not in found:
803 803 found.append(n)
804 804 if branch in tags:
805 805 continue
806 806 seen[n] = 1
807 807 if pp[1] != nullid and n not in seenmerge:
808 808 merges.append((pp[1], [x for x in found]))
809 809 seenmerge[n] = 1
810 810 if pp[0] != nullid:
811 811 visit.append(pp[0])
812 812 # traverse the branches dict, eliminating branch tags from each
813 813 # head that are visible from another branch tag for that head.
814 814 out = {}
815 815 viscache = {}
816 816 for h in heads:
817 817 def visible(node):
818 818 if node in viscache:
819 819 return viscache[node]
820 820 ret = {}
821 821 visit = [node]
822 822 while visit:
823 823 x = visit.pop()
824 824 if x in viscache:
825 825 ret.update(viscache[x])
826 826 elif x not in ret:
827 827 ret[x] = 1
828 828 if x in branches:
829 829 visit[len(visit):] = branches[x].keys()
830 830 viscache[node] = ret
831 831 return ret
832 832 if h not in branches:
833 833 continue
834 834 # O(n^2), but somewhat limited. This only searches the
835 835 # tags visible from a specific head, not all the tags in the
836 836 # whole repo.
837 837 for b in branches[h]:
838 838 vis = False
839 839 for bb in branches[h].keys():
840 840 if b != bb:
841 841 if b in visible(bb):
842 842 vis = True
843 843 break
844 844 if not vis:
845 845 l = out.setdefault(h, [])
846 846 l[len(l):] = self.nodetags(b)
847 847 return out
848 848
849 849 def branches(self, nodes):
850 850 if not nodes:
851 851 nodes = [self.changelog.tip()]
852 852 b = []
853 853 for n in nodes:
854 854 t = n
855 855 while n:
856 856 p = self.changelog.parents(n)
857 857 if p[1] != nullid or p[0] == nullid:
858 858 b.append((t, n, p[0], p[1]))
859 859 break
860 860 n = p[0]
861 861 return b
862 862
863 863 def between(self, pairs):
864 864 r = []
865 865
866 866 for top, bottom in pairs:
867 867 n, l, i = top, [], 0
868 868 f = 1
869 869
870 870 while n != bottom:
871 871 p = self.changelog.parents(n)[0]
872 872 if i == f:
873 873 l.append(n)
874 874 f = f * 2
875 875 n = p
876 876 i += 1
877 877
878 878 r.append(l)
879 879
880 880 return r
881 881
882 882 def findincoming(self, remote, base=None, heads=None, force=False):
883 883 m = self.changelog.nodemap
884 884 search = []
885 885 fetch = {}
886 886 seen = {}
887 887 seenbranch = {}
888 888 if base == None:
889 889 base = {}
890 890
891 891 if not heads:
892 892 heads = remote.heads()
893 893
894 894 if self.changelog.tip() == nullid:
895 895 if heads != [nullid]:
896 896 return [nullid]
897 897 return []
898 898
899 899 # assume we're closer to the tip than the root
900 900 # and start by examining the heads
901 901 self.ui.status(_("searching for changes\n"))
902 902
903 903 unknown = []
904 904 for h in heads:
905 905 if h not in m:
906 906 unknown.append(h)
907 907 else:
908 908 base[h] = 1
909 909
910 910 if not unknown:
911 911 return []
912 912
913 913 rep = {}
914 914 reqcnt = 0
915 915
916 916 # search through remote branches
917 917 # a 'branch' here is a linear segment of history, with four parts:
918 918 # head, root, first parent, second parent
919 919 # (a branch always has two parents (or none) by definition)
920 920 unknown = remote.branches(unknown)
921 921 while unknown:
922 922 r = []
923 923 while unknown:
924 924 n = unknown.pop(0)
925 925 if n[0] in seen:
926 926 continue
927 927
928 928 self.ui.debug(_("examining %s:%s\n")
929 929 % (short(n[0]), short(n[1])))
930 930 if n[0] == nullid:
931 931 break
932 932 if n in seenbranch:
933 933 self.ui.debug(_("branch already found\n"))
934 934 continue
935 935 if n[1] and n[1] in m: # do we know the base?
936 936 self.ui.debug(_("found incomplete branch %s:%s\n")
937 937 % (short(n[0]), short(n[1])))
938 938 search.append(n) # schedule branch range for scanning
939 939 seenbranch[n] = 1
940 940 else:
941 941 if n[1] not in seen and n[1] not in fetch:
942 942 if n[2] in m and n[3] in m:
943 943 self.ui.debug(_("found new changeset %s\n") %
944 944 short(n[1]))
945 945 fetch[n[1]] = 1 # earliest unknown
946 946 base[n[2]] = 1 # latest known
947 947 continue
948 948
949 949 for a in n[2:4]:
950 950 if a not in rep:
951 951 r.append(a)
952 952 rep[a] = 1
953 953
954 954 seen[n[0]] = 1
955 955
956 956 if r:
957 957 reqcnt += 1
958 958 self.ui.debug(_("request %d: %s\n") %
959 959 (reqcnt, " ".join(map(short, r))))
960 960 for p in range(0, len(r), 10):
961 961 for b in remote.branches(r[p:p+10]):
962 962 self.ui.debug(_("received %s:%s\n") %
963 963 (short(b[0]), short(b[1])))
964 964 if b[0] in m:
965 965 self.ui.debug(_("found base node %s\n")
966 966 % short(b[0]))
967 967 base[b[0]] = 1
968 968 elif b[0] not in seen:
969 969 unknown.append(b)
970 970
971 971 # do binary search on the branches we found
972 972 while search:
973 973 n = search.pop(0)
974 974 reqcnt += 1
975 975 l = remote.between([(n[0], n[1])])[0]
976 976 l.append(n[1])
977 977 p = n[0]
978 978 f = 1
979 979 for i in l:
980 980 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
981 981 if i in m:
982 982 if f <= 2:
983 983 self.ui.debug(_("found new branch changeset %s\n") %
984 984 short(p))
985 985 fetch[p] = 1
986 986 base[i] = 1
987 987 else:
988 988 self.ui.debug(_("narrowed branch search to %s:%s\n")
989 989 % (short(p), short(i)))
990 990 search.append((p, i))
991 991 break
992 992 p, f = i, f * 2
993 993
994 994 # sanity check our fetch list
995 995 for f in fetch.keys():
996 996 if f in m:
997 997 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
998 998
999 999 if base.keys() == [nullid]:
1000 1000 if force:
1001 1001 self.ui.warn(_("warning: repository is unrelated\n"))
1002 1002 else:
1003 1003 raise util.Abort(_("repository is unrelated"))
1004 1004
1005 1005 self.ui.note(_("found new changesets starting at ") +
1006 1006 " ".join([short(f) for f in fetch]) + "\n")
1007 1007
1008 1008 self.ui.debug(_("%d total queries\n") % reqcnt)
1009 1009
1010 1010 return fetch.keys()
1011 1011
1012 1012 def findoutgoing(self, remote, base=None, heads=None, force=False):
1013 1013 """Return list of nodes that are roots of subsets not in remote
1014 1014
1015 1015 If base dict is specified, assume that these nodes and their parents
1016 1016 exist on the remote side.
1017 1017 If a list of heads is specified, return only nodes which are heads
1018 1018 or ancestors of these heads, and return a second element which
1019 1019 contains all remote heads which get new children.
1020 1020 """
1021 1021 if base == None:
1022 1022 base = {}
1023 1023 self.findincoming(remote, base, heads, force=force)
1024 1024
1025 1025 self.ui.debug(_("common changesets up to ")
1026 1026 + " ".join(map(short, base.keys())) + "\n")
1027 1027
1028 1028 remain = dict.fromkeys(self.changelog.nodemap)
1029 1029
1030 1030 # prune everything remote has from the tree
1031 1031 del remain[nullid]
1032 1032 remove = base.keys()
1033 1033 while remove:
1034 1034 n = remove.pop(0)
1035 1035 if n in remain:
1036 1036 del remain[n]
1037 1037 for p in self.changelog.parents(n):
1038 1038 remove.append(p)
1039 1039
1040 1040 # find every node whose parents have been pruned
1041 1041 subset = []
1042 1042 # find every remote head that will get new children
1043 1043 updated_heads = {}
1044 1044 for n in remain:
1045 1045 p1, p2 = self.changelog.parents(n)
1046 1046 if p1 not in remain and p2 not in remain:
1047 1047 subset.append(n)
1048 1048 if heads:
1049 1049 if p1 in heads:
1050 1050 updated_heads[p1] = True
1051 1051 if p2 in heads:
1052 1052 updated_heads[p2] = True
1053 1053
1054 1054 # this is the set of all roots we have to push
1055 1055 if heads:
1056 1056 return subset, updated_heads.keys()
1057 1057 else:
1058 1058 return subset
1059 1059
1060 1060 def pull(self, remote, heads=None, force=False):
1061 1061 l = self.lock()
1062 1062
1063 1063 fetch = self.findincoming(remote, force=force)
1064 1064 if fetch == [nullid]:
1065 1065 self.ui.status(_("requesting all changes\n"))
1066 1066
1067 1067 if not fetch:
1068 1068 self.ui.status(_("no changes found\n"))
1069 1069 return 0
1070 1070
1071 1071 if heads is None:
1072 1072 cg = remote.changegroup(fetch, 'pull')
1073 1073 else:
1074 1074 cg = remote.changegroupsubset(fetch, heads, 'pull')
1075 1075 return self.addchangegroup(cg)
1076 1076
1077 1077 def push(self, remote, force=False, revs=None):
1078 1078 lock = remote.lock()
1079 1079
1080 1080 base = {}
1081 1081 remote_heads = remote.heads()
1082 1082 inc = self.findincoming(remote, base, remote_heads, force=force)
1083 1083 if not force and inc:
1084 1084 self.ui.warn(_("abort: unsynced remote changes!\n"))
1085 1085 self.ui.status(_("(did you forget to sync?"
1086 1086 " use push -f to force)\n"))
1087 1087 return 1
1088 1088
1089 1089 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1090 1090 if revs is not None:
1091 1091 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1092 1092 else:
1093 1093 bases, heads = update, self.changelog.heads()
1094 1094
1095 1095 if not bases:
1096 1096 self.ui.status(_("no changes found\n"))
1097 1097 return 1
1098 1098 elif not force:
1099 1099 # FIXME we don't properly detect creation of new heads
1100 1100 # in the push -r case, assume the user knows what he's doing
1101 1101 if not revs and len(remote_heads) < len(heads) \
1102 1102 and remote_heads != [nullid]:
1103 1103 self.ui.warn(_("abort: push creates new remote branches!\n"))
1104 1104 self.ui.status(_("(did you forget to merge?"
1105 1105 " use push -f to force)\n"))
1106 1106 return 1
1107 1107
1108 1108 if revs is None:
1109 1109 cg = self.changegroup(update, 'push')
1110 1110 else:
1111 1111 cg = self.changegroupsubset(update, revs, 'push')
1112 1112 return remote.addchangegroup(cg)
1113 1113
1114 1114 def changegroupsubset(self, bases, heads, source):
1115 1115 """This function generates a changegroup consisting of all the nodes
1116 1116 that are descendents of any of the bases, and ancestors of any of
1117 1117 the heads.
1118 1118
1119 1119 It is fairly complex as determining which filenodes and which
1120 1120 manifest nodes need to be included for the changeset to be complete
1121 1121 is non-trivial.
1122 1122
1123 1123 Another wrinkle is doing the reverse, figuring out which changeset in
1124 1124 the changegroup a particular filenode or manifestnode belongs to."""
1125 1125
1126 1126 self.hook('preoutgoing', throw=True, source=source)
1127 1127
1128 1128 # Set up some initial variables
1129 1129 # Make it easy to refer to self.changelog
1130 1130 cl = self.changelog
1131 1131 # msng is short for missing - compute the list of changesets in this
1132 1132 # changegroup.
1133 1133 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1134 1134 # Some bases may turn out to be superfluous, and some heads may be
1135 1135 # too. nodesbetween will return the minimal set of bases and heads
1136 1136 # necessary to re-create the changegroup.
1137 1137
1138 1138 # Known heads are the list of heads that it is assumed the recipient
1139 1139 # of this changegroup will know about.
1140 1140 knownheads = {}
1141 1141 # We assume that all parents of bases are known heads.
1142 1142 for n in bases:
1143 1143 for p in cl.parents(n):
1144 1144 if p != nullid:
1145 1145 knownheads[p] = 1
1146 1146 knownheads = knownheads.keys()
1147 1147 if knownheads:
1148 1148 # Now that we know what heads are known, we can compute which
1149 1149 # changesets are known. The recipient must know about all
1150 1150 # changesets required to reach the known heads from the null
1151 1151 # changeset.
1152 1152 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1153 1153 junk = None
1154 1154 # Transform the list into an ersatz set.
1155 1155 has_cl_set = dict.fromkeys(has_cl_set)
1156 1156 else:
1157 1157 # If there were no known heads, the recipient cannot be assumed to
1158 1158 # know about any changesets.
1159 1159 has_cl_set = {}
1160 1160
1161 1161 # Make it easy to refer to self.manifest
1162 1162 mnfst = self.manifest
1163 1163 # We don't know which manifests are missing yet
1164 1164 msng_mnfst_set = {}
1165 1165 # Nor do we know which filenodes are missing.
1166 1166 msng_filenode_set = {}
1167 1167
1168 1168 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1169 1169 junk = None
1170 1170
1171 1171 # A changeset always belongs to itself, so the changenode lookup
1172 1172 # function for a changenode is identity.
1173 1173 def identity(x):
1174 1174 return x
1175 1175
1176 1176 # A function generating function. Sets up an environment for the
1177 1177 # inner function.
1178 1178 def cmp_by_rev_func(revlog):
1179 1179 # Compare two nodes by their revision number in the environment's
1180 1180 # revision history. Since the revision number both represents the
1181 1181 # most efficient order to read the nodes in, and represents a
1182 1182 # topological sorting of the nodes, this function is often useful.
1183 1183 def cmp_by_rev(a, b):
1184 1184 return cmp(revlog.rev(a), revlog.rev(b))
1185 1185 return cmp_by_rev
1186 1186
1187 1187 # If we determine that a particular file or manifest node must be a
1188 1188 # node that the recipient of the changegroup will already have, we can
1189 1189 # also assume the recipient will have all the parents. This function
1190 1190 # prunes them from the set of missing nodes.
1191 1191 def prune_parents(revlog, hasset, msngset):
1192 1192 haslst = hasset.keys()
1193 1193 haslst.sort(cmp_by_rev_func(revlog))
1194 1194 for node in haslst:
1195 1195 parentlst = [p for p in revlog.parents(node) if p != nullid]
1196 1196 while parentlst:
1197 1197 n = parentlst.pop()
1198 1198 if n not in hasset:
1199 1199 hasset[n] = 1
1200 1200 p = [p for p in revlog.parents(n) if p != nullid]
1201 1201 parentlst.extend(p)
1202 1202 for n in hasset:
1203 1203 msngset.pop(n, None)
1204 1204
1205 1205 # This is a function generating function used to set up an environment
1206 1206 # for the inner function to execute in.
1207 1207 def manifest_and_file_collector(changedfileset):
1208 1208 # This is an information gathering function that gathers
1209 1209 # information from each changeset node that goes out as part of
1210 1210 # the changegroup. The information gathered is a list of which
1211 1211 # manifest nodes are potentially required (the recipient may
1212 1212 # already have them) and total list of all files which were
1213 1213 # changed in any changeset in the changegroup.
1214 1214 #
1215 1215 # We also remember the first changenode we saw any manifest
1216 1216 # referenced by so we can later determine which changenode 'owns'
1217 1217 # the manifest.
1218 1218 def collect_manifests_and_files(clnode):
1219 1219 c = cl.read(clnode)
1220 1220 for f in c[3]:
1221 1221 # This is to make sure we only have one instance of each
1222 1222 # filename string for each filename.
1223 1223 changedfileset.setdefault(f, f)
1224 1224 msng_mnfst_set.setdefault(c[0], clnode)
1225 1225 return collect_manifests_and_files
1226 1226
1227 1227 # Figure out which manifest nodes (of the ones we think might be part
1228 1228 # of the changegroup) the recipient must know about and remove them
1229 1229 # from the changegroup.
1230 1230 def prune_manifests():
1231 1231 has_mnfst_set = {}
1232 1232 for n in msng_mnfst_set:
1233 1233 # If a 'missing' manifest thinks it belongs to a changenode
1234 1234 # the recipient is assumed to have, obviously the recipient
1235 1235 # must have that manifest.
1236 1236 linknode = cl.node(mnfst.linkrev(n))
1237 1237 if linknode in has_cl_set:
1238 1238 has_mnfst_set[n] = 1
1239 1239 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1240 1240
1241 1241 # Use the information collected in collect_manifests_and_files to say
1242 1242 # which changenode any manifestnode belongs to.
1243 1243 def lookup_manifest_link(mnfstnode):
1244 1244 return msng_mnfst_set[mnfstnode]
1245 1245
1246 1246 # A function generating function that sets up the initial environment
1247 1247 # the inner function.
1248 1248 def filenode_collector(changedfiles):
1249 1249 next_rev = [0]
1250 1250 # This gathers information from each manifestnode included in the
1251 1251 # changegroup about which filenodes the manifest node references
1252 1252 # so we can include those in the changegroup too.
1253 1253 #
1254 1254 # It also remembers which changenode each filenode belongs to. It
1255 1255 # does this by assuming the a filenode belongs to the changenode
1256 1256 # the first manifest that references it belongs to.
1257 1257 def collect_msng_filenodes(mnfstnode):
1258 1258 r = mnfst.rev(mnfstnode)
1259 1259 if r == next_rev[0]:
1260 1260 # If the last rev we looked at was the one just previous,
1261 1261 # we only need to see a diff.
1262 1262 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1263 1263 # For each line in the delta
1264 1264 for dline in delta.splitlines():
1265 1265 # get the filename and filenode for that line
1266 1266 f, fnode = dline.split('\0')
1267 1267 fnode = bin(fnode[:40])
1268 1268 f = changedfiles.get(f, None)
1269 1269 # And if the file is in the list of files we care
1270 1270 # about.
1271 1271 if f is not None:
1272 1272 # Get the changenode this manifest belongs to
1273 1273 clnode = msng_mnfst_set[mnfstnode]
1274 1274 # Create the set of filenodes for the file if
1275 1275 # there isn't one already.
1276 1276 ndset = msng_filenode_set.setdefault(f, {})
1277 1277 # And set the filenode's changelog node to the
1278 1278 # manifest's if it hasn't been set already.
1279 1279 ndset.setdefault(fnode, clnode)
1280 1280 else:
1281 1281 # Otherwise we need a full manifest.
1282 1282 m = mnfst.read(mnfstnode)
1283 1283 # For every file in we care about.
1284 1284 for f in changedfiles:
1285 1285 fnode = m.get(f, None)
1286 1286 # If it's in the manifest
1287 1287 if fnode is not None:
1288 1288 # See comments above.
1289 1289 clnode = msng_mnfst_set[mnfstnode]
1290 1290 ndset = msng_filenode_set.setdefault(f, {})
1291 1291 ndset.setdefault(fnode, clnode)
1292 1292 # Remember the revision we hope to see next.
1293 1293 next_rev[0] = r + 1
1294 1294 return collect_msng_filenodes
1295 1295
1296 1296 # We have a list of filenodes we think we need for a file, lets remove
1297 1297 # all those we now the recipient must have.
1298 1298 def prune_filenodes(f, filerevlog):
1299 1299 msngset = msng_filenode_set[f]
1300 1300 hasset = {}
1301 1301 # If a 'missing' filenode thinks it belongs to a changenode we
1302 1302 # assume the recipient must have, then the recipient must have
1303 1303 # that filenode.
1304 1304 for n in msngset:
1305 1305 clnode = cl.node(filerevlog.linkrev(n))
1306 1306 if clnode in has_cl_set:
1307 1307 hasset[n] = 1
1308 1308 prune_parents(filerevlog, hasset, msngset)
1309 1309
1310 1310 # A function generator function that sets up the a context for the
1311 1311 # inner function.
1312 1312 def lookup_filenode_link_func(fname):
1313 1313 msngset = msng_filenode_set[fname]
1314 1314 # Lookup the changenode the filenode belongs to.
1315 1315 def lookup_filenode_link(fnode):
1316 1316 return msngset[fnode]
1317 1317 return lookup_filenode_link
1318 1318
1319 1319 # Now that we have all theses utility functions to help out and
1320 1320 # logically divide up the task, generate the group.
1321 1321 def gengroup():
1322 1322 # The set of changed files starts empty.
1323 1323 changedfiles = {}
1324 1324 # Create a changenode group generator that will call our functions
1325 1325 # back to lookup the owning changenode and collect information.
1326 1326 group = cl.group(msng_cl_lst, identity,
1327 1327 manifest_and_file_collector(changedfiles))
1328 1328 for chnk in group:
1329 1329 yield chnk
1330 1330
1331 1331 # The list of manifests has been collected by the generator
1332 1332 # calling our functions back.
1333 1333 prune_manifests()
1334 1334 msng_mnfst_lst = msng_mnfst_set.keys()
1335 1335 # Sort the manifestnodes by revision number.
1336 1336 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1337 1337 # Create a generator for the manifestnodes that calls our lookup
1338 1338 # and data collection functions back.
1339 1339 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1340 1340 filenode_collector(changedfiles))
1341 1341 for chnk in group:
1342 1342 yield chnk
1343 1343
1344 1344 # These are no longer needed, dereference and toss the memory for
1345 1345 # them.
1346 1346 msng_mnfst_lst = None
1347 1347 msng_mnfst_set.clear()
1348 1348
1349 1349 changedfiles = changedfiles.keys()
1350 1350 changedfiles.sort()
1351 1351 # Go through all our files in order sorted by name.
1352 1352 for fname in changedfiles:
1353 1353 filerevlog = self.file(fname)
1354 1354 # Toss out the filenodes that the recipient isn't really
1355 1355 # missing.
1356 1356 if msng_filenode_set.has_key(fname):
1357 1357 prune_filenodes(fname, filerevlog)
1358 1358 msng_filenode_lst = msng_filenode_set[fname].keys()
1359 1359 else:
1360 1360 msng_filenode_lst = []
1361 1361 # If any filenodes are left, generate the group for them,
1362 1362 # otherwise don't bother.
1363 1363 if len(msng_filenode_lst) > 0:
1364 1364 yield changegroup.genchunk(fname)
1365 1365 # Sort the filenodes by their revision #
1366 1366 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1367 1367 # Create a group generator and only pass in a changenode
1368 1368 # lookup function as we need to collect no information
1369 1369 # from filenodes.
1370 1370 group = filerevlog.group(msng_filenode_lst,
1371 1371 lookup_filenode_link_func(fname))
1372 1372 for chnk in group:
1373 1373 yield chnk
1374 1374 if msng_filenode_set.has_key(fname):
1375 1375 # Don't need this anymore, toss it to free memory.
1376 1376 del msng_filenode_set[fname]
1377 1377 # Signal that no more groups are left.
1378 1378 yield changegroup.closechunk()
1379 1379
1380 1380 if msng_cl_lst:
1381 1381 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1382 1382
1383 1383 return util.chunkbuffer(gengroup())
1384 1384
1385 1385 def changegroup(self, basenodes, source):
1386 1386 """Generate a changegroup of all nodes that we have that a recipient
1387 1387 doesn't.
1388 1388
1389 1389 This is much easier than the previous function as we can assume that
1390 1390 the recipient has any changenode we aren't sending them."""
1391 1391
1392 1392 self.hook('preoutgoing', throw=True, source=source)
1393 1393
1394 1394 cl = self.changelog
1395 1395 nodes = cl.nodesbetween(basenodes, None)[0]
1396 1396 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1397 1397
1398 1398 def identity(x):
1399 1399 return x
1400 1400
1401 1401 def gennodelst(revlog):
1402 1402 for r in xrange(0, revlog.count()):
1403 1403 n = revlog.node(r)
1404 1404 if revlog.linkrev(n) in revset:
1405 1405 yield n
1406 1406
1407 1407 def changed_file_collector(changedfileset):
1408 1408 def collect_changed_files(clnode):
1409 1409 c = cl.read(clnode)
1410 1410 for fname in c[3]:
1411 1411 changedfileset[fname] = 1
1412 1412 return collect_changed_files
1413 1413
1414 1414 def lookuprevlink_func(revlog):
1415 1415 def lookuprevlink(n):
1416 1416 return cl.node(revlog.linkrev(n))
1417 1417 return lookuprevlink
1418 1418
1419 1419 def gengroup():
1420 1420 # construct a list of all changed files
1421 1421 changedfiles = {}
1422 1422
1423 1423 for chnk in cl.group(nodes, identity,
1424 1424 changed_file_collector(changedfiles)):
1425 1425 yield chnk
1426 1426 changedfiles = changedfiles.keys()
1427 1427 changedfiles.sort()
1428 1428
1429 1429 mnfst = self.manifest
1430 1430 nodeiter = gennodelst(mnfst)
1431 1431 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1432 1432 yield chnk
1433 1433
1434 1434 for fname in changedfiles:
1435 1435 filerevlog = self.file(fname)
1436 1436 nodeiter = gennodelst(filerevlog)
1437 1437 nodeiter = list(nodeiter)
1438 1438 if nodeiter:
1439 1439 yield changegroup.genchunk(fname)
1440 1440 lookup = lookuprevlink_func(filerevlog)
1441 1441 for chnk in filerevlog.group(nodeiter, lookup):
1442 1442 yield chnk
1443 1443
1444 1444 yield changegroup.closechunk()
1445 1445
1446 1446 if nodes:
1447 1447 self.hook('outgoing', node=hex(nodes[0]), source=source)
1448 1448
1449 1449 return util.chunkbuffer(gengroup())
1450 1450
1451 1451 def addchangegroup(self, source):
1452 1452 """add changegroup to repo.
1453 1453 returns number of heads modified or added + 1."""
1454 1454
1455 1455 def csmap(x):
1456 1456 self.ui.debug(_("add changeset %s\n") % short(x))
1457 1457 return cl.count()
1458 1458
1459 1459 def revmap(x):
1460 1460 return cl.rev(x)
1461 1461
1462 1462 if not source:
1463 1463 return 0
1464 1464
1465 1465 self.hook('prechangegroup', throw=True)
1466 1466
1467 1467 changesets = files = revisions = 0
1468 1468
1469 1469 tr = self.transaction()
1470 1470
1471 1471 # write changelog and manifest data to temp files so
1472 1472 # concurrent readers will not see inconsistent view
1473 1473 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1474 1474
1475 1475 oldheads = len(cl.heads())
1476 1476
1477 1477 # pull off the changeset group
1478 1478 self.ui.status(_("adding changesets\n"))
1479 1479 co = cl.tip()
1480 1480 chunkiter = changegroup.chunkiter(source)
1481 1481 cn = cl.addgroup(chunkiter, csmap, tr, 1) # unique
1482 1482 cnr, cor = map(cl.rev, (cn, co))
1483 1483 if cn == nullid:
1484 1484 cnr = cor
1485 1485 changesets = cnr - cor
1486 1486
1487 1487 mf = appendfile.appendmanifest(self.opener, self.manifest.version)
1488 1488
1489 1489 # pull off the manifest group
1490 1490 self.ui.status(_("adding manifests\n"))
1491 1491 mm = mf.tip()
1492 1492 chunkiter = changegroup.chunkiter(source)
1493 1493 mo = mf.addgroup(chunkiter, revmap, tr)
1494 1494
1495 1495 # process the files
1496 1496 self.ui.status(_("adding file changes\n"))
1497 1497 while 1:
1498 1498 f = changegroup.getchunk(source)
1499 1499 if not f:
1500 1500 break
1501 1501 self.ui.debug(_("adding %s revisions\n") % f)
1502 1502 fl = self.file(f)
1503 1503 o = fl.count()
1504 1504 chunkiter = changegroup.chunkiter(source)
1505 1505 n = fl.addgroup(chunkiter, revmap, tr)
1506 1506 revisions += fl.count() - o
1507 1507 files += 1
1508 1508
1509 1509 # write order here is important so concurrent readers will see
1510 1510 # consistent view of repo
1511 1511 mf.writedata()
1512 1512 cl.writedata()
1513 1513
1514 1514 # make changelog and manifest see real files again
1515 1515 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1516 1516 self.manifest = manifest.manifest(self.opener, self.manifest.version)
1517 1517 self.changelog.checkinlinesize(tr)
1518 1518 self.manifest.checkinlinesize(tr)
1519 1519
1520 1520 newheads = len(self.changelog.heads())
1521 1521 heads = ""
1522 1522 if oldheads and newheads > oldheads:
1523 1523 heads = _(" (+%d heads)") % (newheads - oldheads)
1524 1524
1525 1525 self.ui.status(_("added %d changesets"
1526 1526 " with %d changes to %d files%s\n")
1527 1527 % (changesets, revisions, files, heads))
1528 1528
1529 1529 self.hook('pretxnchangegroup', throw=True,
1530 1530 node=hex(self.changelog.node(cor+1)))
1531 1531
1532 1532 tr.close()
1533 1533
1534 1534 if changesets > 0:
1535 1535 self.hook("changegroup", node=hex(self.changelog.node(cor+1)))
1536 1536
1537 1537 for i in range(cor + 1, cnr + 1):
1538 1538 self.hook("incoming", node=hex(self.changelog.node(i)))
1539 1539
1540 1540 return newheads - oldheads + 1
1541 1541
1542 1542 def update(self, node, allow=False, force=False, choose=None,
1543 1543 moddirstate=True, forcemerge=False, wlock=None):
1544 1544 pl = self.dirstate.parents()
1545 1545 if not force and pl[1] != nullid:
1546 1546 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1547 1547 return 1
1548 1548
1549 1549 err = False
1550 1550
1551 1551 p1, p2 = pl[0], node
1552 1552 pa = self.changelog.ancestor(p1, p2)
1553 1553 m1n = self.changelog.read(p1)[0]
1554 1554 m2n = self.changelog.read(p2)[0]
1555 1555 man = self.manifest.ancestor(m1n, m2n)
1556 1556 m1 = self.manifest.read(m1n)
1557 1557 mf1 = self.manifest.readflags(m1n)
1558 1558 m2 = self.manifest.read(m2n).copy()
1559 1559 mf2 = self.manifest.readflags(m2n)
1560 1560 ma = self.manifest.read(man)
1561 1561 mfa = self.manifest.readflags(man)
1562 1562
1563 1563 modified, added, removed, deleted, unknown = self.changes()
1564 1564
1565 1565 # is this a jump, or a merge? i.e. is there a linear path
1566 1566 # from p1 to p2?
1567 1567 linear_path = (pa == p1 or pa == p2)
1568 1568
1569 1569 if allow and linear_path:
1570 1570 raise util.Abort(_("there is nothing to merge, "
1571 1571 "just use 'hg update'"))
1572 1572 if allow and not forcemerge:
1573 1573 if modified or added or removed:
1574 1574 raise util.Abort(_("outstanding uncommitted changes"))
1575 1575 if not forcemerge and not force:
1576 1576 for f in unknown:
1577 1577 if f in m2:
1578 1578 t1 = self.wread(f)
1579 1579 t2 = self.file(f).read(m2[f])
1580 1580 if cmp(t1, t2) != 0:
1581 1581 raise util.Abort(_("'%s' already exists in the working"
1582 1582 " dir and differs from remote") % f)
1583 1583
1584 1584 # resolve the manifest to determine which files
1585 1585 # we care about merging
1586 1586 self.ui.note(_("resolving manifests\n"))
1587 1587 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1588 1588 (force, allow, moddirstate, linear_path))
1589 1589 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1590 1590 (short(man), short(m1n), short(m2n)))
1591 1591
1592 1592 merge = {}
1593 1593 get = {}
1594 1594 remove = []
1595 1595
1596 1596 # construct a working dir manifest
1597 1597 mw = m1.copy()
1598 1598 mfw = mf1.copy()
1599 1599 umap = dict.fromkeys(unknown)
1600 1600
1601 1601 for f in added + modified + unknown:
1602 1602 mw[f] = ""
1603 1603 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1604 1604
1605 1605 if moddirstate and not wlock:
1606 1606 wlock = self.wlock()
1607 1607
1608 1608 for f in deleted + removed:
1609 1609 if f in mw:
1610 1610 del mw[f]
1611 1611
1612 1612 # If we're jumping between revisions (as opposed to merging),
1613 1613 # and if neither the working directory nor the target rev has
1614 1614 # the file, then we need to remove it from the dirstate, to
1615 1615 # prevent the dirstate from listing the file when it is no
1616 1616 # longer in the manifest.
1617 1617 if moddirstate and linear_path and f not in m2:
1618 1618 self.dirstate.forget((f,))
1619 1619
1620 1620 # Compare manifests
1621 1621 for f, n in mw.iteritems():
1622 1622 if choose and not choose(f):
1623 1623 continue
1624 1624 if f in m2:
1625 1625 s = 0
1626 1626
1627 1627 # is the wfile new since m1, and match m2?
1628 1628 if f not in m1:
1629 1629 t1 = self.wread(f)
1630 1630 t2 = self.file(f).read(m2[f])
1631 1631 if cmp(t1, t2) == 0:
1632 1632 n = m2[f]
1633 1633 del t1, t2
1634 1634
1635 1635 # are files different?
1636 1636 if n != m2[f]:
1637 1637 a = ma.get(f, nullid)
1638 1638 # are both different from the ancestor?
1639 1639 if n != a and m2[f] != a:
1640 1640 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1641 1641 # merge executable bits
1642 1642 # "if we changed or they changed, change in merge"
1643 1643 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1644 1644 mode = ((a^b) | (a^c)) ^ a
1645 1645 merge[f] = (m1.get(f, nullid), m2[f], mode)
1646 1646 s = 1
1647 1647 # are we clobbering?
1648 1648 # is remote's version newer?
1649 1649 # or are we going back in time?
1650 1650 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1651 1651 self.ui.debug(_(" remote %s is newer, get\n") % f)
1652 1652 get[f] = m2[f]
1653 1653 s = 1
1654 1654 elif f in umap or f in added:
1655 1655 # this unknown file is the same as the checkout
1656 1656 # we need to reset the dirstate if the file was added
1657 1657 get[f] = m2[f]
1658 1658
1659 1659 if not s and mfw[f] != mf2[f]:
1660 1660 if force:
1661 1661 self.ui.debug(_(" updating permissions for %s\n") % f)
1662 1662 util.set_exec(self.wjoin(f), mf2[f])
1663 1663 else:
1664 1664 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1665 1665 mode = ((a^b) | (a^c)) ^ a
1666 1666 if mode != b:
1667 1667 self.ui.debug(_(" updating permissions for %s\n")
1668 1668 % f)
1669 1669 util.set_exec(self.wjoin(f), mode)
1670 1670 del m2[f]
1671 1671 elif f in ma:
1672 1672 if n != ma[f]:
1673 1673 r = _("d")
1674 1674 if not force and (linear_path or allow):
1675 1675 r = self.ui.prompt(
1676 1676 (_(" local changed %s which remote deleted\n") % f) +
1677 1677 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1678 1678 if r == _("d"):
1679 1679 remove.append(f)
1680 1680 else:
1681 1681 self.ui.debug(_("other deleted %s\n") % f)
1682 1682 remove.append(f) # other deleted it
1683 1683 else:
1684 1684 # file is created on branch or in working directory
1685 1685 if force and f not in umap:
1686 1686 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1687 1687 remove.append(f)
1688 1688 elif n == m1.get(f, nullid): # same as parent
1689 1689 if p2 == pa: # going backwards?
1690 1690 self.ui.debug(_("remote deleted %s\n") % f)
1691 1691 remove.append(f)
1692 1692 else:
1693 1693 self.ui.debug(_("local modified %s, keeping\n") % f)
1694 1694 else:
1695 1695 self.ui.debug(_("working dir created %s, keeping\n") % f)
1696 1696
1697 1697 for f, n in m2.iteritems():
1698 1698 if choose and not choose(f):
1699 1699 continue
1700 1700 if f[0] == "/":
1701 1701 continue
1702 1702 if f in ma and n != ma[f]:
1703 1703 r = _("k")
1704 1704 if not force and (linear_path or allow):
1705 1705 r = self.ui.prompt(
1706 1706 (_("remote changed %s which local deleted\n") % f) +
1707 1707 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1708 1708 if r == _("k"):
1709 1709 get[f] = n
1710 1710 elif f not in ma:
1711 1711 self.ui.debug(_("remote created %s\n") % f)
1712 1712 get[f] = n
1713 1713 else:
1714 1714 if force or p2 == pa: # going backwards?
1715 1715 self.ui.debug(_("local deleted %s, recreating\n") % f)
1716 1716 get[f] = n
1717 1717 else:
1718 1718 self.ui.debug(_("local deleted %s\n") % f)
1719 1719
1720 1720 del mw, m1, m2, ma
1721 1721
1722 1722 if force:
1723 1723 for f in merge:
1724 1724 get[f] = merge[f][1]
1725 1725 merge = {}
1726 1726
1727 1727 if linear_path or force:
1728 1728 # we don't need to do any magic, just jump to the new rev
1729 1729 branch_merge = False
1730 1730 p1, p2 = p2, nullid
1731 1731 else:
1732 1732 if not allow:
1733 1733 self.ui.status(_("this update spans a branch"
1734 1734 " affecting the following files:\n"))
1735 1735 fl = merge.keys() + get.keys()
1736 1736 fl.sort()
1737 1737 for f in fl:
1738 1738 cf = ""
1739 1739 if f in merge:
1740 1740 cf = _(" (resolve)")
1741 1741 self.ui.status(" %s%s\n" % (f, cf))
1742 1742 self.ui.warn(_("aborting update spanning branches!\n"))
1743 1743 self.ui.status(_("(use 'hg merge' to merge across branches"
1744 1744 " or 'hg update -C' to lose changes)\n"))
1745 1745 return 1
1746 1746 branch_merge = True
1747 1747
1748 1748 # get the files we don't need to change
1749 1749 files = get.keys()
1750 1750 files.sort()
1751 1751 for f in files:
1752 1752 if f[0] == "/":
1753 1753 continue
1754 1754 self.ui.note(_("getting %s\n") % f)
1755 1755 t = self.file(f).read(get[f])
1756 1756 self.wwrite(f, t)
1757 1757 util.set_exec(self.wjoin(f), mf2[f])
1758 1758 if moddirstate:
1759 1759 if branch_merge:
1760 1760 self.dirstate.update([f], 'n', st_mtime=-1)
1761 1761 else:
1762 1762 self.dirstate.update([f], 'n')
1763 1763
1764 1764 # merge the tricky bits
1765 1765 failedmerge = []
1766 1766 files = merge.keys()
1767 1767 files.sort()
1768 1768 xp1 = hex(p1)
1769 1769 xp2 = hex(p2)
1770 1770 for f in files:
1771 1771 self.ui.status(_("merging %s\n") % f)
1772 1772 my, other, flag = merge[f]
1773 1773 ret = self.merge3(f, my, other, xp1, xp2)
1774 1774 if ret:
1775 1775 err = True
1776 1776 failedmerge.append(f)
1777 1777 util.set_exec(self.wjoin(f), flag)
1778 1778 if moddirstate:
1779 1779 if branch_merge:
1780 1780 # We've done a branch merge, mark this file as merged
1781 1781 # so that we properly record the merger later
1782 1782 self.dirstate.update([f], 'm')
1783 1783 else:
1784 1784 # We've update-merged a locally modified file, so
1785 1785 # we set the dirstate to emulate a normal checkout
1786 1786 # of that file some time in the past. Thus our
1787 1787 # merge will appear as a normal local file
1788 1788 # modification.
1789 1789 f_len = len(self.file(f).read(other))
1790 1790 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1791 1791
1792 1792 remove.sort()
1793 1793 for f in remove:
1794 1794 self.ui.note(_("removing %s\n") % f)
1795 1795 util.audit_path(f)
1796 1796 try:
1797 1797 util.unlink(self.wjoin(f))
1798 1798 except OSError, inst:
1799 1799 if inst.errno != errno.ENOENT:
1800 1800 self.ui.warn(_("update failed to remove %s: %s!\n") %
1801 1801 (f, inst.strerror))
1802 1802 if moddirstate:
1803 1803 if branch_merge:
1804 1804 self.dirstate.update(remove, 'r')
1805 1805 else:
1806 1806 self.dirstate.forget(remove)
1807 1807
1808 1808 if moddirstate:
1809 1809 self.dirstate.setparents(p1, p2)
1810 1810
1811 1811 stat = ((len(get), _("updated")),
1812 1812 (len(merge) - len(failedmerge), _("merged")),
1813 1813 (len(remove), _("removed")),
1814 1814 (len(failedmerge), _("unresolved")))
1815 1815 note = ", ".join([_("%d files %s") % s for s in stat])
1816 1816 self.ui.note("%s\n" % note)
1817 1817 if moddirstate and branch_merge:
1818 1818 self.ui.note(_("(branch merge, don't forget to commit)\n"))
1819 1819
1820 1820 return err
1821 1821
1822 1822 def merge3(self, fn, my, other, p1, p2):
1823 1823 """perform a 3-way merge in the working directory"""
1824 1824
1825 1825 def temp(prefix, node):
1826 1826 pre = "%s~%s." % (os.path.basename(fn), prefix)
1827 1827 (fd, name) = tempfile.mkstemp("", pre)
1828 1828 f = os.fdopen(fd, "wb")
1829 1829 self.wwrite(fn, fl.read(node), f)
1830 1830 f.close()
1831 1831 return name
1832 1832
1833 1833 fl = self.file(fn)
1834 1834 base = fl.ancestor(my, other)
1835 1835 a = self.wjoin(fn)
1836 1836 b = temp("base", base)
1837 1837 c = temp("other", other)
1838 1838
1839 1839 self.ui.note(_("resolving %s\n") % fn)
1840 1840 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1841 1841 (fn, short(my), short(other), short(base)))
1842 1842
1843 1843 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1844 1844 or "hgmerge")
1845 1845 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
1846 1846 environ={'HG_FILE': fn,
1847 1847 'HG_MY_NODE': p1,
1848 1848 'HG_OTHER_NODE': p2,
1849 1849 'HG_FILE_MY_NODE': hex(my),
1850 1850 'HG_FILE_OTHER_NODE': hex(other),
1851 1851 'HG_FILE_BASE_NODE': hex(base)})
1852 1852 if r:
1853 1853 self.ui.warn(_("merging %s failed!\n") % fn)
1854 1854
1855 1855 os.unlink(b)
1856 1856 os.unlink(c)
1857 1857 return r
1858 1858
1859 1859 def verify(self):
1860 1860 filelinkrevs = {}
1861 1861 filenodes = {}
1862 1862 changesets = revisions = files = 0
1863 1863 errors = [0]
1864 1864 warnings = [0]
1865 1865 neededmanifests = {}
1866 1866
1867 1867 def err(msg):
1868 1868 self.ui.warn(msg + "\n")
1869 1869 errors[0] += 1
1870 1870
1871 1871 def warn(msg):
1872 1872 self.ui.warn(msg + "\n")
1873 1873 warnings[0] += 1
1874 1874
1875 1875 def checksize(obj, name):
1876 1876 d = obj.checksize()
1877 1877 if d[0]:
1878 1878 err(_("%s data length off by %d bytes") % (name, d[0]))
1879 1879 if d[1]:
1880 1880 err(_("%s index contains %d extra bytes") % (name, d[1]))
1881 1881
1882 1882 def checkversion(obj, name):
1883 1883 if obj.version != revlog.REVLOGV0:
1884 1884 if not revlogv1:
1885 1885 warn(_("warning: `%s' uses revlog format 1") % name)
1886 1886 elif revlogv1:
1887 1887 warn(_("warning: `%s' uses revlog format 0") % name)
1888 1888
1889 1889 revlogv1 = self.revlogversion != revlog.REVLOGV0
1890 1890 if self.ui.verbose or revlogv1 != self.revlogv1:
1891 1891 self.ui.status(_("repository uses revlog format %d\n") %
1892 1892 (revlogv1 and 1 or 0))
1893 1893
1894 1894 seen = {}
1895 1895 self.ui.status(_("checking changesets\n"))
1896 1896 checksize(self.changelog, "changelog")
1897 1897
1898 1898 for i in range(self.changelog.count()):
1899 1899 changesets += 1
1900 1900 n = self.changelog.node(i)
1901 1901 l = self.changelog.linkrev(n)
1902 1902 if l != i:
1903 1903 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1904 1904 if n in seen:
1905 1905 err(_("duplicate changeset at revision %d") % i)
1906 1906 seen[n] = 1
1907 1907
1908 1908 for p in self.changelog.parents(n):
1909 1909 if p not in self.changelog.nodemap:
1910 1910 err(_("changeset %s has unknown parent %s") %
1911 1911 (short(n), short(p)))
1912 1912 try:
1913 1913 changes = self.changelog.read(n)
1914 1914 except KeyboardInterrupt:
1915 1915 self.ui.warn(_("interrupted"))
1916 1916 raise
1917 1917 except Exception, inst:
1918 1918 err(_("unpacking changeset %s: %s") % (short(n), inst))
1919 1919 continue
1920 1920
1921 1921 neededmanifests[changes[0]] = n
1922 1922
1923 1923 for f in changes[3]:
1924 1924 filelinkrevs.setdefault(f, []).append(i)
1925 1925
1926 1926 seen = {}
1927 1927 self.ui.status(_("checking manifests\n"))
1928 1928 checkversion(self.manifest, "manifest")
1929 1929 checksize(self.manifest, "manifest")
1930 1930
1931 1931 for i in range(self.manifest.count()):
1932 1932 n = self.manifest.node(i)
1933 1933 l = self.manifest.linkrev(n)
1934 1934
1935 1935 if l < 0 or l >= self.changelog.count():
1936 1936 err(_("bad manifest link (%d) at revision %d") % (l, i))
1937 1937
1938 1938 if n in neededmanifests:
1939 1939 del neededmanifests[n]
1940 1940
1941 1941 if n in seen:
1942 1942 err(_("duplicate manifest at revision %d") % i)
1943 1943
1944 1944 seen[n] = 1
1945 1945
1946 1946 for p in self.manifest.parents(n):
1947 1947 if p not in self.manifest.nodemap:
1948 1948 err(_("manifest %s has unknown parent %s") %
1949 1949 (short(n), short(p)))
1950 1950
1951 1951 try:
1952 1952 delta = mdiff.patchtext(self.manifest.delta(n))
1953 1953 except KeyboardInterrupt:
1954 1954 self.ui.warn(_("interrupted"))
1955 1955 raise
1956 1956 except Exception, inst:
1957 1957 err(_("unpacking manifest %s: %s") % (short(n), inst))
1958 1958 continue
1959 1959
1960 1960 try:
1961 1961 ff = [ l.split('\0') for l in delta.splitlines() ]
1962 1962 for f, fn in ff:
1963 1963 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1964 1964 except (ValueError, TypeError), inst:
1965 1965 err(_("broken delta in manifest %s: %s") % (short(n), inst))
1966 1966
1967 1967 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1968 1968
1969 1969 for m, c in neededmanifests.items():
1970 1970 err(_("Changeset %s refers to unknown manifest %s") %
1971 1971 (short(m), short(c)))
1972 1972 del neededmanifests
1973 1973
1974 1974 for f in filenodes:
1975 1975 if f not in filelinkrevs:
1976 1976 err(_("file %s in manifest but not in changesets") % f)
1977 1977
1978 1978 for f in filelinkrevs:
1979 1979 if f not in filenodes:
1980 1980 err(_("file %s in changeset but not in manifest") % f)
1981 1981
1982 1982 self.ui.status(_("checking files\n"))
1983 1983 ff = filenodes.keys()
1984 1984 ff.sort()
1985 1985 for f in ff:
1986 1986 if f == "/dev/null":
1987 1987 continue
1988 1988 files += 1
1989 1989 if not f:
1990 1990 err(_("file without name in manifest %s") % short(n))
1991 1991 continue
1992 1992 fl = self.file(f)
1993 1993 checkversion(fl, f)
1994 1994 checksize(fl, f)
1995 1995
1996 1996 nodes = {nullid: 1}
1997 1997 seen = {}
1998 1998 for i in range(fl.count()):
1999 1999 revisions += 1
2000 2000 n = fl.node(i)
2001 2001
2002 2002 if n in seen:
2003 2003 err(_("%s: duplicate revision %d") % (f, i))
2004 2004 if n not in filenodes[f]:
2005 2005 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
2006 2006 else:
2007 2007 del filenodes[f][n]
2008 2008
2009 2009 flr = fl.linkrev(n)
2010 2010 if flr not in filelinkrevs.get(f, []):
2011 2011 err(_("%s:%s points to unexpected changeset %d")
2012 2012 % (f, short(n), flr))
2013 2013 else:
2014 2014 filelinkrevs[f].remove(flr)
2015 2015
2016 2016 # verify contents
2017 2017 try:
2018 2018 t = fl.read(n)
2019 2019 except KeyboardInterrupt:
2020 2020 self.ui.warn(_("interrupted"))
2021 2021 raise
2022 2022 except Exception, inst:
2023 2023 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
2024 2024
2025 2025 # verify parents
2026 2026 (p1, p2) = fl.parents(n)
2027 2027 if p1 not in nodes:
2028 2028 err(_("file %s:%s unknown parent 1 %s") %
2029 2029 (f, short(n), short(p1)))
2030 2030 if p2 not in nodes:
2031 2031 err(_("file %s:%s unknown parent 2 %s") %
2032 2032 (f, short(n), short(p1)))
2033 2033 nodes[n] = 1
2034 2034
2035 2035 # cross-check
2036 2036 for node in filenodes[f]:
2037 2037 err(_("node %s in manifests not in %s") % (hex(node), f))
2038 2038
2039 2039 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
2040 2040 (files, changesets, revisions))
2041 2041
2042 2042 if warnings[0]:
2043 2043 self.ui.warn(_("%d warnings encountered!\n") % warnings[0])
2044 2044 if errors[0]:
2045 2045 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
2046 2046 return 1
2047 2047
2048 2048 # used to avoid circular references so destructors work
2049 2049 def aftertrans(base):
2050 2050 p = base
2051 2051 def a():
2052 2052 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
2053 2053 util.rename(os.path.join(p, "journal.dirstate"),
2054 2054 os.path.join(p, "undo.dirstate"))
2055 2055 return a
2056 2056
@@ -1,264 +1,266 b''
1 1 # ui.py - user interface bits for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import ConfigParser
9 9 from i18n import gettext as _
10 10 from demandload import *
11 11 demandload(globals(), "errno os re socket sys tempfile util")
12 12
13 13 class ui(object):
14 14 def __init__(self, verbose=False, debug=False, quiet=False,
15 interactive=True, parentui=None):
15 interactive=True, traceback=False, parentui=None):
16 16 self.overlay = {}
17 17 if parentui is None:
18 18 # this is the parent of all ui children
19 19 self.parentui = None
20 20 self.cdata = ConfigParser.SafeConfigParser()
21 21 self.readconfig(util.rcpath())
22 22
23 23 self.quiet = self.configbool("ui", "quiet")
24 24 self.verbose = self.configbool("ui", "verbose")
25 25 self.debugflag = self.configbool("ui", "debug")
26 26 self.interactive = self.configbool("ui", "interactive", True)
27 self.traceback = traceback
27 28
28 29 self.updateopts(verbose, debug, quiet, interactive)
29 30 self.diffcache = None
30 31 self.header = []
31 32 self.prev_header = []
32 33 self.revlogopts = self.configrevlog()
33 34 else:
34 35 # parentui may point to an ui object which is already a child
35 36 self.parentui = parentui.parentui or parentui
36 37 parent_cdata = self.parentui.cdata
37 38 self.cdata = ConfigParser.SafeConfigParser(parent_cdata.defaults())
38 39 # make interpolation work
39 40 for section in parent_cdata.sections():
40 41 self.cdata.add_section(section)
41 42 for name, value in parent_cdata.items(section, raw=True):
42 43 self.cdata.set(section, name, value)
43 44
44 45 def __getattr__(self, key):
45 46 return getattr(self.parentui, key)
46 47
47 48 def updateopts(self, verbose=False, debug=False, quiet=False,
48 interactive=True):
49 interactive=True, traceback=False):
49 50 self.quiet = (self.quiet or quiet) and not verbose and not debug
50 51 self.verbose = (self.verbose or verbose) or debug
51 52 self.debugflag = (self.debugflag or debug)
52 53 self.interactive = (self.interactive and interactive)
54 self.traceback = self.traceback or traceback
53 55
54 56 def readconfig(self, fn, root=None):
55 57 if isinstance(fn, basestring):
56 58 fn = [fn]
57 59 for f in fn:
58 60 try:
59 61 self.cdata.read(f)
60 62 except ConfigParser.ParsingError, inst:
61 63 raise util.Abort(_("Failed to parse %s\n%s") % (f, inst))
62 64 # translate paths relative to root (or home) into absolute paths
63 65 if root is None:
64 66 root = os.path.expanduser('~')
65 67 for name, path in self.configitems("paths"):
66 68 if path and path.find("://") == -1 and not os.path.isabs(path):
67 69 self.cdata.set("paths", name, os.path.join(root, path))
68 70
69 71 def setconfig(self, section, name, val):
70 72 self.overlay[(section, name)] = val
71 73
72 74 def config(self, section, name, default=None):
73 75 if self.overlay.has_key((section, name)):
74 76 return self.overlay[(section, name)]
75 77 if self.cdata.has_option(section, name):
76 78 try:
77 79 return self.cdata.get(section, name)
78 80 except ConfigParser.InterpolationError, inst:
79 81 raise util.Abort(_("Error in configuration:\n%s") % inst)
80 82 if self.parentui is None:
81 83 return default
82 84 else:
83 85 return self.parentui.config(section, name, default)
84 86
85 87 def configbool(self, section, name, default=False):
86 88 if self.overlay.has_key((section, name)):
87 89 return self.overlay[(section, name)]
88 90 if self.cdata.has_option(section, name):
89 91 try:
90 92 return self.cdata.getboolean(section, name)
91 93 except ConfigParser.InterpolationError, inst:
92 94 raise util.Abort(_("Error in configuration:\n%s") % inst)
93 95 if self.parentui is None:
94 96 return default
95 97 else:
96 98 return self.parentui.configbool(section, name, default)
97 99
98 100 def configitems(self, section):
99 101 items = {}
100 102 if self.parentui is not None:
101 103 items = dict(self.parentui.configitems(section))
102 104 if self.cdata.has_section(section):
103 105 try:
104 106 items.update(dict(self.cdata.items(section)))
105 107 except ConfigParser.InterpolationError, inst:
106 108 raise util.Abort(_("Error in configuration:\n%s") % inst)
107 109 x = items.items()
108 110 x.sort()
109 111 return x
110 112
111 113 def walkconfig(self, seen=None):
112 114 if seen is None:
113 115 seen = {}
114 116 for (section, name), value in self.overlay.iteritems():
115 117 yield section, name, value
116 118 seen[section, name] = 1
117 119 for section in self.cdata.sections():
118 120 for name, value in self.cdata.items(section):
119 121 if (section, name) in seen: continue
120 122 yield section, name, value.replace('\n', '\\n')
121 123 seen[section, name] = 1
122 124 if self.parentui is not None:
123 125 for parent in self.parentui.walkconfig(seen):
124 126 yield parent
125 127
126 128 def extensions(self):
127 129 return self.configitems("extensions")
128 130
129 131 def hgignorefiles(self):
130 132 result = []
131 133 cfgitems = self.configitems("ui")
132 134 for key, value in cfgitems:
133 135 if key == 'ignore' or key.startswith('ignore.'):
134 136 path = os.path.expanduser(value)
135 137 result.append(path)
136 138 return result
137 139
138 140 def configrevlog(self):
139 141 ret = {}
140 142 for x in self.configitems("revlog"):
141 143 k = x[0].lower()
142 144 ret[k] = x[1]
143 145 return ret
144 146 def diffopts(self):
145 147 if self.diffcache:
146 148 return self.diffcache
147 149 ret = { 'showfunc' : True, 'ignorews' : False}
148 150 for x in self.configitems("diff"):
149 151 k = x[0].lower()
150 152 v = x[1]
151 153 if v:
152 154 v = v.lower()
153 155 if v == 'true':
154 156 value = True
155 157 else:
156 158 value = False
157 159 ret[k] = value
158 160 self.diffcache = ret
159 161 return ret
160 162
161 163 def username(self):
162 164 """Return default username to be used in commits.
163 165
164 166 Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
165 167 and stop searching if one of these is set.
166 168 Abort if found username is an empty string to force specifying
167 169 the commit user elsewhere, e.g. with line option or repo hgrc.
168 170 If not found, use $LOGNAME or $USERNAME +"@full.hostname".
169 171 """
170 172 user = os.environ.get("HGUSER")
171 173 if user is None:
172 174 user = self.config("ui", "username")
173 175 if user is None:
174 176 user = os.environ.get("EMAIL")
175 177 if user is None:
176 178 user = os.environ.get("LOGNAME") or os.environ.get("USERNAME")
177 179 if user:
178 180 user = "%s@%s" % (user, socket.getfqdn())
179 181 if not user:
180 182 raise util.Abort(_("Please specify a username."))
181 183 return user
182 184
183 185 def shortuser(self, user):
184 186 """Return a short representation of a user name or email address."""
185 187 if not self.verbose: user = util.shortuser(user)
186 188 return user
187 189
188 190 def expandpath(self, loc):
189 191 """Return repository location relative to cwd or from [paths]"""
190 192 if loc.find("://") != -1 or os.path.exists(loc):
191 193 return loc
192 194
193 195 return self.config("paths", loc, loc)
194 196
195 197 def write(self, *args):
196 198 if self.header:
197 199 if self.header != self.prev_header:
198 200 self.prev_header = self.header
199 201 self.write(*self.header)
200 202 self.header = []
201 203 for a in args:
202 204 sys.stdout.write(str(a))
203 205
204 206 def write_header(self, *args):
205 207 for a in args:
206 208 self.header.append(str(a))
207 209
208 210 def write_err(self, *args):
209 211 try:
210 212 if not sys.stdout.closed: sys.stdout.flush()
211 213 for a in args:
212 214 sys.stderr.write(str(a))
213 215 except IOError, inst:
214 216 if inst.errno != errno.EPIPE:
215 217 raise
216 218
217 219 def flush(self):
218 220 try: sys.stdout.flush()
219 221 except: pass
220 222 try: sys.stderr.flush()
221 223 except: pass
222 224
223 225 def readline(self):
224 226 return sys.stdin.readline()[:-1]
225 227 def prompt(self, msg, pat, default="y"):
226 228 if not self.interactive: return default
227 229 while 1:
228 230 self.write(msg, " ")
229 231 r = self.readline()
230 232 if re.match(pat, r):
231 233 return r
232 234 else:
233 235 self.write(_("unrecognized response\n"))
234 236 def status(self, *msg):
235 237 if not self.quiet: self.write(*msg)
236 238 def warn(self, *msg):
237 239 self.write_err(*msg)
238 240 def note(self, *msg):
239 241 if self.verbose: self.write(*msg)
240 242 def debug(self, *msg):
241 243 if self.debugflag: self.write(*msg)
242 244 def edit(self, text, user):
243 245 (fd, name) = tempfile.mkstemp(prefix="hg-editor-", suffix=".txt")
244 246 try:
245 247 f = os.fdopen(fd, "w")
246 248 f.write(text)
247 249 f.close()
248 250
249 251 editor = (os.environ.get("HGEDITOR") or
250 252 self.config("ui", "editor") or
251 253 os.environ.get("EDITOR", "vi"))
252 254
253 255 util.system("%s \"%s\"" % (editor, name),
254 256 environ={'HGUSER': user},
255 257 onerr=util.Abort, errprefix=_("edit failed"))
256 258
257 259 f = open(name)
258 260 t = f.read()
259 261 f.close()
260 262 t = re.sub("(?m)^HG:.*\n", "", t)
261 263 finally:
262 264 os.unlink(name)
263 265
264 266 return t
General Comments 0
You need to be logged in to leave comments. Login now