##// END OF EJS Templates
fix the cat command...
Benoit Boissinot -
r1582:63799b01 default
parent child Browse files
Show More
@@ -0,0 +1,18 b''
1 #!/bin/sh
2 #
3 mkdir t
4 cd t
5 hg init
6 echo 0 > a
7 echo 0 > b
8 hg ci -A -m m -d "0 0"
9 hg rm a
10 hg cat a
11 sleep 1 # make sure mtime is changed
12 echo 1 > b
13 hg ci -m m -d "0 0"
14 echo 2 > b
15 hg cat -r 0 a
16 hg cat -r 0 b
17 hg cat -r 1 a
18 hg cat -r 1 b
@@ -0,0 +1,7 b''
1 adding a
2 adding b
3 0
4 0
5 0
6 a: No such file in rev 551e7cb14b32
7 1
@@ -1,2700 +1,2694 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from demandload import demandload
9 9 from node import *
10 10 from i18n import gettext as _
11 11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 12 demandload(globals(), "fancyopts ui hg util lock revlog")
13 13 demandload(globals(), "fnmatch hgweb mdiff random signal time traceback")
14 14 demandload(globals(), "errno socket version struct atexit sets bz2")
15 15
16 16 class UnknownCommand(Exception):
17 17 """Exception raised if command is not in the command table."""
18 18 class AmbiguousCommand(Exception):
19 19 """Exception raised if command shortcut matches more than one command."""
20 20
21 21 def filterfiles(filters, files):
22 22 l = [x for x in files if x in filters]
23 23
24 24 for t in filters:
25 25 if t and t[-1] != "/":
26 26 t += "/"
27 27 l += [x for x in files if x.startswith(t)]
28 28 return l
29 29
30 30 def relpath(repo, args):
31 31 cwd = repo.getcwd()
32 32 if cwd:
33 33 return [util.normpath(os.path.join(cwd, x)) for x in args]
34 34 return args
35 35
36 36 def matchpats(repo, pats=[], opts={}, head=''):
37 37 cwd = repo.getcwd()
38 38 if not pats and cwd:
39 39 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
40 40 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
41 41 cwd = ''
42 42 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
43 43 opts.get('exclude'), head) + (cwd,)
44 44
45 def makewalk(repo, pats, opts, head=''):
45 def makewalk(repo, pats, opts, node=None, head=''):
46 46 files, matchfn, anypats, cwd = matchpats(repo, pats, opts, head)
47 47 exact = dict(zip(files, files))
48 48 def walk():
49 for src, fn in repo.walk(files=files, match=matchfn):
49 for src, fn in repo.walk(node=node, files=files, match=matchfn):
50 50 yield src, fn, util.pathto(cwd, fn), fn in exact
51 51 return files, matchfn, walk()
52 52
53 def walk(repo, pats, opts, head=''):
54 files, matchfn, results = makewalk(repo, pats, opts, head)
53 def walk(repo, pats, opts, node=None, head=''):
54 files, matchfn, results = makewalk(repo, pats, opts, node, head)
55 55 for r in results:
56 56 yield r
57 57
58 58 def walkchangerevs(ui, repo, pats, opts):
59 59 '''Iterate over files and the revs they changed in.
60 60
61 61 Callers most commonly need to iterate backwards over the history
62 62 it is interested in. Doing so has awful (quadratic-looking)
63 63 performance, so we use iterators in a "windowed" way.
64 64
65 65 We walk a window of revisions in the desired order. Within the
66 66 window, we first walk forwards to gather data, then in the desired
67 67 order (usually backwards) to display it.
68 68
69 69 This function returns an (iterator, getchange) pair. The
70 70 getchange function returns the changelog entry for a numeric
71 71 revision. The iterator yields 3-tuples. They will be of one of
72 72 the following forms:
73 73
74 74 "window", incrementing, lastrev: stepping through a window,
75 75 positive if walking forwards through revs, last rev in the
76 76 sequence iterated over - use to reset state for the current window
77 77
78 78 "add", rev, fns: out-of-order traversal of the given file names
79 79 fns, which changed during revision rev - use to gather data for
80 80 possible display
81 81
82 82 "iter", rev, None: in-order traversal of the revs earlier iterated
83 83 over with "add" - use to display data'''
84 84
85 85 if repo.changelog.count() == 0:
86 86 return [], False
87 87
88 88 files, matchfn, anypats, cwd = matchpats(repo, pats, opts)
89 89 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
90 90 wanted = {}
91 91 slowpath = anypats
92 92 window = 300
93 93 fncache = {}
94 94
95 95 chcache = {}
96 96 def getchange(rev):
97 97 ch = chcache.get(rev)
98 98 if ch is None:
99 99 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
100 100 return ch
101 101
102 102 if not slowpath and not files:
103 103 # No files, no patterns. Display all revs.
104 104 wanted = dict(zip(revs, revs))
105 105 if not slowpath:
106 106 # Only files, no patterns. Check the history of each file.
107 107 def filerevgen(filelog):
108 108 for i in xrange(filelog.count() - 1, -1, -window):
109 109 revs = []
110 110 for j in xrange(max(0, i - window), i + 1):
111 111 revs.append(filelog.linkrev(filelog.node(j)))
112 112 revs.reverse()
113 113 for rev in revs:
114 114 yield rev
115 115
116 116 minrev, maxrev = min(revs), max(revs)
117 117 for file in files:
118 118 filelog = repo.file(file)
119 119 # A zero count may be a directory or deleted file, so
120 120 # try to find matching entries on the slow path.
121 121 if filelog.count() == 0:
122 122 slowpath = True
123 123 break
124 124 for rev in filerevgen(filelog):
125 125 if rev <= maxrev:
126 126 if rev < minrev:
127 127 break
128 128 fncache.setdefault(rev, [])
129 129 fncache[rev].append(file)
130 130 wanted[rev] = 1
131 131 if slowpath:
132 132 # The slow path checks files modified in every changeset.
133 133 def changerevgen():
134 134 for i in xrange(repo.changelog.count() - 1, -1, -window):
135 135 for j in xrange(max(0, i - window), i + 1):
136 136 yield j, getchange(j)[3]
137 137
138 138 for rev, changefiles in changerevgen():
139 139 matches = filter(matchfn, changefiles)
140 140 if matches:
141 141 fncache[rev] = matches
142 142 wanted[rev] = 1
143 143
144 144 def iterate():
145 145 for i in xrange(0, len(revs), window):
146 146 yield 'window', revs[0] < revs[-1], revs[-1]
147 147 nrevs = [rev for rev in revs[i:min(i+window, len(revs))]
148 148 if rev in wanted]
149 149 srevs = list(nrevs)
150 150 srevs.sort()
151 151 for rev in srevs:
152 152 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
153 153 yield 'add', rev, fns
154 154 for rev in nrevs:
155 155 yield 'iter', rev, None
156 156 return iterate(), getchange
157 157
158 158 revrangesep = ':'
159 159
160 160 def revrange(ui, repo, revs, revlog=None):
161 161 """Yield revision as strings from a list of revision specifications."""
162 162 if revlog is None:
163 163 revlog = repo.changelog
164 164 revcount = revlog.count()
165 165 def fix(val, defval):
166 166 if not val:
167 167 return defval
168 168 try:
169 169 num = int(val)
170 170 if str(num) != val:
171 171 raise ValueError
172 172 if num < 0: num += revcount
173 173 if num < 0: num = 0
174 174 elif num >= revcount:
175 175 raise ValueError
176 176 except ValueError:
177 177 try:
178 178 num = repo.changelog.rev(repo.lookup(val))
179 179 except KeyError:
180 180 try:
181 181 num = revlog.rev(revlog.lookup(val))
182 182 except KeyError:
183 183 raise util.Abort(_('invalid revision identifier %s'), val)
184 184 return num
185 185 seen = {}
186 186 for spec in revs:
187 187 if spec.find(revrangesep) >= 0:
188 188 start, end = spec.split(revrangesep, 1)
189 189 start = fix(start, 0)
190 190 end = fix(end, revcount - 1)
191 191 step = start > end and -1 or 1
192 192 for rev in xrange(start, end+step, step):
193 193 if rev in seen: continue
194 194 seen[rev] = 1
195 195 yield str(rev)
196 196 else:
197 197 rev = fix(spec, None)
198 198 if rev in seen: continue
199 199 seen[rev] = 1
200 200 yield str(rev)
201 201
202 202 def make_filename(repo, r, pat, node=None,
203 203 total=None, seqno=None, revwidth=None, pathname=None):
204 204 node_expander = {
205 205 'H': lambda: hex(node),
206 206 'R': lambda: str(r.rev(node)),
207 207 'h': lambda: short(node),
208 208 }
209 209 expander = {
210 210 '%': lambda: '%',
211 211 'b': lambda: os.path.basename(repo.root),
212 212 }
213 213
214 214 try:
215 215 if node:
216 216 expander.update(node_expander)
217 217 if node and revwidth is not None:
218 218 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
219 219 if total is not None:
220 220 expander['N'] = lambda: str(total)
221 221 if seqno is not None:
222 222 expander['n'] = lambda: str(seqno)
223 223 if total is not None and seqno is not None:
224 224 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
225 225 if pathname is not None:
226 226 expander['s'] = lambda: os.path.basename(pathname)
227 227 expander['d'] = lambda: os.path.dirname(pathname) or '.'
228 228 expander['p'] = lambda: pathname
229 229
230 230 newname = []
231 231 patlen = len(pat)
232 232 i = 0
233 233 while i < patlen:
234 234 c = pat[i]
235 235 if c == '%':
236 236 i += 1
237 237 c = pat[i]
238 238 c = expander[c]()
239 239 newname.append(c)
240 240 i += 1
241 241 return ''.join(newname)
242 242 except KeyError, inst:
243 243 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
244 244 inst.args[0])
245 245
246 246 def make_file(repo, r, pat, node=None,
247 247 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
248 248 if not pat or pat == '-':
249 249 return 'w' in mode and sys.stdout or sys.stdin
250 250 if hasattr(pat, 'write') and 'w' in mode:
251 251 return pat
252 252 if hasattr(pat, 'read') and 'r' in mode:
253 253 return pat
254 254 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
255 255 pathname),
256 256 mode)
257 257
258 258 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
259 259 changes=None, text=False):
260 260 if not changes:
261 261 (c, a, d, u) = repo.changes(node1, node2, files, match=match)
262 262 else:
263 263 (c, a, d, u) = changes
264 264 if files:
265 265 c, a, d = map(lambda x: filterfiles(files, x), (c, a, d))
266 266
267 267 if not c and not a and not d:
268 268 return
269 269
270 270 if node2:
271 271 change = repo.changelog.read(node2)
272 272 mmap2 = repo.manifest.read(change[0])
273 273 date2 = util.datestr(change[2])
274 274 def read(f):
275 275 return repo.file(f).read(mmap2[f])
276 276 else:
277 277 date2 = util.datestr()
278 278 if not node1:
279 279 node1 = repo.dirstate.parents()[0]
280 280 def read(f):
281 281 return repo.wfile(f).read()
282 282
283 283 if ui.quiet:
284 284 r = None
285 285 else:
286 286 hexfunc = ui.verbose and hex or short
287 287 r = [hexfunc(node) for node in [node1, node2] if node]
288 288
289 289 change = repo.changelog.read(node1)
290 290 mmap = repo.manifest.read(change[0])
291 291 date1 = util.datestr(change[2])
292 292
293 293 for f in c:
294 294 to = None
295 295 if f in mmap:
296 296 to = repo.file(f).read(mmap[f])
297 297 tn = read(f)
298 298 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
299 299 for f in a:
300 300 to = None
301 301 tn = read(f)
302 302 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
303 303 for f in d:
304 304 to = repo.file(f).read(mmap[f])
305 305 tn = None
306 306 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
307 307
308 308 def trimuser(ui, name, rev, revcache):
309 309 """trim the name of the user who committed a change"""
310 310 user = revcache.get(rev)
311 311 if user is None:
312 312 user = revcache[rev] = ui.shortuser(name)
313 313 return user
314 314
315 315 def show_changeset(ui, repo, rev=0, changenode=None, brinfo=None):
316 316 """show a single changeset or file revision"""
317 317 log = repo.changelog
318 318 if changenode is None:
319 319 changenode = log.node(rev)
320 320 elif not rev:
321 321 rev = log.rev(changenode)
322 322
323 323 if ui.quiet:
324 324 ui.write("%d:%s\n" % (rev, short(changenode)))
325 325 return
326 326
327 327 changes = log.read(changenode)
328 328 date = util.datestr(changes[2])
329 329
330 330 parents = [(log.rev(p), ui.verbose and hex(p) or short(p))
331 331 for p in log.parents(changenode)
332 332 if ui.debugflag or p != nullid]
333 333 if not ui.debugflag and len(parents) == 1 and parents[0][0] == rev-1:
334 334 parents = []
335 335
336 336 if ui.verbose:
337 337 ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
338 338 else:
339 339 ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
340 340
341 341 for tag in repo.nodetags(changenode):
342 342 ui.status(_("tag: %s\n") % tag)
343 343 for parent in parents:
344 344 ui.write(_("parent: %d:%s\n") % parent)
345 345
346 346 if brinfo and changenode in brinfo:
347 347 br = brinfo[changenode]
348 348 ui.write(_("branch: %s\n") % " ".join(br))
349 349
350 350 ui.debug(_("manifest: %d:%s\n") % (repo.manifest.rev(changes[0]),
351 351 hex(changes[0])))
352 352 ui.status(_("user: %s\n") % changes[1])
353 353 ui.status(_("date: %s\n") % date)
354 354
355 355 if ui.debugflag:
356 356 files = repo.changes(log.parents(changenode)[0], changenode)
357 357 for key, value in zip([_("files:"), _("files+:"), _("files-:")], files):
358 358 if value:
359 359 ui.note("%-12s %s\n" % (key, " ".join(value)))
360 360 else:
361 361 ui.note(_("files: %s\n") % " ".join(changes[3]))
362 362
363 363 description = changes[4].strip()
364 364 if description:
365 365 if ui.verbose:
366 366 ui.status(_("description:\n"))
367 367 ui.status(description)
368 368 ui.status("\n\n")
369 369 else:
370 370 ui.status(_("summary: %s\n") % description.splitlines()[0])
371 371 ui.status("\n")
372 372
373 373 def show_version(ui):
374 374 """output version and copyright information"""
375 375 ui.write(_("Mercurial Distributed SCM (version %s)\n")
376 376 % version.get_version())
377 377 ui.status(_(
378 378 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
379 379 "This is free software; see the source for copying conditions. "
380 380 "There is NO\nwarranty; "
381 381 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
382 382 ))
383 383
384 384 def help_(ui, cmd=None, with_version=False):
385 385 """show help for a given command or all commands"""
386 386 option_lists = []
387 387 if cmd and cmd != 'shortlist':
388 388 if with_version:
389 389 show_version(ui)
390 390 ui.write('\n')
391 391 aliases, i = find(cmd)
392 392 # synopsis
393 393 ui.write("%s\n\n" % i[2])
394 394
395 395 # description
396 396 doc = i[0].__doc__
397 397 if ui.quiet:
398 398 doc = doc.splitlines(0)[0]
399 399 ui.write("%s\n" % doc.rstrip())
400 400
401 401 if not ui.quiet:
402 402 # aliases
403 403 if len(aliases) > 1:
404 404 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
405 405
406 406 # options
407 407 if i[1]:
408 408 option_lists.append(("options", i[1]))
409 409
410 410 else:
411 411 # program name
412 412 if ui.verbose or with_version:
413 413 show_version(ui)
414 414 else:
415 415 ui.status(_("Mercurial Distributed SCM\n"))
416 416 ui.status('\n')
417 417
418 418 # list of commands
419 419 if cmd == "shortlist":
420 420 ui.status(_('basic commands (use "hg help" '
421 421 'for the full list or option "-v" for details):\n\n'))
422 422 elif ui.verbose:
423 423 ui.status(_('list of commands:\n\n'))
424 424 else:
425 425 ui.status(_('list of commands (use "hg help -v" '
426 426 'to show aliases and global options):\n\n'))
427 427
428 428 h = {}
429 429 cmds = {}
430 430 for c, e in table.items():
431 431 f = c.split("|")[0]
432 432 if cmd == "shortlist" and not f.startswith("^"):
433 433 continue
434 434 f = f.lstrip("^")
435 435 if not ui.debugflag and f.startswith("debug"):
436 436 continue
437 437 d = ""
438 438 if e[0].__doc__:
439 439 d = e[0].__doc__.splitlines(0)[0].rstrip()
440 440 h[f] = d
441 441 cmds[f]=c.lstrip("^")
442 442
443 443 fns = h.keys()
444 444 fns.sort()
445 445 m = max(map(len, fns))
446 446 for f in fns:
447 447 if ui.verbose:
448 448 commands = cmds[f].replace("|",", ")
449 449 ui.write(" %s:\n %s\n"%(commands,h[f]))
450 450 else:
451 451 ui.write(' %-*s %s\n' % (m, f, h[f]))
452 452
453 453 # global options
454 454 if ui.verbose:
455 455 option_lists.append(("global options", globalopts))
456 456
457 457 # list all option lists
458 458 opt_output = []
459 459 for title, options in option_lists:
460 460 opt_output.append(("\n%s:\n" % title, None))
461 461 for shortopt, longopt, default, desc in options:
462 462 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
463 463 longopt and " --%s" % longopt),
464 464 "%s%s" % (desc,
465 465 default and _(" (default: %s)") % default
466 466 or "")))
467 467
468 468 if opt_output:
469 469 opts_len = max([len(line[0]) for line in opt_output if line[1]])
470 470 for first, second in opt_output:
471 471 if second:
472 472 ui.write(" %-*s %s\n" % (opts_len, first, second))
473 473 else:
474 474 ui.write("%s\n" % first)
475 475
476 476 # Commands start here, listed alphabetically
477 477
478 478 def add(ui, repo, *pats, **opts):
479 479 """add the specified files on the next commit
480 480
481 481 Schedule files to be version controlled and added to the repository.
482 482
483 483 The files will be added to the repository at the next commit.
484 484
485 485 If no names are given, add all files in the repository.
486 486 """
487 487
488 488 names = []
489 489 for src, abs, rel, exact in walk(repo, pats, opts):
490 490 if exact:
491 491 if ui.verbose: ui.status(_('adding %s\n') % rel)
492 492 names.append(abs)
493 493 elif repo.dirstate.state(abs) == '?':
494 494 ui.status(_('adding %s\n') % rel)
495 495 names.append(abs)
496 496 repo.add(names)
497 497
498 498 def addremove(ui, repo, *pats, **opts):
499 499 """add all new files, delete all missing files
500 500
501 501 Add all new files and remove all missing files from the repository.
502 502
503 503 New files are ignored if they match any of the patterns in .hgignore. As
504 504 with add, these changes take effect at the next commit.
505 505 """
506 506 add, remove = [], []
507 507 for src, abs, rel, exact in walk(repo, pats, opts):
508 508 if src == 'f' and repo.dirstate.state(abs) == '?':
509 509 add.append(abs)
510 510 if ui.verbose or not exact:
511 511 ui.status(_('adding %s\n') % rel)
512 512 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
513 513 remove.append(abs)
514 514 if ui.verbose or not exact:
515 515 ui.status(_('removing %s\n') % rel)
516 516 repo.add(add)
517 517 repo.remove(remove)
518 518
519 519 def annotate(ui, repo, *pats, **opts):
520 520 """show changeset information per file line
521 521
522 522 List changes in files, showing the revision id responsible for each line
523 523
524 524 This command is useful to discover who did a change or when a change took
525 525 place.
526 526
527 527 Without the -a option, annotate will avoid processing files it
528 528 detects as binary. With -a, annotate will generate an annotation
529 529 anyway, probably with undesirable results.
530 530 """
531 531 def getnode(rev):
532 532 return short(repo.changelog.node(rev))
533 533
534 534 ucache = {}
535 535 def getname(rev):
536 536 cl = repo.changelog.read(repo.changelog.node(rev))
537 537 return trimuser(ui, cl[1], rev, ucache)
538 538
539 539 dcache = {}
540 540 def getdate(rev):
541 541 datestr = dcache.get(rev)
542 542 if datestr is None:
543 543 cl = repo.changelog.read(repo.changelog.node(rev))
544 544 datestr = dcache[rev] = util.datestr(cl[2])
545 545 return datestr
546 546
547 547 if not pats:
548 548 raise util.Abort(_('at least one file name or pattern required'))
549 549
550 550 opmap = [['user', getname], ['number', str], ['changeset', getnode],
551 551 ['date', getdate]]
552 552 if not opts['user'] and not opts['changeset'] and not opts['date']:
553 553 opts['number'] = 1
554 554
555 555 if opts['rev']:
556 556 node = repo.changelog.lookup(opts['rev'])
557 557 else:
558 558 node = repo.dirstate.parents()[0]
559 559 change = repo.changelog.read(node)
560 560 mmap = repo.manifest.read(change[0])
561 561
562 562 for src, abs, rel, exact in walk(repo, pats, opts):
563 563 if abs not in mmap:
564 564 ui.warn(_("warning: %s is not in the repository!\n") % rel)
565 565 continue
566 566
567 567 f = repo.file(abs)
568 568 if not opts['text'] and util.binary(f.read(mmap[abs])):
569 569 ui.write(_("%s: binary file\n") % rel)
570 570 continue
571 571
572 572 lines = f.annotate(mmap[abs])
573 573 pieces = []
574 574
575 575 for o, f in opmap:
576 576 if opts[o]:
577 577 l = [f(n) for n, dummy in lines]
578 578 if l:
579 579 m = max(map(len, l))
580 580 pieces.append(["%*s" % (m, x) for x in l])
581 581
582 582 if pieces:
583 583 for p, l in zip(zip(*pieces), lines):
584 584 ui.write("%s: %s" % (" ".join(p), l[1]))
585 585
586 586 def bundle(ui, repo, fname, dest="default-push", **opts):
587 587 """create a changegroup file
588 588
589 589 Generate a compressed changegroup file collecting all changesets
590 590 not found in the other repository.
591 591
592 592 This file can then be transferred using conventional means and
593 593 applied to another repository with the unbundle command. This is
594 594 useful when native push and pull are not available or when
595 595 exporting an entire repository is undesirable. The standard file
596 596 extension is ".hg".
597 597
598 598 Unlike import/export, this exactly preserves all changeset
599 599 contents including permissions, rename data, and revision history.
600 600 """
601 601 f = open(fname, "wb")
602 602 dest = ui.expandpath(dest, repo.root)
603 603 other = hg.repository(ui, dest)
604 604 o = repo.findoutgoing(other)
605 605 cg = repo.changegroup(o)
606 606
607 607 try:
608 608 f.write("HG10")
609 609 z = bz2.BZ2Compressor(9)
610 610 while 1:
611 611 chunk = cg.read(4096)
612 612 if not chunk:
613 613 break
614 614 f.write(z.compress(chunk))
615 615 f.write(z.flush())
616 616 except:
617 617 os.unlink(fname)
618 618 raise
619 619
620 620 def cat(ui, repo, file1, *pats, **opts):
621 621 """output the latest or given revisions of files
622 622
623 623 Print the specified files as they were at the given revision.
624 624 If no revision is given then the tip is used.
625 625
626 626 Output may be to a file, in which case the name of the file is
627 627 given using a format string. The formatting rules are the same as
628 628 for the export command, with the following additions:
629 629
630 630 %s basename of file being printed
631 631 %d dirname of file being printed, or '.' if in repo root
632 632 %p root-relative path name of file being printed
633 633 """
634 634 mf = {}
635 635 rev = opts['rev']
636 636 if rev:
637 change = repo.changelog.read(repo.lookup(rev))
637 node = repo.lookup(rev)
638 else:
639 node = repo.changelog.tip()
640 change = repo.changelog.read(node)
638 641 mf = repo.manifest.read(change[0])
639 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts):
642 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, node):
640 643 r = repo.file(abs)
641 if rev:
642 try:
643 644 n = mf[abs]
644 except (hg.RepoError, KeyError):
645 try:
646 n = r.lookup(rev)
647 except KeyError, inst:
648 raise util.Abort(_('cannot find file %s in rev %s'), rel, rev)
649 else:
650 n = r.tip()
651 645 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
652 646 fp.write(r.read(n))
653 647
654 648 def clone(ui, source, dest=None, **opts):
655 649 """make a copy of an existing repository
656 650
657 651 Create a copy of an existing repository in a new directory.
658 652
659 653 If no destination directory name is specified, it defaults to the
660 654 basename of the source.
661 655
662 656 The location of the source is added to the new repository's
663 657 .hg/hgrc file, as the default to be used for future pulls.
664 658
665 659 For efficiency, hardlinks are used for cloning whenever the source
666 660 and destination are on the same filesystem. Some filesystems,
667 661 such as AFS, implement hardlinking incorrectly, but do not report
668 662 errors. In these cases, use the --pull option to avoid
669 663 hardlinking.
670 664 """
671 665 if dest is None:
672 666 dest = os.path.basename(os.path.normpath(source))
673 667
674 668 if os.path.exists(dest):
675 669 raise util.Abort(_("destination '%s' already exists"), dest)
676 670
677 671 dest = os.path.realpath(dest)
678 672
679 673 class Dircleanup(object):
680 674 def __init__(self, dir_):
681 675 self.rmtree = shutil.rmtree
682 676 self.dir_ = dir_
683 677 os.mkdir(dir_)
684 678 def close(self):
685 679 self.dir_ = None
686 680 def __del__(self):
687 681 if self.dir_:
688 682 self.rmtree(self.dir_, True)
689 683
690 684 if opts['ssh']:
691 685 ui.setconfig("ui", "ssh", opts['ssh'])
692 686 if opts['remotecmd']:
693 687 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
694 688
695 689 if not os.path.exists(source):
696 690 source = ui.expandpath(source)
697 691
698 692 d = Dircleanup(dest)
699 693 abspath = source
700 694 other = hg.repository(ui, source)
701 695
702 696 copy = False
703 697 if other.dev() != -1:
704 698 abspath = os.path.abspath(source)
705 699 if not opts['pull'] and not opts['rev']:
706 700 copy = True
707 701
708 702 if copy:
709 703 try:
710 704 # we use a lock here because if we race with commit, we
711 705 # can end up with extra data in the cloned revlogs that's
712 706 # not pointed to by changesets, thus causing verify to
713 707 # fail
714 708 l1 = lock.lock(os.path.join(source, ".hg", "lock"))
715 709 except OSError:
716 710 copy = False
717 711
718 712 if copy:
719 713 # we lock here to avoid premature writing to the target
720 714 os.mkdir(os.path.join(dest, ".hg"))
721 715 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
722 716
723 717 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
724 718 for f in files.split():
725 719 src = os.path.join(source, ".hg", f)
726 720 dst = os.path.join(dest, ".hg", f)
727 721 try:
728 722 util.copyfiles(src, dst)
729 723 except OSError, inst:
730 724 if inst.errno != errno.ENOENT: raise
731 725
732 726 repo = hg.repository(ui, dest)
733 727
734 728 else:
735 729 revs = None
736 730 if opts['rev']:
737 731 if not other.local():
738 732 raise util.Abort("clone -r not supported yet for remote repositories.")
739 733 else:
740 734 revs = [other.lookup(rev) for rev in opts['rev']]
741 735 repo = hg.repository(ui, dest, create=1)
742 736 repo.pull(other, heads = revs)
743 737
744 738 f = repo.opener("hgrc", "w", text=True)
745 739 f.write("[paths]\n")
746 740 f.write("default = %s\n" % abspath)
747 741 f.close()
748 742
749 743 if not opts['noupdate']:
750 744 update(ui, repo)
751 745
752 746 d.close()
753 747
754 748 def commit(ui, repo, *pats, **opts):
755 749 """commit the specified files or all outstanding changes
756 750
757 751 Commit changes to the given files into the repository.
758 752
759 753 If a list of files is omitted, all changes reported by "hg status"
760 754 will be commited.
761 755
762 756 The HGEDITOR or EDITOR environment variables are used to start an
763 757 editor to add a commit comment.
764 758 """
765 759 message = opts['message']
766 760 logfile = opts['logfile']
767 761
768 762 if message and logfile:
769 763 raise util.Abort(_('options --message and --logfile are mutually '
770 764 'exclusive'))
771 765 if not message and logfile:
772 766 try:
773 767 if logfile == '-':
774 768 message = sys.stdin.read()
775 769 else:
776 770 message = open(logfile).read()
777 771 except IOError, inst:
778 772 raise util.Abort(_("can't read commit message '%s': %s") %
779 773 (logfile, inst.strerror))
780 774
781 775 if opts['addremove']:
782 776 addremove(ui, repo, *pats, **opts)
783 777 fns, match, anypats, cwd = matchpats(repo, pats, opts)
784 778 if pats:
785 779 c, a, d, u = repo.changes(files=fns, match=match)
786 780 files = c + a + [fn for fn in d if repo.dirstate.state(fn) == 'r']
787 781 else:
788 782 files = []
789 783 try:
790 784 repo.commit(files, message, opts['user'], opts['date'], match)
791 785 except ValueError, inst:
792 786 raise util.Abort(str(inst))
793 787
794 788 def docopy(ui, repo, pats, opts):
795 789 cwd = repo.getcwd()
796 790 errors = 0
797 791 copied = []
798 792 targets = {}
799 793
800 794 def okaytocopy(abs, rel, exact):
801 795 reasons = {'?': _('is not managed'),
802 796 'a': _('has been marked for add')}
803 797 reason = reasons.get(repo.dirstate.state(abs))
804 798 if reason:
805 799 if exact: ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
806 800 else:
807 801 return True
808 802
809 803 def copy(abssrc, relsrc, target, exact):
810 804 abstarget = util.canonpath(repo.root, cwd, target)
811 805 reltarget = util.pathto(cwd, abstarget)
812 806 prevsrc = targets.get(abstarget)
813 807 if prevsrc is not None:
814 808 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
815 809 (reltarget, abssrc, prevsrc))
816 810 return
817 811 if (not opts['after'] and os.path.exists(reltarget) or
818 812 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
819 813 if not opts['force']:
820 814 ui.warn(_('%s: not overwriting - file exists\n') %
821 815 reltarget)
822 816 return
823 817 if not opts['after']:
824 818 os.unlink(reltarget)
825 819 if opts['after']:
826 820 if not os.path.exists(reltarget):
827 821 return
828 822 else:
829 823 targetdir = os.path.dirname(reltarget) or '.'
830 824 if not os.path.isdir(targetdir):
831 825 os.makedirs(targetdir)
832 826 try:
833 827 shutil.copyfile(relsrc, reltarget)
834 828 shutil.copymode(relsrc, reltarget)
835 829 except shutil.Error, inst:
836 830 raise util.Abort(str(inst))
837 831 except IOError, inst:
838 832 if inst.errno == errno.ENOENT:
839 833 ui.warn(_('%s: deleted in working copy\n') % relsrc)
840 834 else:
841 835 ui.warn(_('%s: cannot copy - %s\n') %
842 836 (relsrc, inst.strerror))
843 837 errors += 1
844 838 return
845 839 if ui.verbose or not exact:
846 840 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
847 841 targets[abstarget] = abssrc
848 842 repo.copy(abssrc, abstarget)
849 843 copied.append((abssrc, relsrc, exact))
850 844
851 845 def targetpathfn(pat, dest, srcs):
852 846 if os.path.isdir(pat):
853 847 if pat.endswith(os.sep):
854 848 pat = pat[:-len(os.sep)]
855 849 if destdirexists:
856 850 striplen = len(os.path.split(pat)[0])
857 851 else:
858 852 striplen = len(pat)
859 853 if striplen:
860 854 striplen += len(os.sep)
861 855 res = lambda p: os.path.join(dest, p[striplen:])
862 856 elif destdirexists:
863 857 res = lambda p: os.path.join(dest, os.path.basename(p))
864 858 else:
865 859 res = lambda p: dest
866 860 return res
867 861
868 862 def targetpathafterfn(pat, dest, srcs):
869 863 if util.patkind(pat, None)[0]:
870 864 # a mercurial pattern
871 865 res = lambda p: os.path.join(dest, os.path.basename(p))
872 866 elif len(util.canonpath(repo.root, cwd, pat)) < len(srcs[0][0]):
873 867 # A directory. Either the target path contains the last
874 868 # component of the source path or it does not.
875 869 def evalpath(striplen):
876 870 score = 0
877 871 for s in srcs:
878 872 t = os.path.join(dest, s[1][striplen:])
879 873 if os.path.exists(t):
880 874 score += 1
881 875 return score
882 876
883 877 if pat.endswith(os.sep):
884 878 pat = pat[:-len(os.sep)]
885 879 striplen = len(pat) + len(os.sep)
886 880 if os.path.isdir(os.path.join(dest, os.path.split(pat)[1])):
887 881 score = evalpath(striplen)
888 882 striplen1 = len(os.path.split(pat)[0])
889 883 if striplen1:
890 884 striplen1 += len(os.sep)
891 885 if evalpath(striplen1) > score:
892 886 striplen = striplen1
893 887 res = lambda p: os.path.join(dest, p[striplen:])
894 888 else:
895 889 # a file
896 890 if destdirexists:
897 891 res = lambda p: os.path.join(dest, os.path.basename(p))
898 892 else:
899 893 res = lambda p: dest
900 894 return res
901 895
902 896
903 897 pats = list(pats)
904 898 if not pats:
905 899 raise util.Abort(_('no source or destination specified'))
906 900 if len(pats) == 1:
907 901 raise util.Abort(_('no destination specified'))
908 902 dest = pats.pop()
909 903 destdirexists = os.path.isdir(dest)
910 904 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
911 905 raise util.Abort(_('with multiple sources, destination must be an '
912 906 'existing directory'))
913 907 if opts['after']:
914 908 tfn = targetpathafterfn
915 909 else:
916 910 tfn = targetpathfn
917 911 copylist = []
918 912 for pat in pats:
919 913 srcs = []
920 914 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
921 915 if okaytocopy(abssrc, relsrc, exact):
922 916 srcs.append((abssrc, relsrc, exact))
923 917 if not srcs:
924 918 continue
925 919 copylist.append((tfn(pat, dest, srcs), srcs))
926 920 if not copylist:
927 921 raise util.Abort(_('no files to copy'))
928 922
929 923 for targetpath, srcs in copylist:
930 924 for abssrc, relsrc, exact in srcs:
931 925 copy(abssrc, relsrc, targetpath(relsrc), exact)
932 926
933 927 if errors:
934 928 ui.warn(_('(consider using --after)\n'))
935 929 return errors, copied
936 930
937 931 def copy(ui, repo, *pats, **opts):
938 932 """mark files as copied for the next commit
939 933
940 934 Mark dest as having copies of source files. If dest is a
941 935 directory, copies are put in that directory. If dest is a file,
942 936 there can only be one source.
943 937
944 938 By default, this command copies the contents of files as they
945 939 stand in the working directory. If invoked with --after, the
946 940 operation is recorded, but no copying is performed.
947 941
948 942 This command takes effect in the next commit.
949 943
950 944 NOTE: This command should be treated as experimental. While it
951 945 should properly record copied files, this information is not yet
952 946 fully used by merge, nor fully reported by log.
953 947 """
954 948 errs, copied = docopy(ui, repo, pats, opts)
955 949 return errs
956 950
957 951 def debugancestor(ui, index, rev1, rev2):
958 952 """find the ancestor revision of two revisions in a given index"""
959 953 r = revlog.revlog(util.opener(os.getcwd()), index, "")
960 954 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
961 955 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
962 956
963 957 def debugcheckstate(ui, repo):
964 958 """validate the correctness of the current dirstate"""
965 959 parent1, parent2 = repo.dirstate.parents()
966 960 repo.dirstate.read()
967 961 dc = repo.dirstate.map
968 962 keys = dc.keys()
969 963 keys.sort()
970 964 m1n = repo.changelog.read(parent1)[0]
971 965 m2n = repo.changelog.read(parent2)[0]
972 966 m1 = repo.manifest.read(m1n)
973 967 m2 = repo.manifest.read(m2n)
974 968 errors = 0
975 969 for f in dc:
976 970 state = repo.dirstate.state(f)
977 971 if state in "nr" and f not in m1:
978 972 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
979 973 errors += 1
980 974 if state in "a" and f in m1:
981 975 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
982 976 errors += 1
983 977 if state in "m" and f not in m1 and f not in m2:
984 978 ui.warn(_("%s in state %s, but not in either manifest\n") %
985 979 (f, state))
986 980 errors += 1
987 981 for f in m1:
988 982 state = repo.dirstate.state(f)
989 983 if state not in "nrm":
990 984 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
991 985 errors += 1
992 986 if errors:
993 987 raise util.Abort(_(".hg/dirstate inconsistent with current parent's manifest"))
994 988
995 989 def debugconfig(ui):
996 990 """show combined config settings from all hgrc files"""
997 991 try:
998 992 repo = hg.repository(ui)
999 993 except hg.RepoError:
1000 994 pass
1001 995 for section, name, value in ui.walkconfig():
1002 996 ui.write('%s.%s=%s\n' % (section, name, value))
1003 997
1004 998 def debugsetparents(ui, repo, rev1, rev2=None):
1005 999 """manually set the parents of the current working directory
1006 1000
1007 1001 This is useful for writing repository conversion tools, but should
1008 1002 be used with care.
1009 1003 """
1010 1004
1011 1005 if not rev2:
1012 1006 rev2 = hex(nullid)
1013 1007
1014 1008 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1015 1009
1016 1010 def debugstate(ui, repo):
1017 1011 """show the contents of the current dirstate"""
1018 1012 repo.dirstate.read()
1019 1013 dc = repo.dirstate.map
1020 1014 keys = dc.keys()
1021 1015 keys.sort()
1022 1016 for file_ in keys:
1023 1017 ui.write("%c %3o %10d %s %s\n"
1024 1018 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1025 1019 time.strftime("%x %X",
1026 1020 time.localtime(dc[file_][3])), file_))
1027 1021 for f in repo.dirstate.copies:
1028 1022 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1029 1023
1030 1024 def debugdata(ui, file_, rev):
1031 1025 """dump the contents of an data file revision"""
1032 1026 r = revlog.revlog(util.opener(os.getcwd()), file_[:-2] + ".i", file_)
1033 1027 try:
1034 1028 ui.write(r.revision(r.lookup(rev)))
1035 1029 except KeyError:
1036 1030 raise util.Abort(_('invalid revision identifier %s'), rev)
1037 1031
1038 1032 def debugindex(ui, file_):
1039 1033 """dump the contents of an index file"""
1040 1034 r = revlog.revlog(util.opener(os.getcwd()), file_, "")
1041 1035 ui.write(" rev offset length base linkrev" +
1042 1036 " nodeid p1 p2\n")
1043 1037 for i in range(r.count()):
1044 1038 e = r.index[i]
1045 1039 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1046 1040 i, e[0], e[1], e[2], e[3],
1047 1041 short(e[6]), short(e[4]), short(e[5])))
1048 1042
1049 1043 def debugindexdot(ui, file_):
1050 1044 """dump an index DAG as a .dot file"""
1051 1045 r = revlog.revlog(util.opener(os.getcwd()), file_, "")
1052 1046 ui.write("digraph G {\n")
1053 1047 for i in range(r.count()):
1054 1048 e = r.index[i]
1055 1049 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
1056 1050 if e[5] != nullid:
1057 1051 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
1058 1052 ui.write("}\n")
1059 1053
1060 1054 def debugrename(ui, repo, file, rev=None):
1061 1055 """dump rename information"""
1062 1056 r = repo.file(relpath(repo, [file])[0])
1063 1057 if rev:
1064 1058 try:
1065 1059 # assume all revision numbers are for changesets
1066 1060 n = repo.lookup(rev)
1067 1061 change = repo.changelog.read(n)
1068 1062 m = repo.manifest.read(change[0])
1069 1063 n = m[relpath(repo, [file])[0]]
1070 1064 except (hg.RepoError, KeyError):
1071 1065 n = r.lookup(rev)
1072 1066 else:
1073 1067 n = r.tip()
1074 1068 m = r.renamed(n)
1075 1069 if m:
1076 1070 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1077 1071 else:
1078 1072 ui.write(_("not renamed\n"))
1079 1073
1080 1074 def debugwalk(ui, repo, *pats, **opts):
1081 1075 """show how files match on given patterns"""
1082 1076 items = list(walk(repo, pats, opts))
1083 1077 if not items:
1084 1078 return
1085 1079 fmt = '%%s %%-%ds %%-%ds %%s' % (
1086 1080 max([len(abs) for (src, abs, rel, exact) in items]),
1087 1081 max([len(rel) for (src, abs, rel, exact) in items]))
1088 1082 for src, abs, rel, exact in items:
1089 1083 line = fmt % (src, abs, rel, exact and 'exact' or '')
1090 1084 ui.write("%s\n" % line.rstrip())
1091 1085
1092 1086 def diff(ui, repo, *pats, **opts):
1093 1087 """diff repository (or selected files)
1094 1088
1095 1089 Show differences between revisions for the specified files.
1096 1090
1097 1091 Differences between files are shown using the unified diff format.
1098 1092
1099 1093 When two revision arguments are given, then changes are shown
1100 1094 between those revisions. If only one revision is specified then
1101 1095 that revision is compared to the working directory, and, when no
1102 1096 revisions are specified, the working directory files are compared
1103 1097 to its parent.
1104 1098
1105 1099 Without the -a option, diff will avoid generating diffs of files
1106 1100 it detects as binary. With -a, diff will generate a diff anyway,
1107 1101 probably with undesirable results.
1108 1102 """
1109 1103 node1, node2 = None, None
1110 1104 revs = [repo.lookup(x) for x in opts['rev']]
1111 1105
1112 1106 if len(revs) > 0:
1113 1107 node1 = revs[0]
1114 1108 if len(revs) > 1:
1115 1109 node2 = revs[1]
1116 1110 if len(revs) > 2:
1117 1111 raise util.Abort(_("too many revisions to diff"))
1118 1112
1119 1113 fns, matchfn, anypats, cwd = matchpats(repo, pats, opts)
1120 1114
1121 1115 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1122 1116 text=opts['text'])
1123 1117
1124 1118 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1125 1119 node = repo.lookup(changeset)
1126 1120 prev, other = repo.changelog.parents(node)
1127 1121 change = repo.changelog.read(node)
1128 1122
1129 1123 fp = make_file(repo, repo.changelog, opts['output'],
1130 1124 node=node, total=total, seqno=seqno,
1131 1125 revwidth=revwidth)
1132 1126 if fp != sys.stdout:
1133 1127 ui.note("%s\n" % fp.name)
1134 1128
1135 1129 fp.write("# HG changeset patch\n")
1136 1130 fp.write("# User %s\n" % change[1])
1137 1131 fp.write("# Node ID %s\n" % hex(node))
1138 1132 fp.write("# Parent %s\n" % hex(prev))
1139 1133 if other != nullid:
1140 1134 fp.write("# Parent %s\n" % hex(other))
1141 1135 fp.write(change[4].rstrip())
1142 1136 fp.write("\n\n")
1143 1137
1144 1138 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1145 1139 if fp != sys.stdout:
1146 1140 fp.close()
1147 1141
1148 1142 def export(ui, repo, *changesets, **opts):
1149 1143 """dump the header and diffs for one or more changesets
1150 1144
1151 1145 Print the changeset header and diffs for one or more revisions.
1152 1146
1153 1147 The information shown in the changeset header is: author,
1154 1148 changeset hash, parent and commit comment.
1155 1149
1156 1150 Output may be to a file, in which case the name of the file is
1157 1151 given using a format string. The formatting rules are as follows:
1158 1152
1159 1153 %% literal "%" character
1160 1154 %H changeset hash (40 bytes of hexadecimal)
1161 1155 %N number of patches being generated
1162 1156 %R changeset revision number
1163 1157 %b basename of the exporting repository
1164 1158 %h short-form changeset hash (12 bytes of hexadecimal)
1165 1159 %n zero-padded sequence number, starting at 1
1166 1160 %r zero-padded changeset revision number
1167 1161
1168 1162 Without the -a option, export will avoid generating diffs of files
1169 1163 it detects as binary. With -a, export will generate a diff anyway,
1170 1164 probably with undesirable results.
1171 1165 """
1172 1166 if not changesets:
1173 1167 raise util.Abort(_("export requires at least one changeset"))
1174 1168 seqno = 0
1175 1169 revs = list(revrange(ui, repo, changesets))
1176 1170 total = len(revs)
1177 1171 revwidth = max(map(len, revs))
1178 1172 ui.note(len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n"))
1179 1173 for cset in revs:
1180 1174 seqno += 1
1181 1175 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1182 1176
1183 1177 def forget(ui, repo, *pats, **opts):
1184 1178 """don't add the specified files on the next commit
1185 1179
1186 1180 Undo an 'hg add' scheduled for the next commit.
1187 1181 """
1188 1182 forget = []
1189 1183 for src, abs, rel, exact in walk(repo, pats, opts):
1190 1184 if repo.dirstate.state(abs) == 'a':
1191 1185 forget.append(abs)
1192 1186 if ui.verbose or not exact:
1193 1187 ui.status(_('forgetting %s\n') % rel)
1194 1188 repo.forget(forget)
1195 1189
1196 1190 def grep(ui, repo, pattern, *pats, **opts):
1197 1191 """search for a pattern in specified files and revisions
1198 1192
1199 1193 Search revisions of files for a regular expression.
1200 1194
1201 1195 This command behaves differently than Unix grep. It only accepts
1202 1196 Python/Perl regexps. It searches repository history, not the
1203 1197 working directory. It always prints the revision number in which
1204 1198 a match appears.
1205 1199
1206 1200 By default, grep only prints output for the first revision of a
1207 1201 file in which it finds a match. To get it to print every revision
1208 1202 that contains a change in match status ("-" for a match that
1209 1203 becomes a non-match, or "+" for a non-match that becomes a match),
1210 1204 use the --all flag.
1211 1205 """
1212 1206 reflags = 0
1213 1207 if opts['ignore_case']:
1214 1208 reflags |= re.I
1215 1209 regexp = re.compile(pattern, reflags)
1216 1210 sep, eol = ':', '\n'
1217 1211 if opts['print0']:
1218 1212 sep = eol = '\0'
1219 1213
1220 1214 fcache = {}
1221 1215 def getfile(fn):
1222 1216 if fn not in fcache:
1223 1217 fcache[fn] = repo.file(fn)
1224 1218 return fcache[fn]
1225 1219
1226 1220 def matchlines(body):
1227 1221 begin = 0
1228 1222 linenum = 0
1229 1223 while True:
1230 1224 match = regexp.search(body, begin)
1231 1225 if not match:
1232 1226 break
1233 1227 mstart, mend = match.span()
1234 1228 linenum += body.count('\n', begin, mstart) + 1
1235 1229 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1236 1230 lend = body.find('\n', mend)
1237 1231 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1238 1232 begin = lend + 1
1239 1233
1240 1234 class linestate(object):
1241 1235 def __init__(self, line, linenum, colstart, colend):
1242 1236 self.line = line
1243 1237 self.linenum = linenum
1244 1238 self.colstart = colstart
1245 1239 self.colend = colend
1246 1240 def __eq__(self, other):
1247 1241 return self.line == other.line
1248 1242 def __hash__(self):
1249 1243 return hash(self.line)
1250 1244
1251 1245 matches = {}
1252 1246 def grepbody(fn, rev, body):
1253 1247 matches[rev].setdefault(fn, {})
1254 1248 m = matches[rev][fn]
1255 1249 for lnum, cstart, cend, line in matchlines(body):
1256 1250 s = linestate(line, lnum, cstart, cend)
1257 1251 m[s] = s
1258 1252
1259 1253 prev = {}
1260 1254 ucache = {}
1261 1255 def display(fn, rev, states, prevstates):
1262 1256 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1263 1257 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1264 1258 counts = {'-': 0, '+': 0}
1265 1259 filerevmatches = {}
1266 1260 for l in diff:
1267 1261 if incrementing or not opts['all']:
1268 1262 change = ((l in prevstates) and '-') or '+'
1269 1263 r = rev
1270 1264 else:
1271 1265 change = ((l in states) and '-') or '+'
1272 1266 r = prev[fn]
1273 1267 cols = [fn, str(rev)]
1274 1268 if opts['line_number']: cols.append(str(l.linenum))
1275 1269 if opts['all']: cols.append(change)
1276 1270 if opts['user']: cols.append(trimuser(ui, getchange(rev)[1], rev,
1277 1271 ucache))
1278 1272 if opts['files_with_matches']:
1279 1273 c = (fn, rev)
1280 1274 if c in filerevmatches: continue
1281 1275 filerevmatches[c] = 1
1282 1276 else:
1283 1277 cols.append(l.line)
1284 1278 ui.write(sep.join(cols), eol)
1285 1279 counts[change] += 1
1286 1280 return counts['+'], counts['-']
1287 1281
1288 1282 fstate = {}
1289 1283 skip = {}
1290 1284 changeiter, getchange = walkchangerevs(ui, repo, pats, opts)
1291 1285 count = 0
1292 1286 incrementing = False
1293 1287 for st, rev, fns in changeiter:
1294 1288 if st == 'window':
1295 1289 incrementing = rev
1296 1290 matches.clear()
1297 1291 elif st == 'add':
1298 1292 change = repo.changelog.read(repo.lookup(str(rev)))
1299 1293 mf = repo.manifest.read(change[0])
1300 1294 matches[rev] = {}
1301 1295 for fn in fns:
1302 1296 if fn in skip: continue
1303 1297 fstate.setdefault(fn, {})
1304 1298 try:
1305 1299 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1306 1300 except KeyError:
1307 1301 pass
1308 1302 elif st == 'iter':
1309 1303 states = matches[rev].items()
1310 1304 states.sort()
1311 1305 for fn, m in states:
1312 1306 if fn in skip: continue
1313 1307 if incrementing or not opts['all'] or fstate[fn]:
1314 1308 pos, neg = display(fn, rev, m, fstate[fn])
1315 1309 count += pos + neg
1316 1310 if pos and not opts['all']:
1317 1311 skip[fn] = True
1318 1312 fstate[fn] = m
1319 1313 prev[fn] = rev
1320 1314
1321 1315 if not incrementing:
1322 1316 fstate = fstate.items()
1323 1317 fstate.sort()
1324 1318 for fn, state in fstate:
1325 1319 if fn in skip: continue
1326 1320 display(fn, rev, {}, state)
1327 1321 return (count == 0 and 1) or 0
1328 1322
1329 1323 def heads(ui, repo, **opts):
1330 1324 """show current repository heads
1331 1325
1332 1326 Show all repository head changesets.
1333 1327
1334 1328 Repository "heads" are changesets that don't have children
1335 1329 changesets. They are where development generally takes place and
1336 1330 are the usual targets for update and merge operations.
1337 1331 """
1338 1332 if opts['rev']:
1339 1333 heads = repo.heads(repo.lookup(opts['rev']))
1340 1334 else:
1341 1335 heads = repo.heads()
1342 1336 br = None
1343 1337 if opts['branches']:
1344 1338 br = repo.branchlookup(heads)
1345 1339 for n in heads:
1346 1340 show_changeset(ui, repo, changenode=n, brinfo=br)
1347 1341
1348 1342 def identify(ui, repo):
1349 1343 """print information about the working copy
1350 1344
1351 1345 Print a short summary of the current state of the repo.
1352 1346
1353 1347 This summary identifies the repository state using one or two parent
1354 1348 hash identifiers, followed by a "+" if there are uncommitted changes
1355 1349 in the working directory, followed by a list of tags for this revision.
1356 1350 """
1357 1351 parents = [p for p in repo.dirstate.parents() if p != nullid]
1358 1352 if not parents:
1359 1353 ui.write(_("unknown\n"))
1360 1354 return
1361 1355
1362 1356 hexfunc = ui.verbose and hex or short
1363 1357 (c, a, d, u) = repo.changes()
1364 1358 output = ["%s%s" % ('+'.join([hexfunc(parent) for parent in parents]),
1365 1359 (c or a or d) and "+" or "")]
1366 1360
1367 1361 if not ui.quiet:
1368 1362 # multiple tags for a single parent separated by '/'
1369 1363 parenttags = ['/'.join(tags)
1370 1364 for tags in map(repo.nodetags, parents) if tags]
1371 1365 # tags for multiple parents separated by ' + '
1372 1366 if parenttags:
1373 1367 output.append(' + '.join(parenttags))
1374 1368
1375 1369 ui.write("%s\n" % ' '.join(output))
1376 1370
1377 1371 def import_(ui, repo, patch1, *patches, **opts):
1378 1372 """import an ordered set of patches
1379 1373
1380 1374 Import a list of patches and commit them individually.
1381 1375
1382 1376 If there are outstanding changes in the working directory, import
1383 1377 will abort unless given the -f flag.
1384 1378
1385 1379 If a patch looks like a mail message (its first line starts with
1386 1380 "From " or looks like an RFC822 header), it will not be applied
1387 1381 unless the -f option is used. The importer neither parses nor
1388 1382 discards mail headers, so use -f only to override the "mailness"
1389 1383 safety check, not to import a real mail message.
1390 1384 """
1391 1385 patches = (patch1,) + patches
1392 1386
1393 1387 if not opts['force']:
1394 1388 (c, a, d, u) = repo.changes()
1395 1389 if c or a or d:
1396 1390 raise util.Abort(_("outstanding uncommitted changes"))
1397 1391
1398 1392 d = opts["base"]
1399 1393 strip = opts["strip"]
1400 1394
1401 1395 mailre = re.compile(r'(?:From |[\w-]+:)')
1402 1396
1403 1397 # attempt to detect the start of a patch
1404 1398 # (this heuristic is borrowed from quilt)
1405 1399 diffre = re.compile(r'(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1406 1400 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1407 1401 '(---|\*\*\*)[ \t])')
1408 1402
1409 1403 for patch in patches:
1410 1404 ui.status(_("applying %s\n") % patch)
1411 1405 pf = os.path.join(d, patch)
1412 1406
1413 1407 message = []
1414 1408 user = None
1415 1409 hgpatch = False
1416 1410 for line in file(pf):
1417 1411 line = line.rstrip()
1418 1412 if (not message and not hgpatch and
1419 1413 mailre.match(line) and not opts['force']):
1420 1414 if len(line) > 35: line = line[:32] + '...'
1421 1415 raise util.Abort(_('first line looks like a '
1422 1416 'mail header: ') + line)
1423 1417 if diffre.match(line):
1424 1418 break
1425 1419 elif hgpatch:
1426 1420 # parse values when importing the result of an hg export
1427 1421 if line.startswith("# User "):
1428 1422 user = line[7:]
1429 1423 ui.debug(_('User: %s\n') % user)
1430 1424 elif not line.startswith("# ") and line:
1431 1425 message.append(line)
1432 1426 hgpatch = False
1433 1427 elif line == '# HG changeset patch':
1434 1428 hgpatch = True
1435 1429 message = [] # We may have collected garbage
1436 1430 else:
1437 1431 message.append(line)
1438 1432
1439 1433 # make sure message isn't empty
1440 1434 if not message:
1441 1435 message = _("imported patch %s\n") % patch
1442 1436 else:
1443 1437 message = "%s\n" % '\n'.join(message)
1444 1438 ui.debug(_('message:\n%s\n') % message)
1445 1439
1446 1440 files = util.patch(strip, pf, ui)
1447 1441
1448 1442 if len(files) > 0:
1449 1443 addremove(ui, repo, *files)
1450 1444 repo.commit(files, message, user)
1451 1445
1452 1446 def incoming(ui, repo, source="default", **opts):
1453 1447 """show new changesets found in source
1454 1448
1455 1449 Show new changesets found in the specified repo or the default
1456 1450 pull repo. These are the changesets that would be pulled if a pull
1457 1451 was requested.
1458 1452
1459 1453 Currently only local repositories are supported.
1460 1454 """
1461 1455 source = ui.expandpath(source, repo.root)
1462 1456 other = hg.repository(ui, source)
1463 1457 if not other.local():
1464 1458 raise util.Abort(_("incoming doesn't work for remote repositories yet"))
1465 1459 o = repo.findincoming(other)
1466 1460 if not o:
1467 1461 return
1468 1462 o = other.changelog.nodesbetween(o)[0]
1469 1463 if opts['newest_first']:
1470 1464 o.reverse()
1471 1465 for n in o:
1472 1466 parents = [p for p in other.changelog.parents(n) if p != nullid]
1473 1467 if opts['no_merges'] and len(parents) == 2:
1474 1468 continue
1475 1469 show_changeset(ui, other, changenode=n)
1476 1470 if opts['patch']:
1477 1471 prev = (parents and parents[0]) or nullid
1478 1472 dodiff(ui, ui, other, prev, n)
1479 1473 ui.write("\n")
1480 1474
1481 1475 def init(ui, dest="."):
1482 1476 """create a new repository in the given directory
1483 1477
1484 1478 Initialize a new repository in the given directory. If the given
1485 1479 directory does not exist, it is created.
1486 1480
1487 1481 If no directory is given, the current directory is used.
1488 1482 """
1489 1483 if not os.path.exists(dest):
1490 1484 os.mkdir(dest)
1491 1485 hg.repository(ui, dest, create=1)
1492 1486
1493 1487 def locate(ui, repo, *pats, **opts):
1494 1488 """locate files matching specific patterns
1495 1489
1496 1490 Print all files under Mercurial control whose names match the
1497 1491 given patterns.
1498 1492
1499 1493 This command searches the current directory and its
1500 1494 subdirectories. To search an entire repository, move to the root
1501 1495 of the repository.
1502 1496
1503 1497 If no patterns are given to match, this command prints all file
1504 1498 names.
1505 1499
1506 1500 If you want to feed the output of this command into the "xargs"
1507 1501 command, use the "-0" option to both this command and "xargs".
1508 1502 This will avoid the problem of "xargs" treating single filenames
1509 1503 that contain white space as multiple filenames.
1510 1504 """
1511 1505 end = opts['print0'] and '\0' or '\n'
1512 1506
1513 1507 for src, abs, rel, exact in walk(repo, pats, opts, '(?:.*/|)'):
1514 1508 if repo.dirstate.state(abs) == '?':
1515 1509 continue
1516 1510 if opts['fullpath']:
1517 1511 ui.write(os.path.join(repo.root, abs), end)
1518 1512 else:
1519 1513 ui.write(rel, end)
1520 1514
1521 1515 def log(ui, repo, *pats, **opts):
1522 1516 """show revision history of entire repository or files
1523 1517
1524 1518 Print the revision history of the specified files or the entire project.
1525 1519
1526 1520 By default this command outputs: changeset id and hash, tags,
1527 1521 non-trivial parents, user, date and time, and a summary for each
1528 1522 commit. When the -v/--verbose switch is used, the list of changed
1529 1523 files and full commit message is shown.
1530 1524 """
1531 1525 class dui(object):
1532 1526 # Implement and delegate some ui protocol. Save hunks of
1533 1527 # output for later display in the desired order.
1534 1528 def __init__(self, ui):
1535 1529 self.ui = ui
1536 1530 self.hunk = {}
1537 1531 def bump(self, rev):
1538 1532 self.rev = rev
1539 1533 self.hunk[rev] = []
1540 1534 def note(self, *args):
1541 1535 if self.verbose:
1542 1536 self.write(*args)
1543 1537 def status(self, *args):
1544 1538 if not self.quiet:
1545 1539 self.write(*args)
1546 1540 def write(self, *args):
1547 1541 self.hunk[self.rev].append(args)
1548 1542 def debug(self, *args):
1549 1543 if self.debugflag:
1550 1544 self.write(*args)
1551 1545 def __getattr__(self, key):
1552 1546 return getattr(self.ui, key)
1553 1547 changeiter, getchange = walkchangerevs(ui, repo, pats, opts)
1554 1548 for st, rev, fns in changeiter:
1555 1549 if st == 'window':
1556 1550 du = dui(ui)
1557 1551 elif st == 'add':
1558 1552 du.bump(rev)
1559 1553 changenode = repo.changelog.node(rev)
1560 1554 parents = [p for p in repo.changelog.parents(changenode)
1561 1555 if p != nullid]
1562 1556 if opts['no_merges'] and len(parents) == 2:
1563 1557 continue
1564 1558 if opts['only_merges'] and len(parents) != 2:
1565 1559 continue
1566 1560
1567 1561 br = None
1568 1562 if opts['keyword']:
1569 1563 changes = repo.changelog.read(repo.changelog.node(rev))
1570 1564 miss = 0
1571 1565 for k in [kw.lower() for kw in opts['keyword']]:
1572 1566 if not (k in changes[1].lower() or
1573 1567 k in changes[4].lower() or
1574 1568 k in " ".join(changes[3][:20]).lower()):
1575 1569 miss = 1
1576 1570 break
1577 1571 if miss:
1578 1572 continue
1579 1573
1580 1574 if opts['branch']:
1581 1575 br = repo.branchlookup([repo.changelog.node(rev)])
1582 1576
1583 1577 show_changeset(du, repo, rev, brinfo=br)
1584 1578 if opts['patch']:
1585 1579 prev = (parents and parents[0]) or nullid
1586 1580 dodiff(du, du, repo, prev, changenode, fns)
1587 1581 du.write("\n\n")
1588 1582 elif st == 'iter':
1589 1583 for args in du.hunk[rev]:
1590 1584 ui.write(*args)
1591 1585
1592 1586 def manifest(ui, repo, rev=None):
1593 1587 """output the latest or given revision of the project manifest
1594 1588
1595 1589 Print a list of version controlled files for the given revision.
1596 1590
1597 1591 The manifest is the list of files being version controlled. If no revision
1598 1592 is given then the tip is used.
1599 1593 """
1600 1594 if rev:
1601 1595 try:
1602 1596 # assume all revision numbers are for changesets
1603 1597 n = repo.lookup(rev)
1604 1598 change = repo.changelog.read(n)
1605 1599 n = change[0]
1606 1600 except hg.RepoError:
1607 1601 n = repo.manifest.lookup(rev)
1608 1602 else:
1609 1603 n = repo.manifest.tip()
1610 1604 m = repo.manifest.read(n)
1611 1605 mf = repo.manifest.readflags(n)
1612 1606 files = m.keys()
1613 1607 files.sort()
1614 1608
1615 1609 for f in files:
1616 1610 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
1617 1611
1618 1612 def outgoing(ui, repo, dest="default-push", **opts):
1619 1613 """show changesets not found in destination
1620 1614
1621 1615 Show changesets not found in the specified destination repo or the
1622 1616 default push repo. These are the changesets that would be pushed
1623 1617 if a push was requested.
1624 1618 """
1625 1619 dest = ui.expandpath(dest, repo.root)
1626 1620 other = hg.repository(ui, dest)
1627 1621 o = repo.findoutgoing(other)
1628 1622 o = repo.changelog.nodesbetween(o)[0]
1629 1623 if opts['newest_first']:
1630 1624 o.reverse()
1631 1625 for n in o:
1632 1626 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1633 1627 if opts['no_merges'] and len(parents) == 2:
1634 1628 continue
1635 1629 show_changeset(ui, repo, changenode=n)
1636 1630 if opts['patch']:
1637 1631 prev = (parents and parents[0]) or nullid
1638 1632 dodiff(ui, ui, repo, prev, n)
1639 1633 ui.write("\n")
1640 1634
1641 1635 def parents(ui, repo, rev=None):
1642 1636 """show the parents of the working dir or revision
1643 1637
1644 1638 Print the working directory's parent revisions.
1645 1639 """
1646 1640 if rev:
1647 1641 p = repo.changelog.parents(repo.lookup(rev))
1648 1642 else:
1649 1643 p = repo.dirstate.parents()
1650 1644
1651 1645 for n in p:
1652 1646 if n != nullid:
1653 1647 show_changeset(ui, repo, changenode=n)
1654 1648
1655 1649 def paths(ui, search=None):
1656 1650 """show definition of symbolic path names
1657 1651
1658 1652 Show definition of symbolic path name NAME. If no name is given, show
1659 1653 definition of available names.
1660 1654
1661 1655 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1662 1656 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1663 1657 """
1664 1658 try:
1665 1659 repo = hg.repository(ui=ui)
1666 1660 except hg.RepoError:
1667 1661 pass
1668 1662
1669 1663 if search:
1670 1664 for name, path in ui.configitems("paths"):
1671 1665 if name == search:
1672 1666 ui.write("%s\n" % path)
1673 1667 return
1674 1668 ui.warn(_("not found!\n"))
1675 1669 return 1
1676 1670 else:
1677 1671 for name, path in ui.configitems("paths"):
1678 1672 ui.write("%s = %s\n" % (name, path))
1679 1673
1680 1674 def pull(ui, repo, source="default", **opts):
1681 1675 """pull changes from the specified source
1682 1676
1683 1677 Pull changes from a remote repository to a local one.
1684 1678
1685 1679 This finds all changes from the repository at the specified path
1686 1680 or URL and adds them to the local repository. By default, this
1687 1681 does not update the copy of the project in the working directory.
1688 1682
1689 1683 Valid URLs are of the form:
1690 1684
1691 1685 local/filesystem/path
1692 1686 http://[user@]host[:port][/path]
1693 1687 https://[user@]host[:port][/path]
1694 1688 ssh://[user@]host[:port][/path]
1695 1689
1696 1690 SSH requires an accessible shell account on the destination machine
1697 1691 and a copy of hg in the remote path. With SSH, paths are relative
1698 1692 to the remote user's home directory by default; use two slashes at
1699 1693 the start of a path to specify it as relative to the filesystem root.
1700 1694 """
1701 1695 source = ui.expandpath(source, repo.root)
1702 1696 ui.status(_('pulling from %s\n') % (source))
1703 1697
1704 1698 if opts['ssh']:
1705 1699 ui.setconfig("ui", "ssh", opts['ssh'])
1706 1700 if opts['remotecmd']:
1707 1701 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1708 1702
1709 1703 other = hg.repository(ui, source)
1710 1704 revs = None
1711 1705 if opts['rev'] and not other.local():
1712 1706 raise util.Abort("pull -r doesn't work for remote repositories yet")
1713 1707 elif opts['rev']:
1714 1708 revs = [other.lookup(rev) for rev in opts['rev']]
1715 1709 r = repo.pull(other, heads=revs)
1716 1710 if not r:
1717 1711 if opts['update']:
1718 1712 return update(ui, repo)
1719 1713 else:
1720 1714 ui.status(_("(run 'hg update' to get a working copy)\n"))
1721 1715
1722 1716 return r
1723 1717
1724 1718 def push(ui, repo, dest="default-push", force=False, ssh=None, remotecmd=None):
1725 1719 """push changes to the specified destination
1726 1720
1727 1721 Push changes from the local repository to the given destination.
1728 1722
1729 1723 This is the symmetrical operation for pull. It helps to move
1730 1724 changes from the current repository to a different one. If the
1731 1725 destination is local this is identical to a pull in that directory
1732 1726 from the current one.
1733 1727
1734 1728 By default, push will refuse to run if it detects the result would
1735 1729 increase the number of remote heads. This generally indicates the
1736 1730 the client has forgotten to sync and merge before pushing.
1737 1731
1738 1732 Valid URLs are of the form:
1739 1733
1740 1734 local/filesystem/path
1741 1735 ssh://[user@]host[:port][/path]
1742 1736
1743 1737 SSH requires an accessible shell account on the destination
1744 1738 machine and a copy of hg in the remote path.
1745 1739 """
1746 1740 dest = ui.expandpath(dest, repo.root)
1747 1741 ui.status('pushing to %s\n' % (dest))
1748 1742
1749 1743 if ssh:
1750 1744 ui.setconfig("ui", "ssh", ssh)
1751 1745 if remotecmd:
1752 1746 ui.setconfig("ui", "remotecmd", remotecmd)
1753 1747
1754 1748 other = hg.repository(ui, dest)
1755 1749 r = repo.push(other, force)
1756 1750 return r
1757 1751
1758 1752 def rawcommit(ui, repo, *flist, **rc):
1759 1753 """raw commit interface
1760 1754
1761 1755 Lowlevel commit, for use in helper scripts.
1762 1756
1763 1757 This command is not intended to be used by normal users, as it is
1764 1758 primarily useful for importing from other SCMs.
1765 1759 """
1766 1760 message = rc['message']
1767 1761 if not message and rc['logfile']:
1768 1762 try:
1769 1763 message = open(rc['logfile']).read()
1770 1764 except IOError:
1771 1765 pass
1772 1766 if not message and not rc['logfile']:
1773 1767 raise util.Abort(_("missing commit message"))
1774 1768
1775 1769 files = relpath(repo, list(flist))
1776 1770 if rc['files']:
1777 1771 files += open(rc['files']).read().splitlines()
1778 1772
1779 1773 rc['parent'] = map(repo.lookup, rc['parent'])
1780 1774
1781 1775 try:
1782 1776 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
1783 1777 except ValueError, inst:
1784 1778 raise util.Abort(str(inst))
1785 1779
1786 1780 def recover(ui, repo):
1787 1781 """roll back an interrupted transaction
1788 1782
1789 1783 Recover from an interrupted commit or pull.
1790 1784
1791 1785 This command tries to fix the repository status after an interrupted
1792 1786 operation. It should only be necessary when Mercurial suggests it.
1793 1787 """
1794 1788 if repo.recover():
1795 1789 return repo.verify()
1796 1790 return False
1797 1791
1798 1792 def remove(ui, repo, pat, *pats, **opts):
1799 1793 """remove the specified files on the next commit
1800 1794
1801 1795 Schedule the indicated files for removal from the repository.
1802 1796
1803 1797 This command schedules the files to be removed at the next commit.
1804 1798 This only removes files from the current branch, not from the
1805 1799 entire project history. If the files still exist in the working
1806 1800 directory, they will be deleted from it.
1807 1801 """
1808 1802 names = []
1809 1803 def okaytoremove(abs, rel, exact):
1810 1804 c, a, d, u = repo.changes(files = [abs])
1811 1805 reason = None
1812 1806 if c: reason = _('is modified')
1813 1807 elif a: reason = _('has been marked for add')
1814 1808 elif u: reason = _('is not managed')
1815 1809 if reason:
1816 1810 if exact: ui.warn(_('not removing %s: file %s\n') % (rel, reason))
1817 1811 else:
1818 1812 return True
1819 1813 for src, abs, rel, exact in walk(repo, (pat,) + pats, opts):
1820 1814 if okaytoremove(abs, rel, exact):
1821 1815 if ui.verbose or not exact: ui.status(_('removing %s\n') % rel)
1822 1816 names.append(abs)
1823 1817 repo.remove(names, unlink=True)
1824 1818
1825 1819 def rename(ui, repo, *pats, **opts):
1826 1820 """rename files; equivalent of copy + remove
1827 1821
1828 1822 Mark dest as copies of sources; mark sources for deletion. If
1829 1823 dest is a directory, copies are put in that directory. If dest is
1830 1824 a file, there can only be one source.
1831 1825
1832 1826 By default, this command copies the contents of files as they
1833 1827 stand in the working directory. If invoked with --after, the
1834 1828 operation is recorded, but no copying is performed.
1835 1829
1836 1830 This command takes effect in the next commit.
1837 1831
1838 1832 NOTE: This command should be treated as experimental. While it
1839 1833 should properly record rename files, this information is not yet
1840 1834 fully used by merge, nor fully reported by log.
1841 1835 """
1842 1836 errs, copied = docopy(ui, repo, pats, opts)
1843 1837 names = []
1844 1838 for abs, rel, exact in copied:
1845 1839 if ui.verbose or not exact: ui.status(_('removing %s\n') % rel)
1846 1840 names.append(abs)
1847 1841 repo.remove(names, unlink=True)
1848 1842 return errs
1849 1843
1850 1844 def revert(ui, repo, *pats, **opts):
1851 1845 """revert modified files or dirs back to their unmodified states
1852 1846
1853 1847 Revert any uncommitted modifications made to the named files or
1854 1848 directories. This restores the contents of the affected files to
1855 1849 an unmodified state.
1856 1850
1857 1851 If a file has been deleted, it is recreated. If the executable
1858 1852 mode of a file was changed, it is reset.
1859 1853
1860 1854 If names are given, all files matching the names are reverted.
1861 1855
1862 1856 If no arguments are given, all files in the repository are reverted.
1863 1857 """
1864 1858 node = opts['rev'] and repo.lookup(opts['rev']) or \
1865 1859 repo.dirstate.parents()[0]
1866 1860
1867 1861 files, choose, anypats, cwd = matchpats(repo, pats, opts)
1868 1862 (c, a, d, u) = repo.changes(match=choose)
1869 1863 repo.forget(a)
1870 1864 repo.undelete(d)
1871 1865
1872 1866 return repo.update(node, False, True, choose, False)
1873 1867
1874 1868 def root(ui, repo):
1875 1869 """print the root (top) of the current working dir
1876 1870
1877 1871 Print the root directory of the current repository.
1878 1872 """
1879 1873 ui.write(repo.root + "\n")
1880 1874
1881 1875 def serve(ui, repo, **opts):
1882 1876 """export the repository via HTTP
1883 1877
1884 1878 Start a local HTTP repository browser and pull server.
1885 1879
1886 1880 By default, the server logs accesses to stdout and errors to
1887 1881 stderr. Use the "-A" and "-E" options to log to files.
1888 1882 """
1889 1883
1890 1884 if opts["stdio"]:
1891 1885 fin, fout = sys.stdin, sys.stdout
1892 1886 sys.stdout = sys.stderr
1893 1887
1894 1888 # Prevent insertion/deletion of CRs
1895 1889 util.set_binary(fin)
1896 1890 util.set_binary(fout)
1897 1891
1898 1892 def getarg():
1899 1893 argline = fin.readline()[:-1]
1900 1894 arg, l = argline.split()
1901 1895 val = fin.read(int(l))
1902 1896 return arg, val
1903 1897 def respond(v):
1904 1898 fout.write("%d\n" % len(v))
1905 1899 fout.write(v)
1906 1900 fout.flush()
1907 1901
1908 1902 lock = None
1909 1903
1910 1904 while 1:
1911 1905 cmd = fin.readline()[:-1]
1912 1906 if cmd == '':
1913 1907 return
1914 1908 if cmd == "heads":
1915 1909 h = repo.heads()
1916 1910 respond(" ".join(map(hex, h)) + "\n")
1917 1911 if cmd == "lock":
1918 1912 lock = repo.lock()
1919 1913 respond("")
1920 1914 if cmd == "unlock":
1921 1915 if lock:
1922 1916 lock.release()
1923 1917 lock = None
1924 1918 respond("")
1925 1919 elif cmd == "branches":
1926 1920 arg, nodes = getarg()
1927 1921 nodes = map(bin, nodes.split(" "))
1928 1922 r = []
1929 1923 for b in repo.branches(nodes):
1930 1924 r.append(" ".join(map(hex, b)) + "\n")
1931 1925 respond("".join(r))
1932 1926 elif cmd == "between":
1933 1927 arg, pairs = getarg()
1934 1928 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
1935 1929 r = []
1936 1930 for b in repo.between(pairs):
1937 1931 r.append(" ".join(map(hex, b)) + "\n")
1938 1932 respond("".join(r))
1939 1933 elif cmd == "changegroup":
1940 1934 nodes = []
1941 1935 arg, roots = getarg()
1942 1936 nodes = map(bin, roots.split(" "))
1943 1937
1944 1938 cg = repo.changegroup(nodes)
1945 1939 while 1:
1946 1940 d = cg.read(4096)
1947 1941 if not d:
1948 1942 break
1949 1943 fout.write(d)
1950 1944
1951 1945 fout.flush()
1952 1946
1953 1947 elif cmd == "addchangegroup":
1954 1948 if not lock:
1955 1949 respond("not locked")
1956 1950 continue
1957 1951 respond("")
1958 1952
1959 1953 r = repo.addchangegroup(fin)
1960 1954 respond("")
1961 1955
1962 1956 optlist = "name templates style address port ipv6 accesslog errorlog"
1963 1957 for o in optlist.split():
1964 1958 if opts[o]:
1965 1959 ui.setconfig("web", o, opts[o])
1966 1960
1967 1961 try:
1968 1962 httpd = hgweb.create_server(repo)
1969 1963 except socket.error, inst:
1970 1964 raise util.Abort('cannot start server: ' + inst.args[1])
1971 1965
1972 1966 if ui.verbose:
1973 1967 addr, port = httpd.socket.getsockname()
1974 1968 if addr == '0.0.0.0':
1975 1969 addr = socket.gethostname()
1976 1970 else:
1977 1971 try:
1978 1972 addr = socket.gethostbyaddr(addr)[0]
1979 1973 except socket.error:
1980 1974 pass
1981 1975 if port != 80:
1982 1976 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
1983 1977 else:
1984 1978 ui.status(_('listening at http://%s/\n') % addr)
1985 1979 httpd.serve_forever()
1986 1980
1987 1981 def status(ui, repo, *pats, **opts):
1988 1982 """show changed files in the working directory
1989 1983
1990 1984 Show changed files in the repository. If names are
1991 1985 given, only files that match are shown.
1992 1986
1993 1987 The codes used to show the status of files are:
1994 1988 M = modified
1995 1989 A = added
1996 1990 R = removed
1997 1991 ? = not tracked
1998 1992 """
1999 1993
2000 1994 files, matchfn, anypats, cwd = matchpats(repo, pats, opts)
2001 1995 (c, a, d, u) = [[util.pathto(cwd, x) for x in n]
2002 1996 for n in repo.changes(files=files, match=matchfn)]
2003 1997
2004 1998 changetypes = [(_('modified'), 'M', c),
2005 1999 (_('added'), 'A', a),
2006 2000 (_('removed'), 'R', d),
2007 2001 (_('unknown'), '?', u)]
2008 2002
2009 2003 end = opts['print0'] and '\0' or '\n'
2010 2004
2011 2005 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
2012 2006 or changetypes):
2013 2007 if opts['no_status']:
2014 2008 format = "%%s%s" % end
2015 2009 else:
2016 2010 format = "%s %%s%s" % (char, end);
2017 2011
2018 2012 for f in changes:
2019 2013 ui.write(format % f)
2020 2014
2021 2015 def tag(ui, repo, name, rev=None, **opts):
2022 2016 """add a tag for the current tip or a given revision
2023 2017
2024 2018 Name a particular revision using <name>.
2025 2019
2026 2020 Tags are used to name particular revisions of the repository and are
2027 2021 very useful to compare different revision, to go back to significant
2028 2022 earlier versions or to mark branch points as releases, etc.
2029 2023
2030 2024 If no revision is given, the tip is used.
2031 2025
2032 2026 To facilitate version control, distribution, and merging of tags,
2033 2027 they are stored as a file named ".hgtags" which is managed
2034 2028 similarly to other project files and can be hand-edited if
2035 2029 necessary.
2036 2030 """
2037 2031 if name == "tip":
2038 2032 raise util.Abort(_("the name 'tip' is reserved"))
2039 2033 if 'rev' in opts:
2040 2034 rev = opts['rev']
2041 2035 if rev:
2042 2036 r = hex(repo.lookup(rev))
2043 2037 else:
2044 2038 r = hex(repo.changelog.tip())
2045 2039
2046 2040 disallowed = (revrangesep, '\r', '\n')
2047 2041 for c in disallowed:
2048 2042 if name.find(c) >= 0:
2049 2043 raise util.Abort(_("%s cannot be used in a tag name") % repr(c))
2050 2044
2051 2045 if opts['local']:
2052 2046 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
2053 2047 return
2054 2048
2055 2049 (c, a, d, u) = repo.changes()
2056 2050 for x in (c, a, d, u):
2057 2051 if ".hgtags" in x:
2058 2052 raise util.Abort(_("working copy of .hgtags is changed "
2059 2053 "(please commit .hgtags manually)"))
2060 2054
2061 2055 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
2062 2056 if repo.dirstate.state(".hgtags") == '?':
2063 2057 repo.add([".hgtags"])
2064 2058
2065 2059 message = (opts['message'] or
2066 2060 _("Added tag %s for changeset %s") % (name, r))
2067 2061 try:
2068 2062 repo.commit([".hgtags"], message, opts['user'], opts['date'])
2069 2063 except ValueError, inst:
2070 2064 raise util.Abort(str(inst))
2071 2065
2072 2066 def tags(ui, repo):
2073 2067 """list repository tags
2074 2068
2075 2069 List the repository tags.
2076 2070
2077 2071 This lists both regular and local tags.
2078 2072 """
2079 2073
2080 2074 l = repo.tagslist()
2081 2075 l.reverse()
2082 2076 for t, n in l:
2083 2077 try:
2084 2078 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2085 2079 except KeyError:
2086 2080 r = " ?:?"
2087 2081 ui.write("%-30s %s\n" % (t, r))
2088 2082
2089 2083 def tip(ui, repo):
2090 2084 """show the tip revision
2091 2085
2092 2086 Show the tip revision.
2093 2087 """
2094 2088 n = repo.changelog.tip()
2095 2089 show_changeset(ui, repo, changenode=n)
2096 2090
2097 2091 def unbundle(ui, repo, fname):
2098 2092 """apply a changegroup file
2099 2093
2100 2094 Apply a compressed changegroup file generated by the bundle
2101 2095 command.
2102 2096 """
2103 2097 f = urllib.urlopen(fname)
2104 2098
2105 2099 if f.read(4) != "HG10":
2106 2100 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2107 2101
2108 2102 def bzgenerator(f):
2109 2103 zd = bz2.BZ2Decompressor()
2110 2104 for chunk in f:
2111 2105 yield zd.decompress(chunk)
2112 2106
2113 2107 bzgen = bzgenerator(util.filechunkiter(f, 4096))
2114 2108 repo.addchangegroup(util.chunkbuffer(bzgen))
2115 2109
2116 2110 def undo(ui, repo):
2117 2111 """undo the last commit or pull
2118 2112
2119 2113 Roll back the last pull or commit transaction on the
2120 2114 repository, restoring the project to its earlier state.
2121 2115
2122 2116 This command should be used with care. There is only one level of
2123 2117 undo and there is no redo.
2124 2118
2125 2119 This command is not intended for use on public repositories. Once
2126 2120 a change is visible for pull by other users, undoing it locally is
2127 2121 ineffective.
2128 2122 """
2129 2123 repo.undo()
2130 2124
2131 2125 def update(ui, repo, node=None, merge=False, clean=False, branch=None):
2132 2126 """update or merge working directory
2133 2127
2134 2128 Update the working directory to the specified revision.
2135 2129
2136 2130 If there are no outstanding changes in the working directory and
2137 2131 there is a linear relationship between the current version and the
2138 2132 requested version, the result is the requested version.
2139 2133
2140 2134 Otherwise the result is a merge between the contents of the
2141 2135 current working directory and the requested version. Files that
2142 2136 changed between either parent are marked as changed for the next
2143 2137 commit and a commit must be performed before any further updates
2144 2138 are allowed.
2145 2139
2146 2140 By default, update will refuse to run if doing so would require
2147 2141 merging or discarding local changes.
2148 2142 """
2149 2143 if branch:
2150 2144 br = repo.branchlookup(branch=branch)
2151 2145 found = []
2152 2146 for x in br:
2153 2147 if branch in br[x]:
2154 2148 found.append(x)
2155 2149 if len(found) > 1:
2156 2150 ui.warn(_("Found multiple heads for %s\n") % branch)
2157 2151 for x in found:
2158 2152 show_changeset(ui, repo, changenode=x, brinfo=br)
2159 2153 return 1
2160 2154 if len(found) == 1:
2161 2155 node = found[0]
2162 2156 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2163 2157 else:
2164 2158 ui.warn(_("branch %s not found\n") % (branch))
2165 2159 return 1
2166 2160 else:
2167 2161 node = node and repo.lookup(node) or repo.changelog.tip()
2168 2162 return repo.update(node, allow=merge, force=clean)
2169 2163
2170 2164 def verify(ui, repo):
2171 2165 """verify the integrity of the repository
2172 2166
2173 2167 Verify the integrity of the current repository.
2174 2168
2175 2169 This will perform an extensive check of the repository's
2176 2170 integrity, validating the hashes and checksums of each entry in
2177 2171 the changelog, manifest, and tracked files, as well as the
2178 2172 integrity of their crosslinks and indices.
2179 2173 """
2180 2174 return repo.verify()
2181 2175
2182 2176 # Command options and aliases are listed here, alphabetically
2183 2177
2184 2178 table = {
2185 2179 "^add":
2186 2180 (add,
2187 2181 [('I', 'include', [], _('include names matching the given patterns')),
2188 2182 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2189 2183 "hg add [OPTION]... [FILE]..."),
2190 2184 "addremove":
2191 2185 (addremove,
2192 2186 [('I', 'include', [], _('include names matching the given patterns')),
2193 2187 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2194 2188 "hg addremove [OPTION]... [FILE]..."),
2195 2189 "^annotate":
2196 2190 (annotate,
2197 2191 [('r', 'rev', '', _('annotate the specified revision')),
2198 2192 ('a', 'text', None, _('treat all files as text')),
2199 2193 ('u', 'user', None, _('list the author')),
2200 2194 ('d', 'date', None, _('list the date')),
2201 2195 ('n', 'number', None, _('list the revision number (default)')),
2202 2196 ('c', 'changeset', None, _('list the changeset')),
2203 2197 ('I', 'include', [], _('include names matching the given patterns')),
2204 2198 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2205 2199 _('hg annotate [OPTION]... FILE...')),
2206 2200 "bundle":
2207 2201 (bundle,
2208 2202 [],
2209 2203 _('hg bundle FILE DEST')),
2210 2204 "cat":
2211 2205 (cat,
2212 2206 [('I', 'include', [], _('include names matching the given patterns')),
2213 2207 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2214 2208 ('o', 'output', "", _('print output to file with formatted name')),
2215 2209 ('r', 'rev', '', _('print the given revision'))],
2216 2210 _('hg cat [OPTION]... FILE...')),
2217 2211 "^clone":
2218 2212 (clone,
2219 2213 [('U', 'noupdate', None, _('do not update the new working directory')),
2220 2214 ('e', 'ssh', "", _('specify ssh command to use')),
2221 2215 ('', 'pull', None, _('use pull protocol to copy metadata')),
2222 2216 ('r', 'rev', [], _('a changeset you would like to have after cloning')),
2223 2217 ('', 'remotecmd', "", _('specify hg command to run on the remote side'))],
2224 2218 _('hg clone [OPTION]... SOURCE [DEST]')),
2225 2219 "^commit|ci":
2226 2220 (commit,
2227 2221 [('A', 'addremove', None, _('run addremove during commit')),
2228 2222 ('I', 'include', [], _('include names matching the given patterns')),
2229 2223 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2230 2224 ('m', 'message', "", _('use <text> as commit message')),
2231 2225 ('l', 'logfile', "", _('read the commit message from <file>')),
2232 2226 ('d', 'date', "", _('record datecode as commit date')),
2233 2227 ('u', 'user', "", _('record user as commiter'))],
2234 2228 _('hg commit [OPTION]... [FILE]...')),
2235 2229 "copy|cp": (copy,
2236 2230 [('I', 'include', [], _('include names matching the given patterns')),
2237 2231 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2238 2232 ('A', 'after', None, _('record a copy that has already occurred')),
2239 2233 ('f', 'force', None, _('forcibly copy over an existing managed file'))],
2240 2234 _('hg copy [OPTION]... [SOURCE]... DEST')),
2241 2235 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2242 2236 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2243 2237 "debugconfig": (debugconfig, [], _('debugconfig')),
2244 2238 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2245 2239 "debugstate": (debugstate, [], _('debugstate')),
2246 2240 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2247 2241 "debugindex": (debugindex, [], _('debugindex FILE')),
2248 2242 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2249 2243 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2250 2244 "debugwalk":
2251 2245 (debugwalk,
2252 2246 [('I', 'include', [], _('include names matching the given patterns')),
2253 2247 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2254 2248 _('debugwalk [OPTION]... [FILE]...')),
2255 2249 "^diff":
2256 2250 (diff,
2257 2251 [('r', 'rev', [], _('revision')),
2258 2252 ('a', 'text', None, _('treat all files as text')),
2259 2253 ('I', 'include', [], _('include names matching the given patterns')),
2260 2254 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2261 2255 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2262 2256 "^export":
2263 2257 (export,
2264 2258 [('o', 'output', "", _('print output to file with formatted name')),
2265 2259 ('a', 'text', None, _('treat all files as text'))],
2266 2260 "hg export [-a] [-o OUTFILE] REV..."),
2267 2261 "forget":
2268 2262 (forget,
2269 2263 [('I', 'include', [], _('include names matching the given patterns')),
2270 2264 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2271 2265 "hg forget [OPTION]... FILE..."),
2272 2266 "grep":
2273 2267 (grep,
2274 2268 [('0', 'print0', None, _('end fields with NUL')),
2275 2269 ('I', 'include', [], _('include names matching the given patterns')),
2276 2270 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2277 2271 ('', 'all', None, _('print all revisions that match')),
2278 2272 ('i', 'ignore-case', None, _('ignore case when matching')),
2279 2273 ('l', 'files-with-matches', None, _('print only filenames and revs that match')),
2280 2274 ('n', 'line-number', None, _('print matching line numbers')),
2281 2275 ('r', 'rev', [], _('search in given revision range')),
2282 2276 ('u', 'user', None, _('print user who committed change'))],
2283 2277 "hg grep [OPTION]... PATTERN [FILE]..."),
2284 2278 "heads":
2285 2279 (heads,
2286 2280 [('b', 'branches', None, _('find branch info')),
2287 2281 ('r', 'rev', "", _('show only heads which are descendants of rev'))],
2288 2282 _('hg heads [-b] [-r <rev>]')),
2289 2283 "help": (help_, [], _('hg help [COMMAND]')),
2290 2284 "identify|id": (identify, [], _('hg identify')),
2291 2285 "import|patch":
2292 2286 (import_,
2293 2287 [('p', 'strip', 1, _('directory strip option for patch. This has the same\n') +
2294 2288 _('meaning as the corresponding patch option')),
2295 2289 ('f', 'force', None, _('skip check for outstanding uncommitted changes')),
2296 2290 ('b', 'base', "", _('base path'))],
2297 2291 "hg import [-f] [-p NUM] [-b BASE] PATCH..."),
2298 2292 "incoming|in": (incoming,
2299 2293 [('M', 'no-merges', None, _("do not show merges")),
2300 2294 ('p', 'patch', None, _('show patch')),
2301 2295 ('n', 'newest-first', None, _('show newest record first'))],
2302 2296 _('hg incoming [-p] [-n] [-M] [SOURCE]')),
2303 2297 "^init": (init, [], _('hg init [DEST]')),
2304 2298 "locate":
2305 2299 (locate,
2306 2300 [('r', 'rev', '', _('search the repository as it stood at rev')),
2307 2301 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
2308 2302 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
2309 2303 ('I', 'include', [], _('include names matching the given patterns')),
2310 2304 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2311 2305 _('hg locate [OPTION]... [PATTERN]...')),
2312 2306 "^log|history":
2313 2307 (log,
2314 2308 [('I', 'include', [], _('include names matching the given patterns')),
2315 2309 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2316 2310 ('b', 'branch', None, _('show branches')),
2317 2311 ('k', 'keyword', [], _('search for a keyword')),
2318 2312 ('r', 'rev', [], _('show the specified revision or range')),
2319 2313 ('M', 'no-merges', None, _("do not show merges")),
2320 2314 ('m', 'only-merges', None, _("show only merges")),
2321 2315 ('p', 'patch', None, _('show patch'))],
2322 2316 _('hg log [-I] [-X] [-r REV]... [-p] [FILE]')),
2323 2317 "manifest": (manifest, [], _('hg manifest [REV]')),
2324 2318 "outgoing|out": (outgoing,
2325 2319 [('M', 'no-merges', None, _("do not show merges")),
2326 2320 ('p', 'patch', None, _('show patch')),
2327 2321 ('n', 'newest-first', None, _('show newest record first'))],
2328 2322 _('hg outgoing [-p] [-n] [-M] [DEST]')),
2329 2323 "^parents": (parents, [], _('hg parents [REV]')),
2330 2324 "paths": (paths, [], _('hg paths [NAME]')),
2331 2325 "^pull":
2332 2326 (pull,
2333 2327 [('u', 'update', None, _('update the working directory to tip after pull')),
2334 2328 ('e', 'ssh', "", _('specify ssh command to use')),
2335 2329 ('r', 'rev', [], _('a specific revision you would like to pull')),
2336 2330 ('', 'remotecmd', "", _('specify hg command to run on the remote side'))],
2337 2331 _('hg pull [-u] [-e FILE] [-r rev] [--remotecmd FILE] [SOURCE]')),
2338 2332 "^push":
2339 2333 (push,
2340 2334 [('f', 'force', None, _('force push')),
2341 2335 ('e', 'ssh', "", _('specify ssh command to use')),
2342 2336 ('', 'remotecmd', "", _('specify hg command to run on the remote side'))],
2343 2337 _('hg push [-f] [-e FILE] [--remotecmd FILE] [DEST]')),
2344 2338 "rawcommit":
2345 2339 (rawcommit,
2346 2340 [('p', 'parent', [], _('parent')),
2347 2341 ('d', 'date', "", _('date code')),
2348 2342 ('u', 'user', "", _('user')),
2349 2343 ('F', 'files', "", _('file list')),
2350 2344 ('m', 'message', "", _('commit message')),
2351 2345 ('l', 'logfile', "", _('commit message file'))],
2352 2346 _('hg rawcommit [OPTION]... [FILE]...')),
2353 2347 "recover": (recover, [], _("hg recover")),
2354 2348 "^remove|rm": (remove,
2355 2349 [('I', 'include', [], _('include names matching the given patterns')),
2356 2350 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2357 2351 _("hg remove [OPTION]... FILE...")),
2358 2352 "rename|mv": (rename,
2359 2353 [('I', 'include', [], _('include names matching the given patterns')),
2360 2354 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2361 2355 ('A', 'after', None, _('record a rename that has already occurred')),
2362 2356 ('f', 'force', None, _('forcibly copy over an existing managed file'))],
2363 2357 _('hg rename [OPTION]... [SOURCE]... DEST')),
2364 2358 "^revert":
2365 2359 (revert,
2366 2360 [('I', 'include', [], _('include names matching the given patterns')),
2367 2361 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2368 2362 ("r", "rev", "", _("revision to revert to"))],
2369 2363 _("hg revert [-n] [-r REV] [NAME]...")),
2370 2364 "root": (root, [], _("hg root")),
2371 2365 "^serve":
2372 2366 (serve,
2373 2367 [('A', 'accesslog', '', _('name of access log file to write to')),
2374 2368 ('E', 'errorlog', '', _('name of error log file to write to')),
2375 2369 ('p', 'port', 0, _('port to use (default: 8000)')),
2376 2370 ('a', 'address', '', _('address to use')),
2377 2371 ('n', 'name', "", _('name to show in web pages (default: working dir)')),
2378 2372 ('', 'stdio', None, _('for remote clients')),
2379 2373 ('t', 'templates', "", _('web templates to use')),
2380 2374 ('', 'style', "", _('template style to use')),
2381 2375 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2382 2376 _("hg serve [OPTION]...")),
2383 2377 "^status|st":
2384 2378 (status,
2385 2379 [('m', 'modified', None, _('show only modified files')),
2386 2380 ('a', 'added', None, _('show only added files')),
2387 2381 ('r', 'removed', None, _('show only removed files')),
2388 2382 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2389 2383 ('n', 'no-status', None, _('hide status prefix')),
2390 2384 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
2391 2385 ('I', 'include', [], _('include names matching the given patterns')),
2392 2386 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2393 2387 _("hg status [OPTION]... [FILE]...")),
2394 2388 "tag":
2395 2389 (tag,
2396 2390 [('l', 'local', None, _('make the tag local')),
2397 2391 ('m', 'message', "", _('message for tag commit log entry')),
2398 2392 ('d', 'date', "", _('record datecode as commit date')),
2399 2393 ('u', 'user', "", _('record user as commiter')),
2400 2394 ('r', 'rev', "", _('revision to tag'))],
2401 2395 _('hg tag [OPTION]... NAME [REV]')),
2402 2396 "tags": (tags, [], _('hg tags')),
2403 2397 "tip": (tip, [], _('hg tip')),
2404 2398 "unbundle":
2405 2399 (unbundle,
2406 2400 [],
2407 2401 _('hg unbundle FILE')),
2408 2402 "undo": (undo, [], _('hg undo')),
2409 2403 "^update|up|checkout|co":
2410 2404 (update,
2411 2405 [('b', 'branch', "", _('checkout the head of a specific branch')),
2412 2406 ('m', 'merge', None, _('allow merging of branches')),
2413 2407 ('C', 'clean', None, _('overwrite locally modified files'))],
2414 2408 _('hg update [-b TAG] [-m] [-C] [REV]')),
2415 2409 "verify": (verify, [], _('hg verify')),
2416 2410 "version": (show_version, [], _('hg version')),
2417 2411 }
2418 2412
2419 2413 globalopts = [
2420 2414 ('R', 'repository', "", _("repository root directory")),
2421 2415 ('', 'cwd', '', _("change working directory")),
2422 2416 ('y', 'noninteractive', None, _("do not prompt, assume 'yes' for any required answers")),
2423 2417 ('q', 'quiet', None, _("suppress output")),
2424 2418 ('v', 'verbose', None, _("enable additional output")),
2425 2419 ('', 'debug', None, _("enable debugging output")),
2426 2420 ('', 'debugger', None, _("start debugger")),
2427 2421 ('', 'traceback', None, _("print traceback on exception")),
2428 2422 ('', 'time', None, _("time how long the command takes")),
2429 2423 ('', 'profile', None, _("print command execution profile")),
2430 2424 ('', 'version', None, _("output version information and exit")),
2431 2425 ('h', 'help', None, _("display help and exit")),
2432 2426 ]
2433 2427
2434 2428 norepo = ("clone init version help debugancestor debugconfig debugdata"
2435 2429 " debugindex debugindexdot paths")
2436 2430
2437 2431 def find(cmd):
2438 2432 """Return (aliases, command table entry) for command string."""
2439 2433 choice = None
2440 2434 for e in table.keys():
2441 2435 aliases = e.lstrip("^").split("|")
2442 2436 if cmd in aliases:
2443 2437 return aliases, table[e]
2444 2438 for a in aliases:
2445 2439 if a.startswith(cmd):
2446 2440 if choice:
2447 2441 raise AmbiguousCommand(cmd)
2448 2442 else:
2449 2443 choice = aliases, table[e]
2450 2444 break
2451 2445 if choice:
2452 2446 return choice
2453 2447
2454 2448 raise UnknownCommand(cmd)
2455 2449
2456 2450 class SignalInterrupt(Exception):
2457 2451 """Exception raised on SIGTERM and SIGHUP."""
2458 2452
2459 2453 def catchterm(*args):
2460 2454 raise SignalInterrupt
2461 2455
2462 2456 def run():
2463 2457 sys.exit(dispatch(sys.argv[1:]))
2464 2458
2465 2459 class ParseError(Exception):
2466 2460 """Exception raised on errors in parsing the command line."""
2467 2461
2468 2462 def parse(ui, args):
2469 2463 options = {}
2470 2464 cmdoptions = {}
2471 2465
2472 2466 try:
2473 2467 args = fancyopts.fancyopts(args, globalopts, options)
2474 2468 except fancyopts.getopt.GetoptError, inst:
2475 2469 raise ParseError(None, inst)
2476 2470
2477 2471 if args:
2478 2472 cmd, args = args[0], args[1:]
2479 2473 aliases, i = find(cmd)
2480 2474 cmd = aliases[0]
2481 2475 defaults = ui.config("defaults", cmd)
2482 2476 if defaults:
2483 2477 args = defaults.split() + args
2484 2478 c = list(i[1])
2485 2479 else:
2486 2480 cmd = None
2487 2481 c = []
2488 2482
2489 2483 # combine global options into local
2490 2484 for o in globalopts:
2491 2485 c.append((o[0], o[1], options[o[1]], o[3]))
2492 2486
2493 2487 try:
2494 2488 args = fancyopts.fancyopts(args, c, cmdoptions)
2495 2489 except fancyopts.getopt.GetoptError, inst:
2496 2490 raise ParseError(cmd, inst)
2497 2491
2498 2492 # separate global options back out
2499 2493 for o in globalopts:
2500 2494 n = o[1]
2501 2495 options[n] = cmdoptions[n]
2502 2496 del cmdoptions[n]
2503 2497
2504 2498 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
2505 2499
2506 2500 def dispatch(args):
2507 2501 signal.signal(signal.SIGTERM, catchterm)
2508 2502 try:
2509 2503 signal.signal(signal.SIGHUP, catchterm)
2510 2504 except AttributeError:
2511 2505 pass
2512 2506
2513 2507 try:
2514 2508 u = ui.ui()
2515 2509 except util.Abort, inst:
2516 2510 sys.stderr.write(_("abort: %s\n") % inst)
2517 2511 sys.exit(1)
2518 2512
2519 2513 external = []
2520 2514 for x in u.extensions():
2521 2515 def on_exception(exc, inst):
2522 2516 u.warn(_("*** failed to import extension %s\n") % x[1])
2523 2517 u.warn("%s\n" % inst)
2524 2518 if "--traceback" in sys.argv[1:]:
2525 2519 traceback.print_exc()
2526 2520 if x[1]:
2527 2521 try:
2528 2522 mod = imp.load_source(x[0], x[1])
2529 2523 except Exception, inst:
2530 2524 on_exception(Exception, inst)
2531 2525 continue
2532 2526 else:
2533 2527 def importh(name):
2534 2528 mod = __import__(name)
2535 2529 components = name.split('.')
2536 2530 for comp in components[1:]:
2537 2531 mod = getattr(mod, comp)
2538 2532 return mod
2539 2533 try:
2540 2534 mod = importh(x[0])
2541 2535 except Exception, inst:
2542 2536 on_exception(Exception, inst)
2543 2537 continue
2544 2538
2545 2539 external.append(mod)
2546 2540 for x in external:
2547 2541 cmdtable = getattr(x, 'cmdtable', {})
2548 2542 for t in cmdtable:
2549 2543 if t in table:
2550 2544 u.warn(_("module %s overrides %s\n") % (x.__name__, t))
2551 2545 table.update(cmdtable)
2552 2546
2553 2547 try:
2554 2548 cmd, func, args, options, cmdoptions = parse(u, args)
2555 2549 except ParseError, inst:
2556 2550 if inst.args[0]:
2557 2551 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
2558 2552 help_(u, inst.args[0])
2559 2553 else:
2560 2554 u.warn(_("hg: %s\n") % inst.args[1])
2561 2555 help_(u, 'shortlist')
2562 2556 sys.exit(-1)
2563 2557 except AmbiguousCommand, inst:
2564 2558 u.warn(_("hg: command '%s' is ambiguous.\n") % inst.args[0])
2565 2559 sys.exit(1)
2566 2560 except UnknownCommand, inst:
2567 2561 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2568 2562 help_(u, 'shortlist')
2569 2563 sys.exit(1)
2570 2564
2571 2565 if options["time"]:
2572 2566 def get_times():
2573 2567 t = os.times()
2574 2568 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
2575 2569 t = (t[0], t[1], t[2], t[3], time.clock())
2576 2570 return t
2577 2571 s = get_times()
2578 2572 def print_time():
2579 2573 t = get_times()
2580 2574 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
2581 2575 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
2582 2576 atexit.register(print_time)
2583 2577
2584 2578 u.updateopts(options["verbose"], options["debug"], options["quiet"],
2585 2579 not options["noninteractive"])
2586 2580
2587 2581 # enter the debugger before command execution
2588 2582 if options['debugger']:
2589 2583 pdb.set_trace()
2590 2584
2591 2585 try:
2592 2586 try:
2593 2587 if options['help']:
2594 2588 help_(u, cmd, options['version'])
2595 2589 sys.exit(0)
2596 2590 elif options['version']:
2597 2591 show_version(u)
2598 2592 sys.exit(0)
2599 2593 elif not cmd:
2600 2594 help_(u, 'shortlist')
2601 2595 sys.exit(0)
2602 2596
2603 2597 if options['cwd']:
2604 2598 try:
2605 2599 os.chdir(options['cwd'])
2606 2600 except OSError, inst:
2607 2601 raise util.Abort('%s: %s' %
2608 2602 (options['cwd'], inst.strerror))
2609 2603
2610 2604 if cmd not in norepo.split():
2611 2605 path = options["repository"] or ""
2612 2606 repo = hg.repository(ui=u, path=path)
2613 2607 for x in external:
2614 2608 if hasattr(x, 'reposetup'): x.reposetup(u, repo)
2615 2609 d = lambda: func(u, repo, *args, **cmdoptions)
2616 2610 else:
2617 2611 d = lambda: func(u, *args, **cmdoptions)
2618 2612
2619 2613 if options['profile']:
2620 2614 import hotshot, hotshot.stats
2621 2615 prof = hotshot.Profile("hg.prof")
2622 2616 r = prof.runcall(d)
2623 2617 prof.close()
2624 2618 stats = hotshot.stats.load("hg.prof")
2625 2619 stats.strip_dirs()
2626 2620 stats.sort_stats('time', 'calls')
2627 2621 stats.print_stats(40)
2628 2622 return r
2629 2623 else:
2630 2624 return d()
2631 2625 except:
2632 2626 # enter the debugger when we hit an exception
2633 2627 if options['debugger']:
2634 2628 pdb.post_mortem(sys.exc_info()[2])
2635 2629 if options['traceback']:
2636 2630 traceback.print_exc()
2637 2631 raise
2638 2632 except hg.RepoError, inst:
2639 2633 u.warn(_("abort: "), inst, "!\n")
2640 2634 except revlog.RevlogError, inst:
2641 2635 u.warn(_("abort: "), inst, "!\n")
2642 2636 except SignalInterrupt:
2643 2637 u.warn(_("killed!\n"))
2644 2638 except KeyboardInterrupt:
2645 2639 try:
2646 2640 u.warn(_("interrupted!\n"))
2647 2641 except IOError, inst:
2648 2642 if inst.errno == errno.EPIPE:
2649 2643 if u.debugflag:
2650 2644 u.warn(_("\nbroken pipe\n"))
2651 2645 else:
2652 2646 raise
2653 2647 except IOError, inst:
2654 2648 if hasattr(inst, "code"):
2655 2649 u.warn(_("abort: %s\n") % inst)
2656 2650 elif hasattr(inst, "reason"):
2657 2651 u.warn(_("abort: error: %s\n") % inst.reason[1])
2658 2652 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
2659 2653 if u.debugflag:
2660 2654 u.warn(_("broken pipe\n"))
2661 2655 elif getattr(inst, "strerror", None):
2662 2656 if getattr(inst, "filename", None):
2663 2657 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
2664 2658 else:
2665 2659 u.warn(_("abort: %s\n") % inst.strerror)
2666 2660 else:
2667 2661 raise
2668 2662 except OSError, inst:
2669 2663 if hasattr(inst, "filename"):
2670 2664 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
2671 2665 else:
2672 2666 u.warn(_("abort: %s\n") % inst.strerror)
2673 2667 except util.Abort, inst:
2674 2668 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
2675 2669 sys.exit(1)
2676 2670 except TypeError, inst:
2677 2671 # was this an argument error?
2678 2672 tb = traceback.extract_tb(sys.exc_info()[2])
2679 2673 if len(tb) > 2: # no
2680 2674 raise
2681 2675 u.debug(inst, "\n")
2682 2676 u.warn(_("%s: invalid arguments\n") % cmd)
2683 2677 help_(u, cmd)
2684 2678 except AmbiguousCommand, inst:
2685 2679 u.warn(_("hg: command '%s' is ambiguous.\n") % inst.args[0])
2686 2680 help_(u, 'shortlist')
2687 2681 except UnknownCommand, inst:
2688 2682 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2689 2683 help_(u, 'shortlist')
2690 2684 except SystemExit:
2691 2685 # don't catch this in the catch-all below
2692 2686 raise
2693 2687 except:
2694 2688 u.warn(_("** unknown exception encountered, details follow\n"))
2695 2689 u.warn(_("** report bug details to mercurial@selenic.com\n"))
2696 2690 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
2697 2691 % version.get_version())
2698 2692 raise
2699 2693
2700 2694 sys.exit(-1)
@@ -1,1780 +1,1786 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import struct, os, util
9 9 import filelog, manifest, changelog, dirstate, repo
10 10 from node import *
11 11 from i18n import gettext as _
12 12 from demandload import *
13 13 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
14 14
15 15 class localrepository(object):
16 16 def __init__(self, ui, path=None, create=0):
17 17 if not path:
18 18 p = os.getcwd()
19 19 while not os.path.isdir(os.path.join(p, ".hg")):
20 20 oldp = p
21 21 p = os.path.dirname(p)
22 22 if p == oldp: raise repo.RepoError(_("no repo found"))
23 23 path = p
24 24 self.path = os.path.join(path, ".hg")
25 25
26 26 if not create and not os.path.isdir(self.path):
27 27 raise repo.RepoError(_("repository %s not found") % self.path)
28 28
29 29 self.root = os.path.abspath(path)
30 30 self.ui = ui
31 31 self.opener = util.opener(self.path)
32 32 self.wopener = util.opener(self.root)
33 33 self.manifest = manifest.manifest(self.opener)
34 34 self.changelog = changelog.changelog(self.opener)
35 35 self.tagscache = None
36 36 self.nodetagscache = None
37 37 self.encodepats = None
38 38 self.decodepats = None
39 39
40 40 if create:
41 41 os.mkdir(self.path)
42 42 os.mkdir(self.join("data"))
43 43
44 44 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
45 45 try:
46 46 self.ui.readconfig(self.join("hgrc"))
47 47 except IOError: pass
48 48
49 49 def hook(self, name, **args):
50 50 def runhook(name, cmd):
51 51 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
52 52 old = {}
53 53 for k, v in args.items():
54 54 k = k.upper()
55 55 old[k] = os.environ.get(k, None)
56 56 os.environ[k] = v
57 57
58 58 # Hooks run in the repository root
59 59 olddir = os.getcwd()
60 60 os.chdir(self.root)
61 61 r = os.system(cmd)
62 62 os.chdir(olddir)
63 63
64 64 for k, v in old.items():
65 65 if v != None:
66 66 os.environ[k] = v
67 67 else:
68 68 del os.environ[k]
69 69
70 70 if r:
71 71 self.ui.warn(_("abort: %s hook failed with status %d!\n") %
72 72 (name, r))
73 73 return False
74 74 return True
75 75
76 76 r = True
77 77 for hname, cmd in self.ui.configitems("hooks"):
78 78 s = hname.split(".")
79 79 if s[0] == name and cmd:
80 80 r = runhook(hname, cmd) and r
81 81 return r
82 82
83 83 def tags(self):
84 84 '''return a mapping of tag to node'''
85 85 if not self.tagscache:
86 86 self.tagscache = {}
87 87 def addtag(self, k, n):
88 88 try:
89 89 bin_n = bin(n)
90 90 except TypeError:
91 91 bin_n = ''
92 92 self.tagscache[k.strip()] = bin_n
93 93
94 94 try:
95 95 # read each head of the tags file, ending with the tip
96 96 # and add each tag found to the map, with "newer" ones
97 97 # taking precedence
98 98 fl = self.file(".hgtags")
99 99 h = fl.heads()
100 100 h.reverse()
101 101 for r in h:
102 102 for l in fl.read(r).splitlines():
103 103 if l:
104 104 n, k = l.split(" ", 1)
105 105 addtag(self, k, n)
106 106 except KeyError:
107 107 pass
108 108
109 109 try:
110 110 f = self.opener("localtags")
111 111 for l in f:
112 112 n, k = l.split(" ", 1)
113 113 addtag(self, k, n)
114 114 except IOError:
115 115 pass
116 116
117 117 self.tagscache['tip'] = self.changelog.tip()
118 118
119 119 return self.tagscache
120 120
121 121 def tagslist(self):
122 122 '''return a list of tags ordered by revision'''
123 123 l = []
124 124 for t, n in self.tags().items():
125 125 try:
126 126 r = self.changelog.rev(n)
127 127 except:
128 128 r = -2 # sort to the beginning of the list if unknown
129 129 l.append((r,t,n))
130 130 l.sort()
131 131 return [(t,n) for r,t,n in l]
132 132
133 133 def nodetags(self, node):
134 134 '''return the tags associated with a node'''
135 135 if not self.nodetagscache:
136 136 self.nodetagscache = {}
137 137 for t,n in self.tags().items():
138 138 self.nodetagscache.setdefault(n,[]).append(t)
139 139 return self.nodetagscache.get(node, [])
140 140
141 141 def lookup(self, key):
142 142 try:
143 143 return self.tags()[key]
144 144 except KeyError:
145 145 try:
146 146 return self.changelog.lookup(key)
147 147 except:
148 148 raise repo.RepoError(_("unknown revision '%s'") % key)
149 149
150 150 def dev(self):
151 151 return os.stat(self.path).st_dev
152 152
153 153 def local(self):
154 154 return True
155 155
156 156 def join(self, f):
157 157 return os.path.join(self.path, f)
158 158
159 159 def wjoin(self, f):
160 160 return os.path.join(self.root, f)
161 161
162 162 def file(self, f):
163 163 if f[0] == '/': f = f[1:]
164 164 return filelog.filelog(self.opener, f)
165 165
166 166 def getcwd(self):
167 167 return self.dirstate.getcwd()
168 168
169 169 def wfile(self, f, mode='r'):
170 170 return self.wopener(f, mode)
171 171
172 172 def wread(self, filename):
173 173 if self.encodepats == None:
174 174 l = []
175 175 for pat, cmd in self.ui.configitems("encode"):
176 176 mf = util.matcher("", "/", [pat], [], [])[1]
177 177 l.append((mf, cmd))
178 178 self.encodepats = l
179 179
180 180 data = self.wopener(filename, 'r').read()
181 181
182 182 for mf, cmd in self.encodepats:
183 183 if mf(filename):
184 184 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
185 185 data = util.filter(data, cmd)
186 186 break
187 187
188 188 return data
189 189
190 190 def wwrite(self, filename, data, fd=None):
191 191 if self.decodepats == None:
192 192 l = []
193 193 for pat, cmd in self.ui.configitems("decode"):
194 194 mf = util.matcher("", "/", [pat], [], [])[1]
195 195 l.append((mf, cmd))
196 196 self.decodepats = l
197 197
198 198 for mf, cmd in self.decodepats:
199 199 if mf(filename):
200 200 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
201 201 data = util.filter(data, cmd)
202 202 break
203 203
204 204 if fd:
205 205 return fd.write(data)
206 206 return self.wopener(filename, 'w').write(data)
207 207
208 208 def transaction(self):
209 209 # save dirstate for undo
210 210 try:
211 211 ds = self.opener("dirstate").read()
212 212 except IOError:
213 213 ds = ""
214 214 self.opener("journal.dirstate", "w").write(ds)
215 215
216 216 def after():
217 217 util.rename(self.join("journal"), self.join("undo"))
218 218 util.rename(self.join("journal.dirstate"),
219 219 self.join("undo.dirstate"))
220 220
221 221 return transaction.transaction(self.ui.warn, self.opener,
222 222 self.join("journal"), after)
223 223
224 224 def recover(self):
225 225 lock = self.lock()
226 226 if os.path.exists(self.join("journal")):
227 227 self.ui.status(_("rolling back interrupted transaction\n"))
228 228 transaction.rollback(self.opener, self.join("journal"))
229 229 return True
230 230 else:
231 231 self.ui.warn(_("no interrupted transaction available\n"))
232 232 return False
233 233
234 234 def undo(self):
235 235 wlock = self.wlock()
236 236 lock = self.lock()
237 237 if os.path.exists(self.join("undo")):
238 238 self.ui.status(_("rolling back last transaction\n"))
239 239 transaction.rollback(self.opener, self.join("undo"))
240 240 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
241 241 self.dirstate.read()
242 242 else:
243 243 self.ui.warn(_("no undo information available\n"))
244 244
245 245 def lock(self, wait=1):
246 246 try:
247 247 return lock.lock(self.join("lock"), 0)
248 248 except lock.LockHeld, inst:
249 249 if wait:
250 250 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
251 251 return lock.lock(self.join("lock"), wait)
252 252 raise inst
253 253
254 254 def wlock(self, wait=1):
255 255 try:
256 256 wlock = lock.lock(self.join("wlock"), 0, self.dirstate.write)
257 257 except lock.LockHeld, inst:
258 258 if not wait:
259 259 raise inst
260 260 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
261 261 wlock = lock.lock(self.join("wlock"), wait, self.dirstate.write)
262 262 self.dirstate.read()
263 263 return wlock
264 264
265 265 def rawcommit(self, files, text, user, date, p1=None, p2=None):
266 266 orig_parent = self.dirstate.parents()[0] or nullid
267 267 p1 = p1 or self.dirstate.parents()[0] or nullid
268 268 p2 = p2 or self.dirstate.parents()[1] or nullid
269 269 c1 = self.changelog.read(p1)
270 270 c2 = self.changelog.read(p2)
271 271 m1 = self.manifest.read(c1[0])
272 272 mf1 = self.manifest.readflags(c1[0])
273 273 m2 = self.manifest.read(c2[0])
274 274 changed = []
275 275
276 276 if orig_parent == p1:
277 277 update_dirstate = 1
278 278 else:
279 279 update_dirstate = 0
280 280
281 281 wlock = self.wlock()
282 282 lock = self.lock()
283 283 tr = self.transaction()
284 284 mm = m1.copy()
285 285 mfm = mf1.copy()
286 286 linkrev = self.changelog.count()
287 287 for f in files:
288 288 try:
289 289 t = self.wread(f)
290 290 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
291 291 r = self.file(f)
292 292 mfm[f] = tm
293 293
294 294 fp1 = m1.get(f, nullid)
295 295 fp2 = m2.get(f, nullid)
296 296
297 297 # is the same revision on two branches of a merge?
298 298 if fp2 == fp1:
299 299 fp2 = nullid
300 300
301 301 if fp2 != nullid:
302 302 # is one parent an ancestor of the other?
303 303 fpa = r.ancestor(fp1, fp2)
304 304 if fpa == fp1:
305 305 fp1, fp2 = fp2, nullid
306 306 elif fpa == fp2:
307 307 fp2 = nullid
308 308
309 309 # is the file unmodified from the parent?
310 310 if t == r.read(fp1):
311 311 # record the proper existing parent in manifest
312 312 # no need to add a revision
313 313 mm[f] = fp1
314 314 continue
315 315
316 316 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
317 317 changed.append(f)
318 318 if update_dirstate:
319 319 self.dirstate.update([f], "n")
320 320 except IOError:
321 321 try:
322 322 del mm[f]
323 323 del mfm[f]
324 324 if update_dirstate:
325 325 self.dirstate.forget([f])
326 326 except:
327 327 # deleted from p2?
328 328 pass
329 329
330 330 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
331 331 user = user or self.ui.username()
332 332 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
333 333 tr.close()
334 334 if update_dirstate:
335 335 self.dirstate.setparents(n, nullid)
336 336
337 337 def commit(self, files = None, text = "", user = None, date = None,
338 338 match = util.always, force=False):
339 339 commit = []
340 340 remove = []
341 341 changed = []
342 342
343 343 if files:
344 344 for f in files:
345 345 s = self.dirstate.state(f)
346 346 if s in 'nmai':
347 347 commit.append(f)
348 348 elif s == 'r':
349 349 remove.append(f)
350 350 else:
351 351 self.ui.warn(_("%s not tracked!\n") % f)
352 352 else:
353 353 (c, a, d, u) = self.changes(match=match)
354 354 commit = c + a
355 355 remove = d
356 356
357 357 p1, p2 = self.dirstate.parents()
358 358 c1 = self.changelog.read(p1)
359 359 c2 = self.changelog.read(p2)
360 360 m1 = self.manifest.read(c1[0])
361 361 mf1 = self.manifest.readflags(c1[0])
362 362 m2 = self.manifest.read(c2[0])
363 363
364 364 if not commit and not remove and not force and p2 == nullid:
365 365 self.ui.status(_("nothing changed\n"))
366 366 return None
367 367
368 368 if not self.hook("precommit"):
369 369 return None
370 370
371 371 wlock = self.wlock()
372 372 lock = self.lock()
373 373 tr = self.transaction()
374 374
375 375 # check in files
376 376 new = {}
377 377 linkrev = self.changelog.count()
378 378 commit.sort()
379 379 for f in commit:
380 380 self.ui.note(f + "\n")
381 381 try:
382 382 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
383 383 t = self.wread(f)
384 384 except IOError:
385 385 self.ui.warn(_("trouble committing %s!\n") % f)
386 386 raise
387 387
388 388 r = self.file(f)
389 389
390 390 meta = {}
391 391 cp = self.dirstate.copied(f)
392 392 if cp:
393 393 meta["copy"] = cp
394 394 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
395 395 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
396 396 fp1, fp2 = nullid, nullid
397 397 else:
398 398 fp1 = m1.get(f, nullid)
399 399 fp2 = m2.get(f, nullid)
400 400
401 401 # is the same revision on two branches of a merge?
402 402 if fp2 == fp1:
403 403 fp2 = nullid
404 404
405 405 if fp2 != nullid:
406 406 # is one parent an ancestor of the other?
407 407 fpa = r.ancestor(fp1, fp2)
408 408 if fpa == fp1:
409 409 fp1, fp2 = fp2, nullid
410 410 elif fpa == fp2:
411 411 fp2 = nullid
412 412
413 413 # is the file unmodified from the parent?
414 414 if not meta and t == r.read(fp1):
415 415 # record the proper existing parent in manifest
416 416 # no need to add a revision
417 417 new[f] = fp1
418 418 continue
419 419
420 420 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
421 421 # remember what we've added so that we can later calculate
422 422 # the files to pull from a set of changesets
423 423 changed.append(f)
424 424
425 425 # update manifest
426 426 m1.update(new)
427 427 for f in remove:
428 428 if f in m1:
429 429 del m1[f]
430 430 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
431 431 (new, remove))
432 432
433 433 # add changeset
434 434 new = new.keys()
435 435 new.sort()
436 436
437 437 if not text:
438 438 edittext = ""
439 439 if p2 != nullid:
440 440 edittext += "HG: branch merge\n"
441 441 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
442 442 edittext += "".join(["HG: changed %s\n" % f for f in changed])
443 443 edittext += "".join(["HG: removed %s\n" % f for f in remove])
444 444 if not changed and not remove:
445 445 edittext += "HG: no files changed\n"
446 446 edittext = self.ui.edit(edittext)
447 447 if not edittext.rstrip():
448 448 return None
449 449 text = edittext
450 450
451 451 user = user or self.ui.username()
452 452 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
453 453 tr.close()
454 454
455 455 self.dirstate.setparents(n)
456 456 self.dirstate.update(new, "n")
457 457 self.dirstate.forget(remove)
458 458
459 459 if not self.hook("commit", node=hex(n)):
460 460 return None
461 461 return n
462 462
463 463 def walk(self, node=None, files=[], match=util.always):
464 464 if node:
465 fdict = dict.fromkeys(files)
465 466 for fn in self.manifest.read(self.changelog.read(node)[0]):
466 if match(fn): yield 'm', fn
467 fdict.pop(fn, None)
468 if match(fn):
469 yield 'm', fn
470 for fn in fdict:
471 self.ui.warn(_('%s: No such file in rev %s\n') % (
472 util.pathto(self.getcwd(), fn), short(node)))
467 473 else:
468 474 for src, fn in self.dirstate.walk(files, match):
469 475 yield src, fn
470 476
471 477 def changes(self, node1 = None, node2 = None, files = [],
472 478 match = util.always):
473 479 mf2, u = None, []
474 480
475 481 def fcmp(fn, mf):
476 482 t1 = self.wread(fn)
477 483 t2 = self.file(fn).read(mf.get(fn, nullid))
478 484 return cmp(t1, t2)
479 485
480 486 def mfmatches(node):
481 487 mf = dict(self.manifest.read(node))
482 488 for fn in mf.keys():
483 489 if not match(fn):
484 490 del mf[fn]
485 491 return mf
486 492
487 493 # are we comparing the working directory?
488 494 if not node2:
489 495 try:
490 496 wlock = self.wlock(wait=0)
491 497 except lock.LockHeld:
492 498 wlock = None
493 499 l, c, a, d, u = self.dirstate.changes(files, match)
494 500
495 501 # are we comparing working dir against its parent?
496 502 if not node1:
497 503 if l:
498 504 # do a full compare of any files that might have changed
499 505 change = self.changelog.read(self.dirstate.parents()[0])
500 506 mf2 = mfmatches(change[0])
501 507 for f in l:
502 508 if fcmp(f, mf2):
503 509 c.append(f)
504 510 elif wlock is not None:
505 511 self.dirstate.update([f], "n")
506 512
507 513 for l in c, a, d, u:
508 514 l.sort()
509 515
510 516 return (c, a, d, u)
511 517
512 518 # are we comparing working dir against non-tip?
513 519 # generate a pseudo-manifest for the working dir
514 520 if not node2:
515 521 if not mf2:
516 522 change = self.changelog.read(self.dirstate.parents()[0])
517 523 mf2 = mfmatches(change[0])
518 524 for f in a + c + l:
519 525 mf2[f] = ""
520 526 for f in d:
521 527 if f in mf2: del mf2[f]
522 528 else:
523 529 change = self.changelog.read(node2)
524 530 mf2 = mfmatches(change[0])
525 531
526 532 # flush lists from dirstate before comparing manifests
527 533 c, a = [], []
528 534
529 535 change = self.changelog.read(node1)
530 536 mf1 = mfmatches(change[0])
531 537
532 538 for fn in mf2:
533 539 if mf1.has_key(fn):
534 540 if mf1[fn] != mf2[fn]:
535 541 if mf2[fn] != "" or fcmp(fn, mf1):
536 542 c.append(fn)
537 543 del mf1[fn]
538 544 else:
539 545 a.append(fn)
540 546
541 547 d = mf1.keys()
542 548
543 549 for l in c, a, d, u:
544 550 l.sort()
545 551
546 552 return (c, a, d, u)
547 553
548 554 def add(self, list):
549 555 wlock = self.wlock()
550 556 for f in list:
551 557 p = self.wjoin(f)
552 558 if not os.path.exists(p):
553 559 self.ui.warn(_("%s does not exist!\n") % f)
554 560 elif not os.path.isfile(p):
555 561 self.ui.warn(_("%s not added: only files supported currently\n") % f)
556 562 elif self.dirstate.state(f) in 'an':
557 563 self.ui.warn(_("%s already tracked!\n") % f)
558 564 else:
559 565 self.dirstate.update([f], "a")
560 566
561 567 def forget(self, list):
562 568 wlock = self.wlock()
563 569 for f in list:
564 570 if self.dirstate.state(f) not in 'ai':
565 571 self.ui.warn(_("%s not added!\n") % f)
566 572 else:
567 573 self.dirstate.forget([f])
568 574
569 575 def remove(self, list, unlink=False):
570 576 if unlink:
571 577 for f in list:
572 578 try:
573 579 util.unlink(self.wjoin(f))
574 580 except OSError, inst:
575 581 if inst.errno != errno.ENOENT: raise
576 582 wlock = self.wlock()
577 583 for f in list:
578 584 p = self.wjoin(f)
579 585 if os.path.exists(p):
580 586 self.ui.warn(_("%s still exists!\n") % f)
581 587 elif self.dirstate.state(f) == 'a':
582 588 self.ui.warn(_("%s never committed!\n") % f)
583 589 self.dirstate.forget([f])
584 590 elif f not in self.dirstate:
585 591 self.ui.warn(_("%s not tracked!\n") % f)
586 592 else:
587 593 self.dirstate.update([f], "r")
588 594
589 595 def undelete(self, list):
590 596 p = self.dirstate.parents()[0]
591 597 mn = self.changelog.read(p)[0]
592 598 mf = self.manifest.readflags(mn)
593 599 m = self.manifest.read(mn)
594 600 wlock = self.wlock()
595 601 for f in list:
596 602 if self.dirstate.state(f) not in "r":
597 603 self.ui.warn("%s not removed!\n" % f)
598 604 else:
599 605 t = self.file(f).read(m[f])
600 606 self.wwrite(f, t)
601 607 util.set_exec(self.wjoin(f), mf[f])
602 608 self.dirstate.update([f], "n")
603 609
604 610 def copy(self, source, dest):
605 611 p = self.wjoin(dest)
606 612 if not os.path.exists(p):
607 613 self.ui.warn(_("%s does not exist!\n") % dest)
608 614 elif not os.path.isfile(p):
609 615 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
610 616 else:
611 617 wlock = self.wlock()
612 618 if self.dirstate.state(dest) == '?':
613 619 self.dirstate.update([dest], "a")
614 620 self.dirstate.copy(source, dest)
615 621
616 622 def heads(self, start=None):
617 623 heads = self.changelog.heads(start)
618 624 # sort the output in rev descending order
619 625 heads = [(-self.changelog.rev(h), h) for h in heads]
620 626 heads.sort()
621 627 return [n for (r, n) in heads]
622 628
623 629 # branchlookup returns a dict giving a list of branches for
624 630 # each head. A branch is defined as the tag of a node or
625 631 # the branch of the node's parents. If a node has multiple
626 632 # branch tags, tags are eliminated if they are visible from other
627 633 # branch tags.
628 634 #
629 635 # So, for this graph: a->b->c->d->e
630 636 # \ /
631 637 # aa -----/
632 638 # a has tag 2.6.12
633 639 # d has tag 2.6.13
634 640 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
635 641 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
636 642 # from the list.
637 643 #
638 644 # It is possible that more than one head will have the same branch tag.
639 645 # callers need to check the result for multiple heads under the same
640 646 # branch tag if that is a problem for them (ie checkout of a specific
641 647 # branch).
642 648 #
643 649 # passing in a specific branch will limit the depth of the search
644 650 # through the parents. It won't limit the branches returned in the
645 651 # result though.
646 652 def branchlookup(self, heads=None, branch=None):
647 653 if not heads:
648 654 heads = self.heads()
649 655 headt = [ h for h in heads ]
650 656 chlog = self.changelog
651 657 branches = {}
652 658 merges = []
653 659 seenmerge = {}
654 660
655 661 # traverse the tree once for each head, recording in the branches
656 662 # dict which tags are visible from this head. The branches
657 663 # dict also records which tags are visible from each tag
658 664 # while we traverse.
659 665 while headt or merges:
660 666 if merges:
661 667 n, found = merges.pop()
662 668 visit = [n]
663 669 else:
664 670 h = headt.pop()
665 671 visit = [h]
666 672 found = [h]
667 673 seen = {}
668 674 while visit:
669 675 n = visit.pop()
670 676 if n in seen:
671 677 continue
672 678 pp = chlog.parents(n)
673 679 tags = self.nodetags(n)
674 680 if tags:
675 681 for x in tags:
676 682 if x == 'tip':
677 683 continue
678 684 for f in found:
679 685 branches.setdefault(f, {})[n] = 1
680 686 branches.setdefault(n, {})[n] = 1
681 687 break
682 688 if n not in found:
683 689 found.append(n)
684 690 if branch in tags:
685 691 continue
686 692 seen[n] = 1
687 693 if pp[1] != nullid and n not in seenmerge:
688 694 merges.append((pp[1], [x for x in found]))
689 695 seenmerge[n] = 1
690 696 if pp[0] != nullid:
691 697 visit.append(pp[0])
692 698 # traverse the branches dict, eliminating branch tags from each
693 699 # head that are visible from another branch tag for that head.
694 700 out = {}
695 701 viscache = {}
696 702 for h in heads:
697 703 def visible(node):
698 704 if node in viscache:
699 705 return viscache[node]
700 706 ret = {}
701 707 visit = [node]
702 708 while visit:
703 709 x = visit.pop()
704 710 if x in viscache:
705 711 ret.update(viscache[x])
706 712 elif x not in ret:
707 713 ret[x] = 1
708 714 if x in branches:
709 715 visit[len(visit):] = branches[x].keys()
710 716 viscache[node] = ret
711 717 return ret
712 718 if h not in branches:
713 719 continue
714 720 # O(n^2), but somewhat limited. This only searches the
715 721 # tags visible from a specific head, not all the tags in the
716 722 # whole repo.
717 723 for b in branches[h]:
718 724 vis = False
719 725 for bb in branches[h].keys():
720 726 if b != bb:
721 727 if b in visible(bb):
722 728 vis = True
723 729 break
724 730 if not vis:
725 731 l = out.setdefault(h, [])
726 732 l[len(l):] = self.nodetags(b)
727 733 return out
728 734
729 735 def branches(self, nodes):
730 736 if not nodes: nodes = [self.changelog.tip()]
731 737 b = []
732 738 for n in nodes:
733 739 t = n
734 740 while n:
735 741 p = self.changelog.parents(n)
736 742 if p[1] != nullid or p[0] == nullid:
737 743 b.append((t, n, p[0], p[1]))
738 744 break
739 745 n = p[0]
740 746 return b
741 747
742 748 def between(self, pairs):
743 749 r = []
744 750
745 751 for top, bottom in pairs:
746 752 n, l, i = top, [], 0
747 753 f = 1
748 754
749 755 while n != bottom:
750 756 p = self.changelog.parents(n)[0]
751 757 if i == f:
752 758 l.append(n)
753 759 f = f * 2
754 760 n = p
755 761 i += 1
756 762
757 763 r.append(l)
758 764
759 765 return r
760 766
761 767 def findincoming(self, remote, base=None, heads=None):
762 768 m = self.changelog.nodemap
763 769 search = []
764 770 fetch = {}
765 771 seen = {}
766 772 seenbranch = {}
767 773 if base == None:
768 774 base = {}
769 775
770 776 # assume we're closer to the tip than the root
771 777 # and start by examining the heads
772 778 self.ui.status(_("searching for changes\n"))
773 779
774 780 if not heads:
775 781 heads = remote.heads()
776 782
777 783 unknown = []
778 784 for h in heads:
779 785 if h not in m:
780 786 unknown.append(h)
781 787 else:
782 788 base[h] = 1
783 789
784 790 if not unknown:
785 791 return None
786 792
787 793 rep = {}
788 794 reqcnt = 0
789 795
790 796 # search through remote branches
791 797 # a 'branch' here is a linear segment of history, with four parts:
792 798 # head, root, first parent, second parent
793 799 # (a branch always has two parents (or none) by definition)
794 800 unknown = remote.branches(unknown)
795 801 while unknown:
796 802 r = []
797 803 while unknown:
798 804 n = unknown.pop(0)
799 805 if n[0] in seen:
800 806 continue
801 807
802 808 self.ui.debug(_("examining %s:%s\n") % (short(n[0]), short(n[1])))
803 809 if n[0] == nullid:
804 810 break
805 811 if n in seenbranch:
806 812 self.ui.debug(_("branch already found\n"))
807 813 continue
808 814 if n[1] and n[1] in m: # do we know the base?
809 815 self.ui.debug(_("found incomplete branch %s:%s\n")
810 816 % (short(n[0]), short(n[1])))
811 817 search.append(n) # schedule branch range for scanning
812 818 seenbranch[n] = 1
813 819 else:
814 820 if n[1] not in seen and n[1] not in fetch:
815 821 if n[2] in m and n[3] in m:
816 822 self.ui.debug(_("found new changeset %s\n") %
817 823 short(n[1]))
818 824 fetch[n[1]] = 1 # earliest unknown
819 825 base[n[2]] = 1 # latest known
820 826 continue
821 827
822 828 for a in n[2:4]:
823 829 if a not in rep:
824 830 r.append(a)
825 831 rep[a] = 1
826 832
827 833 seen[n[0]] = 1
828 834
829 835 if r:
830 836 reqcnt += 1
831 837 self.ui.debug(_("request %d: %s\n") %
832 838 (reqcnt, " ".join(map(short, r))))
833 839 for p in range(0, len(r), 10):
834 840 for b in remote.branches(r[p:p+10]):
835 841 self.ui.debug(_("received %s:%s\n") %
836 842 (short(b[0]), short(b[1])))
837 843 if b[0] in m:
838 844 self.ui.debug(_("found base node %s\n") % short(b[0]))
839 845 base[b[0]] = 1
840 846 elif b[0] not in seen:
841 847 unknown.append(b)
842 848
843 849 # do binary search on the branches we found
844 850 while search:
845 851 n = search.pop(0)
846 852 reqcnt += 1
847 853 l = remote.between([(n[0], n[1])])[0]
848 854 l.append(n[1])
849 855 p = n[0]
850 856 f = 1
851 857 for i in l:
852 858 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
853 859 if i in m:
854 860 if f <= 2:
855 861 self.ui.debug(_("found new branch changeset %s\n") %
856 862 short(p))
857 863 fetch[p] = 1
858 864 base[i] = 1
859 865 else:
860 866 self.ui.debug(_("narrowed branch search to %s:%s\n")
861 867 % (short(p), short(i)))
862 868 search.append((p, i))
863 869 break
864 870 p, f = i, f * 2
865 871
866 872 # sanity check our fetch list
867 873 for f in fetch.keys():
868 874 if f in m:
869 875 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
870 876
871 877 if base.keys() == [nullid]:
872 878 self.ui.warn(_("warning: pulling from an unrelated repository!\n"))
873 879
874 880 self.ui.note(_("found new changesets starting at ") +
875 881 " ".join([short(f) for f in fetch]) + "\n")
876 882
877 883 self.ui.debug(_("%d total queries\n") % reqcnt)
878 884
879 885 return fetch.keys()
880 886
881 887 def findoutgoing(self, remote, base=None, heads=None):
882 888 if base == None:
883 889 base = {}
884 890 self.findincoming(remote, base, heads)
885 891
886 892 self.ui.debug(_("common changesets up to ")
887 893 + " ".join(map(short, base.keys())) + "\n")
888 894
889 895 remain = dict.fromkeys(self.changelog.nodemap)
890 896
891 897 # prune everything remote has from the tree
892 898 del remain[nullid]
893 899 remove = base.keys()
894 900 while remove:
895 901 n = remove.pop(0)
896 902 if n in remain:
897 903 del remain[n]
898 904 for p in self.changelog.parents(n):
899 905 remove.append(p)
900 906
901 907 # find every node whose parents have been pruned
902 908 subset = []
903 909 for n in remain:
904 910 p1, p2 = self.changelog.parents(n)
905 911 if p1 not in remain and p2 not in remain:
906 912 subset.append(n)
907 913
908 914 # this is the set of all roots we have to push
909 915 return subset
910 916
911 917 def pull(self, remote, heads = None):
912 918 lock = self.lock()
913 919
914 920 # if we have an empty repo, fetch everything
915 921 if self.changelog.tip() == nullid:
916 922 self.ui.status(_("requesting all changes\n"))
917 923 fetch = [nullid]
918 924 else:
919 925 fetch = self.findincoming(remote)
920 926
921 927 if not fetch:
922 928 self.ui.status(_("no changes found\n"))
923 929 return 1
924 930
925 931 if heads is None:
926 932 cg = remote.changegroup(fetch)
927 933 else:
928 934 cg = remote.changegroupsubset(fetch, heads)
929 935 return self.addchangegroup(cg)
930 936
931 937 def push(self, remote, force=False):
932 938 lock = remote.lock()
933 939
934 940 base = {}
935 941 heads = remote.heads()
936 942 inc = self.findincoming(remote, base, heads)
937 943 if not force and inc:
938 944 self.ui.warn(_("abort: unsynced remote changes!\n"))
939 945 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
940 946 return 1
941 947
942 948 update = self.findoutgoing(remote, base)
943 949 if not update:
944 950 self.ui.status(_("no changes found\n"))
945 951 return 1
946 952 elif not force:
947 953 if len(heads) < len(self.changelog.heads()):
948 954 self.ui.warn(_("abort: push creates new remote branches!\n"))
949 955 self.ui.status(_("(did you forget to merge?"
950 956 " use push -f to force)\n"))
951 957 return 1
952 958
953 959 cg = self.changegroup(update)
954 960 return remote.addchangegroup(cg)
955 961
956 962 def changegroupsubset(self, bases, heads):
957 963 """This function generates a changegroup consisting of all the nodes
958 964 that are descendents of any of the bases, and ancestors of any of
959 965 the heads.
960 966
961 967 It is fairly complex as determining which filenodes and which
962 968 manifest nodes need to be included for the changeset to be complete
963 969 is non-trivial.
964 970
965 971 Another wrinkle is doing the reverse, figuring out which changeset in
966 972 the changegroup a particular filenode or manifestnode belongs to."""
967 973
968 974 # Set up some initial variables
969 975 # Make it easy to refer to self.changelog
970 976 cl = self.changelog
971 977 # msng is short for missing - compute the list of changesets in this
972 978 # changegroup.
973 979 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
974 980 # Some bases may turn out to be superfluous, and some heads may be
975 981 # too. nodesbetween will return the minimal set of bases and heads
976 982 # necessary to re-create the changegroup.
977 983
978 984 # Known heads are the list of heads that it is assumed the recipient
979 985 # of this changegroup will know about.
980 986 knownheads = {}
981 987 # We assume that all parents of bases are known heads.
982 988 for n in bases:
983 989 for p in cl.parents(n):
984 990 if p != nullid:
985 991 knownheads[p] = 1
986 992 knownheads = knownheads.keys()
987 993 if knownheads:
988 994 # Now that we know what heads are known, we can compute which
989 995 # changesets are known. The recipient must know about all
990 996 # changesets required to reach the known heads from the null
991 997 # changeset.
992 998 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
993 999 junk = None
994 1000 # Transform the list into an ersatz set.
995 1001 has_cl_set = dict.fromkeys(has_cl_set)
996 1002 else:
997 1003 # If there were no known heads, the recipient cannot be assumed to
998 1004 # know about any changesets.
999 1005 has_cl_set = {}
1000 1006
1001 1007 # Make it easy to refer to self.manifest
1002 1008 mnfst = self.manifest
1003 1009 # We don't know which manifests are missing yet
1004 1010 msng_mnfst_set = {}
1005 1011 # Nor do we know which filenodes are missing.
1006 1012 msng_filenode_set = {}
1007 1013
1008 1014 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1009 1015 junk = None
1010 1016
1011 1017 # A changeset always belongs to itself, so the changenode lookup
1012 1018 # function for a changenode is identity.
1013 1019 def identity(x):
1014 1020 return x
1015 1021
1016 1022 # A function generating function. Sets up an environment for the
1017 1023 # inner function.
1018 1024 def cmp_by_rev_func(revlog):
1019 1025 # Compare two nodes by their revision number in the environment's
1020 1026 # revision history. Since the revision number both represents the
1021 1027 # most efficient order to read the nodes in, and represents a
1022 1028 # topological sorting of the nodes, this function is often useful.
1023 1029 def cmp_by_rev(a, b):
1024 1030 return cmp(revlog.rev(a), revlog.rev(b))
1025 1031 return cmp_by_rev
1026 1032
1027 1033 # If we determine that a particular file or manifest node must be a
1028 1034 # node that the recipient of the changegroup will already have, we can
1029 1035 # also assume the recipient will have all the parents. This function
1030 1036 # prunes them from the set of missing nodes.
1031 1037 def prune_parents(revlog, hasset, msngset):
1032 1038 haslst = hasset.keys()
1033 1039 haslst.sort(cmp_by_rev_func(revlog))
1034 1040 for node in haslst:
1035 1041 parentlst = [p for p in revlog.parents(node) if p != nullid]
1036 1042 while parentlst:
1037 1043 n = parentlst.pop()
1038 1044 if n not in hasset:
1039 1045 hasset[n] = 1
1040 1046 p = [p for p in revlog.parents(n) if p != nullid]
1041 1047 parentlst.extend(p)
1042 1048 for n in hasset:
1043 1049 msngset.pop(n, None)
1044 1050
1045 1051 # This is a function generating function used to set up an environment
1046 1052 # for the inner function to execute in.
1047 1053 def manifest_and_file_collector(changedfileset):
1048 1054 # This is an information gathering function that gathers
1049 1055 # information from each changeset node that goes out as part of
1050 1056 # the changegroup. The information gathered is a list of which
1051 1057 # manifest nodes are potentially required (the recipient may
1052 1058 # already have them) and total list of all files which were
1053 1059 # changed in any changeset in the changegroup.
1054 1060 #
1055 1061 # We also remember the first changenode we saw any manifest
1056 1062 # referenced by so we can later determine which changenode 'owns'
1057 1063 # the manifest.
1058 1064 def collect_manifests_and_files(clnode):
1059 1065 c = cl.read(clnode)
1060 1066 for f in c[3]:
1061 1067 # This is to make sure we only have one instance of each
1062 1068 # filename string for each filename.
1063 1069 changedfileset.setdefault(f, f)
1064 1070 msng_mnfst_set.setdefault(c[0], clnode)
1065 1071 return collect_manifests_and_files
1066 1072
1067 1073 # Figure out which manifest nodes (of the ones we think might be part
1068 1074 # of the changegroup) the recipient must know about and remove them
1069 1075 # from the changegroup.
1070 1076 def prune_manifests():
1071 1077 has_mnfst_set = {}
1072 1078 for n in msng_mnfst_set:
1073 1079 # If a 'missing' manifest thinks it belongs to a changenode
1074 1080 # the recipient is assumed to have, obviously the recipient
1075 1081 # must have that manifest.
1076 1082 linknode = cl.node(mnfst.linkrev(n))
1077 1083 if linknode in has_cl_set:
1078 1084 has_mnfst_set[n] = 1
1079 1085 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1080 1086
1081 1087 # Use the information collected in collect_manifests_and_files to say
1082 1088 # which changenode any manifestnode belongs to.
1083 1089 def lookup_manifest_link(mnfstnode):
1084 1090 return msng_mnfst_set[mnfstnode]
1085 1091
1086 1092 # A function generating function that sets up the initial environment
1087 1093 # the inner function.
1088 1094 def filenode_collector(changedfiles):
1089 1095 next_rev = [0]
1090 1096 # This gathers information from each manifestnode included in the
1091 1097 # changegroup about which filenodes the manifest node references
1092 1098 # so we can include those in the changegroup too.
1093 1099 #
1094 1100 # It also remembers which changenode each filenode belongs to. It
1095 1101 # does this by assuming the a filenode belongs to the changenode
1096 1102 # the first manifest that references it belongs to.
1097 1103 def collect_msng_filenodes(mnfstnode):
1098 1104 r = mnfst.rev(mnfstnode)
1099 1105 if r == next_rev[0]:
1100 1106 # If the last rev we looked at was the one just previous,
1101 1107 # we only need to see a diff.
1102 1108 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1103 1109 # For each line in the delta
1104 1110 for dline in delta.splitlines():
1105 1111 # get the filename and filenode for that line
1106 1112 f, fnode = dline.split('\0')
1107 1113 fnode = bin(fnode[:40])
1108 1114 f = changedfiles.get(f, None)
1109 1115 # And if the file is in the list of files we care
1110 1116 # about.
1111 1117 if f is not None:
1112 1118 # Get the changenode this manifest belongs to
1113 1119 clnode = msng_mnfst_set[mnfstnode]
1114 1120 # Create the set of filenodes for the file if
1115 1121 # there isn't one already.
1116 1122 ndset = msng_filenode_set.setdefault(f, {})
1117 1123 # And set the filenode's changelog node to the
1118 1124 # manifest's if it hasn't been set already.
1119 1125 ndset.setdefault(fnode, clnode)
1120 1126 else:
1121 1127 # Otherwise we need a full manifest.
1122 1128 m = mnfst.read(mnfstnode)
1123 1129 # For every file in we care about.
1124 1130 for f in changedfiles:
1125 1131 fnode = m.get(f, None)
1126 1132 # If it's in the manifest
1127 1133 if fnode is not None:
1128 1134 # See comments above.
1129 1135 clnode = msng_mnfst_set[mnfstnode]
1130 1136 ndset = msng_filenode_set.setdefault(f, {})
1131 1137 ndset.setdefault(fnode, clnode)
1132 1138 # Remember the revision we hope to see next.
1133 1139 next_rev[0] = r + 1
1134 1140 return collect_msng_filenodes
1135 1141
1136 1142 # We have a list of filenodes we think we need for a file, lets remove
1137 1143 # all those we now the recipient must have.
1138 1144 def prune_filenodes(f, filerevlog):
1139 1145 msngset = msng_filenode_set[f]
1140 1146 hasset = {}
1141 1147 # If a 'missing' filenode thinks it belongs to a changenode we
1142 1148 # assume the recipient must have, then the recipient must have
1143 1149 # that filenode.
1144 1150 for n in msngset:
1145 1151 clnode = cl.node(filerevlog.linkrev(n))
1146 1152 if clnode in has_cl_set:
1147 1153 hasset[n] = 1
1148 1154 prune_parents(filerevlog, hasset, msngset)
1149 1155
1150 1156 # A function generator function that sets up the a context for the
1151 1157 # inner function.
1152 1158 def lookup_filenode_link_func(fname):
1153 1159 msngset = msng_filenode_set[fname]
1154 1160 # Lookup the changenode the filenode belongs to.
1155 1161 def lookup_filenode_link(fnode):
1156 1162 return msngset[fnode]
1157 1163 return lookup_filenode_link
1158 1164
1159 1165 # Now that we have all theses utility functions to help out and
1160 1166 # logically divide up the task, generate the group.
1161 1167 def gengroup():
1162 1168 # The set of changed files starts empty.
1163 1169 changedfiles = {}
1164 1170 # Create a changenode group generator that will call our functions
1165 1171 # back to lookup the owning changenode and collect information.
1166 1172 group = cl.group(msng_cl_lst, identity,
1167 1173 manifest_and_file_collector(changedfiles))
1168 1174 for chnk in group:
1169 1175 yield chnk
1170 1176
1171 1177 # The list of manifests has been collected by the generator
1172 1178 # calling our functions back.
1173 1179 prune_manifests()
1174 1180 msng_mnfst_lst = msng_mnfst_set.keys()
1175 1181 # Sort the manifestnodes by revision number.
1176 1182 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1177 1183 # Create a generator for the manifestnodes that calls our lookup
1178 1184 # and data collection functions back.
1179 1185 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1180 1186 filenode_collector(changedfiles))
1181 1187 for chnk in group:
1182 1188 yield chnk
1183 1189
1184 1190 # These are no longer needed, dereference and toss the memory for
1185 1191 # them.
1186 1192 msng_mnfst_lst = None
1187 1193 msng_mnfst_set.clear()
1188 1194
1189 1195 changedfiles = changedfiles.keys()
1190 1196 changedfiles.sort()
1191 1197 # Go through all our files in order sorted by name.
1192 1198 for fname in changedfiles:
1193 1199 filerevlog = self.file(fname)
1194 1200 # Toss out the filenodes that the recipient isn't really
1195 1201 # missing.
1196 1202 prune_filenodes(fname, filerevlog)
1197 1203 msng_filenode_lst = msng_filenode_set[fname].keys()
1198 1204 # If any filenodes are left, generate the group for them,
1199 1205 # otherwise don't bother.
1200 1206 if len(msng_filenode_lst) > 0:
1201 1207 yield struct.pack(">l", len(fname) + 4) + fname
1202 1208 # Sort the filenodes by their revision #
1203 1209 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1204 1210 # Create a group generator and only pass in a changenode
1205 1211 # lookup function as we need to collect no information
1206 1212 # from filenodes.
1207 1213 group = filerevlog.group(msng_filenode_lst,
1208 1214 lookup_filenode_link_func(fname))
1209 1215 for chnk in group:
1210 1216 yield chnk
1211 1217 # Don't need this anymore, toss it to free memory.
1212 1218 del msng_filenode_set[fname]
1213 1219 # Signal that no more groups are left.
1214 1220 yield struct.pack(">l", 0)
1215 1221
1216 1222 return util.chunkbuffer(gengroup())
1217 1223
1218 1224 def changegroup(self, basenodes):
1219 1225 """Generate a changegroup of all nodes that we have that a recipient
1220 1226 doesn't.
1221 1227
1222 1228 This is much easier than the previous function as we can assume that
1223 1229 the recipient has any changenode we aren't sending them."""
1224 1230 cl = self.changelog
1225 1231 nodes = cl.nodesbetween(basenodes, None)[0]
1226 1232 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1227 1233
1228 1234 def identity(x):
1229 1235 return x
1230 1236
1231 1237 def gennodelst(revlog):
1232 1238 for r in xrange(0, revlog.count()):
1233 1239 n = revlog.node(r)
1234 1240 if revlog.linkrev(n) in revset:
1235 1241 yield n
1236 1242
1237 1243 def changed_file_collector(changedfileset):
1238 1244 def collect_changed_files(clnode):
1239 1245 c = cl.read(clnode)
1240 1246 for fname in c[3]:
1241 1247 changedfileset[fname] = 1
1242 1248 return collect_changed_files
1243 1249
1244 1250 def lookuprevlink_func(revlog):
1245 1251 def lookuprevlink(n):
1246 1252 return cl.node(revlog.linkrev(n))
1247 1253 return lookuprevlink
1248 1254
1249 1255 def gengroup():
1250 1256 # construct a list of all changed files
1251 1257 changedfiles = {}
1252 1258
1253 1259 for chnk in cl.group(nodes, identity,
1254 1260 changed_file_collector(changedfiles)):
1255 1261 yield chnk
1256 1262 changedfiles = changedfiles.keys()
1257 1263 changedfiles.sort()
1258 1264
1259 1265 mnfst = self.manifest
1260 1266 nodeiter = gennodelst(mnfst)
1261 1267 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1262 1268 yield chnk
1263 1269
1264 1270 for fname in changedfiles:
1265 1271 filerevlog = self.file(fname)
1266 1272 nodeiter = gennodelst(filerevlog)
1267 1273 nodeiter = list(nodeiter)
1268 1274 if nodeiter:
1269 1275 yield struct.pack(">l", len(fname) + 4) + fname
1270 1276 lookup = lookuprevlink_func(filerevlog)
1271 1277 for chnk in filerevlog.group(nodeiter, lookup):
1272 1278 yield chnk
1273 1279
1274 1280 yield struct.pack(">l", 0)
1275 1281
1276 1282 return util.chunkbuffer(gengroup())
1277 1283
1278 1284 def addchangegroup(self, source):
1279 1285
1280 1286 def getchunk():
1281 1287 d = source.read(4)
1282 1288 if not d: return ""
1283 1289 l = struct.unpack(">l", d)[0]
1284 1290 if l <= 4: return ""
1285 1291 d = source.read(l - 4)
1286 1292 if len(d) < l - 4:
1287 1293 raise repo.RepoError(_("premature EOF reading chunk"
1288 1294 " (got %d bytes, expected %d)")
1289 1295 % (len(d), l - 4))
1290 1296 return d
1291 1297
1292 1298 def getgroup():
1293 1299 while 1:
1294 1300 c = getchunk()
1295 1301 if not c: break
1296 1302 yield c
1297 1303
1298 1304 def csmap(x):
1299 1305 self.ui.debug(_("add changeset %s\n") % short(x))
1300 1306 return self.changelog.count()
1301 1307
1302 1308 def revmap(x):
1303 1309 return self.changelog.rev(x)
1304 1310
1305 1311 if not source: return
1306 1312 changesets = files = revisions = 0
1307 1313
1308 1314 tr = self.transaction()
1309 1315
1310 1316 oldheads = len(self.changelog.heads())
1311 1317
1312 1318 # pull off the changeset group
1313 1319 self.ui.status(_("adding changesets\n"))
1314 1320 co = self.changelog.tip()
1315 1321 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1316 1322 cnr, cor = map(self.changelog.rev, (cn, co))
1317 1323 if cn == nullid:
1318 1324 cnr = cor
1319 1325 changesets = cnr - cor
1320 1326
1321 1327 # pull off the manifest group
1322 1328 self.ui.status(_("adding manifests\n"))
1323 1329 mm = self.manifest.tip()
1324 1330 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1325 1331
1326 1332 # process the files
1327 1333 self.ui.status(_("adding file changes\n"))
1328 1334 while 1:
1329 1335 f = getchunk()
1330 1336 if not f: break
1331 1337 self.ui.debug(_("adding %s revisions\n") % f)
1332 1338 fl = self.file(f)
1333 1339 o = fl.count()
1334 1340 n = fl.addgroup(getgroup(), revmap, tr)
1335 1341 revisions += fl.count() - o
1336 1342 files += 1
1337 1343
1338 1344 newheads = len(self.changelog.heads())
1339 1345 heads = ""
1340 1346 if oldheads and newheads > oldheads:
1341 1347 heads = _(" (+%d heads)") % (newheads - oldheads)
1342 1348
1343 1349 self.ui.status(_("added %d changesets"
1344 1350 " with %d changes to %d files%s\n")
1345 1351 % (changesets, revisions, files, heads))
1346 1352
1347 1353 tr.close()
1348 1354
1349 1355 if changesets > 0:
1350 1356 if not self.hook("changegroup",
1351 1357 node=hex(self.changelog.node(cor+1))):
1352 1358 self.ui.warn(_("abort: changegroup hook returned failure!\n"))
1353 1359 return 1
1354 1360
1355 1361 for i in range(cor + 1, cnr + 1):
1356 1362 self.hook("commit", node=hex(self.changelog.node(i)))
1357 1363
1358 1364 return
1359 1365
1360 1366 def update(self, node, allow=False, force=False, choose=None,
1361 1367 moddirstate=True):
1362 1368 pl = self.dirstate.parents()
1363 1369 if not force and pl[1] != nullid:
1364 1370 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1365 1371 return 1
1366 1372
1367 1373 p1, p2 = pl[0], node
1368 1374 pa = self.changelog.ancestor(p1, p2)
1369 1375 m1n = self.changelog.read(p1)[0]
1370 1376 m2n = self.changelog.read(p2)[0]
1371 1377 man = self.manifest.ancestor(m1n, m2n)
1372 1378 m1 = self.manifest.read(m1n)
1373 1379 mf1 = self.manifest.readflags(m1n)
1374 1380 m2 = self.manifest.read(m2n)
1375 1381 mf2 = self.manifest.readflags(m2n)
1376 1382 ma = self.manifest.read(man)
1377 1383 mfa = self.manifest.readflags(man)
1378 1384
1379 1385 (c, a, d, u) = self.changes()
1380 1386
1381 1387 # is this a jump, or a merge? i.e. is there a linear path
1382 1388 # from p1 to p2?
1383 1389 linear_path = (pa == p1 or pa == p2)
1384 1390
1385 1391 # resolve the manifest to determine which files
1386 1392 # we care about merging
1387 1393 self.ui.note(_("resolving manifests\n"))
1388 1394 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1389 1395 (force, allow, moddirstate, linear_path))
1390 1396 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1391 1397 (short(man), short(m1n), short(m2n)))
1392 1398
1393 1399 merge = {}
1394 1400 get = {}
1395 1401 remove = []
1396 1402
1397 1403 # construct a working dir manifest
1398 1404 mw = m1.copy()
1399 1405 mfw = mf1.copy()
1400 1406 umap = dict.fromkeys(u)
1401 1407
1402 1408 for f in a + c + u:
1403 1409 mw[f] = ""
1404 1410 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1405 1411
1406 1412 if moddirstate:
1407 1413 wlock = self.wlock()
1408 1414
1409 1415 for f in d:
1410 1416 if f in mw: del mw[f]
1411 1417
1412 1418 # If we're jumping between revisions (as opposed to merging),
1413 1419 # and if neither the working directory nor the target rev has
1414 1420 # the file, then we need to remove it from the dirstate, to
1415 1421 # prevent the dirstate from listing the file when it is no
1416 1422 # longer in the manifest.
1417 1423 if moddirstate and linear_path and f not in m2:
1418 1424 self.dirstate.forget((f,))
1419 1425
1420 1426 # Compare manifests
1421 1427 for f, n in mw.iteritems():
1422 1428 if choose and not choose(f): continue
1423 1429 if f in m2:
1424 1430 s = 0
1425 1431
1426 1432 # is the wfile new since m1, and match m2?
1427 1433 if f not in m1:
1428 1434 t1 = self.wread(f)
1429 1435 t2 = self.file(f).read(m2[f])
1430 1436 if cmp(t1, t2) == 0:
1431 1437 n = m2[f]
1432 1438 del t1, t2
1433 1439
1434 1440 # are files different?
1435 1441 if n != m2[f]:
1436 1442 a = ma.get(f, nullid)
1437 1443 # are both different from the ancestor?
1438 1444 if n != a and m2[f] != a:
1439 1445 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1440 1446 # merge executable bits
1441 1447 # "if we changed or they changed, change in merge"
1442 1448 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1443 1449 mode = ((a^b) | (a^c)) ^ a
1444 1450 merge[f] = (m1.get(f, nullid), m2[f], mode)
1445 1451 s = 1
1446 1452 # are we clobbering?
1447 1453 # is remote's version newer?
1448 1454 # or are we going back in time?
1449 1455 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1450 1456 self.ui.debug(_(" remote %s is newer, get\n") % f)
1451 1457 get[f] = m2[f]
1452 1458 s = 1
1453 1459 elif f in umap:
1454 1460 # this unknown file is the same as the checkout
1455 1461 get[f] = m2[f]
1456 1462
1457 1463 if not s and mfw[f] != mf2[f]:
1458 1464 if force:
1459 1465 self.ui.debug(_(" updating permissions for %s\n") % f)
1460 1466 util.set_exec(self.wjoin(f), mf2[f])
1461 1467 else:
1462 1468 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1463 1469 mode = ((a^b) | (a^c)) ^ a
1464 1470 if mode != b:
1465 1471 self.ui.debug(_(" updating permissions for %s\n") % f)
1466 1472 util.set_exec(self.wjoin(f), mode)
1467 1473 del m2[f]
1468 1474 elif f in ma:
1469 1475 if n != ma[f]:
1470 1476 r = _("d")
1471 1477 if not force and (linear_path or allow):
1472 1478 r = self.ui.prompt(
1473 1479 (_(" local changed %s which remote deleted\n") % f) +
1474 1480 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1475 1481 if r == _("d"):
1476 1482 remove.append(f)
1477 1483 else:
1478 1484 self.ui.debug(_("other deleted %s\n") % f)
1479 1485 remove.append(f) # other deleted it
1480 1486 else:
1481 1487 # file is created on branch or in working directory
1482 1488 if force and f not in umap:
1483 1489 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1484 1490 remove.append(f)
1485 1491 elif n == m1.get(f, nullid): # same as parent
1486 1492 if p2 == pa: # going backwards?
1487 1493 self.ui.debug(_("remote deleted %s\n") % f)
1488 1494 remove.append(f)
1489 1495 else:
1490 1496 self.ui.debug(_("local modified %s, keeping\n") % f)
1491 1497 else:
1492 1498 self.ui.debug(_("working dir created %s, keeping\n") % f)
1493 1499
1494 1500 for f, n in m2.iteritems():
1495 1501 if choose and not choose(f): continue
1496 1502 if f[0] == "/": continue
1497 1503 if f in ma and n != ma[f]:
1498 1504 r = _("k")
1499 1505 if not force and (linear_path or allow):
1500 1506 r = self.ui.prompt(
1501 1507 (_("remote changed %s which local deleted\n") % f) +
1502 1508 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1503 1509 if r == _("k"): get[f] = n
1504 1510 elif f not in ma:
1505 1511 self.ui.debug(_("remote created %s\n") % f)
1506 1512 get[f] = n
1507 1513 else:
1508 1514 if force or p2 == pa: # going backwards?
1509 1515 self.ui.debug(_("local deleted %s, recreating\n") % f)
1510 1516 get[f] = n
1511 1517 else:
1512 1518 self.ui.debug(_("local deleted %s\n") % f)
1513 1519
1514 1520 del mw, m1, m2, ma
1515 1521
1516 1522 if force:
1517 1523 for f in merge:
1518 1524 get[f] = merge[f][1]
1519 1525 merge = {}
1520 1526
1521 1527 if linear_path or force:
1522 1528 # we don't need to do any magic, just jump to the new rev
1523 1529 branch_merge = False
1524 1530 p1, p2 = p2, nullid
1525 1531 else:
1526 1532 if not allow:
1527 1533 self.ui.status(_("this update spans a branch"
1528 1534 " affecting the following files:\n"))
1529 1535 fl = merge.keys() + get.keys()
1530 1536 fl.sort()
1531 1537 for f in fl:
1532 1538 cf = ""
1533 1539 if f in merge: cf = _(" (resolve)")
1534 1540 self.ui.status(" %s%s\n" % (f, cf))
1535 1541 self.ui.warn(_("aborting update spanning branches!\n"))
1536 1542 self.ui.status(_("(use update -m to merge across branches"
1537 1543 " or -C to lose changes)\n"))
1538 1544 return 1
1539 1545 branch_merge = True
1540 1546
1541 1547 # get the files we don't need to change
1542 1548 files = get.keys()
1543 1549 files.sort()
1544 1550 for f in files:
1545 1551 if f[0] == "/": continue
1546 1552 self.ui.note(_("getting %s\n") % f)
1547 1553 t = self.file(f).read(get[f])
1548 1554 self.wwrite(f, t)
1549 1555 util.set_exec(self.wjoin(f), mf2[f])
1550 1556 if moddirstate:
1551 1557 if branch_merge:
1552 1558 self.dirstate.update([f], 'n', st_mtime=-1)
1553 1559 else:
1554 1560 self.dirstate.update([f], 'n')
1555 1561
1556 1562 # merge the tricky bits
1557 1563 files = merge.keys()
1558 1564 files.sort()
1559 1565 for f in files:
1560 1566 self.ui.status(_("merging %s\n") % f)
1561 1567 my, other, flag = merge[f]
1562 1568 self.merge3(f, my, other)
1563 1569 util.set_exec(self.wjoin(f), flag)
1564 1570 if moddirstate:
1565 1571 if branch_merge:
1566 1572 # We've done a branch merge, mark this file as merged
1567 1573 # so that we properly record the merger later
1568 1574 self.dirstate.update([f], 'm')
1569 1575 else:
1570 1576 # We've update-merged a locally modified file, so
1571 1577 # we set the dirstate to emulate a normal checkout
1572 1578 # of that file some time in the past. Thus our
1573 1579 # merge will appear as a normal local file
1574 1580 # modification.
1575 1581 f_len = len(self.file(f).read(other))
1576 1582 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1577 1583
1578 1584 remove.sort()
1579 1585 for f in remove:
1580 1586 self.ui.note(_("removing %s\n") % f)
1581 1587 try:
1582 1588 util.unlink(self.wjoin(f))
1583 1589 except OSError, inst:
1584 1590 if inst.errno != errno.ENOENT:
1585 1591 self.ui.warn(_("update failed to remove %s: %s!\n") %
1586 1592 (f, inst.strerror))
1587 1593 if moddirstate:
1588 1594 if branch_merge:
1589 1595 self.dirstate.update(remove, 'r')
1590 1596 else:
1591 1597 self.dirstate.forget(remove)
1592 1598
1593 1599 if moddirstate:
1594 1600 self.dirstate.setparents(p1, p2)
1595 1601
1596 1602 def merge3(self, fn, my, other):
1597 1603 """perform a 3-way merge in the working directory"""
1598 1604
1599 1605 def temp(prefix, node):
1600 1606 pre = "%s~%s." % (os.path.basename(fn), prefix)
1601 1607 (fd, name) = tempfile.mkstemp("", pre)
1602 1608 f = os.fdopen(fd, "wb")
1603 1609 self.wwrite(fn, fl.read(node), f)
1604 1610 f.close()
1605 1611 return name
1606 1612
1607 1613 fl = self.file(fn)
1608 1614 base = fl.ancestor(my, other)
1609 1615 a = self.wjoin(fn)
1610 1616 b = temp("base", base)
1611 1617 c = temp("other", other)
1612 1618
1613 1619 self.ui.note(_("resolving %s\n") % fn)
1614 1620 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1615 1621 (fn, short(my), short(other), short(base)))
1616 1622
1617 1623 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1618 1624 or "hgmerge")
1619 1625 r = os.system('%s "%s" "%s" "%s"' % (cmd, a, b, c))
1620 1626 if r:
1621 1627 self.ui.warn(_("merging %s failed!\n") % fn)
1622 1628
1623 1629 os.unlink(b)
1624 1630 os.unlink(c)
1625 1631
1626 1632 def verify(self):
1627 1633 filelinkrevs = {}
1628 1634 filenodes = {}
1629 1635 changesets = revisions = files = 0
1630 1636 errors = [0]
1631 1637 neededmanifests = {}
1632 1638
1633 1639 def err(msg):
1634 1640 self.ui.warn(msg + "\n")
1635 1641 errors[0] += 1
1636 1642
1637 1643 seen = {}
1638 1644 self.ui.status(_("checking changesets\n"))
1639 1645 d = self.changelog.checksize()
1640 1646 if d:
1641 1647 err(_("changeset data short %d bytes") % d)
1642 1648 for i in range(self.changelog.count()):
1643 1649 changesets += 1
1644 1650 n = self.changelog.node(i)
1645 1651 l = self.changelog.linkrev(n)
1646 1652 if l != i:
1647 1653 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1648 1654 if n in seen:
1649 1655 err(_("duplicate changeset at revision %d") % i)
1650 1656 seen[n] = 1
1651 1657
1652 1658 for p in self.changelog.parents(n):
1653 1659 if p not in self.changelog.nodemap:
1654 1660 err(_("changeset %s has unknown parent %s") %
1655 1661 (short(n), short(p)))
1656 1662 try:
1657 1663 changes = self.changelog.read(n)
1658 1664 except KeyboardInterrupt:
1659 1665 self.ui.warn(_("interrupted"))
1660 1666 raise
1661 1667 except Exception, inst:
1662 1668 err(_("unpacking changeset %s: %s") % (short(n), inst))
1663 1669
1664 1670 neededmanifests[changes[0]] = n
1665 1671
1666 1672 for f in changes[3]:
1667 1673 filelinkrevs.setdefault(f, []).append(i)
1668 1674
1669 1675 seen = {}
1670 1676 self.ui.status(_("checking manifests\n"))
1671 1677 d = self.manifest.checksize()
1672 1678 if d:
1673 1679 err(_("manifest data short %d bytes") % d)
1674 1680 for i in range(self.manifest.count()):
1675 1681 n = self.manifest.node(i)
1676 1682 l = self.manifest.linkrev(n)
1677 1683
1678 1684 if l < 0 or l >= self.changelog.count():
1679 1685 err(_("bad manifest link (%d) at revision %d") % (l, i))
1680 1686
1681 1687 if n in neededmanifests:
1682 1688 del neededmanifests[n]
1683 1689
1684 1690 if n in seen:
1685 1691 err(_("duplicate manifest at revision %d") % i)
1686 1692
1687 1693 seen[n] = 1
1688 1694
1689 1695 for p in self.manifest.parents(n):
1690 1696 if p not in self.manifest.nodemap:
1691 1697 err(_("manifest %s has unknown parent %s") %
1692 1698 (short(n), short(p)))
1693 1699
1694 1700 try:
1695 1701 delta = mdiff.patchtext(self.manifest.delta(n))
1696 1702 except KeyboardInterrupt:
1697 1703 self.ui.warn(_("interrupted"))
1698 1704 raise
1699 1705 except Exception, inst:
1700 1706 err(_("unpacking manifest %s: %s") % (short(n), inst))
1701 1707
1702 1708 ff = [ l.split('\0') for l in delta.splitlines() ]
1703 1709 for f, fn in ff:
1704 1710 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1705 1711
1706 1712 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1707 1713
1708 1714 for m,c in neededmanifests.items():
1709 1715 err(_("Changeset %s refers to unknown manifest %s") %
1710 1716 (short(m), short(c)))
1711 1717 del neededmanifests
1712 1718
1713 1719 for f in filenodes:
1714 1720 if f not in filelinkrevs:
1715 1721 err(_("file %s in manifest but not in changesets") % f)
1716 1722
1717 1723 for f in filelinkrevs:
1718 1724 if f not in filenodes:
1719 1725 err(_("file %s in changeset but not in manifest") % f)
1720 1726
1721 1727 self.ui.status(_("checking files\n"))
1722 1728 ff = filenodes.keys()
1723 1729 ff.sort()
1724 1730 for f in ff:
1725 1731 if f == "/dev/null": continue
1726 1732 files += 1
1727 1733 fl = self.file(f)
1728 1734 d = fl.checksize()
1729 1735 if d:
1730 1736 err(_("%s file data short %d bytes") % (f, d))
1731 1737
1732 1738 nodes = { nullid: 1 }
1733 1739 seen = {}
1734 1740 for i in range(fl.count()):
1735 1741 revisions += 1
1736 1742 n = fl.node(i)
1737 1743
1738 1744 if n in seen:
1739 1745 err(_("%s: duplicate revision %d") % (f, i))
1740 1746 if n not in filenodes[f]:
1741 1747 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1742 1748 else:
1743 1749 del filenodes[f][n]
1744 1750
1745 1751 flr = fl.linkrev(n)
1746 1752 if flr not in filelinkrevs[f]:
1747 1753 err(_("%s:%s points to unexpected changeset %d")
1748 1754 % (f, short(n), flr))
1749 1755 else:
1750 1756 filelinkrevs[f].remove(flr)
1751 1757
1752 1758 # verify contents
1753 1759 try:
1754 1760 t = fl.read(n)
1755 1761 except KeyboardInterrupt:
1756 1762 self.ui.warn(_("interrupted"))
1757 1763 raise
1758 1764 except Exception, inst:
1759 1765 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1760 1766
1761 1767 # verify parents
1762 1768 (p1, p2) = fl.parents(n)
1763 1769 if p1 not in nodes:
1764 1770 err(_("file %s:%s unknown parent 1 %s") %
1765 1771 (f, short(n), short(p1)))
1766 1772 if p2 not in nodes:
1767 1773 err(_("file %s:%s unknown parent 2 %s") %
1768 1774 (f, short(n), short(p1)))
1769 1775 nodes[n] = 1
1770 1776
1771 1777 # cross-check
1772 1778 for node in filenodes[f]:
1773 1779 err(_("node %s in manifests not in %s") % (hex(node), f))
1774 1780
1775 1781 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1776 1782 (files, changesets, revisions))
1777 1783
1778 1784 if errors[0]:
1779 1785 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1780 1786 return 1
General Comments 0
You need to be logged in to leave comments. Login now