##// END OF EJS Templates
refactor some unlink/remove code and make sure we prune empty dir
Benoit Boissinot -
r1415:c6e6ca96 default
parent child Browse files
Show More
@@ -1,2242 +1,2233 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from demandload import demandload
9 9 from node import *
10 10 from i18n import gettext as _
11 11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 12 demandload(globals(), "fancyopts ui hg util lock revlog")
13 13 demandload(globals(), "fnmatch hgweb mdiff random signal time traceback")
14 14 demandload(globals(), "errno socket version struct atexit sets bz2")
15 15
16 16 class UnknownCommand(Exception):
17 17 """Exception raised if command is not in the command table."""
18 18
19 19 def filterfiles(filters, files):
20 20 l = [x for x in files if x in filters]
21 21
22 22 for t in filters:
23 23 if t and t[-1] != "/":
24 24 t += "/"
25 25 l += [x for x in files if x.startswith(t)]
26 26 return l
27 27
28 28 def relpath(repo, args):
29 29 cwd = repo.getcwd()
30 30 if cwd:
31 31 return [util.normpath(os.path.join(cwd, x)) for x in args]
32 32 return args
33 33
34 34 def matchpats(repo, cwd, pats=[], opts={}, head=''):
35 35 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
36 36 opts.get('exclude'), head)
37 37
38 38 def makewalk(repo, pats, opts, head=''):
39 39 cwd = repo.getcwd()
40 40 files, matchfn, anypats = matchpats(repo, cwd, pats, opts, head)
41 41 exact = dict(zip(files, files))
42 42 def walk():
43 43 for src, fn in repo.walk(files=files, match=matchfn):
44 44 yield src, fn, util.pathto(cwd, fn), fn in exact
45 45 return files, matchfn, walk()
46 46
47 47 def walk(repo, pats, opts, head=''):
48 48 files, matchfn, results = makewalk(repo, pats, opts, head)
49 49 for r in results:
50 50 yield r
51 51
52 52 def walkchangerevs(ui, repo, cwd, pats, opts):
53 53 '''Iterate over files and the revs they changed in.
54 54
55 55 Callers most commonly need to iterate backwards over the history
56 56 it is interested in. Doing so has awful (quadratic-looking)
57 57 performance, so we use iterators in a "windowed" way.
58 58
59 59 We walk a window of revisions in the desired order. Within the
60 60 window, we first walk forwards to gather data, then in the desired
61 61 order (usually backwards) to display it.
62 62
63 63 This function returns an (iterator, getchange) pair. The
64 64 getchange function returns the changelog entry for a numeric
65 65 revision. The iterator yields 3-tuples. They will be of one of
66 66 the following forms:
67 67
68 68 "window", incrementing, lastrev: stepping through a window,
69 69 positive if walking forwards through revs, last rev in the
70 70 sequence iterated over - use to reset state for the current window
71 71
72 72 "add", rev, fns: out-of-order traversal of the given file names
73 73 fns, which changed during revision rev - use to gather data for
74 74 possible display
75 75
76 76 "iter", rev, None: in-order traversal of the revs earlier iterated
77 77 over with "add" - use to display data'''
78 78
79 79 if repo.changelog.count() == 0:
80 80 return [], False
81 81
82 82 cwd = repo.getcwd()
83 83 if not pats and cwd:
84 84 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
85 85 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
86 86 files, matchfn, anypats = matchpats(repo, (pats and cwd) or '',
87 87 pats, opts)
88 88 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
89 89 wanted = {}
90 90 slowpath = anypats
91 91 window = 300
92 92 fncache = {}
93 93
94 94 chcache = {}
95 95 def getchange(rev):
96 96 ch = chcache.get(rev)
97 97 if ch is None:
98 98 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
99 99 return ch
100 100
101 101 if not slowpath and not files:
102 102 # No files, no patterns. Display all revs.
103 103 wanted = dict(zip(revs, revs))
104 104 if not slowpath:
105 105 # Only files, no patterns. Check the history of each file.
106 106 def filerevgen(filelog):
107 107 for i in xrange(filelog.count() - 1, -1, -window):
108 108 revs = []
109 109 for j in xrange(max(0, i - window), i + 1):
110 110 revs.append(filelog.linkrev(filelog.node(j)))
111 111 revs.reverse()
112 112 for rev in revs:
113 113 yield rev
114 114
115 115 minrev, maxrev = min(revs), max(revs)
116 116 for file in files:
117 117 filelog = repo.file(file)
118 118 # A zero count may be a directory or deleted file, so
119 119 # try to find matching entries on the slow path.
120 120 if filelog.count() == 0:
121 121 slowpath = True
122 122 break
123 123 for rev in filerevgen(filelog):
124 124 if rev <= maxrev:
125 125 if rev < minrev:
126 126 break
127 127 fncache.setdefault(rev, [])
128 128 fncache[rev].append(file)
129 129 wanted[rev] = 1
130 130 if slowpath:
131 131 # The slow path checks files modified in every changeset.
132 132 def changerevgen():
133 133 for i in xrange(repo.changelog.count() - 1, -1, -window):
134 134 for j in xrange(max(0, i - window), i + 1):
135 135 yield j, getchange(j)[3]
136 136
137 137 for rev, changefiles in changerevgen():
138 138 matches = filter(matchfn, changefiles)
139 139 if matches:
140 140 fncache[rev] = matches
141 141 wanted[rev] = 1
142 142
143 143 def iterate():
144 144 for i in xrange(0, len(revs), window):
145 145 yield 'window', revs[0] < revs[-1], revs[-1]
146 146 nrevs = [rev for rev in revs[i:min(i+window, len(revs))]
147 147 if rev in wanted]
148 148 srevs = list(nrevs)
149 149 srevs.sort()
150 150 for rev in srevs:
151 151 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
152 152 yield 'add', rev, fns
153 153 for rev in nrevs:
154 154 yield 'iter', rev, None
155 155 return iterate(), getchange
156 156
157 157 revrangesep = ':'
158 158
159 159 def revrange(ui, repo, revs, revlog=None):
160 160 """Yield revision as strings from a list of revision specifications."""
161 161 if revlog is None:
162 162 revlog = repo.changelog
163 163 revcount = revlog.count()
164 164 def fix(val, defval):
165 165 if not val:
166 166 return defval
167 167 try:
168 168 num = int(val)
169 169 if str(num) != val:
170 170 raise ValueError
171 171 if num < 0: num += revcount
172 172 if num < 0: num = 0
173 173 elif num >= revcount:
174 174 raise ValueError
175 175 except ValueError:
176 176 try:
177 177 num = repo.changelog.rev(repo.lookup(val))
178 178 except KeyError:
179 179 try:
180 180 num = revlog.rev(revlog.lookup(val))
181 181 except KeyError:
182 182 raise util.Abort(_('invalid revision identifier %s'), val)
183 183 return num
184 184 seen = {}
185 185 for spec in revs:
186 186 if spec.find(revrangesep) >= 0:
187 187 start, end = spec.split(revrangesep, 1)
188 188 start = fix(start, 0)
189 189 end = fix(end, revcount - 1)
190 190 step = start > end and -1 or 1
191 191 for rev in xrange(start, end+step, step):
192 192 if rev in seen: continue
193 193 seen[rev] = 1
194 194 yield str(rev)
195 195 else:
196 196 rev = fix(spec, None)
197 197 if rev in seen: continue
198 198 seen[rev] = 1
199 199 yield str(rev)
200 200
201 201 def make_filename(repo, r, pat, node=None,
202 202 total=None, seqno=None, revwidth=None, pathname=None):
203 203 node_expander = {
204 204 'H': lambda: hex(node),
205 205 'R': lambda: str(r.rev(node)),
206 206 'h': lambda: short(node),
207 207 }
208 208 expander = {
209 209 '%': lambda: '%',
210 210 'b': lambda: os.path.basename(repo.root),
211 211 }
212 212
213 213 try:
214 214 if node:
215 215 expander.update(node_expander)
216 216 if node and revwidth is not None:
217 217 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
218 218 if total is not None:
219 219 expander['N'] = lambda: str(total)
220 220 if seqno is not None:
221 221 expander['n'] = lambda: str(seqno)
222 222 if total is not None and seqno is not None:
223 223 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
224 224 if pathname is not None:
225 225 expander['s'] = lambda: os.path.basename(pathname)
226 226 expander['d'] = lambda: os.path.dirname(pathname) or '.'
227 227 expander['p'] = lambda: pathname
228 228
229 229 newname = []
230 230 patlen = len(pat)
231 231 i = 0
232 232 while i < patlen:
233 233 c = pat[i]
234 234 if c == '%':
235 235 i += 1
236 236 c = pat[i]
237 237 c = expander[c]()
238 238 newname.append(c)
239 239 i += 1
240 240 return ''.join(newname)
241 241 except KeyError, inst:
242 242 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
243 243 inst.args[0])
244 244
245 245 def make_file(repo, r, pat, node=None,
246 246 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
247 247 if not pat or pat == '-':
248 248 return 'w' in mode and sys.stdout or sys.stdin
249 249 if hasattr(pat, 'write') and 'w' in mode:
250 250 return pat
251 251 if hasattr(pat, 'read') and 'r' in mode:
252 252 return pat
253 253 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
254 254 pathname),
255 255 mode)
256 256
257 257 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
258 258 changes=None, text=False):
259 259 if not changes:
260 260 (c, a, d, u) = repo.changes(node1, node2, files, match=match)
261 261 else:
262 262 (c, a, d, u) = changes
263 263 if files:
264 264 c, a, d = map(lambda x: filterfiles(files, x), (c, a, d))
265 265
266 266 if not c and not a and not d:
267 267 return
268 268
269 269 if node2:
270 270 change = repo.changelog.read(node2)
271 271 mmap2 = repo.manifest.read(change[0])
272 272 date2 = util.datestr(change[2])
273 273 def read(f):
274 274 return repo.file(f).read(mmap2[f])
275 275 else:
276 276 date2 = util.datestr()
277 277 if not node1:
278 278 node1 = repo.dirstate.parents()[0]
279 279 def read(f):
280 280 return repo.wfile(f).read()
281 281
282 282 if ui.quiet:
283 283 r = None
284 284 else:
285 285 hexfunc = ui.verbose and hex or short
286 286 r = [hexfunc(node) for node in [node1, node2] if node]
287 287
288 288 change = repo.changelog.read(node1)
289 289 mmap = repo.manifest.read(change[0])
290 290 date1 = util.datestr(change[2])
291 291
292 292 for f in c:
293 293 to = None
294 294 if f in mmap:
295 295 to = repo.file(f).read(mmap[f])
296 296 tn = read(f)
297 297 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
298 298 for f in a:
299 299 to = None
300 300 tn = read(f)
301 301 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
302 302 for f in d:
303 303 to = repo.file(f).read(mmap[f])
304 304 tn = None
305 305 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
306 306
307 307 def trimuser(ui, name, rev, revcache):
308 308 """trim the name of the user who committed a change"""
309 309 user = revcache.get(rev)
310 310 if user is None:
311 311 user = revcache[rev] = ui.shortuser(name)
312 312 return user
313 313
314 314 def show_changeset(ui, repo, rev=0, changenode=None, brinfo=None):
315 315 """show a single changeset or file revision"""
316 316 log = repo.changelog
317 317 if changenode is None:
318 318 changenode = log.node(rev)
319 319 elif not rev:
320 320 rev = log.rev(changenode)
321 321
322 322 if ui.quiet:
323 323 ui.write("%d:%s\n" % (rev, short(changenode)))
324 324 return
325 325
326 326 changes = log.read(changenode)
327 327 date = util.datestr(changes[2])
328 328
329 329 parents = [(log.rev(p), ui.verbose and hex(p) or short(p))
330 330 for p in log.parents(changenode)
331 331 if ui.debugflag or p != nullid]
332 332 if not ui.debugflag and len(parents) == 1 and parents[0][0] == rev-1:
333 333 parents = []
334 334
335 335 if ui.verbose:
336 336 ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
337 337 else:
338 338 ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
339 339
340 340 for tag in repo.nodetags(changenode):
341 341 ui.status(_("tag: %s\n") % tag)
342 342 for parent in parents:
343 343 ui.write(_("parent: %d:%s\n") % parent)
344 344
345 345 if brinfo and changenode in brinfo:
346 346 br = brinfo[changenode]
347 347 ui.write(_("branch: %s\n") % " ".join(br))
348 348
349 349 ui.debug(_("manifest: %d:%s\n") % (repo.manifest.rev(changes[0]),
350 350 hex(changes[0])))
351 351 ui.status(_("user: %s\n") % changes[1])
352 352 ui.status(_("date: %s\n") % date)
353 353
354 354 if ui.debugflag:
355 355 files = repo.changes(log.parents(changenode)[0], changenode)
356 356 for key, value in zip([_("files:"), _("files+:"), _("files-:")], files):
357 357 if value:
358 358 ui.note("%-12s %s\n" % (key, " ".join(value)))
359 359 else:
360 360 ui.note(_("files: %s\n") % " ".join(changes[3]))
361 361
362 362 description = changes[4].strip()
363 363 if description:
364 364 if ui.verbose:
365 365 ui.status(_("description:\n"))
366 366 ui.status(description)
367 367 ui.status("\n\n")
368 368 else:
369 369 ui.status(_("summary: %s\n") % description.splitlines()[0])
370 370 ui.status("\n")
371 371
372 372 def show_version(ui):
373 373 """output version and copyright information"""
374 374 ui.write(_("Mercurial Distributed SCM (version %s)\n")
375 375 % version.get_version())
376 376 ui.status(_(
377 377 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
378 378 "This is free software; see the source for copying conditions. "
379 379 "There is NO\nwarranty; "
380 380 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
381 381 ))
382 382
383 383 def help_(ui, cmd=None, with_version=False):
384 384 """show help for a given command or all commands"""
385 385 option_lists = []
386 386 if cmd and cmd != 'shortlist':
387 387 if with_version:
388 388 show_version(ui)
389 389 ui.write('\n')
390 390 key, i = find(cmd)
391 391 # synopsis
392 392 ui.write("%s\n\n" % i[2])
393 393
394 394 # description
395 395 doc = i[0].__doc__
396 396 if ui.quiet:
397 397 doc = doc.splitlines(0)[0]
398 398 ui.write("%s\n" % doc.rstrip())
399 399
400 400 if not ui.quiet:
401 401 # aliases
402 402 aliases = ', '.join(key.split('|')[1:])
403 403 if aliases:
404 404 ui.write(_("\naliases: %s\n") % aliases)
405 405
406 406 # options
407 407 if i[1]:
408 408 option_lists.append(("options", i[1]))
409 409
410 410 else:
411 411 # program name
412 412 if ui.verbose or with_version:
413 413 show_version(ui)
414 414 else:
415 415 ui.status(_("Mercurial Distributed SCM\n"))
416 416 ui.status('\n')
417 417
418 418 # list of commands
419 419 if cmd == "shortlist":
420 420 ui.status(_('basic commands (use "hg help" '
421 421 'for the full list or option "-v" for details):\n\n'))
422 422 elif ui.verbose:
423 423 ui.status(_('list of commands:\n\n'))
424 424 else:
425 425 ui.status(_('list of commands (use "hg help -v" '
426 426 'to show aliases and global options):\n\n'))
427 427
428 428 h = {}
429 429 cmds = {}
430 430 for c, e in table.items():
431 431 f = c.split("|")[0]
432 432 if cmd == "shortlist" and not f.startswith("^"):
433 433 continue
434 434 f = f.lstrip("^")
435 435 if not ui.debugflag and f.startswith("debug"):
436 436 continue
437 437 d = ""
438 438 if e[0].__doc__:
439 439 d = e[0].__doc__.splitlines(0)[0].rstrip()
440 440 h[f] = d
441 441 cmds[f]=c.lstrip("^")
442 442
443 443 fns = h.keys()
444 444 fns.sort()
445 445 m = max(map(len, fns))
446 446 for f in fns:
447 447 if ui.verbose:
448 448 commands = cmds[f].replace("|",", ")
449 449 ui.write(" %s:\n %s\n"%(commands,h[f]))
450 450 else:
451 451 ui.write(' %-*s %s\n' % (m, f, h[f]))
452 452
453 453 # global options
454 454 if ui.verbose:
455 455 option_lists.append(("global options", globalopts))
456 456
457 457 # list all option lists
458 458 opt_output = []
459 459 for title, options in option_lists:
460 460 opt_output.append(("\n%s:\n" % title, None))
461 461 for shortopt, longopt, default, desc in options:
462 462 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
463 463 longopt and " --%s" % longopt),
464 464 "%s%s" % (desc,
465 465 default and _(" (default: %s)") % default
466 466 or "")))
467 467
468 468 if opt_output:
469 469 opts_len = max([len(line[0]) for line in opt_output if line[1]])
470 470 for first, second in opt_output:
471 471 if second:
472 472 ui.write(" %-*s %s\n" % (opts_len, first, second))
473 473 else:
474 474 ui.write("%s\n" % first)
475 475
476 476 # Commands start here, listed alphabetically
477 477
478 478 def add(ui, repo, *pats, **opts):
479 479 '''add the specified files on the next commit'''
480 480 names = []
481 481 for src, abs, rel, exact in walk(repo, pats, opts):
482 482 if exact:
483 483 if ui.verbose: ui.status(_('adding %s\n') % rel)
484 484 names.append(abs)
485 485 elif repo.dirstate.state(abs) == '?':
486 486 ui.status(_('adding %s\n') % rel)
487 487 names.append(abs)
488 488 repo.add(names)
489 489
490 490 def addremove(ui, repo, *pats, **opts):
491 491 """add all new files, delete all missing files"""
492 492 add, remove = [], []
493 493 for src, abs, rel, exact in walk(repo, pats, opts):
494 494 if src == 'f' and repo.dirstate.state(abs) == '?':
495 495 add.append(abs)
496 496 if ui.verbose or not exact:
497 497 ui.status(_('adding %s\n') % rel)
498 498 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
499 499 remove.append(abs)
500 500 if ui.verbose or not exact:
501 501 ui.status(_('removing %s\n') % rel)
502 502 repo.add(add)
503 503 repo.remove(remove)
504 504
505 505 def annotate(ui, repo, *pats, **opts):
506 506 """show changeset information per file line"""
507 507 def getnode(rev):
508 508 return short(repo.changelog.node(rev))
509 509
510 510 ucache = {}
511 511 def getname(rev):
512 512 cl = repo.changelog.read(repo.changelog.node(rev))
513 513 return trimuser(ui, cl[1], rev, ucache)
514 514
515 515 if not pats:
516 516 raise util.Abort(_('at least one file name or pattern required'))
517 517
518 518 opmap = [['user', getname], ['number', str], ['changeset', getnode]]
519 519 if not opts['user'] and not opts['changeset']:
520 520 opts['number'] = 1
521 521
522 522 if opts['rev']:
523 523 node = repo.changelog.lookup(opts['rev'])
524 524 else:
525 525 node = repo.dirstate.parents()[0]
526 526 change = repo.changelog.read(node)
527 527 mmap = repo.manifest.read(change[0])
528 528
529 529 for src, abs, rel, exact in walk(repo, pats, opts):
530 530 if abs not in mmap:
531 531 ui.warn(_("warning: %s is not in the repository!\n") % rel)
532 532 continue
533 533
534 534 f = repo.file(abs)
535 535 if not opts['text'] and util.binary(f.read(mmap[abs])):
536 536 ui.write(_("%s: binary file\n") % rel)
537 537 continue
538 538
539 539 lines = f.annotate(mmap[abs])
540 540 pieces = []
541 541
542 542 for o, f in opmap:
543 543 if opts[o]:
544 544 l = [f(n) for n, dummy in lines]
545 545 if l:
546 546 m = max(map(len, l))
547 547 pieces.append(["%*s" % (m, x) for x in l])
548 548
549 549 if pieces:
550 550 for p, l in zip(zip(*pieces), lines):
551 551 ui.write("%s: %s" % (" ".join(p), l[1]))
552 552
553 553 def bundle(ui, repo, fname, dest="default-push", **opts):
554 554 """create a changegroup file"""
555 555 f = open(fname, "wb")
556 556 dest = ui.expandpath(dest)
557 557 other = hg.repository(ui, dest)
558 558 o = repo.findoutgoing(other)
559 559 cg = repo.changegroup(o)
560 560
561 561 try:
562 562 f.write("HG10")
563 563 z = bz2.BZ2Compressor(9)
564 564 while 1:
565 565 chunk = cg.read(4096)
566 566 if not chunk:
567 567 break
568 568 f.write(z.compress(chunk))
569 569 f.write(z.flush())
570 570 except:
571 571 os.unlink(fname)
572 572 raise
573 573
574 574 def cat(ui, repo, file1, *pats, **opts):
575 575 """output the latest or given revisions of files"""
576 576 mf = {}
577 577 if opts['rev']:
578 578 change = repo.changelog.read(repo.lookup(opts['rev']))
579 579 mf = repo.manifest.read(change[0])
580 580 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts):
581 581 r = repo.file(abs)
582 582 if opts['rev']:
583 583 try:
584 584 n = mf[abs]
585 585 except (hg.RepoError, KeyError):
586 586 try:
587 587 n = r.lookup(rev)
588 588 except KeyError, inst:
589 589 raise util.Abort(_('cannot find file %s in rev %s'), rel, rev)
590 590 else:
591 591 n = r.tip()
592 592 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
593 593 fp.write(r.read(n))
594 594
595 595 def clone(ui, source, dest=None, **opts):
596 596 """make a copy of an existing repository"""
597 597 if dest is None:
598 598 dest = os.path.basename(os.path.normpath(source))
599 599
600 600 if os.path.exists(dest):
601 601 raise util.Abort(_("destination '%s' already exists"), dest)
602 602
603 603 dest = os.path.realpath(dest)
604 604
605 605 class Dircleanup:
606 606 def __init__(self, dir_):
607 607 self.rmtree = shutil.rmtree
608 608 self.dir_ = dir_
609 609 os.mkdir(dir_)
610 610 def close(self):
611 611 self.dir_ = None
612 612 def __del__(self):
613 613 if self.dir_:
614 614 self.rmtree(self.dir_, True)
615 615
616 616 if opts['ssh']:
617 617 ui.setconfig("ui", "ssh", opts['ssh'])
618 618 if opts['remotecmd']:
619 619 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
620 620
621 621 if not os.path.exists(source):
622 622 source = ui.expandpath(source)
623 623
624 624 d = Dircleanup(dest)
625 625 abspath = source
626 626 other = hg.repository(ui, source)
627 627
628 628 copy = False
629 629 if other.dev() != -1:
630 630 abspath = os.path.abspath(source)
631 631 if not opts['pull']:
632 632 copy = True
633 633
634 634 if copy:
635 635 try:
636 636 # we use a lock here because if we race with commit, we
637 637 # can end up with extra data in the cloned revlogs that's
638 638 # not pointed to by changesets, thus causing verify to
639 639 # fail
640 640 l1 = lock.lock(os.path.join(source, ".hg", "lock"))
641 641 except OSError:
642 642 copy = False
643 643
644 644 if copy:
645 645 # we lock here to avoid premature writing to the target
646 646 os.mkdir(os.path.join(dest, ".hg"))
647 647 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
648 648
649 649 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
650 650 for f in files.split():
651 651 src = os.path.join(source, ".hg", f)
652 652 dst = os.path.join(dest, ".hg", f)
653 653 util.copyfiles(src, dst)
654 654
655 655 repo = hg.repository(ui, dest)
656 656
657 657 else:
658 658 repo = hg.repository(ui, dest, create=1)
659 659 repo.pull(other)
660 660
661 661 f = repo.opener("hgrc", "w", text=True)
662 662 f.write("[paths]\n")
663 663 f.write("default = %s\n" % abspath)
664 664
665 665 if not opts['noupdate']:
666 666 update(ui, repo)
667 667
668 668 d.close()
669 669
670 670 def commit(ui, repo, *pats, **opts):
671 671 """commit the specified files or all outstanding changes"""
672 672 if opts['text']:
673 673 ui.warn(_("Warning: -t and --text is deprecated,"
674 674 " please use -m or --message instead.\n"))
675 675 message = opts['message'] or opts['text']
676 676 logfile = opts['logfile']
677 677
678 678 if message and logfile:
679 679 raise util.Abort(_('options --message and --logfile are mutually '
680 680 'exclusive'))
681 681 if not message and logfile:
682 682 try:
683 683 if logfile == '-':
684 684 message = sys.stdin.read()
685 685 else:
686 686 message = open(logfile).read()
687 687 except IOError, inst:
688 688 raise util.Abort(_("can't read commit message '%s': %s") %
689 689 (logfile, inst.strerror))
690 690
691 691 if opts['addremove']:
692 692 addremove(ui, repo, *pats, **opts)
693 693 cwd = repo.getcwd()
694 694 if not pats and cwd:
695 695 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
696 696 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
697 697 fns, match, anypats = matchpats(repo, (pats and repo.getcwd()) or '',
698 698 pats, opts)
699 699 if pats:
700 700 c, a, d, u = repo.changes(files=fns, match=match)
701 701 files = c + a + [fn for fn in d if repo.dirstate.state(fn) == 'r']
702 702 else:
703 703 files = []
704 704 try:
705 705 repo.commit(files, message, opts['user'], opts['date'], match)
706 706 except ValueError, inst:
707 707 raise util.Abort(str(inst))
708 708
709 709 def docopy(ui, repo, pats, opts):
710 710 if not pats:
711 711 raise util.Abort(_('no source or destination specified'))
712 712 elif len(pats) == 1:
713 713 raise util.Abort(_('no destination specified'))
714 714 pats = list(pats)
715 715 dest = pats.pop()
716 716 sources = []
717 717
718 718 def okaytocopy(abs, rel, exact):
719 719 reasons = {'?': _('is not managed'),
720 720 'a': _('has been marked for add')}
721 721 reason = reasons.get(repo.dirstate.state(abs))
722 722 if reason:
723 723 if exact: ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
724 724 else:
725 725 return True
726 726
727 727 for src, abs, rel, exact in walk(repo, pats, opts):
728 728 if okaytocopy(abs, rel, exact):
729 729 sources.append((abs, rel, exact))
730 730 if not sources:
731 731 raise util.Abort(_('no files to copy'))
732 732
733 733 cwd = repo.getcwd()
734 734 absdest = util.canonpath(repo.root, cwd, dest)
735 735 reldest = util.pathto(cwd, absdest)
736 736 if os.path.exists(reldest):
737 737 destisfile = not os.path.isdir(reldest)
738 738 else:
739 739 destisfile = len(sources) == 1 or repo.dirstate.state(absdest) != '?'
740 740
741 741 if destisfile:
742 742 if opts['parents']:
743 743 raise util.Abort(_('with --parents, destination must be a directory'))
744 744 elif len(sources) > 1:
745 745 raise util.Abort(_('with multiple sources, destination must be a '
746 746 'directory'))
747 747 errs, copied = 0, []
748 748 for abs, rel, exact in sources:
749 749 if opts['parents']:
750 750 mydest = os.path.join(dest, rel)
751 751 elif destisfile:
752 752 mydest = reldest
753 753 else:
754 754 mydest = os.path.join(dest, os.path.basename(rel))
755 755 myabsdest = util.canonpath(repo.root, cwd, mydest)
756 756 myreldest = util.pathto(cwd, myabsdest)
757 757 if not opts['force'] and repo.dirstate.state(myabsdest) not in 'a?':
758 758 ui.warn(_('%s: not overwriting - file already managed\n') % myreldest)
759 759 continue
760 760 mydestdir = os.path.dirname(myreldest) or '.'
761 761 if not opts['after']:
762 762 try:
763 763 if opts['parents']: os.makedirs(mydestdir)
764 764 elif not destisfile: os.mkdir(mydestdir)
765 765 except OSError, inst:
766 766 if inst.errno != errno.EEXIST: raise
767 767 if ui.verbose or not exact:
768 768 ui.status(_('copying %s to %s\n') % (rel, myreldest))
769 769 if not opts['after']:
770 770 try:
771 771 shutil.copyfile(rel, myreldest)
772 772 shutil.copymode(rel, myreldest)
773 773 except shutil.Error, inst:
774 774 raise util.Abort(str(inst))
775 775 except IOError, inst:
776 776 if inst.errno == errno.ENOENT:
777 777 ui.warn(_('%s: deleted in working copy\n') % rel)
778 778 else:
779 779 ui.warn(_('%s: cannot copy - %s\n') % (rel, inst.strerror))
780 780 errs += 1
781 781 continue
782 782 repo.copy(abs, myabsdest)
783 783 copied.append((abs, rel, exact))
784 784 if errs:
785 785 ui.warn(_('(consider using --after)\n'))
786 786 return errs, copied
787 787
788 788 def copy(ui, repo, *pats, **opts):
789 789 """mark files as copied for the next commit"""
790 790 errs, copied = docopy(ui, repo, pats, opts)
791 791 return errs
792 792
793 793 def debugancestor(ui, index, rev1, rev2):
794 794 """find the ancestor revision of two revisions in a given index"""
795 795 r = revlog.revlog(file, index, "")
796 796 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
797 797 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
798 798
799 799 def debugcheckstate(ui, repo):
800 800 """validate the correctness of the current dirstate"""
801 801 parent1, parent2 = repo.dirstate.parents()
802 802 repo.dirstate.read()
803 803 dc = repo.dirstate.map
804 804 keys = dc.keys()
805 805 keys.sort()
806 806 m1n = repo.changelog.read(parent1)[0]
807 807 m2n = repo.changelog.read(parent2)[0]
808 808 m1 = repo.manifest.read(m1n)
809 809 m2 = repo.manifest.read(m2n)
810 810 errors = 0
811 811 for f in dc:
812 812 state = repo.dirstate.state(f)
813 813 if state in "nr" and f not in m1:
814 814 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
815 815 errors += 1
816 816 if state in "a" and f in m1:
817 817 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
818 818 errors += 1
819 819 if state in "m" and f not in m1 and f not in m2:
820 820 ui.warn(_("%s in state %s, but not in either manifest\n") %
821 821 (f, state))
822 822 errors += 1
823 823 for f in m1:
824 824 state = repo.dirstate.state(f)
825 825 if state not in "nrm":
826 826 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
827 827 errors += 1
828 828 if errors:
829 829 raise util.Abort(_(".hg/dirstate inconsistent with current parent's manifest"))
830 830
831 831 def debugconfig(ui):
832 832 """show combined config settings from all hgrc files"""
833 833 try:
834 834 repo = hg.repository(ui)
835 835 except hg.RepoError:
836 836 pass
837 837 for section, name, value in ui.walkconfig():
838 838 ui.write('%s.%s=%s\n' % (section, name, value))
839 839
840 840 def debugsetparents(ui, repo, rev1, rev2=None):
841 841 """
842 842 manually set the parents of the current working directory
843 843
844 844 This is useful for writing repository conversion tools, but should
845 845 be used with care.
846 846 """
847 847
848 848 if not rev2:
849 849 rev2 = hex(nullid)
850 850
851 851 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
852 852
853 853 def debugstate(ui, repo):
854 854 """show the contents of the current dirstate"""
855 855 repo.dirstate.read()
856 856 dc = repo.dirstate.map
857 857 keys = dc.keys()
858 858 keys.sort()
859 859 for file_ in keys:
860 860 ui.write("%c %3o %10d %s %s\n"
861 861 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
862 862 time.strftime("%x %X",
863 863 time.localtime(dc[file_][3])), file_))
864 864 for f in repo.dirstate.copies:
865 865 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
866 866
867 867 def debugdata(ui, file_, rev):
868 868 """dump the contents of an data file revision"""
869 869 r = revlog.revlog(file, file_[:-2] + ".i", file_)
870 870 try:
871 871 ui.write(r.revision(r.lookup(rev)))
872 872 except KeyError:
873 873 raise util.Abort(_('invalid revision identifier %s'), rev)
874 874
875 875 def debugindex(ui, file_):
876 876 """dump the contents of an index file"""
877 877 r = revlog.revlog(file, file_, "")
878 878 ui.write(" rev offset length base linkrev" +
879 879 " nodeid p1 p2\n")
880 880 for i in range(r.count()):
881 881 e = r.index[i]
882 882 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
883 883 i, e[0], e[1], e[2], e[3],
884 884 short(e[6]), short(e[4]), short(e[5])))
885 885
886 886 def debugindexdot(ui, file_):
887 887 """dump an index DAG as a .dot file"""
888 888 r = revlog.revlog(file, file_, "")
889 889 ui.write("digraph G {\n")
890 890 for i in range(r.count()):
891 891 e = r.index[i]
892 892 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
893 893 if e[5] != nullid:
894 894 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
895 895 ui.write("}\n")
896 896
897 897 def debugrename(ui, repo, file, rev=None):
898 898 """dump rename information"""
899 899 r = repo.file(relpath(repo, [file])[0])
900 900 if rev:
901 901 try:
902 902 # assume all revision numbers are for changesets
903 903 n = repo.lookup(rev)
904 904 change = repo.changelog.read(n)
905 905 m = repo.manifest.read(change[0])
906 906 n = m[relpath(repo, [file])[0]]
907 907 except hg.RepoError, KeyError:
908 908 n = r.lookup(rev)
909 909 else:
910 910 n = r.tip()
911 911 m = r.renamed(n)
912 912 if m:
913 913 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
914 914 else:
915 915 ui.write(_("not renamed\n"))
916 916
917 917 def debugwalk(ui, repo, *pats, **opts):
918 918 """show how files match on given patterns"""
919 919 items = list(walk(repo, pats, opts))
920 920 if not items:
921 921 return
922 922 fmt = '%%s %%-%ds %%-%ds %%s' % (
923 923 max([len(abs) for (src, abs, rel, exact) in items]),
924 924 max([len(rel) for (src, abs, rel, exact) in items]))
925 925 for src, abs, rel, exact in items:
926 926 line = fmt % (src, abs, rel, exact and 'exact' or '')
927 927 ui.write("%s\n" % line.rstrip())
928 928
929 929 def diff(ui, repo, *pats, **opts):
930 930 """diff working directory (or selected files)"""
931 931 node1, node2 = None, None
932 932 revs = [repo.lookup(x) for x in opts['rev']]
933 933
934 934 if len(revs) > 0:
935 935 node1 = revs[0]
936 936 if len(revs) > 1:
937 937 node2 = revs[1]
938 938 if len(revs) > 2:
939 939 raise util.Abort(_("too many revisions to diff"))
940 940
941 941 fns, matchfn, anypats = matchpats(repo, repo.getcwd(), pats, opts)
942 942
943 943 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
944 944 text=opts['text'])
945 945
946 946 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
947 947 node = repo.lookup(changeset)
948 948 prev, other = repo.changelog.parents(node)
949 949 change = repo.changelog.read(node)
950 950
951 951 fp = make_file(repo, repo.changelog, opts['output'],
952 952 node=node, total=total, seqno=seqno,
953 953 revwidth=revwidth)
954 954 if fp != sys.stdout:
955 955 ui.note("%s\n" % fp.name)
956 956
957 957 fp.write("# HG changeset patch\n")
958 958 fp.write("# User %s\n" % change[1])
959 959 fp.write("# Node ID %s\n" % hex(node))
960 960 fp.write("# Parent %s\n" % hex(prev))
961 961 if other != nullid:
962 962 fp.write("# Parent %s\n" % hex(other))
963 963 fp.write(change[4].rstrip())
964 964 fp.write("\n\n")
965 965
966 966 dodiff(fp, ui, repo, prev, node, text=opts['text'])
967 967 if fp != sys.stdout:
968 968 fp.close()
969 969
970 970 def export(ui, repo, *changesets, **opts):
971 971 """dump the header and diffs for one or more changesets"""
972 972 if not changesets:
973 973 raise util.Abort(_("export requires at least one changeset"))
974 974 seqno = 0
975 975 revs = list(revrange(ui, repo, changesets))
976 976 total = len(revs)
977 977 revwidth = max(map(len, revs))
978 978 ui.note(len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n"))
979 979 for cset in revs:
980 980 seqno += 1
981 981 doexport(ui, repo, cset, seqno, total, revwidth, opts)
982 982
983 983 def forget(ui, repo, *pats, **opts):
984 984 """don't add the specified files on the next commit"""
985 985 forget = []
986 986 for src, abs, rel, exact in walk(repo, pats, opts):
987 987 if repo.dirstate.state(abs) == 'a':
988 988 forget.append(abs)
989 989 if ui.verbose or not exact:
990 990 ui.status(_('forgetting %s\n') % rel)
991 991 repo.forget(forget)
992 992
993 993 def grep(ui, repo, pattern, *pats, **opts):
994 994 """search for a pattern in specified files and revisions"""
995 995 reflags = 0
996 996 if opts['ignore_case']:
997 997 reflags |= re.I
998 998 regexp = re.compile(pattern, reflags)
999 999 sep, eol = ':', '\n'
1000 1000 if opts['print0']:
1001 1001 sep = eol = '\0'
1002 1002
1003 1003 fcache = {}
1004 1004 def getfile(fn):
1005 1005 if fn not in fcache:
1006 1006 fcache[fn] = repo.file(fn)
1007 1007 return fcache[fn]
1008 1008
1009 1009 def matchlines(body):
1010 1010 begin = 0
1011 1011 linenum = 0
1012 1012 while True:
1013 1013 match = regexp.search(body, begin)
1014 1014 if not match:
1015 1015 break
1016 1016 mstart, mend = match.span()
1017 1017 linenum += body.count('\n', begin, mstart) + 1
1018 1018 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1019 1019 lend = body.find('\n', mend)
1020 1020 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1021 1021 begin = lend + 1
1022 1022
1023 1023 class linestate:
1024 1024 def __init__(self, line, linenum, colstart, colend):
1025 1025 self.line = line
1026 1026 self.linenum = linenum
1027 1027 self.colstart = colstart
1028 1028 self.colend = colend
1029 1029 def __eq__(self, other):
1030 1030 return self.line == other.line
1031 1031 def __hash__(self):
1032 1032 return hash(self.line)
1033 1033
1034 1034 matches = {}
1035 1035 def grepbody(fn, rev, body):
1036 1036 matches[rev].setdefault(fn, {})
1037 1037 m = matches[rev][fn]
1038 1038 for lnum, cstart, cend, line in matchlines(body):
1039 1039 s = linestate(line, lnum, cstart, cend)
1040 1040 m[s] = s
1041 1041
1042 1042 prev = {}
1043 1043 ucache = {}
1044 1044 def display(fn, rev, states, prevstates):
1045 1045 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1046 1046 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1047 1047 counts = {'-': 0, '+': 0}
1048 1048 filerevmatches = {}
1049 1049 for l in diff:
1050 1050 if incrementing or not opts['all']:
1051 1051 change = ((l in prevstates) and '-') or '+'
1052 1052 r = rev
1053 1053 else:
1054 1054 change = ((l in states) and '-') or '+'
1055 1055 r = prev[fn]
1056 1056 cols = [fn, str(rev)]
1057 1057 if opts['line_number']: cols.append(str(l.linenum))
1058 1058 if opts['all']: cols.append(change)
1059 1059 if opts['user']: cols.append(trimuser(ui, getchange(rev)[1], rev,
1060 1060 ucache))
1061 1061 if opts['files_with_matches']:
1062 1062 c = (fn, rev)
1063 1063 if c in filerevmatches: continue
1064 1064 filerevmatches[c] = 1
1065 1065 else:
1066 1066 cols.append(l.line)
1067 1067 ui.write(sep.join(cols), eol)
1068 1068 counts[change] += 1
1069 1069 return counts['+'], counts['-']
1070 1070
1071 1071 fstate = {}
1072 1072 skip = {}
1073 1073 changeiter, getchange = walkchangerevs(ui, repo, repo.getcwd(), pats, opts)
1074 1074 count = 0
1075 1075 incrementing = False
1076 1076 for st, rev, fns in changeiter:
1077 1077 if st == 'window':
1078 1078 incrementing = rev
1079 1079 matches.clear()
1080 1080 elif st == 'add':
1081 1081 change = repo.changelog.read(repo.lookup(str(rev)))
1082 1082 mf = repo.manifest.read(change[0])
1083 1083 matches[rev] = {}
1084 1084 for fn in fns:
1085 1085 if fn in skip: continue
1086 1086 fstate.setdefault(fn, {})
1087 1087 try:
1088 1088 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1089 1089 except KeyError:
1090 1090 pass
1091 1091 elif st == 'iter':
1092 1092 states = matches[rev].items()
1093 1093 states.sort()
1094 1094 for fn, m in states:
1095 1095 if fn in skip: continue
1096 1096 if incrementing or not opts['all'] or fstate[fn]:
1097 1097 pos, neg = display(fn, rev, m, fstate[fn])
1098 1098 count += pos + neg
1099 1099 if pos and not opts['all']:
1100 1100 skip[fn] = True
1101 1101 fstate[fn] = m
1102 1102 prev[fn] = rev
1103 1103
1104 1104 if not incrementing:
1105 1105 fstate = fstate.items()
1106 1106 fstate.sort()
1107 1107 for fn, state in fstate:
1108 1108 if fn in skip: continue
1109 1109 display(fn, rev, {}, state)
1110 1110 return (count == 0 and 1) or 0
1111 1111
1112 1112 def heads(ui, repo, **opts):
1113 1113 """show current repository heads"""
1114 1114 heads = repo.changelog.heads()
1115 1115 br = None
1116 1116 if opts['branches']:
1117 1117 br = repo.branchlookup(heads)
1118 1118 for n in repo.changelog.heads():
1119 1119 show_changeset(ui, repo, changenode=n, brinfo=br)
1120 1120
1121 1121 def identify(ui, repo):
1122 1122 """print information about the working copy"""
1123 1123 parents = [p for p in repo.dirstate.parents() if p != nullid]
1124 1124 if not parents:
1125 1125 ui.write(_("unknown\n"))
1126 1126 return
1127 1127
1128 1128 hexfunc = ui.verbose and hex or short
1129 1129 (c, a, d, u) = repo.changes()
1130 1130 output = ["%s%s" % ('+'.join([hexfunc(parent) for parent in parents]),
1131 1131 (c or a or d) and "+" or "")]
1132 1132
1133 1133 if not ui.quiet:
1134 1134 # multiple tags for a single parent separated by '/'
1135 1135 parenttags = ['/'.join(tags)
1136 1136 for tags in map(repo.nodetags, parents) if tags]
1137 1137 # tags for multiple parents separated by ' + '
1138 1138 if parenttags:
1139 1139 output.append(' + '.join(parenttags))
1140 1140
1141 1141 ui.write("%s\n" % ' '.join(output))
1142 1142
1143 1143 def import_(ui, repo, patch1, *patches, **opts):
1144 1144 """import an ordered set of patches"""
1145 1145 patches = (patch1,) + patches
1146 1146
1147 1147 if not opts['force']:
1148 1148 (c, a, d, u) = repo.changes()
1149 1149 if c or a or d:
1150 1150 raise util.Abort(_("outstanding uncommitted changes"))
1151 1151
1152 1152 d = opts["base"]
1153 1153 strip = opts["strip"]
1154 1154
1155 1155 mailre = re.compile(r'(?:From |[\w-]+:)')
1156 1156
1157 1157 # attempt to detect the start of a patch
1158 1158 # (this heuristic is borrowed from quilt)
1159 1159 diffre = re.compile(r'(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1160 1160 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1161 1161 '(---|\*\*\*)[ \t])')
1162 1162
1163 1163 for patch in patches:
1164 1164 ui.status(_("applying %s\n") % patch)
1165 1165 pf = os.path.join(d, patch)
1166 1166
1167 1167 message = []
1168 1168 user = None
1169 1169 hgpatch = False
1170 1170 for line in file(pf):
1171 1171 line = line.rstrip()
1172 1172 if (not message and not hgpatch and
1173 1173 mailre.match(line) and not opts['force']):
1174 1174 if len(line) > 35: line = line[:32] + '...'
1175 1175 raise util.Abort(_('first line looks like a '
1176 1176 'mail header: ') + line)
1177 1177 if diffre.match(line):
1178 1178 break
1179 1179 elif hgpatch:
1180 1180 # parse values when importing the result of an hg export
1181 1181 if line.startswith("# User "):
1182 1182 user = line[7:]
1183 1183 ui.debug(_('User: %s\n') % user)
1184 1184 elif not line.startswith("# ") and line:
1185 1185 message.append(line)
1186 1186 hgpatch = False
1187 1187 elif line == '# HG changeset patch':
1188 1188 hgpatch = True
1189 1189 message = [] # We may have collected garbage
1190 1190 else:
1191 1191 message.append(line)
1192 1192
1193 1193 # make sure message isn't empty
1194 1194 if not message:
1195 1195 message = _("imported patch %s\n") % patch
1196 1196 else:
1197 1197 message = "%s\n" % '\n'.join(message)
1198 1198 ui.debug(_('message:\n%s\n') % message)
1199 1199
1200 1200 files = util.patch(strip, pf, ui)
1201 1201
1202 1202 if len(files) > 0:
1203 1203 addremove(ui, repo, *files)
1204 1204 repo.commit(files, message, user)
1205 1205
1206 1206 def incoming(ui, repo, source="default", **opts):
1207 1207 """show new changesets found in source"""
1208 1208 source = ui.expandpath(source)
1209 1209 other = hg.repository(ui, source)
1210 1210 if not other.local():
1211 1211 raise util.Abort(_("incoming doesn't work for remote repositories yet"))
1212 1212 o = repo.findincoming(other)
1213 1213 if not o:
1214 1214 return
1215 1215 o = other.newer(o)
1216 1216 for n in o:
1217 1217 show_changeset(ui, other, changenode=n)
1218 1218 if opts['patch']:
1219 1219 prev = other.changelog.parents(n)[0]
1220 1220 dodiff(ui, ui, other, prev, n)
1221 1221 ui.write("\n")
1222 1222
1223 1223 def init(ui, dest="."):
1224 1224 """create a new repository in the given directory"""
1225 1225 if not os.path.exists(dest):
1226 1226 os.mkdir(dest)
1227 1227 hg.repository(ui, dest, create=1)
1228 1228
1229 1229 def locate(ui, repo, *pats, **opts):
1230 1230 """locate files matching specific patterns"""
1231 1231 end = opts['print0'] and '\0' or '\n'
1232 1232
1233 1233 for src, abs, rel, exact in walk(repo, pats, opts, '(?:.*/|)'):
1234 1234 if repo.dirstate.state(abs) == '?':
1235 1235 continue
1236 1236 if opts['fullpath']:
1237 1237 ui.write(os.path.join(repo.root, abs), end)
1238 1238 else:
1239 1239 ui.write(rel, end)
1240 1240
1241 1241 def log(ui, repo, *pats, **opts):
1242 1242 """show revision history of entire repository or files"""
1243 1243 class dui:
1244 1244 # Implement and delegate some ui protocol. Save hunks of
1245 1245 # output for later display in the desired order.
1246 1246 def __init__(self, ui):
1247 1247 self.ui = ui
1248 1248 self.hunk = {}
1249 1249 def bump(self, rev):
1250 1250 self.rev = rev
1251 1251 self.hunk[rev] = []
1252 1252 def note(self, *args):
1253 1253 if self.verbose:
1254 1254 self.write(*args)
1255 1255 def status(self, *args):
1256 1256 if not self.quiet:
1257 1257 self.write(*args)
1258 1258 def write(self, *args):
1259 1259 self.hunk[self.rev].append(args)
1260 1260 def debug(self, *args):
1261 1261 if self.debugflag:
1262 1262 self.write(*args)
1263 1263 def __getattr__(self, key):
1264 1264 return getattr(self.ui, key)
1265 1265 cwd = repo.getcwd()
1266 1266 if not pats and cwd:
1267 1267 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
1268 1268 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
1269 1269 changeiter, getchange = walkchangerevs(ui, repo, (pats and cwd) or '',
1270 1270 pats, opts)
1271 1271 for st, rev, fns in changeiter:
1272 1272 if st == 'window':
1273 1273 du = dui(ui)
1274 1274 elif st == 'add':
1275 1275 du.bump(rev)
1276 1276 br = None
1277 1277 if opts['keyword']:
1278 1278 changes = repo.changelog.read(repo.changelog.node(rev))
1279 1279 miss = 0
1280 1280 for k in [kw.lower() for kw in opts['keyword']]:
1281 1281 if not (k in changes[1].lower() or
1282 1282 k in changes[4].lower() or
1283 1283 k in " ".join(changes[3][:20]).lower()):
1284 1284 miss = 1
1285 1285 break
1286 1286 if miss:
1287 1287 continue
1288 1288
1289 1289 if opts['branch']:
1290 1290 br = repo.branchlookup([repo.changelog.node(rev)])
1291 1291
1292 1292 show_changeset(du, repo, rev, brinfo=br)
1293 1293 if opts['patch']:
1294 1294 changenode = repo.changelog.node(rev)
1295 1295 prev, other = repo.changelog.parents(changenode)
1296 1296 dodiff(du, du, repo, prev, changenode, fns)
1297 1297 du.write("\n\n")
1298 1298 elif st == 'iter':
1299 1299 for args in du.hunk[rev]:
1300 1300 ui.write(*args)
1301 1301
1302 1302 def manifest(ui, repo, rev=None):
1303 1303 """output the latest or given revision of the project manifest"""
1304 1304 if rev:
1305 1305 try:
1306 1306 # assume all revision numbers are for changesets
1307 1307 n = repo.lookup(rev)
1308 1308 change = repo.changelog.read(n)
1309 1309 n = change[0]
1310 1310 except hg.RepoError:
1311 1311 n = repo.manifest.lookup(rev)
1312 1312 else:
1313 1313 n = repo.manifest.tip()
1314 1314 m = repo.manifest.read(n)
1315 1315 mf = repo.manifest.readflags(n)
1316 1316 files = m.keys()
1317 1317 files.sort()
1318 1318
1319 1319 for f in files:
1320 1320 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
1321 1321
1322 1322 def outgoing(ui, repo, dest="default-push", **opts):
1323 1323 """show changesets not found in destination"""
1324 1324 dest = ui.expandpath(dest)
1325 1325 other = hg.repository(ui, dest)
1326 1326 o = repo.findoutgoing(other)
1327 1327 o = repo.newer(o)
1328 1328 for n in o:
1329 1329 show_changeset(ui, repo, changenode=n)
1330 1330 if opts['patch']:
1331 1331 prev = repo.changelog.parents(n)[0]
1332 1332 dodiff(ui, ui, repo, prev, n)
1333 1333 ui.write("\n")
1334 1334
1335 1335 def parents(ui, repo, rev=None):
1336 1336 """show the parents of the working dir or revision"""
1337 1337 if rev:
1338 1338 p = repo.changelog.parents(repo.lookup(rev))
1339 1339 else:
1340 1340 p = repo.dirstate.parents()
1341 1341
1342 1342 for n in p:
1343 1343 if n != nullid:
1344 1344 show_changeset(ui, repo, changenode=n)
1345 1345
1346 1346 def paths(ui, search=None):
1347 1347 """show definition of symbolic path names"""
1348 1348 try:
1349 1349 repo = hg.repository(ui=ui)
1350 1350 except hg.RepoError:
1351 1351 pass
1352 1352
1353 1353 if search:
1354 1354 for name, path in ui.configitems("paths"):
1355 1355 if name == search:
1356 1356 ui.write("%s\n" % path)
1357 1357 return
1358 1358 ui.warn(_("not found!\n"))
1359 1359 return 1
1360 1360 else:
1361 1361 for name, path in ui.configitems("paths"):
1362 1362 ui.write("%s = %s\n" % (name, path))
1363 1363
1364 1364 def pull(ui, repo, source="default", **opts):
1365 1365 """pull changes from the specified source"""
1366 1366 source = ui.expandpath(source)
1367 1367 ui.status(_('pulling from %s\n') % (source))
1368 1368
1369 1369 if opts['ssh']:
1370 1370 ui.setconfig("ui", "ssh", opts['ssh'])
1371 1371 if opts['remotecmd']:
1372 1372 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1373 1373
1374 1374 other = hg.repository(ui, source)
1375 1375 r = repo.pull(other)
1376 1376 if not r:
1377 1377 if opts['update']:
1378 1378 return update(ui, repo)
1379 1379 else:
1380 1380 ui.status(_("(run 'hg update' to get a working copy)\n"))
1381 1381
1382 1382 return r
1383 1383
1384 1384 def push(ui, repo, dest="default-push", force=False, ssh=None, remotecmd=None):
1385 1385 """push changes to the specified destination"""
1386 1386 dest = ui.expandpath(dest)
1387 1387 ui.status('pushing to %s\n' % (dest))
1388 1388
1389 1389 if ssh:
1390 1390 ui.setconfig("ui", "ssh", ssh)
1391 1391 if remotecmd:
1392 1392 ui.setconfig("ui", "remotecmd", remotecmd)
1393 1393
1394 1394 other = hg.repository(ui, dest)
1395 1395 r = repo.push(other, force)
1396 1396 return r
1397 1397
1398 1398 def rawcommit(ui, repo, *flist, **rc):
1399 1399 "raw commit interface"
1400 1400 if rc['text']:
1401 1401 ui.warn(_("Warning: -t and --text is deprecated,"
1402 1402 " please use -m or --message instead.\n"))
1403 1403 message = rc['message'] or rc['text']
1404 1404 if not message and rc['logfile']:
1405 1405 try:
1406 1406 message = open(rc['logfile']).read()
1407 1407 except IOError:
1408 1408 pass
1409 1409 if not message and not rc['logfile']:
1410 1410 raise util.Abort(_("missing commit message"))
1411 1411
1412 1412 files = relpath(repo, list(flist))
1413 1413 if rc['files']:
1414 1414 files += open(rc['files']).read().splitlines()
1415 1415
1416 1416 rc['parent'] = map(repo.lookup, rc['parent'])
1417 1417
1418 1418 try:
1419 1419 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
1420 1420 except ValueError, inst:
1421 1421 raise util.Abort(str(inst))
1422 1422
1423 1423 def recover(ui, repo):
1424 1424 """roll back an interrupted transaction"""
1425 1425 repo.recover()
1426 1426
1427 1427 def remove(ui, repo, pat, *pats, **opts):
1428 1428 """remove the specified files on the next commit"""
1429 1429 names = []
1430 1430 def okaytoremove(abs, rel, exact):
1431 1431 c, a, d, u = repo.changes(files = [abs])
1432 1432 reason = None
1433 1433 if c: reason = _('is modified')
1434 1434 elif a: reason = _('has been marked for add')
1435 1435 elif u: reason = _('is not managed')
1436 1436 if reason:
1437 1437 if exact: ui.warn(_('not removing %s: file %s\n') % (rel, reason))
1438 1438 else:
1439 1439 return True
1440 1440 for src, abs, rel, exact in walk(repo, (pat,) + pats, opts):
1441 1441 if okaytoremove(abs, rel, exact):
1442 1442 if ui.verbose or not exact: ui.status(_('removing %s\n') % rel)
1443 1443 names.append(abs)
1444 for name in names:
1445 try:
1446 os.unlink(name)
1447 except OSError, inst:
1448 if inst.errno != errno.ENOENT: raise
1449 repo.remove(names)
1444 repo.remove(names, unlink=True)
1450 1445
1451 1446 def rename(ui, repo, *pats, **opts):
1452 1447 """rename files; equivalent of copy + remove"""
1453 1448 errs, copied = docopy(ui, repo, pats, opts)
1454 1449 names = []
1455 1450 for abs, rel, exact in copied:
1456 1451 if ui.verbose or not exact: ui.status(_('removing %s\n') % rel)
1457 try:
1458 os.unlink(rel)
1459 except OSError, inst:
1460 if inst.errno != errno.ENOENT: raise
1461 1452 names.append(abs)
1462 repo.remove(names)
1453 repo.remove(names, unlink=True)
1463 1454 return errs
1464 1455
1465 1456 def revert(ui, repo, *names, **opts):
1466 1457 """revert modified files or dirs back to their unmodified states"""
1467 1458 node = opts['rev'] and repo.lookup(opts['rev']) or \
1468 1459 repo.dirstate.parents()[0]
1469 1460 root = os.path.realpath(repo.root)
1470 1461
1471 1462 def trimpath(p):
1472 1463 p = os.path.realpath(p)
1473 1464 if p.startswith(root):
1474 1465 rest = p[len(root):]
1475 1466 if not rest:
1476 1467 return rest
1477 1468 if p.startswith(os.sep):
1478 1469 return rest[1:]
1479 1470 return p
1480 1471
1481 1472 relnames = map(trimpath, names or [os.getcwd()])
1482 1473 chosen = {}
1483 1474
1484 1475 def choose(name):
1485 1476 def body(name):
1486 1477 for r in relnames:
1487 1478 if not name.startswith(r):
1488 1479 continue
1489 1480 rest = name[len(r):]
1490 1481 if not rest:
1491 1482 return r, True
1492 1483 depth = rest.count(os.sep)
1493 1484 if not r:
1494 1485 if depth == 0 or not opts['nonrecursive']:
1495 1486 return r, True
1496 1487 elif rest[0] == os.sep:
1497 1488 if depth == 1 or not opts['nonrecursive']:
1498 1489 return r, True
1499 1490 return None, False
1500 1491 relname, ret = body(name)
1501 1492 if ret:
1502 1493 chosen[relname] = 1
1503 1494 return ret
1504 1495
1505 1496 r = repo.update(node, False, True, choose, False)
1506 1497 for n in relnames:
1507 1498 if n not in chosen:
1508 1499 ui.warn(_('error: no matches for %s\n') % n)
1509 1500 r = 1
1510 1501 sys.stdout.flush()
1511 1502 return r
1512 1503
1513 1504 def root(ui, repo):
1514 1505 """print the root (top) of the current working dir"""
1515 1506 ui.write(repo.root + "\n")
1516 1507
1517 1508 def serve(ui, repo, **opts):
1518 1509 """export the repository via HTTP"""
1519 1510
1520 1511 if opts["stdio"]:
1521 1512 fin, fout = sys.stdin, sys.stdout
1522 1513 sys.stdout = sys.stderr
1523 1514
1524 1515 def getarg():
1525 1516 argline = fin.readline()[:-1]
1526 1517 arg, l = argline.split()
1527 1518 val = fin.read(int(l))
1528 1519 return arg, val
1529 1520 def respond(v):
1530 1521 fout.write("%d\n" % len(v))
1531 1522 fout.write(v)
1532 1523 fout.flush()
1533 1524
1534 1525 lock = None
1535 1526
1536 1527 while 1:
1537 1528 cmd = fin.readline()[:-1]
1538 1529 if cmd == '':
1539 1530 return
1540 1531 if cmd == "heads":
1541 1532 h = repo.heads()
1542 1533 respond(" ".join(map(hex, h)) + "\n")
1543 1534 if cmd == "lock":
1544 1535 lock = repo.lock()
1545 1536 respond("")
1546 1537 if cmd == "unlock":
1547 1538 if lock:
1548 1539 lock.release()
1549 1540 lock = None
1550 1541 respond("")
1551 1542 elif cmd == "branches":
1552 1543 arg, nodes = getarg()
1553 1544 nodes = map(bin, nodes.split(" "))
1554 1545 r = []
1555 1546 for b in repo.branches(nodes):
1556 1547 r.append(" ".join(map(hex, b)) + "\n")
1557 1548 respond("".join(r))
1558 1549 elif cmd == "between":
1559 1550 arg, pairs = getarg()
1560 1551 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
1561 1552 r = []
1562 1553 for b in repo.between(pairs):
1563 1554 r.append(" ".join(map(hex, b)) + "\n")
1564 1555 respond("".join(r))
1565 1556 elif cmd == "changegroup":
1566 1557 nodes = []
1567 1558 arg, roots = getarg()
1568 1559 nodes = map(bin, roots.split(" "))
1569 1560
1570 1561 cg = repo.changegroup(nodes)
1571 1562 while 1:
1572 1563 d = cg.read(4096)
1573 1564 if not d:
1574 1565 break
1575 1566 fout.write(d)
1576 1567
1577 1568 fout.flush()
1578 1569
1579 1570 elif cmd == "addchangegroup":
1580 1571 if not lock:
1581 1572 respond("not locked")
1582 1573 continue
1583 1574 respond("")
1584 1575
1585 1576 r = repo.addchangegroup(fin)
1586 1577 respond("")
1587 1578
1588 1579 optlist = "name templates style address port ipv6 accesslog errorlog"
1589 1580 for o in optlist.split():
1590 1581 if opts[o]:
1591 1582 ui.setconfig("web", o, opts[o])
1592 1583
1593 1584 try:
1594 1585 httpd = hgweb.create_server(repo)
1595 1586 except socket.error, inst:
1596 1587 raise util.Abort('cannot start server: ' + inst.args[1])
1597 1588
1598 1589 if ui.verbose:
1599 1590 addr, port = httpd.socket.getsockname()
1600 1591 if addr == '0.0.0.0':
1601 1592 addr = socket.gethostname()
1602 1593 else:
1603 1594 try:
1604 1595 addr = socket.gethostbyaddr(addr)[0]
1605 1596 except socket.error:
1606 1597 pass
1607 1598 if port != 80:
1608 1599 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
1609 1600 else:
1610 1601 ui.status(_('listening at http://%s/\n') % addr)
1611 1602 httpd.serve_forever()
1612 1603
1613 1604 def status(ui, repo, *pats, **opts):
1614 1605 '''show changed files in the working directory
1615 1606
1616 1607 M = modified
1617 1608 A = added
1618 1609 R = removed
1619 1610 ? = not tracked
1620 1611 '''
1621 1612
1622 1613 cwd = repo.getcwd()
1623 1614 files, matchfn, anypats = matchpats(repo, cwd, pats, opts)
1624 1615 (c, a, d, u) = [[util.pathto(cwd, x) for x in n]
1625 1616 for n in repo.changes(files=files, match=matchfn)]
1626 1617
1627 1618 changetypes = [(_('modified'), 'M', c),
1628 1619 (_('added'), 'A', a),
1629 1620 (_('removed'), 'R', d),
1630 1621 (_('unknown'), '?', u)]
1631 1622
1632 1623 end = opts['print0'] and '\0' or '\n'
1633 1624
1634 1625 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
1635 1626 or changetypes):
1636 1627 if opts['no_status']:
1637 1628 format = "%%s%s" % end
1638 1629 else:
1639 1630 format = "%s %%s%s" % (char, end);
1640 1631
1641 1632 for f in changes:
1642 1633 ui.write(format % f)
1643 1634
1644 1635 def tag(ui, repo, name, rev=None, **opts):
1645 1636 """add a tag for the current tip or a given revision"""
1646 1637 if opts['text']:
1647 1638 ui.warn(_("Warning: -t and --text is deprecated,"
1648 1639 " please use -m or --message instead.\n"))
1649 1640 if name == "tip":
1650 1641 raise util.Abort(_("the name 'tip' is reserved"))
1651 1642 if rev:
1652 1643 r = hex(repo.lookup(rev))
1653 1644 else:
1654 1645 r = hex(repo.changelog.tip())
1655 1646
1656 1647 if name.find(revrangesep) >= 0:
1657 1648 raise util.Abort(_("'%s' cannot be used in a tag name") % revrangesep)
1658 1649
1659 1650 if opts['local']:
1660 1651 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
1661 1652 return
1662 1653
1663 1654 (c, a, d, u) = repo.changes()
1664 1655 for x in (c, a, d, u):
1665 1656 if ".hgtags" in x:
1666 1657 raise util.Abort(_("working copy of .hgtags is changed "
1667 1658 "(please commit .hgtags manually)"))
1668 1659
1669 1660 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
1670 1661 if repo.dirstate.state(".hgtags") == '?':
1671 1662 repo.add([".hgtags"])
1672 1663
1673 1664 message = (opts['message'] or opts['text'] or
1674 1665 _("Added tag %s for changeset %s") % (name, r))
1675 1666 try:
1676 1667 repo.commit([".hgtags"], message, opts['user'], opts['date'])
1677 1668 except ValueError, inst:
1678 1669 raise util.Abort(str(inst))
1679 1670
1680 1671 def tags(ui, repo):
1681 1672 """list repository tags"""
1682 1673
1683 1674 l = repo.tagslist()
1684 1675 l.reverse()
1685 1676 for t, n in l:
1686 1677 try:
1687 1678 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
1688 1679 except KeyError:
1689 1680 r = " ?:?"
1690 1681 ui.write("%-30s %s\n" % (t, r))
1691 1682
1692 1683 def tip(ui, repo):
1693 1684 """show the tip revision"""
1694 1685 n = repo.changelog.tip()
1695 1686 show_changeset(ui, repo, changenode=n)
1696 1687
1697 1688 def unbundle(ui, repo, fname):
1698 1689 """apply a changegroup file"""
1699 1690 f = urllib.urlopen(fname)
1700 1691
1701 1692 if f.read(4) != "HG10":
1702 1693 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
1703 1694
1704 1695 def bzgenerator(f):
1705 1696 zd = bz2.BZ2Decompressor()
1706 1697 for chunk in f:
1707 1698 yield zd.decompress(chunk)
1708 1699 yield zd.flush()
1709 1700
1710 1701 bzgen = bzgenerator(util.filechunkiter(f, 4096))
1711 1702 repo.addchangegroup(util.chunkbuffer(bzgen))
1712 1703
1713 1704 def undo(ui, repo):
1714 1705 """undo the last commit or pull
1715 1706
1716 1707 Roll back the last pull or commit transaction on the
1717 1708 repository, restoring the project to its earlier state.
1718 1709
1719 1710 This command should be used with care. There is only one level of
1720 1711 undo and there is no redo.
1721 1712
1722 1713 This command is not intended for use on public repositories. Once
1723 1714 a change is visible for pull by other users, undoing it locally is
1724 1715 ineffective.
1725 1716 """
1726 1717 repo.undo()
1727 1718
1728 1719 def update(ui, repo, node=None, merge=False, clean=False, branch=None):
1729 1720 '''update or merge working directory
1730 1721
1731 1722 If there are no outstanding changes in the working directory and
1732 1723 there is a linear relationship between the current version and the
1733 1724 requested version, the result is the requested version.
1734 1725
1735 1726 Otherwise the result is a merge between the contents of the
1736 1727 current working directory and the requested version. Files that
1737 1728 changed between either parent are marked as changed for the next
1738 1729 commit and a commit must be performed before any further updates
1739 1730 are allowed.
1740 1731 '''
1741 1732 if branch:
1742 1733 br = repo.branchlookup(branch=branch)
1743 1734 found = []
1744 1735 for x in br:
1745 1736 if branch in br[x]:
1746 1737 found.append(x)
1747 1738 if len(found) > 1:
1748 1739 ui.warn(_("Found multiple heads for %s\n") % branch)
1749 1740 for x in found:
1750 1741 show_changeset(ui, repo, changenode=x, brinfo=br)
1751 1742 return 1
1752 1743 if len(found) == 1:
1753 1744 node = found[0]
1754 1745 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
1755 1746 else:
1756 1747 ui.warn(_("branch %s not found\n") % (branch))
1757 1748 return 1
1758 1749 else:
1759 1750 node = node and repo.lookup(node) or repo.changelog.tip()
1760 1751 return repo.update(node, allow=merge, force=clean)
1761 1752
1762 1753 def verify(ui, repo):
1763 1754 """verify the integrity of the repository"""
1764 1755 return repo.verify()
1765 1756
1766 1757 # Command options and aliases are listed here, alphabetically
1767 1758
1768 1759 table = {
1769 1760 "^add":
1770 1761 (add,
1771 1762 [('I', 'include', [], _('include path in search')),
1772 1763 ('X', 'exclude', [], _('exclude path from search'))],
1773 1764 "hg add [OPTION]... [FILE]..."),
1774 1765 "addremove":
1775 1766 (addremove,
1776 1767 [('I', 'include', [], _('include path in search')),
1777 1768 ('X', 'exclude', [], _('exclude path from search'))],
1778 1769 _("hg addremove [OPTION]... [FILE]...")),
1779 1770 "^annotate":
1780 1771 (annotate,
1781 1772 [('r', 'rev', '', _('revision')),
1782 1773 ('a', 'text', None, _('treat all files as text')),
1783 1774 ('u', 'user', None, _('show user')),
1784 1775 ('n', 'number', None, _('show revision number')),
1785 1776 ('c', 'changeset', None, _('show changeset')),
1786 1777 ('I', 'include', [], _('include path in search')),
1787 1778 ('X', 'exclude', [], _('exclude path from search'))],
1788 1779 _('hg annotate [OPTION]... FILE...')),
1789 1780 "bundle":
1790 1781 (bundle,
1791 1782 [],
1792 1783 _('hg bundle FILE DEST')),
1793 1784 "cat":
1794 1785 (cat,
1795 1786 [('I', 'include', [], _('include path in search')),
1796 1787 ('X', 'exclude', [], _('exclude path from search')),
1797 1788 ('o', 'output', "", _('output to file')),
1798 1789 ('r', 'rev', '', _('revision'))],
1799 1790 _('hg cat [OPTION]... FILE...')),
1800 1791 "^clone":
1801 1792 (clone,
1802 1793 [('U', 'noupdate', None, _('skip update after cloning')),
1803 1794 ('e', 'ssh', "", _('ssh command')),
1804 1795 ('', 'pull', None, _('use pull protocol to copy metadata')),
1805 1796 ('', 'remotecmd', "", _('remote hg command'))],
1806 1797 _('hg clone [OPTION]... SOURCE [DEST]')),
1807 1798 "^commit|ci":
1808 1799 (commit,
1809 1800 [('A', 'addremove', None, _('run add/remove during commit')),
1810 1801 ('I', 'include', [], _('include path in search')),
1811 1802 ('X', 'exclude', [], _('exclude path from search')),
1812 1803 ('m', 'message', "", _('commit message')),
1813 1804 ('t', 'text', "", _('commit message (deprecated: use -m)')),
1814 1805 ('l', 'logfile', "", _('commit message file')),
1815 1806 ('d', 'date', "", _('date code')),
1816 1807 ('u', 'user', "", _('user'))],
1817 1808 _('hg commit [OPTION]... [FILE]...')),
1818 1809 "copy|cp": (copy,
1819 1810 [('I', 'include', [], _('include path in search')),
1820 1811 ('X', 'exclude', [], _('exclude path from search')),
1821 1812 ('A', 'after', None, _('record a copy after it has happened')),
1822 1813 ('f', 'force', None, _('replace destination if it exists')),
1823 1814 ('p', 'parents', None, _('append source path to dest'))],
1824 1815 _('hg copy [OPTION]... [SOURCE]... DEST')),
1825 1816 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
1826 1817 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
1827 1818 "debugconfig": (debugconfig, [], _('debugconfig')),
1828 1819 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
1829 1820 "debugstate": (debugstate, [], _('debugstate')),
1830 1821 "debugdata": (debugdata, [], _('debugdata FILE REV')),
1831 1822 "debugindex": (debugindex, [], _('debugindex FILE')),
1832 1823 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
1833 1824 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
1834 1825 "debugwalk":
1835 1826 (debugwalk,
1836 1827 [('I', 'include', [], _('include path in search')),
1837 1828 ('X', 'exclude', [], _('exclude path from search'))],
1838 1829 _('debugwalk [OPTION]... [FILE]...')),
1839 1830 "^diff":
1840 1831 (diff,
1841 1832 [('r', 'rev', [], _('revision')),
1842 1833 ('a', 'text', None, _('treat all files as text')),
1843 1834 ('I', 'include', [], _('include path in search')),
1844 1835 ('X', 'exclude', [], _('exclude path from search'))],
1845 1836 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
1846 1837 "^export":
1847 1838 (export,
1848 1839 [('o', 'output', "", _('output to file')),
1849 1840 ('a', 'text', None, _('treat all files as text'))],
1850 1841 _("hg export [-a] [-o OUTFILE] REV...")),
1851 1842 "forget":
1852 1843 (forget,
1853 1844 [('I', 'include', [], _('include path in search')),
1854 1845 ('X', 'exclude', [], _('exclude path from search'))],
1855 1846 _("hg forget [OPTION]... FILE...")),
1856 1847 "grep":
1857 1848 (grep,
1858 1849 [('0', 'print0', None, _('end fields with NUL')),
1859 1850 ('I', 'include', [], _('include path in search')),
1860 1851 ('X', 'exclude', [], _('include path in search')),
1861 1852 ('', 'all', None, _('print all revisions with matches')),
1862 1853 ('i', 'ignore-case', None, _('ignore case when matching')),
1863 1854 ('l', 'files-with-matches', None, _('print names of files and revs with matches')),
1864 1855 ('n', 'line-number', None, _('print line numbers')),
1865 1856 ('r', 'rev', [], _('search in revision rev')),
1866 1857 ('u', 'user', None, _('print user who made change'))],
1867 1858 _("hg grep [OPTION]... PATTERN [FILE]...")),
1868 1859 "heads":
1869 1860 (heads,
1870 1861 [('b', 'branches', None, _('find branch info'))],
1871 1862 _('hg heads [-b]')),
1872 1863 "help": (help_, [], _('hg help [COMMAND]')),
1873 1864 "identify|id": (identify, [], _('hg identify')),
1874 1865 "import|patch":
1875 1866 (import_,
1876 1867 [('p', 'strip', 1, _('path strip')),
1877 1868 ('f', 'force', None, _('skip check for outstanding changes')),
1878 1869 ('b', 'base', "", _('base path'))],
1879 1870 _("hg import [-f] [-p NUM] [-b BASE] PATCH...")),
1880 1871 "incoming|in": (incoming,
1881 1872 [('p', 'patch', None, _('show patch'))],
1882 1873 _('hg incoming [-p] [SOURCE]')),
1883 1874 "^init": (init, [], _('hg init [DEST]')),
1884 1875 "locate":
1885 1876 (locate,
1886 1877 [('r', 'rev', '', _('revision')),
1887 1878 ('0', 'print0', None, _('end filenames with NUL')),
1888 1879 ('f', 'fullpath', None, _('print complete paths')),
1889 1880 ('I', 'include', [], _('include path in search')),
1890 1881 ('X', 'exclude', [], _('exclude path from search'))],
1891 1882 _('hg locate [OPTION]... [PATTERN]...')),
1892 1883 "^log|history":
1893 1884 (log,
1894 1885 [('I', 'include', [], _('include path in search')),
1895 1886 ('X', 'exclude', [], _('exclude path from search')),
1896 1887 ('b', 'branch', None, _('show branches')),
1897 1888 ('k', 'keyword', [], _('search for a keyword')),
1898 1889 ('r', 'rev', [], _('revision')),
1899 1890 ('p', 'patch', None, _('show patch'))],
1900 1891 _('hg log [-I] [-X] [-r REV]... [-p] [FILE]')),
1901 1892 "manifest": (manifest, [], _('hg manifest [REV]')),
1902 1893 "outgoing|out": (outgoing,
1903 1894 [('p', 'patch', None, _('show patch'))],
1904 1895 _('hg outgoing [-p] [DEST]')),
1905 1896 "parents": (parents, [], _('hg parents [REV]')),
1906 1897 "paths": (paths, [], _('hg paths [NAME]')),
1907 1898 "^pull":
1908 1899 (pull,
1909 1900 [('u', 'update', None, _('update working directory')),
1910 1901 ('e', 'ssh', "", _('ssh command')),
1911 1902 ('', 'remotecmd', "", _('remote hg command'))],
1912 1903 _('hg pull [-u] [-e FILE] [--remotecmd FILE] [SOURCE]')),
1913 1904 "^push":
1914 1905 (push,
1915 1906 [('f', 'force', None, _('force push')),
1916 1907 ('e', 'ssh', "", _('ssh command')),
1917 1908 ('', 'remotecmd', "", _('remote hg command'))],
1918 1909 _('hg push [-f] [-e FILE] [--remotecmd FILE] [DEST]')),
1919 1910 "rawcommit":
1920 1911 (rawcommit,
1921 1912 [('p', 'parent', [], _('parent')),
1922 1913 ('d', 'date', "", _('date code')),
1923 1914 ('u', 'user', "", _('user')),
1924 1915 ('F', 'files', "", _('file list')),
1925 1916 ('m', 'message', "", _('commit message')),
1926 1917 ('t', 'text', "", _('commit message (deprecated: use -m)')),
1927 1918 ('l', 'logfile', "", _('commit message file'))],
1928 1919 _('hg rawcommit [OPTION]... [FILE]...')),
1929 1920 "recover": (recover, [], _("hg recover")),
1930 1921 "^remove|rm": (remove,
1931 1922 [('I', 'include', [], _('include path in search')),
1932 1923 ('X', 'exclude', [], _('exclude path from search'))],
1933 1924 _("hg remove [OPTION]... FILE...")),
1934 1925 "rename|mv": (rename,
1935 1926 [('I', 'include', [], _('include path in search')),
1936 1927 ('X', 'exclude', [], _('exclude path from search')),
1937 1928 ('A', 'after', None, _('record a copy after it has happened')),
1938 1929 ('f', 'force', None, _('replace destination if it exists')),
1939 1930 ('p', 'parents', None, _('append source path to dest'))],
1940 1931 _('hg rename [OPTION]... [SOURCE]... DEST')),
1941 1932 "^revert":
1942 1933 (revert,
1943 1934 [("n", "nonrecursive", None, _("don't recurse into subdirs")),
1944 1935 ("r", "rev", "", _("revision"))],
1945 1936 _("hg revert [-n] [-r REV] [NAME]...")),
1946 1937 "root": (root, [], _("hg root")),
1947 1938 "^serve":
1948 1939 (serve,
1949 1940 [('A', 'accesslog', '', _('access log file')),
1950 1941 ('E', 'errorlog', '', _('error log file')),
1951 1942 ('p', 'port', 0, _('listen port')),
1952 1943 ('a', 'address', '', _('interface address')),
1953 1944 ('n', 'name', "", _('repository name')),
1954 1945 ('', 'stdio', None, _('for remote clients')),
1955 1946 ('t', 'templates', "", _('template directory')),
1956 1947 ('', 'style', "", _('template style')),
1957 1948 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
1958 1949 _("hg serve [OPTION]...")),
1959 1950 "^status":
1960 1951 (status,
1961 1952 [('m', 'modified', None, _('show only modified files')),
1962 1953 ('a', 'added', None, _('show only added files')),
1963 1954 ('r', 'removed', None, _('show only removed files')),
1964 1955 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
1965 1956 ('n', 'no-status', None, _('hide status prefix')),
1966 1957 ('0', 'print0', None, _('end filenames with NUL')),
1967 1958 ('I', 'include', [], _('include path in search')),
1968 1959 ('X', 'exclude', [], _('exclude path from search'))],
1969 1960 _("hg status [OPTION]... [FILE]...")),
1970 1961 "tag":
1971 1962 (tag,
1972 1963 [('l', 'local', None, _('make the tag local')),
1973 1964 ('m', 'message', "", _('commit message')),
1974 1965 ('t', 'text', "", _('commit message (deprecated: use -m)')),
1975 1966 ('d', 'date', "", _('date code')),
1976 1967 ('u', 'user', "", _('user'))],
1977 1968 _('hg tag [OPTION]... NAME [REV]')),
1978 1969 "tags": (tags, [], _('hg tags')),
1979 1970 "tip": (tip, [], _('hg tip')),
1980 1971 "unbundle":
1981 1972 (unbundle,
1982 1973 [],
1983 1974 _('hg unbundle FILE')),
1984 1975 "undo": (undo, [], _('hg undo')),
1985 1976 "^update|up|checkout|co":
1986 1977 (update,
1987 1978 [('b', 'branch', "", _('checkout the head of a specific branch')),
1988 1979 ('m', 'merge', None, _('allow merging of conflicts')),
1989 1980 ('C', 'clean', None, _('overwrite locally modified files'))],
1990 1981 _('hg update [-b TAG] [-m] [-C] [REV]')),
1991 1982 "verify": (verify, [], _('hg verify')),
1992 1983 "version": (show_version, [], _('hg version')),
1993 1984 }
1994 1985
1995 1986 globalopts = [
1996 1987 ('R', 'repository', "", _('repository root directory')),
1997 1988 ('', 'cwd', '', _('change working directory')),
1998 1989 ('y', 'noninteractive', None, _('run non-interactively')),
1999 1990 ('q', 'quiet', None, _('quiet mode')),
2000 1991 ('v', 'verbose', None, _('verbose mode')),
2001 1992 ('', 'debug', None, _('debug mode')),
2002 1993 ('', 'debugger', None, _('start debugger')),
2003 1994 ('', 'traceback', None, _('print traceback on exception')),
2004 1995 ('', 'time', None, _('time how long the command takes')),
2005 1996 ('', 'profile', None, _('profile')),
2006 1997 ('', 'version', None, _('output version information and exit')),
2007 1998 ('h', 'help', None, _('display help and exit')),
2008 1999 ]
2009 2000
2010 2001 norepo = ("clone init version help debugancestor debugconfig debugdata"
2011 2002 " debugindex debugindexdot paths")
2012 2003
2013 2004 def find(cmd):
2014 2005 for e in table.keys():
2015 2006 if re.match("(%s)$" % e, cmd):
2016 2007 return e, table[e]
2017 2008
2018 2009 raise UnknownCommand(cmd)
2019 2010
2020 2011 class SignalInterrupt(Exception):
2021 2012 """Exception raised on SIGTERM and SIGHUP."""
2022 2013
2023 2014 def catchterm(*args):
2024 2015 raise SignalInterrupt
2025 2016
2026 2017 def run():
2027 2018 sys.exit(dispatch(sys.argv[1:]))
2028 2019
2029 2020 class ParseError(Exception):
2030 2021 """Exception raised on errors in parsing the command line."""
2031 2022
2032 2023 def parse(args):
2033 2024 options = {}
2034 2025 cmdoptions = {}
2035 2026
2036 2027 try:
2037 2028 args = fancyopts.fancyopts(args, globalopts, options)
2038 2029 except fancyopts.getopt.GetoptError, inst:
2039 2030 raise ParseError(None, inst)
2040 2031
2041 2032 if args:
2042 2033 cmd, args = args[0], args[1:]
2043 2034 i = find(cmd)[1]
2044 2035 c = list(i[1])
2045 2036 else:
2046 2037 cmd = None
2047 2038 c = []
2048 2039
2049 2040 # combine global options into local
2050 2041 for o in globalopts:
2051 2042 c.append((o[0], o[1], options[o[1]], o[3]))
2052 2043
2053 2044 try:
2054 2045 args = fancyopts.fancyopts(args, c, cmdoptions)
2055 2046 except fancyopts.getopt.GetoptError, inst:
2056 2047 raise ParseError(cmd, inst)
2057 2048
2058 2049 # separate global options back out
2059 2050 for o in globalopts:
2060 2051 n = o[1]
2061 2052 options[n] = cmdoptions[n]
2062 2053 del cmdoptions[n]
2063 2054
2064 2055 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
2065 2056
2066 2057 def dispatch(args):
2067 2058 signal.signal(signal.SIGTERM, catchterm)
2068 2059 try:
2069 2060 signal.signal(signal.SIGHUP, catchterm)
2070 2061 except AttributeError:
2071 2062 pass
2072 2063
2073 2064 u = ui.ui()
2074 2065 external = []
2075 2066 for x in u.extensions():
2076 2067 if x[1]:
2077 2068 try:
2078 2069 mod = imp.load_source(x[0], x[1])
2079 2070 except:
2080 2071 u.warn(_("*** failed to import extension %s\n") % x[1])
2081 2072 continue
2082 2073 else:
2083 2074 def importh(name):
2084 2075 mod = __import__(name)
2085 2076 components = name.split('.')
2086 2077 for comp in components[1:]:
2087 2078 mod = getattr(mod, comp)
2088 2079 return mod
2089 2080 try:
2090 2081 mod = importh(x[0])
2091 2082 except:
2092 2083 u.warn(_("failed to import extension %s\n") % x[0])
2093 2084 continue
2094 2085
2095 2086 external.append(mod)
2096 2087 for x in external:
2097 2088 cmdtable = getattr(x, 'cmdtable', {})
2098 2089 for t in cmdtable:
2099 2090 if t in table:
2100 2091 u.warn(_("module %s overrides %s\n") % (x.__name__, t))
2101 2092 table.update(cmdtable)
2102 2093
2103 2094 try:
2104 2095 cmd, func, args, options, cmdoptions = parse(args)
2105 2096 except ParseError, inst:
2106 2097 if inst.args[0]:
2107 2098 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
2108 2099 help_(u, inst.args[0])
2109 2100 else:
2110 2101 u.warn(_("hg: %s\n") % inst.args[1])
2111 2102 help_(u, 'shortlist')
2112 2103 sys.exit(-1)
2113 2104 except UnknownCommand, inst:
2114 2105 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2115 2106 help_(u, 'shortlist')
2116 2107 sys.exit(1)
2117 2108
2118 2109 if options["time"]:
2119 2110 def get_times():
2120 2111 t = os.times()
2121 2112 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
2122 2113 t = (t[0], t[1], t[2], t[3], time.clock())
2123 2114 return t
2124 2115 s = get_times()
2125 2116 def print_time():
2126 2117 t = get_times()
2127 2118 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
2128 2119 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
2129 2120 atexit.register(print_time)
2130 2121
2131 2122 u.updateopts(options["verbose"], options["debug"], options["quiet"],
2132 2123 not options["noninteractive"])
2133 2124
2134 2125 # enter the debugger before command execution
2135 2126 if options['debugger']:
2136 2127 pdb.set_trace()
2137 2128
2138 2129 try:
2139 2130 try:
2140 2131 if options['help']:
2141 2132 help_(u, cmd, options['version'])
2142 2133 sys.exit(0)
2143 2134 elif options['version']:
2144 2135 show_version(u)
2145 2136 sys.exit(0)
2146 2137 elif not cmd:
2147 2138 help_(u, 'shortlist')
2148 2139 sys.exit(0)
2149 2140
2150 2141 if options['cwd']:
2151 2142 try:
2152 2143 os.chdir(options['cwd'])
2153 2144 except OSError, inst:
2154 2145 raise util.Abort('%s: %s' %
2155 2146 (options['cwd'], inst.strerror))
2156 2147
2157 2148 if cmd not in norepo.split():
2158 2149 path = options["repository"] or ""
2159 2150 repo = hg.repository(ui=u, path=path)
2160 2151 for x in external:
2161 2152 if hasattr(x, 'reposetup'): x.reposetup(u, repo)
2162 2153 d = lambda: func(u, repo, *args, **cmdoptions)
2163 2154 else:
2164 2155 d = lambda: func(u, *args, **cmdoptions)
2165 2156
2166 2157 if options['profile']:
2167 2158 import hotshot, hotshot.stats
2168 2159 prof = hotshot.Profile("hg.prof")
2169 2160 r = prof.runcall(d)
2170 2161 prof.close()
2171 2162 stats = hotshot.stats.load("hg.prof")
2172 2163 stats.strip_dirs()
2173 2164 stats.sort_stats('time', 'calls')
2174 2165 stats.print_stats(40)
2175 2166 return r
2176 2167 else:
2177 2168 return d()
2178 2169 except:
2179 2170 # enter the debugger when we hit an exception
2180 2171 if options['debugger']:
2181 2172 pdb.post_mortem(sys.exc_info()[2])
2182 2173 if options['traceback']:
2183 2174 traceback.print_exc()
2184 2175 raise
2185 2176 except hg.RepoError, inst:
2186 2177 u.warn(_("abort: "), inst, "!\n")
2187 2178 except revlog.RevlogError, inst:
2188 2179 u.warn(_("abort: "), inst, "!\n")
2189 2180 except SignalInterrupt:
2190 2181 u.warn(_("killed!\n"))
2191 2182 except KeyboardInterrupt:
2192 2183 try:
2193 2184 u.warn(_("interrupted!\n"))
2194 2185 except IOError, inst:
2195 2186 if inst.errno == errno.EPIPE:
2196 2187 if u.debugflag:
2197 2188 u.warn(_("\nbroken pipe\n"))
2198 2189 else:
2199 2190 raise
2200 2191 except IOError, inst:
2201 2192 if hasattr(inst, "code"):
2202 2193 u.warn(_("abort: %s\n") % inst)
2203 2194 elif hasattr(inst, "reason"):
2204 2195 u.warn(_("abort: error: %s\n") % inst.reason[1])
2205 2196 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
2206 2197 if u.debugflag:
2207 2198 u.warn(_("broken pipe\n"))
2208 2199 elif getattr(inst, "strerror", None):
2209 2200 if getattr(inst, "filename", None):
2210 2201 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
2211 2202 else:
2212 2203 u.warn(_("abort: %s\n") % inst.strerror)
2213 2204 else:
2214 2205 raise
2215 2206 except OSError, inst:
2216 2207 if hasattr(inst, "filename"):
2217 2208 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
2218 2209 else:
2219 2210 u.warn(_("abort: %s\n") % inst.strerror)
2220 2211 except util.Abort, inst:
2221 2212 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
2222 2213 sys.exit(1)
2223 2214 except TypeError, inst:
2224 2215 # was this an argument error?
2225 2216 tb = traceback.extract_tb(sys.exc_info()[2])
2226 2217 if len(tb) > 2: # no
2227 2218 raise
2228 2219 u.debug(inst, "\n")
2229 2220 u.warn(_("%s: invalid arguments\n") % cmd)
2230 2221 help_(u, cmd)
2231 2222 except UnknownCommand, inst:
2232 2223 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2233 2224 help_(u, 'shortlist')
2234 2225 except SystemExit:
2235 2226 # don't catch this in the catch-all below
2236 2227 raise
2237 2228 except:
2238 2229 u.warn(_("** unknown exception encountered, details follow\n"))
2239 2230 u.warn(_("** report bug details to mercurial@selenic.com\n"))
2240 2231 raise
2241 2232
2242 2233 sys.exit(-1)
@@ -1,1449 +1,1452 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import struct, os, util
9 9 import filelog, manifest, changelog, dirstate, repo
10 10 from node import *
11 11 from i18n import gettext as _
12 12 from demandload import *
13 13 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
14 14
15 15 class localrepository:
16 16 def __init__(self, ui, path=None, create=0):
17 17 if not path:
18 18 p = os.getcwd()
19 19 while not os.path.isdir(os.path.join(p, ".hg")):
20 20 oldp = p
21 21 p = os.path.dirname(p)
22 22 if p == oldp: raise repo.RepoError(_("no repo found"))
23 23 path = p
24 24 self.path = os.path.join(path, ".hg")
25 25
26 26 if not create and not os.path.isdir(self.path):
27 27 raise repo.RepoError(_("repository %s not found") % self.path)
28 28
29 29 self.root = os.path.abspath(path)
30 30 self.ui = ui
31 31 self.opener = util.opener(self.path)
32 32 self.wopener = util.opener(self.root)
33 33 self.manifest = manifest.manifest(self.opener)
34 34 self.changelog = changelog.changelog(self.opener)
35 35 self.tagscache = None
36 36 self.nodetagscache = None
37 37 self.encodepats = None
38 38 self.decodepats = None
39 39
40 40 if create:
41 41 os.mkdir(self.path)
42 42 os.mkdir(self.join("data"))
43 43
44 44 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
45 45 try:
46 46 self.ui.readconfig(self.opener("hgrc"))
47 47 except IOError: pass
48 48
49 49 def hook(self, name, **args):
50 50 s = self.ui.config("hooks", name)
51 51 if s:
52 52 self.ui.note(_("running hook %s: %s\n") % (name, s))
53 53 old = {}
54 54 for k, v in args.items():
55 55 k = k.upper()
56 56 old[k] = os.environ.get(k, None)
57 57 os.environ[k] = v
58 58
59 59 # Hooks run in the repository root
60 60 olddir = os.getcwd()
61 61 os.chdir(self.root)
62 62 r = os.system(s)
63 63 os.chdir(olddir)
64 64
65 65 for k, v in old.items():
66 66 if v != None:
67 67 os.environ[k] = v
68 68 else:
69 69 del os.environ[k]
70 70
71 71 if r:
72 72 self.ui.warn(_("abort: %s hook failed with status %d!\n") %
73 73 (name, r))
74 74 return False
75 75 return True
76 76
77 77 def tags(self):
78 78 '''return a mapping of tag to node'''
79 79 if not self.tagscache:
80 80 self.tagscache = {}
81 81 def addtag(self, k, n):
82 82 try:
83 83 bin_n = bin(n)
84 84 except TypeError:
85 85 bin_n = ''
86 86 self.tagscache[k.strip()] = bin_n
87 87
88 88 try:
89 89 # read each head of the tags file, ending with the tip
90 90 # and add each tag found to the map, with "newer" ones
91 91 # taking precedence
92 92 fl = self.file(".hgtags")
93 93 h = fl.heads()
94 94 h.reverse()
95 95 for r in h:
96 96 for l in fl.read(r).splitlines():
97 97 if l:
98 98 n, k = l.split(" ", 1)
99 99 addtag(self, k, n)
100 100 except KeyError:
101 101 pass
102 102
103 103 try:
104 104 f = self.opener("localtags")
105 105 for l in f:
106 106 n, k = l.split(" ", 1)
107 107 addtag(self, k, n)
108 108 except IOError:
109 109 pass
110 110
111 111 self.tagscache['tip'] = self.changelog.tip()
112 112
113 113 return self.tagscache
114 114
115 115 def tagslist(self):
116 116 '''return a list of tags ordered by revision'''
117 117 l = []
118 118 for t, n in self.tags().items():
119 119 try:
120 120 r = self.changelog.rev(n)
121 121 except:
122 122 r = -2 # sort to the beginning of the list if unknown
123 123 l.append((r,t,n))
124 124 l.sort()
125 125 return [(t,n) for r,t,n in l]
126 126
127 127 def nodetags(self, node):
128 128 '''return the tags associated with a node'''
129 129 if not self.nodetagscache:
130 130 self.nodetagscache = {}
131 131 for t,n in self.tags().items():
132 132 self.nodetagscache.setdefault(n,[]).append(t)
133 133 return self.nodetagscache.get(node, [])
134 134
135 135 def lookup(self, key):
136 136 try:
137 137 return self.tags()[key]
138 138 except KeyError:
139 139 try:
140 140 return self.changelog.lookup(key)
141 141 except:
142 142 raise repo.RepoError(_("unknown revision '%s'") % key)
143 143
144 144 def dev(self):
145 145 return os.stat(self.path).st_dev
146 146
147 147 def local(self):
148 148 return True
149 149
150 150 def join(self, f):
151 151 return os.path.join(self.path, f)
152 152
153 153 def wjoin(self, f):
154 154 return os.path.join(self.root, f)
155 155
156 156 def file(self, f):
157 157 if f[0] == '/': f = f[1:]
158 158 return filelog.filelog(self.opener, f)
159 159
160 160 def getcwd(self):
161 161 return self.dirstate.getcwd()
162 162
163 163 def wfile(self, f, mode='r'):
164 164 return self.wopener(f, mode)
165 165
166 166 def wread(self, filename):
167 167 if self.encodepats == None:
168 168 l = []
169 169 for pat, cmd in self.ui.configitems("encode"):
170 170 mf = util.matcher("", "/", [pat], [], [])[1]
171 171 l.append((mf, cmd))
172 172 self.encodepats = l
173 173
174 174 data = self.wopener(filename, 'r').read()
175 175
176 176 for mf, cmd in self.encodepats:
177 177 if mf(filename):
178 178 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
179 179 data = util.filter(data, cmd)
180 180 break
181 181
182 182 return data
183 183
184 184 def wwrite(self, filename, data, fd=None):
185 185 if self.decodepats == None:
186 186 l = []
187 187 for pat, cmd in self.ui.configitems("decode"):
188 188 mf = util.matcher("", "/", [pat], [], [])[1]
189 189 l.append((mf, cmd))
190 190 self.decodepats = l
191 191
192 192 for mf, cmd in self.decodepats:
193 193 if mf(filename):
194 194 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
195 195 data = util.filter(data, cmd)
196 196 break
197 197
198 198 if fd:
199 199 return fd.write(data)
200 200 return self.wopener(filename, 'w').write(data)
201 201
202 202 def transaction(self):
203 203 # save dirstate for undo
204 204 try:
205 205 ds = self.opener("dirstate").read()
206 206 except IOError:
207 207 ds = ""
208 208 self.opener("journal.dirstate", "w").write(ds)
209 209
210 210 def after():
211 211 util.rename(self.join("journal"), self.join("undo"))
212 212 util.rename(self.join("journal.dirstate"),
213 213 self.join("undo.dirstate"))
214 214
215 215 return transaction.transaction(self.ui.warn, self.opener,
216 216 self.join("journal"), after)
217 217
218 218 def recover(self):
219 219 lock = self.lock()
220 220 if os.path.exists(self.join("journal")):
221 221 self.ui.status(_("rolling back interrupted transaction\n"))
222 222 return transaction.rollback(self.opener, self.join("journal"))
223 223 else:
224 224 self.ui.warn(_("no interrupted transaction available\n"))
225 225
226 226 def undo(self):
227 227 lock = self.lock()
228 228 if os.path.exists(self.join("undo")):
229 229 self.ui.status(_("rolling back last transaction\n"))
230 230 transaction.rollback(self.opener, self.join("undo"))
231 231 self.dirstate = None
232 232 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
233 233 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
234 234 else:
235 235 self.ui.warn(_("no undo information available\n"))
236 236
237 237 def lock(self, wait=1):
238 238 try:
239 239 return lock.lock(self.join("lock"), 0)
240 240 except lock.LockHeld, inst:
241 241 if wait:
242 242 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
243 243 return lock.lock(self.join("lock"), wait)
244 244 raise inst
245 245
246 246 def rawcommit(self, files, text, user, date, p1=None, p2=None):
247 247 orig_parent = self.dirstate.parents()[0] or nullid
248 248 p1 = p1 or self.dirstate.parents()[0] or nullid
249 249 p2 = p2 or self.dirstate.parents()[1] or nullid
250 250 c1 = self.changelog.read(p1)
251 251 c2 = self.changelog.read(p2)
252 252 m1 = self.manifest.read(c1[0])
253 253 mf1 = self.manifest.readflags(c1[0])
254 254 m2 = self.manifest.read(c2[0])
255 255 changed = []
256 256
257 257 if orig_parent == p1:
258 258 update_dirstate = 1
259 259 else:
260 260 update_dirstate = 0
261 261
262 262 tr = self.transaction()
263 263 mm = m1.copy()
264 264 mfm = mf1.copy()
265 265 linkrev = self.changelog.count()
266 266 for f in files:
267 267 try:
268 268 t = self.wread(f)
269 269 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
270 270 r = self.file(f)
271 271 mfm[f] = tm
272 272
273 273 fp1 = m1.get(f, nullid)
274 274 fp2 = m2.get(f, nullid)
275 275
276 276 # is the same revision on two branches of a merge?
277 277 if fp2 == fp1:
278 278 fp2 = nullid
279 279
280 280 if fp2 != nullid:
281 281 # is one parent an ancestor of the other?
282 282 fpa = r.ancestor(fp1, fp2)
283 283 if fpa == fp1:
284 284 fp1, fp2 = fp2, nullid
285 285 elif fpa == fp2:
286 286 fp2 = nullid
287 287
288 288 # is the file unmodified from the parent?
289 289 if t == r.read(fp1):
290 290 # record the proper existing parent in manifest
291 291 # no need to add a revision
292 292 mm[f] = fp1
293 293 continue
294 294
295 295 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
296 296 changed.append(f)
297 297 if update_dirstate:
298 298 self.dirstate.update([f], "n")
299 299 except IOError:
300 300 try:
301 301 del mm[f]
302 302 del mfm[f]
303 303 if update_dirstate:
304 304 self.dirstate.forget([f])
305 305 except:
306 306 # deleted from p2?
307 307 pass
308 308
309 309 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
310 310 user = user or self.ui.username()
311 311 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
312 312 tr.close()
313 313 if update_dirstate:
314 314 self.dirstate.setparents(n, nullid)
315 315
316 316 def commit(self, files = None, text = "", user = None, date = None,
317 317 match = util.always, force=False):
318 318 commit = []
319 319 remove = []
320 320 changed = []
321 321
322 322 if files:
323 323 for f in files:
324 324 s = self.dirstate.state(f)
325 325 if s in 'nmai':
326 326 commit.append(f)
327 327 elif s == 'r':
328 328 remove.append(f)
329 329 else:
330 330 self.ui.warn(_("%s not tracked!\n") % f)
331 331 else:
332 332 (c, a, d, u) = self.changes(match=match)
333 333 commit = c + a
334 334 remove = d
335 335
336 336 p1, p2 = self.dirstate.parents()
337 337 c1 = self.changelog.read(p1)
338 338 c2 = self.changelog.read(p2)
339 339 m1 = self.manifest.read(c1[0])
340 340 mf1 = self.manifest.readflags(c1[0])
341 341 m2 = self.manifest.read(c2[0])
342 342
343 343 if not commit and not remove and not force and p2 == nullid:
344 344 self.ui.status(_("nothing changed\n"))
345 345 return None
346 346
347 347 if not self.hook("precommit"):
348 348 return None
349 349
350 350 lock = self.lock()
351 351 tr = self.transaction()
352 352
353 353 # check in files
354 354 new = {}
355 355 linkrev = self.changelog.count()
356 356 commit.sort()
357 357 for f in commit:
358 358 self.ui.note(f + "\n")
359 359 try:
360 360 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
361 361 t = self.wread(f)
362 362 except IOError:
363 363 self.ui.warn(_("trouble committing %s!\n") % f)
364 364 raise
365 365
366 366 r = self.file(f)
367 367
368 368 meta = {}
369 369 cp = self.dirstate.copied(f)
370 370 if cp:
371 371 meta["copy"] = cp
372 372 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
373 373 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
374 374 fp1, fp2 = nullid, nullid
375 375 else:
376 376 fp1 = m1.get(f, nullid)
377 377 fp2 = m2.get(f, nullid)
378 378
379 379 # is the same revision on two branches of a merge?
380 380 if fp2 == fp1:
381 381 fp2 = nullid
382 382
383 383 if fp2 != nullid:
384 384 # is one parent an ancestor of the other?
385 385 fpa = r.ancestor(fp1, fp2)
386 386 if fpa == fp1:
387 387 fp1, fp2 = fp2, nullid
388 388 elif fpa == fp2:
389 389 fp2 = nullid
390 390
391 391 # is the file unmodified from the parent?
392 392 if not meta and t == r.read(fp1):
393 393 # record the proper existing parent in manifest
394 394 # no need to add a revision
395 395 new[f] = fp1
396 396 continue
397 397
398 398 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
399 399 # remember what we've added so that we can later calculate
400 400 # the files to pull from a set of changesets
401 401 changed.append(f)
402 402
403 403 # update manifest
404 404 m1.update(new)
405 405 for f in remove:
406 406 if f in m1:
407 407 del m1[f]
408 408 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
409 409 (new, remove))
410 410
411 411 # add changeset
412 412 new = new.keys()
413 413 new.sort()
414 414
415 415 if not text:
416 416 edittext = ""
417 417 if p2 != nullid:
418 418 edittext += "HG: branch merge\n"
419 419 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
420 420 edittext += "".join(["HG: changed %s\n" % f for f in changed])
421 421 edittext += "".join(["HG: removed %s\n" % f for f in remove])
422 422 if not changed and not remove:
423 423 edittext += "HG: no files changed\n"
424 424 edittext = self.ui.edit(edittext)
425 425 if not edittext.rstrip():
426 426 return None
427 427 text = edittext
428 428
429 429 user = user or self.ui.username()
430 430 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
431 431 tr.close()
432 432
433 433 self.dirstate.setparents(n)
434 434 self.dirstate.update(new, "n")
435 435 self.dirstate.forget(remove)
436 436
437 437 if not self.hook("commit", node=hex(n)):
438 438 return None
439 439 return n
440 440
441 441 def walk(self, node=None, files=[], match=util.always):
442 442 if node:
443 443 for fn in self.manifest.read(self.changelog.read(node)[0]):
444 444 if match(fn): yield 'm', fn
445 445 else:
446 446 for src, fn in self.dirstate.walk(files, match):
447 447 yield src, fn
448 448
449 449 def changes(self, node1 = None, node2 = None, files = [],
450 450 match = util.always):
451 451 mf2, u = None, []
452 452
453 453 def fcmp(fn, mf):
454 454 t1 = self.wread(fn)
455 455 t2 = self.file(fn).read(mf.get(fn, nullid))
456 456 return cmp(t1, t2)
457 457
458 458 def mfmatches(node):
459 459 mf = dict(self.manifest.read(node))
460 460 for fn in mf.keys():
461 461 if not match(fn):
462 462 del mf[fn]
463 463 return mf
464 464
465 465 # are we comparing the working directory?
466 466 if not node2:
467 467 l, c, a, d, u = self.dirstate.changes(files, match)
468 468
469 469 # are we comparing working dir against its parent?
470 470 if not node1:
471 471 if l:
472 472 # do a full compare of any files that might have changed
473 473 change = self.changelog.read(self.dirstate.parents()[0])
474 474 mf2 = mfmatches(change[0])
475 475 for f in l:
476 476 if fcmp(f, mf2):
477 477 c.append(f)
478 478
479 479 for l in c, a, d, u:
480 480 l.sort()
481 481
482 482 return (c, a, d, u)
483 483
484 484 # are we comparing working dir against non-tip?
485 485 # generate a pseudo-manifest for the working dir
486 486 if not node2:
487 487 if not mf2:
488 488 change = self.changelog.read(self.dirstate.parents()[0])
489 489 mf2 = mfmatches(change[0])
490 490 for f in a + c + l:
491 491 mf2[f] = ""
492 492 for f in d:
493 493 if f in mf2: del mf2[f]
494 494 else:
495 495 change = self.changelog.read(node2)
496 496 mf2 = mfmatches(change[0])
497 497
498 498 # flush lists from dirstate before comparing manifests
499 499 c, a = [], []
500 500
501 501 change = self.changelog.read(node1)
502 502 mf1 = mfmatches(change[0])
503 503
504 504 for fn in mf2:
505 505 if mf1.has_key(fn):
506 506 if mf1[fn] != mf2[fn]:
507 507 if mf2[fn] != "" or fcmp(fn, mf1):
508 508 c.append(fn)
509 509 del mf1[fn]
510 510 else:
511 511 a.append(fn)
512 512
513 513 d = mf1.keys()
514 514
515 515 for l in c, a, d, u:
516 516 l.sort()
517 517
518 518 return (c, a, d, u)
519 519
520 520 def add(self, list):
521 521 for f in list:
522 522 p = self.wjoin(f)
523 523 if not os.path.exists(p):
524 524 self.ui.warn(_("%s does not exist!\n") % f)
525 525 elif not os.path.isfile(p):
526 526 self.ui.warn(_("%s not added: only files supported currently\n") % f)
527 527 elif self.dirstate.state(f) in 'an':
528 528 self.ui.warn(_("%s already tracked!\n") % f)
529 529 else:
530 530 self.dirstate.update([f], "a")
531 531
532 532 def forget(self, list):
533 533 for f in list:
534 534 if self.dirstate.state(f) not in 'ai':
535 535 self.ui.warn(_("%s not added!\n") % f)
536 536 else:
537 537 self.dirstate.forget([f])
538 538
539 def remove(self, list):
539 def remove(self, list, unlink=False):
540 if unlink:
541 for f in list:
542 try:
543 util.unlink(self.wjoin(f))
544 except OSError, inst:
545 if inst.errno != errno.ENOENT: raise
540 546 for f in list:
541 547 p = self.wjoin(f)
542 548 if os.path.exists(p):
543 549 self.ui.warn(_("%s still exists!\n") % f)
544 550 elif self.dirstate.state(f) == 'a':
545 551 self.ui.warn(_("%s never committed!\n") % f)
546 552 self.dirstate.forget([f])
547 553 elif f not in self.dirstate:
548 554 self.ui.warn(_("%s not tracked!\n") % f)
549 555 else:
550 556 self.dirstate.update([f], "r")
551 557
552 558 def copy(self, source, dest):
553 559 p = self.wjoin(dest)
554 560 if not os.path.exists(p):
555 561 self.ui.warn(_("%s does not exist!\n") % dest)
556 562 elif not os.path.isfile(p):
557 563 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
558 564 else:
559 565 if self.dirstate.state(dest) == '?':
560 566 self.dirstate.update([dest], "a")
561 567 self.dirstate.copy(source, dest)
562 568
563 569 def heads(self):
564 570 return self.changelog.heads()
565 571
566 572 # branchlookup returns a dict giving a list of branches for
567 573 # each head. A branch is defined as the tag of a node or
568 574 # the branch of the node's parents. If a node has multiple
569 575 # branch tags, tags are eliminated if they are visible from other
570 576 # branch tags.
571 577 #
572 578 # So, for this graph: a->b->c->d->e
573 579 # \ /
574 580 # aa -----/
575 581 # a has tag 2.6.12
576 582 # d has tag 2.6.13
577 583 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
578 584 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
579 585 # from the list.
580 586 #
581 587 # It is possible that more than one head will have the same branch tag.
582 588 # callers need to check the result for multiple heads under the same
583 589 # branch tag if that is a problem for them (ie checkout of a specific
584 590 # branch).
585 591 #
586 592 # passing in a specific branch will limit the depth of the search
587 593 # through the parents. It won't limit the branches returned in the
588 594 # result though.
589 595 def branchlookup(self, heads=None, branch=None):
590 596 if not heads:
591 597 heads = self.heads()
592 598 headt = [ h for h in heads ]
593 599 chlog = self.changelog
594 600 branches = {}
595 601 merges = []
596 602 seenmerge = {}
597 603
598 604 # traverse the tree once for each head, recording in the branches
599 605 # dict which tags are visible from this head. The branches
600 606 # dict also records which tags are visible from each tag
601 607 # while we traverse.
602 608 while headt or merges:
603 609 if merges:
604 610 n, found = merges.pop()
605 611 visit = [n]
606 612 else:
607 613 h = headt.pop()
608 614 visit = [h]
609 615 found = [h]
610 616 seen = {}
611 617 while visit:
612 618 n = visit.pop()
613 619 if n in seen:
614 620 continue
615 621 pp = chlog.parents(n)
616 622 tags = self.nodetags(n)
617 623 if tags:
618 624 for x in tags:
619 625 if x == 'tip':
620 626 continue
621 627 for f in found:
622 628 branches.setdefault(f, {})[n] = 1
623 629 branches.setdefault(n, {})[n] = 1
624 630 break
625 631 if n not in found:
626 632 found.append(n)
627 633 if branch in tags:
628 634 continue
629 635 seen[n] = 1
630 636 if pp[1] != nullid and n not in seenmerge:
631 637 merges.append((pp[1], [x for x in found]))
632 638 seenmerge[n] = 1
633 639 if pp[0] != nullid:
634 640 visit.append(pp[0])
635 641 # traverse the branches dict, eliminating branch tags from each
636 642 # head that are visible from another branch tag for that head.
637 643 out = {}
638 644 viscache = {}
639 645 for h in heads:
640 646 def visible(node):
641 647 if node in viscache:
642 648 return viscache[node]
643 649 ret = {}
644 650 visit = [node]
645 651 while visit:
646 652 x = visit.pop()
647 653 if x in viscache:
648 654 ret.update(viscache[x])
649 655 elif x not in ret:
650 656 ret[x] = 1
651 657 if x in branches:
652 658 visit[len(visit):] = branches[x].keys()
653 659 viscache[node] = ret
654 660 return ret
655 661 if h not in branches:
656 662 continue
657 663 # O(n^2), but somewhat limited. This only searches the
658 664 # tags visible from a specific head, not all the tags in the
659 665 # whole repo.
660 666 for b in branches[h]:
661 667 vis = False
662 668 for bb in branches[h].keys():
663 669 if b != bb:
664 670 if b in visible(bb):
665 671 vis = True
666 672 break
667 673 if not vis:
668 674 l = out.setdefault(h, [])
669 675 l[len(l):] = self.nodetags(b)
670 676 return out
671 677
672 678 def branches(self, nodes):
673 679 if not nodes: nodes = [self.changelog.tip()]
674 680 b = []
675 681 for n in nodes:
676 682 t = n
677 683 while n:
678 684 p = self.changelog.parents(n)
679 685 if p[1] != nullid or p[0] == nullid:
680 686 b.append((t, n, p[0], p[1]))
681 687 break
682 688 n = p[0]
683 689 return b
684 690
685 691 def between(self, pairs):
686 692 r = []
687 693
688 694 for top, bottom in pairs:
689 695 n, l, i = top, [], 0
690 696 f = 1
691 697
692 698 while n != bottom:
693 699 p = self.changelog.parents(n)[0]
694 700 if i == f:
695 701 l.append(n)
696 702 f = f * 2
697 703 n = p
698 704 i += 1
699 705
700 706 r.append(l)
701 707
702 708 return r
703 709
704 710 def newer(self, nodes):
705 711 m = {}
706 712 nl = []
707 713 pm = {}
708 714 cl = self.changelog
709 715 t = l = cl.count()
710 716
711 717 # find the lowest numbered node
712 718 for n in nodes:
713 719 l = min(l, cl.rev(n))
714 720 m[n] = 1
715 721
716 722 for i in xrange(l, t):
717 723 n = cl.node(i)
718 724 if n in m: # explicitly listed
719 725 pm[n] = 1
720 726 nl.append(n)
721 727 continue
722 728 for p in cl.parents(n):
723 729 if p in pm: # parent listed
724 730 pm[n] = 1
725 731 nl.append(n)
726 732 break
727 733
728 734 return nl
729 735
730 736 def findincoming(self, remote, base=None, heads=None):
731 737 m = self.changelog.nodemap
732 738 search = []
733 739 fetch = {}
734 740 seen = {}
735 741 seenbranch = {}
736 742 if base == None:
737 743 base = {}
738 744
739 745 # assume we're closer to the tip than the root
740 746 # and start by examining the heads
741 747 self.ui.status(_("searching for changes\n"))
742 748
743 749 if not heads:
744 750 heads = remote.heads()
745 751
746 752 unknown = []
747 753 for h in heads:
748 754 if h not in m:
749 755 unknown.append(h)
750 756 else:
751 757 base[h] = 1
752 758
753 759 if not unknown:
754 760 return None
755 761
756 762 rep = {}
757 763 reqcnt = 0
758 764
759 765 # search through remote branches
760 766 # a 'branch' here is a linear segment of history, with four parts:
761 767 # head, root, first parent, second parent
762 768 # (a branch always has two parents (or none) by definition)
763 769 unknown = remote.branches(unknown)
764 770 while unknown:
765 771 r = []
766 772 while unknown:
767 773 n = unknown.pop(0)
768 774 if n[0] in seen:
769 775 continue
770 776
771 777 self.ui.debug(_("examining %s:%s\n") % (short(n[0]), short(n[1])))
772 778 if n[0] == nullid:
773 779 break
774 780 if n in seenbranch:
775 781 self.ui.debug(_("branch already found\n"))
776 782 continue
777 783 if n[1] and n[1] in m: # do we know the base?
778 784 self.ui.debug(_("found incomplete branch %s:%s\n")
779 785 % (short(n[0]), short(n[1])))
780 786 search.append(n) # schedule branch range for scanning
781 787 seenbranch[n] = 1
782 788 else:
783 789 if n[1] not in seen and n[1] not in fetch:
784 790 if n[2] in m and n[3] in m:
785 791 self.ui.debug(_("found new changeset %s\n") %
786 792 short(n[1]))
787 793 fetch[n[1]] = 1 # earliest unknown
788 794 base[n[2]] = 1 # latest known
789 795 continue
790 796
791 797 for a in n[2:4]:
792 798 if a not in rep:
793 799 r.append(a)
794 800 rep[a] = 1
795 801
796 802 seen[n[0]] = 1
797 803
798 804 if r:
799 805 reqcnt += 1
800 806 self.ui.debug(_("request %d: %s\n") %
801 807 (reqcnt, " ".join(map(short, r))))
802 808 for p in range(0, len(r), 10):
803 809 for b in remote.branches(r[p:p+10]):
804 810 self.ui.debug(_("received %s:%s\n") %
805 811 (short(b[0]), short(b[1])))
806 812 if b[0] in m:
807 813 self.ui.debug(_("found base node %s\n") % short(b[0]))
808 814 base[b[0]] = 1
809 815 elif b[0] not in seen:
810 816 unknown.append(b)
811 817
812 818 # do binary search on the branches we found
813 819 while search:
814 820 n = search.pop(0)
815 821 reqcnt += 1
816 822 l = remote.between([(n[0], n[1])])[0]
817 823 l.append(n[1])
818 824 p = n[0]
819 825 f = 1
820 826 for i in l:
821 827 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
822 828 if i in m:
823 829 if f <= 2:
824 830 self.ui.debug(_("found new branch changeset %s\n") %
825 831 short(p))
826 832 fetch[p] = 1
827 833 base[i] = 1
828 834 else:
829 835 self.ui.debug(_("narrowed branch search to %s:%s\n")
830 836 % (short(p), short(i)))
831 837 search.append((p, i))
832 838 break
833 839 p, f = i, f * 2
834 840
835 841 # sanity check our fetch list
836 842 for f in fetch.keys():
837 843 if f in m:
838 844 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
839 845
840 846 if base.keys() == [nullid]:
841 847 self.ui.warn(_("warning: pulling from an unrelated repository!\n"))
842 848
843 849 self.ui.note(_("found new changesets starting at ") +
844 850 " ".join([short(f) for f in fetch]) + "\n")
845 851
846 852 self.ui.debug(_("%d total queries\n") % reqcnt)
847 853
848 854 return fetch.keys()
849 855
850 856 def findoutgoing(self, remote, base=None, heads=None):
851 857 if base == None:
852 858 base = {}
853 859 self.findincoming(remote, base, heads)
854 860
855 861 self.ui.debug(_("common changesets up to ")
856 862 + " ".join(map(short, base.keys())) + "\n")
857 863
858 864 remain = dict.fromkeys(self.changelog.nodemap)
859 865
860 866 # prune everything remote has from the tree
861 867 del remain[nullid]
862 868 remove = base.keys()
863 869 while remove:
864 870 n = remove.pop(0)
865 871 if n in remain:
866 872 del remain[n]
867 873 for p in self.changelog.parents(n):
868 874 remove.append(p)
869 875
870 876 # find every node whose parents have been pruned
871 877 subset = []
872 878 for n in remain:
873 879 p1, p2 = self.changelog.parents(n)
874 880 if p1 not in remain and p2 not in remain:
875 881 subset.append(n)
876 882
877 883 # this is the set of all roots we have to push
878 884 return subset
879 885
880 886 def pull(self, remote):
881 887 lock = self.lock()
882 888
883 889 # if we have an empty repo, fetch everything
884 890 if self.changelog.tip() == nullid:
885 891 self.ui.status(_("requesting all changes\n"))
886 892 fetch = [nullid]
887 893 else:
888 894 fetch = self.findincoming(remote)
889 895
890 896 if not fetch:
891 897 self.ui.status(_("no changes found\n"))
892 898 return 1
893 899
894 900 cg = remote.changegroup(fetch)
895 901 return self.addchangegroup(cg)
896 902
897 903 def push(self, remote, force=False):
898 904 lock = remote.lock()
899 905
900 906 base = {}
901 907 heads = remote.heads()
902 908 inc = self.findincoming(remote, base, heads)
903 909 if not force and inc:
904 910 self.ui.warn(_("abort: unsynced remote changes!\n"))
905 911 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
906 912 return 1
907 913
908 914 update = self.findoutgoing(remote, base)
909 915 if not update:
910 916 self.ui.status(_("no changes found\n"))
911 917 return 1
912 918 elif not force:
913 919 if len(heads) < len(self.changelog.heads()):
914 920 self.ui.warn(_("abort: push creates new remote branches!\n"))
915 921 self.ui.status(_("(did you forget to merge?"
916 922 " use push -f to force)\n"))
917 923 return 1
918 924
919 925 cg = self.changegroup(update)
920 926 return remote.addchangegroup(cg)
921 927
922 928 def changegroup(self, basenodes):
923 929 genread = util.chunkbuffer
924 930
925 931 def gengroup():
926 932 nodes = self.newer(basenodes)
927 933
928 934 # construct the link map
929 935 linkmap = {}
930 936 for n in nodes:
931 937 linkmap[self.changelog.rev(n)] = n
932 938
933 939 # construct a list of all changed files
934 940 changed = {}
935 941 for n in nodes:
936 942 c = self.changelog.read(n)
937 943 for f in c[3]:
938 944 changed[f] = 1
939 945 changed = changed.keys()
940 946 changed.sort()
941 947
942 948 # the changegroup is changesets + manifests + all file revs
943 949 revs = [ self.changelog.rev(n) for n in nodes ]
944 950
945 951 for y in self.changelog.group(linkmap): yield y
946 952 for y in self.manifest.group(linkmap): yield y
947 953 for f in changed:
948 954 yield struct.pack(">l", len(f) + 4) + f
949 955 g = self.file(f).group(linkmap)
950 956 for y in g:
951 957 yield y
952 958
953 959 yield struct.pack(">l", 0)
954 960
955 961 return genread(gengroup())
956 962
957 963 def addchangegroup(self, source):
958 964
959 965 def getchunk():
960 966 d = source.read(4)
961 967 if not d: return ""
962 968 l = struct.unpack(">l", d)[0]
963 969 if l <= 4: return ""
964 970 d = source.read(l - 4)
965 971 if len(d) < l - 4:
966 972 raise repo.RepoError(_("premature EOF reading chunk"
967 973 " (got %d bytes, expected %d)")
968 974 % (len(d), l - 4))
969 975 return d
970 976
971 977 def getgroup():
972 978 while 1:
973 979 c = getchunk()
974 980 if not c: break
975 981 yield c
976 982
977 983 def csmap(x):
978 984 self.ui.debug(_("add changeset %s\n") % short(x))
979 985 return self.changelog.count()
980 986
981 987 def revmap(x):
982 988 return self.changelog.rev(x)
983 989
984 990 if not source: return
985 991 changesets = files = revisions = 0
986 992
987 993 tr = self.transaction()
988 994
989 995 oldheads = len(self.changelog.heads())
990 996
991 997 # pull off the changeset group
992 998 self.ui.status(_("adding changesets\n"))
993 999 co = self.changelog.tip()
994 1000 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
995 1001 cnr, cor = map(self.changelog.rev, (cn, co))
996 1002 if cn == nullid:
997 1003 cnr = cor
998 1004 changesets = cnr - cor
999 1005
1000 1006 # pull off the manifest group
1001 1007 self.ui.status(_("adding manifests\n"))
1002 1008 mm = self.manifest.tip()
1003 1009 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1004 1010
1005 1011 # process the files
1006 1012 self.ui.status(_("adding file changes\n"))
1007 1013 while 1:
1008 1014 f = getchunk()
1009 1015 if not f: break
1010 1016 self.ui.debug(_("adding %s revisions\n") % f)
1011 1017 fl = self.file(f)
1012 1018 o = fl.count()
1013 1019 n = fl.addgroup(getgroup(), revmap, tr)
1014 1020 revisions += fl.count() - o
1015 1021 files += 1
1016 1022
1017 1023 newheads = len(self.changelog.heads())
1018 1024 heads = ""
1019 1025 if oldheads and newheads > oldheads:
1020 1026 heads = _(" (+%d heads)") % (newheads - oldheads)
1021 1027
1022 1028 self.ui.status(_("added %d changesets"
1023 1029 " with %d changes to %d files%s\n")
1024 1030 % (changesets, revisions, files, heads))
1025 1031
1026 1032 tr.close()
1027 1033
1028 1034 if changesets > 0:
1029 1035 if not self.hook("changegroup",
1030 1036 node=hex(self.changelog.node(cor+1))):
1031 1037 self.ui.warn(_("abort: changegroup hook returned failure!\n"))
1032 1038 return 1
1033 1039
1034 1040 for i in range(cor + 1, cnr + 1):
1035 1041 self.hook("commit", node=hex(self.changelog.node(i)))
1036 1042
1037 1043 return
1038 1044
1039 1045 def update(self, node, allow=False, force=False, choose=None,
1040 1046 moddirstate=True):
1041 1047 pl = self.dirstate.parents()
1042 1048 if not force and pl[1] != nullid:
1043 1049 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1044 1050 return 1
1045 1051
1046 1052 p1, p2 = pl[0], node
1047 1053 pa = self.changelog.ancestor(p1, p2)
1048 1054 m1n = self.changelog.read(p1)[0]
1049 1055 m2n = self.changelog.read(p2)[0]
1050 1056 man = self.manifest.ancestor(m1n, m2n)
1051 1057 m1 = self.manifest.read(m1n)
1052 1058 mf1 = self.manifest.readflags(m1n)
1053 1059 m2 = self.manifest.read(m2n)
1054 1060 mf2 = self.manifest.readflags(m2n)
1055 1061 ma = self.manifest.read(man)
1056 1062 mfa = self.manifest.readflags(man)
1057 1063
1058 1064 (c, a, d, u) = self.changes()
1059 1065
1060 1066 # is this a jump, or a merge? i.e. is there a linear path
1061 1067 # from p1 to p2?
1062 1068 linear_path = (pa == p1 or pa == p2)
1063 1069
1064 1070 # resolve the manifest to determine which files
1065 1071 # we care about merging
1066 1072 self.ui.note(_("resolving manifests\n"))
1067 1073 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1068 1074 (force, allow, moddirstate, linear_path))
1069 1075 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1070 1076 (short(man), short(m1n), short(m2n)))
1071 1077
1072 1078 merge = {}
1073 1079 get = {}
1074 1080 remove = []
1075 1081
1076 1082 # construct a working dir manifest
1077 1083 mw = m1.copy()
1078 1084 mfw = mf1.copy()
1079 1085 umap = dict.fromkeys(u)
1080 1086
1081 1087 for f in a + c + u:
1082 1088 mw[f] = ""
1083 1089 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1084 1090
1085 1091 for f in d:
1086 1092 if f in mw: del mw[f]
1087 1093
1088 1094 # If we're jumping between revisions (as opposed to merging),
1089 1095 # and if neither the working directory nor the target rev has
1090 1096 # the file, then we need to remove it from the dirstate, to
1091 1097 # prevent the dirstate from listing the file when it is no
1092 1098 # longer in the manifest.
1093 1099 if moddirstate and linear_path and f not in m2:
1094 1100 self.dirstate.forget((f,))
1095 1101
1096 1102 # Compare manifests
1097 1103 for f, n in mw.iteritems():
1098 1104 if choose and not choose(f): continue
1099 1105 if f in m2:
1100 1106 s = 0
1101 1107
1102 1108 # is the wfile new since m1, and match m2?
1103 1109 if f not in m1:
1104 1110 t1 = self.wread(f)
1105 1111 t2 = self.file(f).read(m2[f])
1106 1112 if cmp(t1, t2) == 0:
1107 1113 n = m2[f]
1108 1114 del t1, t2
1109 1115
1110 1116 # are files different?
1111 1117 if n != m2[f]:
1112 1118 a = ma.get(f, nullid)
1113 1119 # are both different from the ancestor?
1114 1120 if n != a and m2[f] != a:
1115 1121 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1116 1122 # merge executable bits
1117 1123 # "if we changed or they changed, change in merge"
1118 1124 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1119 1125 mode = ((a^b) | (a^c)) ^ a
1120 1126 merge[f] = (m1.get(f, nullid), m2[f], mode)
1121 1127 s = 1
1122 1128 # are we clobbering?
1123 1129 # is remote's version newer?
1124 1130 # or are we going back in time?
1125 1131 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1126 1132 self.ui.debug(_(" remote %s is newer, get\n") % f)
1127 1133 get[f] = m2[f]
1128 1134 s = 1
1129 1135 elif f in umap:
1130 1136 # this unknown file is the same as the checkout
1131 1137 get[f] = m2[f]
1132 1138
1133 1139 if not s and mfw[f] != mf2[f]:
1134 1140 if force:
1135 1141 self.ui.debug(_(" updating permissions for %s\n") % f)
1136 1142 util.set_exec(self.wjoin(f), mf2[f])
1137 1143 else:
1138 1144 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1139 1145 mode = ((a^b) | (a^c)) ^ a
1140 1146 if mode != b:
1141 1147 self.ui.debug(_(" updating permissions for %s\n") % f)
1142 1148 util.set_exec(self.wjoin(f), mode)
1143 1149 del m2[f]
1144 1150 elif f in ma:
1145 1151 if n != ma[f]:
1146 1152 r = _("d")
1147 1153 if not force and (linear_path or allow):
1148 1154 r = self.ui.prompt(
1149 1155 (_(" local changed %s which remote deleted\n") % f) +
1150 1156 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1151 1157 if r == _("d"):
1152 1158 remove.append(f)
1153 1159 else:
1154 1160 self.ui.debug(_("other deleted %s\n") % f)
1155 1161 remove.append(f) # other deleted it
1156 1162 else:
1157 1163 # file is created on branch or in working directory
1158 1164 if force and f not in umap:
1159 1165 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1160 1166 remove.append(f)
1161 1167 elif n == m1.get(f, nullid): # same as parent
1162 1168 if p2 == pa: # going backwards?
1163 1169 self.ui.debug(_("remote deleted %s\n") % f)
1164 1170 remove.append(f)
1165 1171 else:
1166 1172 self.ui.debug(_("local modified %s, keeping\n") % f)
1167 1173 else:
1168 1174 self.ui.debug(_("working dir created %s, keeping\n") % f)
1169 1175
1170 1176 for f, n in m2.iteritems():
1171 1177 if choose and not choose(f): continue
1172 1178 if f[0] == "/": continue
1173 1179 if f in ma and n != ma[f]:
1174 1180 r = _("k")
1175 1181 if not force and (linear_path or allow):
1176 1182 r = self.ui.prompt(
1177 1183 (_("remote changed %s which local deleted\n") % f) +
1178 1184 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1179 1185 if r == _("k"): get[f] = n
1180 1186 elif f not in ma:
1181 1187 self.ui.debug(_("remote created %s\n") % f)
1182 1188 get[f] = n
1183 1189 else:
1184 1190 if force or p2 == pa: # going backwards?
1185 1191 self.ui.debug(_("local deleted %s, recreating\n") % f)
1186 1192 get[f] = n
1187 1193 else:
1188 1194 self.ui.debug(_("local deleted %s\n") % f)
1189 1195
1190 1196 del mw, m1, m2, ma
1191 1197
1192 1198 if force:
1193 1199 for f in merge:
1194 1200 get[f] = merge[f][1]
1195 1201 merge = {}
1196 1202
1197 1203 if linear_path or force:
1198 1204 # we don't need to do any magic, just jump to the new rev
1199 1205 branch_merge = False
1200 1206 p1, p2 = p2, nullid
1201 1207 else:
1202 1208 if not allow:
1203 1209 self.ui.status(_("this update spans a branch"
1204 1210 " affecting the following files:\n"))
1205 1211 fl = merge.keys() + get.keys()
1206 1212 fl.sort()
1207 1213 for f in fl:
1208 1214 cf = ""
1209 1215 if f in merge: cf = _(" (resolve)")
1210 1216 self.ui.status(" %s%s\n" % (f, cf))
1211 1217 self.ui.warn(_("aborting update spanning branches!\n"))
1212 1218 self.ui.status(_("(use update -m to merge across branches"
1213 1219 " or -C to lose changes)\n"))
1214 1220 return 1
1215 1221 branch_merge = True
1216 1222
1217 1223 if moddirstate:
1218 1224 self.dirstate.setparents(p1, p2)
1219 1225
1220 1226 # get the files we don't need to change
1221 1227 files = get.keys()
1222 1228 files.sort()
1223 1229 for f in files:
1224 1230 if f[0] == "/": continue
1225 1231 self.ui.note(_("getting %s\n") % f)
1226 1232 t = self.file(f).read(get[f])
1227 1233 try:
1228 1234 self.wwrite(f, t)
1229 1235 except IOError, e:
1230 1236 if e.errno != errno.ENOENT:
1231 1237 raise
1232 1238 os.makedirs(os.path.dirname(self.wjoin(f)))
1233 1239 self.wwrite(f, t)
1234 1240 util.set_exec(self.wjoin(f), mf2[f])
1235 1241 if moddirstate:
1236 1242 if branch_merge:
1237 1243 self.dirstate.update([f], 'n', st_mtime=-1)
1238 1244 else:
1239 1245 self.dirstate.update([f], 'n')
1240 1246
1241 1247 # merge the tricky bits
1242 1248 files = merge.keys()
1243 1249 files.sort()
1244 1250 for f in files:
1245 1251 self.ui.status(_("merging %s\n") % f)
1246 1252 my, other, flag = merge[f]
1247 1253 self.merge3(f, my, other)
1248 1254 util.set_exec(self.wjoin(f), flag)
1249 1255 if moddirstate:
1250 1256 if branch_merge:
1251 1257 # We've done a branch merge, mark this file as merged
1252 1258 # so that we properly record the merger later
1253 1259 self.dirstate.update([f], 'm')
1254 1260 else:
1255 1261 # We've update-merged a locally modified file, so
1256 1262 # we set the dirstate to emulate a normal checkout
1257 1263 # of that file some time in the past. Thus our
1258 1264 # merge will appear as a normal local file
1259 1265 # modification.
1260 1266 f_len = len(self.file(f).read(other))
1261 1267 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1262 1268
1263 1269 remove.sort()
1264 1270 for f in remove:
1265 1271 self.ui.note(_("removing %s\n") % f)
1266 1272 try:
1267 os.unlink(self.wjoin(f))
1273 util.unlink(self.wjoin(f))
1268 1274 except OSError, inst:
1269 1275 if inst.errno != errno.ENOENT:
1270 1276 self.ui.warn(_("update failed to remove %s: %s!\n") %
1271 1277 (f, inst.strerror))
1272 # try removing directories that might now be empty
1273 try: os.removedirs(os.path.dirname(self.wjoin(f)))
1274 except: pass
1275 1278 if moddirstate:
1276 1279 if branch_merge:
1277 1280 self.dirstate.update(remove, 'r')
1278 1281 else:
1279 1282 self.dirstate.forget(remove)
1280 1283
1281 1284 def merge3(self, fn, my, other):
1282 1285 """perform a 3-way merge in the working directory"""
1283 1286
1284 1287 def temp(prefix, node):
1285 1288 pre = "%s~%s." % (os.path.basename(fn), prefix)
1286 1289 (fd, name) = tempfile.mkstemp("", pre)
1287 1290 f = os.fdopen(fd, "wb")
1288 1291 self.wwrite(fn, fl.read(node), f)
1289 1292 f.close()
1290 1293 return name
1291 1294
1292 1295 fl = self.file(fn)
1293 1296 base = fl.ancestor(my, other)
1294 1297 a = self.wjoin(fn)
1295 1298 b = temp("base", base)
1296 1299 c = temp("other", other)
1297 1300
1298 1301 self.ui.note(_("resolving %s\n") % fn)
1299 1302 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1300 1303 (fn, short(my), short(other), short(base)))
1301 1304
1302 1305 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1303 1306 or "hgmerge")
1304 1307 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1305 1308 if r:
1306 1309 self.ui.warn(_("merging %s failed!\n") % fn)
1307 1310
1308 1311 os.unlink(b)
1309 1312 os.unlink(c)
1310 1313
1311 1314 def verify(self):
1312 1315 filelinkrevs = {}
1313 1316 filenodes = {}
1314 1317 changesets = revisions = files = 0
1315 1318 errors = [0]
1316 1319 neededmanifests = {}
1317 1320
1318 1321 def err(msg):
1319 1322 self.ui.warn(msg + "\n")
1320 1323 errors[0] += 1
1321 1324
1322 1325 seen = {}
1323 1326 self.ui.status(_("checking changesets\n"))
1324 1327 for i in range(self.changelog.count()):
1325 1328 changesets += 1
1326 1329 n = self.changelog.node(i)
1327 1330 l = self.changelog.linkrev(n)
1328 1331 if l != i:
1329 1332 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1330 1333 if n in seen:
1331 1334 err(_("duplicate changeset at revision %d") % i)
1332 1335 seen[n] = 1
1333 1336
1334 1337 for p in self.changelog.parents(n):
1335 1338 if p not in self.changelog.nodemap:
1336 1339 err(_("changeset %s has unknown parent %s") %
1337 1340 (short(n), short(p)))
1338 1341 try:
1339 1342 changes = self.changelog.read(n)
1340 1343 except Exception, inst:
1341 1344 err(_("unpacking changeset %s: %s") % (short(n), inst))
1342 1345
1343 1346 neededmanifests[changes[0]] = n
1344 1347
1345 1348 for f in changes[3]:
1346 1349 filelinkrevs.setdefault(f, []).append(i)
1347 1350
1348 1351 seen = {}
1349 1352 self.ui.status(_("checking manifests\n"))
1350 1353 for i in range(self.manifest.count()):
1351 1354 n = self.manifest.node(i)
1352 1355 l = self.manifest.linkrev(n)
1353 1356
1354 1357 if l < 0 or l >= self.changelog.count():
1355 1358 err(_("bad manifest link (%d) at revision %d") % (l, i))
1356 1359
1357 1360 if n in neededmanifests:
1358 1361 del neededmanifests[n]
1359 1362
1360 1363 if n in seen:
1361 1364 err(_("duplicate manifest at revision %d") % i)
1362 1365
1363 1366 seen[n] = 1
1364 1367
1365 1368 for p in self.manifest.parents(n):
1366 1369 if p not in self.manifest.nodemap:
1367 1370 err(_("manifest %s has unknown parent %s") %
1368 1371 (short(n), short(p)))
1369 1372
1370 1373 try:
1371 1374 delta = mdiff.patchtext(self.manifest.delta(n))
1372 1375 except KeyboardInterrupt:
1373 1376 self.ui.warn(_("interrupted"))
1374 1377 raise
1375 1378 except Exception, inst:
1376 1379 err(_("unpacking manifest %s: %s") % (short(n), inst))
1377 1380
1378 1381 ff = [ l.split('\0') for l in delta.splitlines() ]
1379 1382 for f, fn in ff:
1380 1383 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1381 1384
1382 1385 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1383 1386
1384 1387 for m,c in neededmanifests.items():
1385 1388 err(_("Changeset %s refers to unknown manifest %s") %
1386 1389 (short(m), short(c)))
1387 1390 del neededmanifests
1388 1391
1389 1392 for f in filenodes:
1390 1393 if f not in filelinkrevs:
1391 1394 err(_("file %s in manifest but not in changesets") % f)
1392 1395
1393 1396 for f in filelinkrevs:
1394 1397 if f not in filenodes:
1395 1398 err(_("file %s in changeset but not in manifest") % f)
1396 1399
1397 1400 self.ui.status(_("checking files\n"))
1398 1401 ff = filenodes.keys()
1399 1402 ff.sort()
1400 1403 for f in ff:
1401 1404 if f == "/dev/null": continue
1402 1405 files += 1
1403 1406 fl = self.file(f)
1404 1407 nodes = { nullid: 1 }
1405 1408 seen = {}
1406 1409 for i in range(fl.count()):
1407 1410 revisions += 1
1408 1411 n = fl.node(i)
1409 1412
1410 1413 if n in seen:
1411 1414 err(_("%s: duplicate revision %d") % (f, i))
1412 1415 if n not in filenodes[f]:
1413 1416 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1414 1417 else:
1415 1418 del filenodes[f][n]
1416 1419
1417 1420 flr = fl.linkrev(n)
1418 1421 if flr not in filelinkrevs[f]:
1419 1422 err(_("%s:%s points to unexpected changeset %d")
1420 1423 % (f, short(n), flr))
1421 1424 else:
1422 1425 filelinkrevs[f].remove(flr)
1423 1426
1424 1427 # verify contents
1425 1428 try:
1426 1429 t = fl.read(n)
1427 1430 except Exception, inst:
1428 1431 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1429 1432
1430 1433 # verify parents
1431 1434 (p1, p2) = fl.parents(n)
1432 1435 if p1 not in nodes:
1433 1436 err(_("file %s:%s unknown parent 1 %s") %
1434 1437 (f, short(n), short(p1)))
1435 1438 if p2 not in nodes:
1436 1439 err(_("file %s:%s unknown parent 2 %s") %
1437 1440 (f, short(n), short(p1)))
1438 1441 nodes[n] = 1
1439 1442
1440 1443 # cross-check
1441 1444 for node in filenodes[f]:
1442 1445 err(_("node %s in manifests not in %s") % (hex(node), f))
1443 1446
1444 1447 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1445 1448 (files, changesets, revisions))
1446 1449
1447 1450 if errors[0]:
1448 1451 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1449 1452 return 1
@@ -1,589 +1,596 b''
1 1 """
2 2 util.py - Mercurial utility functions and platform specfic implementations
3 3
4 4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5 5
6 6 This software may be used and distributed according to the terms
7 7 of the GNU General Public License, incorporated herein by reference.
8 8
9 9 This contains helper routines that are independent of the SCM core and hide
10 10 platform-specific details from the core.
11 11 """
12 12
13 13 import os, errno
14 14 from i18n import gettext as _
15 15 from demandload import *
16 16 demandload(globals(), "re cStringIO shutil popen2 tempfile threading time")
17 17
18 18 def pipefilter(s, cmd):
19 19 '''filter string S through command CMD, returning its output'''
20 20 (pout, pin) = popen2.popen2(cmd, -1, 'b')
21 21 def writer():
22 22 pin.write(s)
23 23 pin.close()
24 24
25 25 # we should use select instead on UNIX, but this will work on most
26 26 # systems, including Windows
27 27 w = threading.Thread(target=writer)
28 28 w.start()
29 29 f = pout.read()
30 30 pout.close()
31 31 w.join()
32 32 return f
33 33
34 34 def tempfilter(s, cmd):
35 35 '''filter string S through a pair of temporary files with CMD.
36 36 CMD is used as a template to create the real command to be run,
37 37 with the strings INFILE and OUTFILE replaced by the real names of
38 38 the temporary files generated.'''
39 39 inname, outname = None, None
40 40 try:
41 41 infd, inname = tempfile.mkstemp(prefix='hgfin')
42 42 fp = os.fdopen(infd, 'wb')
43 43 fp.write(s)
44 44 fp.close()
45 45 outfd, outname = tempfile.mkstemp(prefix='hgfout')
46 46 os.close(outfd)
47 47 cmd = cmd.replace('INFILE', inname)
48 48 cmd = cmd.replace('OUTFILE', outname)
49 49 code = os.system(cmd)
50 50 if code: raise Abort(_("command '%s' failed: %s") %
51 51 (cmd, explain_exit(code)))
52 52 return open(outname, 'rb').read()
53 53 finally:
54 54 try:
55 55 if inname: os.unlink(inname)
56 56 except: pass
57 57 try:
58 58 if outname: os.unlink(outname)
59 59 except: pass
60 60
61 61 filtertable = {
62 62 'tempfile:': tempfilter,
63 63 'pipe:': pipefilter,
64 64 }
65 65
66 66 def filter(s, cmd):
67 67 "filter a string through a command that transforms its input to its output"
68 68 for name, fn in filtertable.iteritems():
69 69 if cmd.startswith(name):
70 70 return fn(s, cmd[len(name):].lstrip())
71 71 return pipefilter(s, cmd)
72 72
73 73 def patch(strip, patchname, ui):
74 74 """apply the patch <patchname> to the working directory.
75 75 a list of patched files is returned"""
76 76 fp = os.popen('patch -p%d < "%s"' % (strip, patchname))
77 77 files = {}
78 78 for line in fp:
79 79 line = line.rstrip()
80 80 ui.status("%s\n" % line)
81 81 if line.startswith('patching file '):
82 82 pf = parse_patch_output(line)
83 83 files.setdefault(pf, 1)
84 84 code = fp.close()
85 85 if code:
86 86 raise Abort(_("patch command failed: %s") % explain_exit(code)[0])
87 87 return files.keys()
88 88
89 89 def binary(s):
90 90 """return true if a string is binary data using diff's heuristic"""
91 91 if s and '\0' in s[:4096]:
92 92 return True
93 93 return False
94 94
95 95 def unique(g):
96 96 """return the uniq elements of iterable g"""
97 97 seen = {}
98 98 for f in g:
99 99 if f not in seen:
100 100 seen[f] = 1
101 101 yield f
102 102
103 103 class Abort(Exception):
104 104 """Raised if a command needs to print an error and exit."""
105 105
106 106 def always(fn): return True
107 107 def never(fn): return False
108 108
109 109 def globre(pat, head='^', tail='$'):
110 110 "convert a glob pattern into a regexp"
111 111 i, n = 0, len(pat)
112 112 res = ''
113 113 group = False
114 114 def peek(): return i < n and pat[i]
115 115 while i < n:
116 116 c = pat[i]
117 117 i = i+1
118 118 if c == '*':
119 119 if peek() == '*':
120 120 i += 1
121 121 res += '.*'
122 122 else:
123 123 res += '[^/]*'
124 124 elif c == '?':
125 125 res += '.'
126 126 elif c == '[':
127 127 j = i
128 128 if j < n and pat[j] in '!]':
129 129 j += 1
130 130 while j < n and pat[j] != ']':
131 131 j += 1
132 132 if j >= n:
133 133 res += '\\['
134 134 else:
135 135 stuff = pat[i:j].replace('\\','\\\\')
136 136 i = j + 1
137 137 if stuff[0] == '!':
138 138 stuff = '^' + stuff[1:]
139 139 elif stuff[0] == '^':
140 140 stuff = '\\' + stuff
141 141 res = '%s[%s]' % (res, stuff)
142 142 elif c == '{':
143 143 group = True
144 144 res += '(?:'
145 145 elif c == '}' and group:
146 146 res += ')'
147 147 group = False
148 148 elif c == ',' and group:
149 149 res += '|'
150 150 else:
151 151 res += re.escape(c)
152 152 return head + res + tail
153 153
154 154 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
155 155
156 156 def pathto(n1, n2):
157 157 '''return the relative path from one place to another.
158 158 this returns a path in the form used by the local filesystem, not hg.'''
159 159 if not n1: return localpath(n2)
160 160 a, b = n1.split('/'), n2.split('/')
161 161 a.reverse(), b.reverse()
162 162 while a and b and a[-1] == b[-1]:
163 163 a.pop(), b.pop()
164 164 b.reverse()
165 165 return os.sep.join((['..'] * len(a)) + b)
166 166
167 167 def canonpath(root, cwd, myname):
168 168 """return the canonical path of myname, given cwd and root"""
169 169 rootsep = root + os.sep
170 170 name = myname
171 171 if not name.startswith(os.sep):
172 172 name = os.path.join(root, cwd, name)
173 173 name = os.path.normpath(name)
174 174 if name.startswith(rootsep):
175 175 return pconvert(name[len(rootsep):])
176 176 elif name == root:
177 177 return ''
178 178 else:
179 179 raise Abort('%s not under root' % myname)
180 180
181 181 def matcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head=''):
182 182 return _matcher(canonroot, cwd, names, inc, exc, head, 'glob')
183 183
184 184 def cmdmatcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head=''):
185 185 if os.name == 'nt':
186 186 dflt_pat = 'glob'
187 187 else:
188 188 dflt_pat = 'relpath'
189 189 return _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat)
190 190
191 191 def _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat):
192 192 """build a function to match a set of file patterns
193 193
194 194 arguments:
195 195 canonroot - the canonical root of the tree you're matching against
196 196 cwd - the current working directory, if relevant
197 197 names - patterns to find
198 198 inc - patterns to include
199 199 exc - patterns to exclude
200 200 head - a regex to prepend to patterns to control whether a match is rooted
201 201
202 202 a pattern is one of:
203 203 'glob:<rooted glob>'
204 204 're:<rooted regexp>'
205 205 'path:<rooted path>'
206 206 'relglob:<relative glob>'
207 207 'relpath:<relative path>'
208 208 'relre:<relative regexp>'
209 209 '<rooted path or regexp>'
210 210
211 211 returns:
212 212 a 3-tuple containing
213 213 - list of explicit non-pattern names passed in
214 214 - a bool match(filename) function
215 215 - a bool indicating if any patterns were passed in
216 216
217 217 todo:
218 218 make head regex a rooted bool
219 219 """
220 220
221 221 def patkind(name, dflt_pat='glob'):
222 222 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
223 223 if name.startswith(prefix + ':'): return name.split(':', 1)
224 224 return dflt_pat, name
225 225
226 226 def contains_glob(name):
227 227 for c in name:
228 228 if c in _globchars: return True
229 229 return False
230 230
231 231 def regex(kind, name, tail):
232 232 '''convert a pattern into a regular expression'''
233 233 if kind == 're':
234 234 return name
235 235 elif kind == 'path':
236 236 return '^' + re.escape(name) + '(?:/|$)'
237 237 elif kind == 'relglob':
238 238 return head + globre(name, '(?:|.*/)', tail)
239 239 elif kind == 'relpath':
240 240 return head + re.escape(name) + tail
241 241 elif kind == 'relre':
242 242 if name.startswith('^'):
243 243 return name
244 244 return '.*' + name
245 245 return head + globre(name, '', tail)
246 246
247 247 def matchfn(pats, tail):
248 248 """build a matching function from a set of patterns"""
249 249 if pats:
250 250 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
251 251 return re.compile(pat).match
252 252
253 253 def globprefix(pat):
254 254 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
255 255 root = []
256 256 for p in pat.split(os.sep):
257 257 if contains_glob(p): break
258 258 root.append(p)
259 259 return '/'.join(root)
260 260
261 261 pats = []
262 262 files = []
263 263 roots = []
264 264 for kind, name in [patkind(p, dflt_pat) for p in names]:
265 265 if kind in ('glob', 'relpath'):
266 266 name = canonpath(canonroot, cwd, name)
267 267 if name == '':
268 268 kind, name = 'glob', '**'
269 269 if kind in ('glob', 'path', 're'):
270 270 pats.append((kind, name))
271 271 if kind == 'glob':
272 272 root = globprefix(name)
273 273 if root: roots.append(root)
274 274 elif kind == 'relpath':
275 275 files.append((kind, name))
276 276 roots.append(name)
277 277
278 278 patmatch = matchfn(pats, '$') or always
279 279 filematch = matchfn(files, '(?:/|$)') or always
280 280 incmatch = always
281 281 if inc:
282 282 incmatch = matchfn(map(patkind, inc), '(?:/|$)')
283 283 excmatch = lambda fn: False
284 284 if exc:
285 285 excmatch = matchfn(map(patkind, exc), '(?:/|$)')
286 286
287 287 return (roots,
288 288 lambda fn: (incmatch(fn) and not excmatch(fn) and
289 289 (fn.endswith('/') or
290 290 (not pats and not files) or
291 291 (pats and patmatch(fn)) or
292 292 (files and filematch(fn)))),
293 293 (inc or exc or (pats and pats != [('glob', '**')])) and True)
294 294
295 295 def system(cmd, errprefix=None):
296 296 """execute a shell command that must succeed"""
297 297 rc = os.system(cmd)
298 298 if rc:
299 299 errmsg = "%s %s" % (os.path.basename(cmd.split(None, 1)[0]),
300 300 explain_exit(rc)[0])
301 301 if errprefix:
302 302 errmsg = "%s: %s" % (errprefix, errmsg)
303 303 raise Abort(errmsg)
304 304
305 305 def rename(src, dst):
306 306 """forcibly rename a file"""
307 307 try:
308 308 os.rename(src, dst)
309 309 except:
310 310 os.unlink(dst)
311 311 os.rename(src, dst)
312 312
313 def unlink(f):
314 """unlink and remove the directory if it is empty"""
315 os.unlink(f)
316 # try removing directories that might now be empty
317 try: os.removedirs(os.path.dirname(f))
318 except: pass
319
313 320 def copyfiles(src, dst, hardlink=None):
314 321 """Copy a directory tree using hardlinks if possible"""
315 322
316 323 if hardlink is None:
317 324 hardlink = (os.stat(src).st_dev ==
318 325 os.stat(os.path.dirname(dst)).st_dev)
319 326
320 327 if os.path.isdir(src):
321 328 os.mkdir(dst)
322 329 for name in os.listdir(src):
323 330 srcname = os.path.join(src, name)
324 331 dstname = os.path.join(dst, name)
325 332 copyfiles(srcname, dstname, hardlink)
326 333 else:
327 334 if hardlink:
328 335 try:
329 336 os_link(src, dst)
330 337 except:
331 338 hardlink = False
332 339 shutil.copy2(src, dst)
333 340 else:
334 341 shutil.copy2(src, dst)
335 342
336 343 def opener(base):
337 344 """
338 345 return a function that opens files relative to base
339 346
340 347 this function is used to hide the details of COW semantics and
341 348 remote file access from higher level code.
342 349 """
343 350 p = base
344 351 def o(path, mode="r", text=False):
345 352 f = os.path.join(p, path)
346 353
347 354 if not text:
348 355 mode += "b" # for that other OS
349 356
350 357 if mode[0] != "r":
351 358 try:
352 359 nlink = nlinks(f)
353 360 except OSError:
354 361 d = os.path.dirname(f)
355 362 if not os.path.isdir(d):
356 363 os.makedirs(d)
357 364 else:
358 365 if nlink > 1:
359 366 file(f + ".tmp", "wb").write(file(f, "rb").read())
360 367 rename(f+".tmp", f)
361 368
362 369 return file(f, mode)
363 370
364 371 return o
365 372
366 373 def _makelock_file(info, pathname):
367 374 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
368 375 os.write(ld, info)
369 376 os.close(ld)
370 377
371 378 def _readlock_file(pathname):
372 379 return file(pathname).read()
373 380
374 381 def nlinks(pathname):
375 382 """Return number of hardlinks for the given file."""
376 383 return os.stat(pathname).st_nlink
377 384
378 385 if hasattr(os, 'link'):
379 386 os_link = os.link
380 387 else:
381 388 def os_link(src, dst):
382 389 raise OSError(0, _("Hardlinks not supported"))
383 390
384 391 # Platform specific variants
385 392 if os.name == 'nt':
386 393 nulldev = 'NUL:'
387 394
388 395 try:
389 396 import win32api, win32process
390 397 filename = win32process.GetModuleFileNameEx(win32api.GetCurrentProcess(), 0)
391 398 systemrc = os.path.join(os.path.dirname(filename), 'mercurial.ini')
392 399
393 400 except ImportError:
394 401 systemrc = r'c:\mercurial\mercurial.ini'
395 402 pass
396 403
397 404 rcpath = (systemrc,
398 405 os.path.join(os.path.expanduser('~'), 'mercurial.ini'))
399 406
400 407 def parse_patch_output(output_line):
401 408 """parses the output produced by patch and returns the file name"""
402 409 pf = output_line[14:]
403 410 if pf[0] == '`':
404 411 pf = pf[1:-1] # Remove the quotes
405 412 return pf
406 413
407 414 try: # ActivePython can create hard links using win32file module
408 415 import win32file
409 416
410 417 def os_link(src, dst): # NB will only succeed on NTFS
411 418 win32file.CreateHardLink(dst, src)
412 419
413 420 def nlinks(pathname):
414 421 """Return number of hardlinks for the given file."""
415 422 try:
416 423 fh = win32file.CreateFile(pathname,
417 424 win32file.GENERIC_READ, win32file.FILE_SHARE_READ,
418 425 None, win32file.OPEN_EXISTING, 0, None)
419 426 res = win32file.GetFileInformationByHandle(fh)
420 427 fh.Close()
421 428 return res[7]
422 429 except:
423 430 return os.stat(pathname).st_nlink
424 431
425 432 except ImportError:
426 433 pass
427 434
428 435 def is_exec(f, last):
429 436 return last
430 437
431 438 def set_exec(f, mode):
432 439 pass
433 440
434 441 def pconvert(path):
435 442 return path.replace("\\", "/")
436 443
437 444 def localpath(path):
438 445 return path.replace('/', '\\')
439 446
440 447 def normpath(path):
441 448 return pconvert(os.path.normpath(path))
442 449
443 450 makelock = _makelock_file
444 451 readlock = _readlock_file
445 452
446 453 def explain_exit(code):
447 454 return _("exited with status %d") % code, code
448 455
449 456 else:
450 457 nulldev = '/dev/null'
451 458
452 459 hgrcd = '/etc/mercurial/hgrc.d'
453 460 hgrcs = []
454 461 if os.path.isdir(hgrcd):
455 462 hgrcs = [f for f in os.listdir(hgrcd) if f.endswith(".rc")]
456 463 rcpath = map(os.path.normpath, hgrcs +
457 464 ['/etc/mercurial/hgrc', os.path.expanduser('~/.hgrc')])
458 465
459 466 def parse_patch_output(output_line):
460 467 """parses the output produced by patch and returns the file name"""
461 468 return output_line[14:]
462 469
463 470 def is_exec(f, last):
464 471 """check whether a file is executable"""
465 472 return (os.stat(f).st_mode & 0100 != 0)
466 473
467 474 def set_exec(f, mode):
468 475 s = os.stat(f).st_mode
469 476 if (s & 0100 != 0) == mode:
470 477 return
471 478 if mode:
472 479 # Turn on +x for every +r bit when making a file executable
473 480 # and obey umask.
474 481 umask = os.umask(0)
475 482 os.umask(umask)
476 483 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
477 484 else:
478 485 os.chmod(f, s & 0666)
479 486
480 487 def pconvert(path):
481 488 return path
482 489
483 490 def localpath(path):
484 491 return path
485 492
486 493 normpath = os.path.normpath
487 494
488 495 def makelock(info, pathname):
489 496 try:
490 497 os.symlink(info, pathname)
491 498 except OSError, why:
492 499 if why.errno == errno.EEXIST:
493 500 raise
494 501 else:
495 502 _makelock_file(info, pathname)
496 503
497 504 def readlock(pathname):
498 505 try:
499 506 return os.readlink(pathname)
500 507 except OSError, why:
501 508 if why.errno == errno.EINVAL:
502 509 return _readlock_file(pathname)
503 510 else:
504 511 raise
505 512
506 513 def explain_exit(code):
507 514 """return a 2-tuple (desc, code) describing a process's status"""
508 515 if os.WIFEXITED(code):
509 516 val = os.WEXITSTATUS(code)
510 517 return _("exited with status %d") % val, val
511 518 elif os.WIFSIGNALED(code):
512 519 val = os.WTERMSIG(code)
513 520 return _("killed by signal %d") % val, val
514 521 elif os.WIFSTOPPED(code):
515 522 val = os.WSTOPSIG(code)
516 523 return _("stopped by signal %d") % val, val
517 524 raise ValueError(_("invalid exit code"))
518 525
519 526 class chunkbuffer(object):
520 527 """Allow arbitrary sized chunks of data to be efficiently read from an
521 528 iterator over chunks of arbitrary size."""
522 529
523 530 def __init__(self, in_iter, targetsize = 2**16):
524 531 """in_iter is the iterator that's iterating over the input chunks.
525 532 targetsize is how big a buffer to try to maintain."""
526 533 self.in_iter = iter(in_iter)
527 534 self.buf = ''
528 535 self.targetsize = int(targetsize)
529 536 if self.targetsize <= 0:
530 537 raise ValueError(_("targetsize must be greater than 0, was %d") %
531 538 targetsize)
532 539 self.iterempty = False
533 540
534 541 def fillbuf(self):
535 542 """Ignore target size; read every chunk from iterator until empty."""
536 543 if not self.iterempty:
537 544 collector = cStringIO.StringIO()
538 545 collector.write(self.buf)
539 546 for ch in self.in_iter:
540 547 collector.write(ch)
541 548 self.buf = collector.getvalue()
542 549 self.iterempty = True
543 550
544 551 def read(self, l):
545 552 """Read L bytes of data from the iterator of chunks of data.
546 553 Returns less than L bytes if the iterator runs dry."""
547 554 if l > len(self.buf) and not self.iterempty:
548 555 # Clamp to a multiple of self.targetsize
549 556 targetsize = self.targetsize * ((l // self.targetsize) + 1)
550 557 collector = cStringIO.StringIO()
551 558 collector.write(self.buf)
552 559 collected = len(self.buf)
553 560 for chunk in self.in_iter:
554 561 collector.write(chunk)
555 562 collected += len(chunk)
556 563 if collected >= targetsize:
557 564 break
558 565 if collected < targetsize:
559 566 self.iterempty = True
560 567 self.buf = collector.getvalue()
561 568 s, self.buf = self.buf[:l], buffer(self.buf, l)
562 569 return s
563 570
564 571 def filechunkiter(f, size = 65536):
565 572 """Create a generator that produces all the data in the file size
566 573 (default 65536) bytes at a time. Chunks may be less than size
567 574 bytes if the chunk is the last chunk in the file, or the file is a
568 575 socket or some other type of file that sometimes reads less data
569 576 than is requested."""
570 577 s = f.read(size)
571 578 while len(s) > 0:
572 579 yield s
573 580 s = f.read(size)
574 581
575 582 def makedate():
576 583 t = time.time()
577 584 if time.daylight: tz = time.altzone
578 585 else: tz = time.timezone
579 586 return t, tz
580 587
581 588 def datestr(date=None, format='%c'):
582 589 """represent a (unixtime, offset) tuple as a localized time.
583 590 unixtime is seconds since the epoch, and offset is the time zone's
584 591 number of seconds away from UTC."""
585 592 t, tz = date or makedate()
586 593 return ("%s %+03d%02d" %
587 594 (time.strftime(format, time.gmtime(float(t) - tz)),
588 595 -tz / 3600,
589 596 ((-tz % 3600) / 60)))
General Comments 0
You need to be logged in to leave comments. Login now