##// END OF EJS Templates
This implements the nodesbetween method, and it removes the newer method...
Eric Hopper -
r1457:518da3c3 default
parent child Browse files
Show More
@@ -1,2222 +1,2222 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from demandload import demandload
9 9 from node import *
10 10 demandload(globals(), "os re sys signal shutil imp urllib pdb")
11 11 demandload(globals(), "fancyopts ui hg util lock revlog")
12 12 demandload(globals(), "fnmatch hgweb mdiff random signal time traceback")
13 13 demandload(globals(), "errno socket version struct atexit sets bz2")
14 14
15 15 class UnknownCommand(Exception):
16 16 """Exception raised if command is not in the command table."""
17 17
18 18 def filterfiles(filters, files):
19 19 l = [x for x in files if x in filters]
20 20
21 21 for t in filters:
22 22 if t and t[-1] != "/":
23 23 t += "/"
24 24 l += [x for x in files if x.startswith(t)]
25 25 return l
26 26
27 27 def relpath(repo, args):
28 28 cwd = repo.getcwd()
29 29 if cwd:
30 30 return [util.normpath(os.path.join(cwd, x)) for x in args]
31 31 return args
32 32
33 33 def matchpats(repo, cwd, pats=[], opts={}, head=''):
34 34 return util.matcher(repo.root, cwd, pats or ['.'], opts.get('include'),
35 35 opts.get('exclude'), head)
36 36
37 37 def makewalk(repo, pats, opts, head=''):
38 38 cwd = repo.getcwd()
39 39 files, matchfn, anypats = matchpats(repo, cwd, pats, opts, head)
40 40 exact = dict(zip(files, files))
41 41 def walk():
42 42 for src, fn in repo.walk(files=files, match=matchfn):
43 43 yield src, fn, util.pathto(cwd, fn), fn in exact
44 44 return files, matchfn, walk()
45 45
46 46 def walk(repo, pats, opts, head=''):
47 47 files, matchfn, results = makewalk(repo, pats, opts, head)
48 48 for r in results:
49 49 yield r
50 50
51 51 def walkchangerevs(ui, repo, cwd, pats, opts):
52 52 '''Iterate over files and the revs they changed in.
53 53
54 54 Callers most commonly need to iterate backwards over the history
55 55 it is interested in. Doing so has awful (quadratic-looking)
56 56 performance, so we use iterators in a "windowed" way.
57 57
58 58 We walk a window of revisions in the desired order. Within the
59 59 window, we first walk forwards to gather data, then in the desired
60 60 order (usually backwards) to display it.
61 61
62 62 This function returns an (iterator, getchange) pair. The
63 63 getchange function returns the changelog entry for a numeric
64 64 revision. The iterator yields 3-tuples. They will be of one of
65 65 the following forms:
66 66
67 67 "window", incrementing, lastrev: stepping through a window,
68 68 positive if walking forwards through revs, last rev in the
69 69 sequence iterated over - use to reset state for the current window
70 70
71 71 "add", rev, fns: out-of-order traversal of the given file names
72 72 fns, which changed during revision rev - use to gather data for
73 73 possible display
74 74
75 75 "iter", rev, None: in-order traversal of the revs earlier iterated
76 76 over with "add" - use to display data'''
77 77
78 78 if repo.changelog.count() == 0:
79 79 return [], False
80 80
81 81 cwd = repo.getcwd()
82 82 if not pats and cwd:
83 83 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
84 84 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
85 85 files, matchfn, anypats = matchpats(repo, (pats and cwd) or '',
86 86 pats, opts)
87 87 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
88 88 wanted = {}
89 89 slowpath = anypats
90 90 window = 300
91 91 fncache = {}
92 92
93 93 chcache = {}
94 94 def getchange(rev):
95 95 ch = chcache.get(rev)
96 96 if ch is None:
97 97 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
98 98 return ch
99 99
100 100 if not slowpath and not files:
101 101 # No files, no patterns. Display all revs.
102 102 wanted = dict(zip(revs, revs))
103 103 if not slowpath:
104 104 # Only files, no patterns. Check the history of each file.
105 105 def filerevgen(filelog):
106 106 for i in xrange(filelog.count() - 1, -1, -window):
107 107 revs = []
108 108 for j in xrange(max(0, i - window), i + 1):
109 109 revs.append(filelog.linkrev(filelog.node(j)))
110 110 revs.reverse()
111 111 for rev in revs:
112 112 yield rev
113 113
114 114 minrev, maxrev = min(revs), max(revs)
115 115 for file in files:
116 116 filelog = repo.file(file)
117 117 # A zero count may be a directory or deleted file, so
118 118 # try to find matching entries on the slow path.
119 119 if filelog.count() == 0:
120 120 slowpath = True
121 121 break
122 122 for rev in filerevgen(filelog):
123 123 if rev <= maxrev:
124 124 if rev < minrev:
125 125 break
126 126 fncache.setdefault(rev, [])
127 127 fncache[rev].append(file)
128 128 wanted[rev] = 1
129 129 if slowpath:
130 130 # The slow path checks files modified in every changeset.
131 131 def changerevgen():
132 132 for i in xrange(repo.changelog.count() - 1, -1, -window):
133 133 for j in xrange(max(0, i - window), i + 1):
134 134 yield j, getchange(j)[3]
135 135
136 136 for rev, changefiles in changerevgen():
137 137 matches = filter(matchfn, changefiles)
138 138 if matches:
139 139 fncache[rev] = matches
140 140 wanted[rev] = 1
141 141
142 142 def iterate():
143 143 for i in xrange(0, len(revs), window):
144 144 yield 'window', revs[0] < revs[-1], revs[-1]
145 145 nrevs = [rev for rev in revs[i:min(i+window, len(revs))]
146 146 if rev in wanted]
147 147 srevs = list(nrevs)
148 148 srevs.sort()
149 149 for rev in srevs:
150 150 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
151 151 yield 'add', rev, fns
152 152 for rev in nrevs:
153 153 yield 'iter', rev, None
154 154 return iterate(), getchange
155 155
156 156 revrangesep = ':'
157 157
158 158 def revrange(ui, repo, revs, revlog=None):
159 159 """Yield revision as strings from a list of revision specifications."""
160 160 if revlog is None:
161 161 revlog = repo.changelog
162 162 revcount = revlog.count()
163 163 def fix(val, defval):
164 164 if not val:
165 165 return defval
166 166 try:
167 167 num = int(val)
168 168 if str(num) != val:
169 169 raise ValueError
170 170 if num < 0: num += revcount
171 171 if num < 0: num = 0
172 172 elif num >= revcount:
173 173 raise ValueError
174 174 except ValueError:
175 175 try:
176 176 num = repo.changelog.rev(repo.lookup(val))
177 177 except KeyError:
178 178 try:
179 179 num = revlog.rev(revlog.lookup(val))
180 180 except KeyError:
181 181 raise util.Abort('invalid revision identifier %s', val)
182 182 return num
183 183 seen = {}
184 184 for spec in revs:
185 185 if spec.find(revrangesep) >= 0:
186 186 start, end = spec.split(revrangesep, 1)
187 187 start = fix(start, 0)
188 188 end = fix(end, revcount - 1)
189 189 step = start > end and -1 or 1
190 190 for rev in xrange(start, end+step, step):
191 191 if rev in seen: continue
192 192 seen[rev] = 1
193 193 yield str(rev)
194 194 else:
195 195 rev = fix(spec, None)
196 196 if rev in seen: continue
197 197 seen[rev] = 1
198 198 yield str(rev)
199 199
200 200 def make_filename(repo, r, pat, node=None,
201 201 total=None, seqno=None, revwidth=None, pathname=None):
202 202 node_expander = {
203 203 'H': lambda: hex(node),
204 204 'R': lambda: str(r.rev(node)),
205 205 'h': lambda: short(node),
206 206 }
207 207 expander = {
208 208 '%': lambda: '%',
209 209 'b': lambda: os.path.basename(repo.root),
210 210 }
211 211
212 212 try:
213 213 if node:
214 214 expander.update(node_expander)
215 215 if node and revwidth is not None:
216 216 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
217 217 if total is not None:
218 218 expander['N'] = lambda: str(total)
219 219 if seqno is not None:
220 220 expander['n'] = lambda: str(seqno)
221 221 if total is not None and seqno is not None:
222 222 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
223 223 if pathname is not None:
224 224 expander['s'] = lambda: os.path.basename(pathname)
225 225 expander['d'] = lambda: os.path.dirname(pathname) or '.'
226 226 expander['p'] = lambda: pathname
227 227
228 228 newname = []
229 229 patlen = len(pat)
230 230 i = 0
231 231 while i < patlen:
232 232 c = pat[i]
233 233 if c == '%':
234 234 i += 1
235 235 c = pat[i]
236 236 c = expander[c]()
237 237 newname.append(c)
238 238 i += 1
239 239 return ''.join(newname)
240 240 except KeyError, inst:
241 241 raise util.Abort("invalid format spec '%%%s' in output file name",
242 242 inst.args[0])
243 243
244 244 def make_file(repo, r, pat, node=None,
245 245 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
246 246 if not pat or pat == '-':
247 247 return 'w' in mode and sys.stdout or sys.stdin
248 248 if hasattr(pat, 'write') and 'w' in mode:
249 249 return pat
250 250 if hasattr(pat, 'read') and 'r' in mode:
251 251 return pat
252 252 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
253 253 pathname),
254 254 mode)
255 255
256 256 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
257 257 changes=None, text=False):
258 258 if not changes:
259 259 (c, a, d, u) = repo.changes(node1, node2, files, match=match)
260 260 else:
261 261 (c, a, d, u) = changes
262 262 if files:
263 263 c, a, d = map(lambda x: filterfiles(files, x), (c, a, d))
264 264
265 265 if not c and not a and not d:
266 266 return
267 267
268 268 if node2:
269 269 change = repo.changelog.read(node2)
270 270 mmap2 = repo.manifest.read(change[0])
271 271 date2 = util.datestr(change[2])
272 272 def read(f):
273 273 return repo.file(f).read(mmap2[f])
274 274 else:
275 275 date2 = util.datestr()
276 276 if not node1:
277 277 node1 = repo.dirstate.parents()[0]
278 278 def read(f):
279 279 return repo.wfile(f).read()
280 280
281 281 if ui.quiet:
282 282 r = None
283 283 else:
284 284 hexfunc = ui.verbose and hex or short
285 285 r = [hexfunc(node) for node in [node1, node2] if node]
286 286
287 287 change = repo.changelog.read(node1)
288 288 mmap = repo.manifest.read(change[0])
289 289 date1 = util.datestr(change[2])
290 290
291 291 for f in c:
292 292 to = None
293 293 if f in mmap:
294 294 to = repo.file(f).read(mmap[f])
295 295 tn = read(f)
296 296 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
297 297 for f in a:
298 298 to = None
299 299 tn = read(f)
300 300 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
301 301 for f in d:
302 302 to = repo.file(f).read(mmap[f])
303 303 tn = None
304 304 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
305 305
306 306 def trimuser(ui, name, rev, revcache):
307 307 """trim the name of the user who committed a change"""
308 308 user = revcache.get(rev)
309 309 if user is None:
310 310 user = revcache[rev] = ui.shortuser(name)
311 311 return user
312 312
313 313 def show_changeset(ui, repo, rev=0, changenode=None, brinfo=None):
314 314 """show a single changeset or file revision"""
315 315 log = repo.changelog
316 316 if changenode is None:
317 317 changenode = log.node(rev)
318 318 elif not rev:
319 319 rev = log.rev(changenode)
320 320
321 321 if ui.quiet:
322 322 ui.write("%d:%s\n" % (rev, short(changenode)))
323 323 return
324 324
325 325 changes = log.read(changenode)
326 326 date = util.datestr(changes[2])
327 327
328 328 parents = [(log.rev(p), ui.verbose and hex(p) or short(p))
329 329 for p in log.parents(changenode)
330 330 if ui.debugflag or p != nullid]
331 331 if not ui.debugflag and len(parents) == 1 and parents[0][0] == rev-1:
332 332 parents = []
333 333
334 334 if ui.verbose:
335 335 ui.write("changeset: %d:%s\n" % (rev, hex(changenode)))
336 336 else:
337 337 ui.write("changeset: %d:%s\n" % (rev, short(changenode)))
338 338
339 339 for tag in repo.nodetags(changenode):
340 340 ui.status("tag: %s\n" % tag)
341 341 for parent in parents:
342 342 ui.write("parent: %d:%s\n" % parent)
343 343
344 344 if brinfo and changenode in brinfo:
345 345 br = brinfo[changenode]
346 346 ui.write("branch: %s\n" % " ".join(br))
347 347
348 348 ui.debug("manifest: %d:%s\n" % (repo.manifest.rev(changes[0]),
349 349 hex(changes[0])))
350 350 ui.status("user: %s\n" % changes[1])
351 351 ui.status("date: %s\n" % date)
352 352
353 353 if ui.debugflag:
354 354 files = repo.changes(log.parents(changenode)[0], changenode)
355 355 for key, value in zip(["files:", "files+:", "files-:"], files):
356 356 if value:
357 357 ui.note("%-12s %s\n" % (key, " ".join(value)))
358 358 else:
359 359 ui.note("files: %s\n" % " ".join(changes[3]))
360 360
361 361 description = changes[4].strip()
362 362 if description:
363 363 if ui.verbose:
364 364 ui.status("description:\n")
365 365 ui.status(description)
366 366 ui.status("\n\n")
367 367 else:
368 368 ui.status("summary: %s\n" % description.splitlines()[0])
369 369 ui.status("\n")
370 370
371 371 def show_version(ui):
372 372 """output version and copyright information"""
373 373 ui.write("Mercurial Distributed SCM (version %s)\n"
374 374 % version.get_version())
375 375 ui.status(
376 376 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
377 377 "This is free software; see the source for copying conditions. "
378 378 "There is NO\nwarranty; "
379 379 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
380 380 )
381 381
382 382 def help_(ui, cmd=None, with_version=False):
383 383 """show help for a given command or all commands"""
384 384 option_lists = []
385 385 if cmd and cmd != 'shortlist':
386 386 if with_version:
387 387 show_version(ui)
388 388 ui.write('\n')
389 389 key, i = find(cmd)
390 390 # synopsis
391 391 ui.write("%s\n\n" % i[2])
392 392
393 393 # description
394 394 doc = i[0].__doc__
395 395 if ui.quiet:
396 396 doc = doc.splitlines(0)[0]
397 397 ui.write("%s\n" % doc.rstrip())
398 398
399 399 if not ui.quiet:
400 400 # aliases
401 401 aliases = ', '.join(key.split('|')[1:])
402 402 if aliases:
403 403 ui.write("\naliases: %s\n" % aliases)
404 404
405 405 # options
406 406 if i[1]:
407 407 option_lists.append(("options", i[1]))
408 408
409 409 else:
410 410 # program name
411 411 if ui.verbose or with_version:
412 412 show_version(ui)
413 413 else:
414 414 ui.status("Mercurial Distributed SCM\n")
415 415 ui.status('\n')
416 416
417 417 # list of commands
418 418 if cmd == "shortlist":
419 419 ui.status('basic commands (use "hg help" '
420 420 'for the full list or option "-v" for details):\n\n')
421 421 elif ui.verbose:
422 422 ui.status('list of commands:\n\n')
423 423 else:
424 424 ui.status('list of commands (use "hg help -v" '
425 425 'to show aliases and global options):\n\n')
426 426
427 427 h = {}
428 428 cmds = {}
429 429 for c, e in table.items():
430 430 f = c.split("|")[0]
431 431 if cmd == "shortlist" and not f.startswith("^"):
432 432 continue
433 433 f = f.lstrip("^")
434 434 if not ui.debugflag and f.startswith("debug"):
435 435 continue
436 436 d = ""
437 437 if e[0].__doc__:
438 438 d = e[0].__doc__.splitlines(0)[0].rstrip()
439 439 h[f] = d
440 440 cmds[f]=c.lstrip("^")
441 441
442 442 fns = h.keys()
443 443 fns.sort()
444 444 m = max(map(len, fns))
445 445 for f in fns:
446 446 if ui.verbose:
447 447 commands = cmds[f].replace("|",", ")
448 448 ui.write(" %s:\n %s\n"%(commands,h[f]))
449 449 else:
450 450 ui.write(' %-*s %s\n' % (m, f, h[f]))
451 451
452 452 # global options
453 453 if ui.verbose:
454 454 option_lists.append(("global options", globalopts))
455 455
456 456 # list all option lists
457 457 opt_output = []
458 458 for title, options in option_lists:
459 459 opt_output.append(("\n%s:\n" % title, None))
460 460 for shortopt, longopt, default, desc in options:
461 461 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
462 462 longopt and " --%s" % longopt),
463 463 "%s%s" % (desc,
464 464 default and " (default: %s)" % default
465 465 or "")))
466 466
467 467 if opt_output:
468 468 opts_len = max([len(line[0]) for line in opt_output if line[1]])
469 469 for first, second in opt_output:
470 470 if second:
471 471 ui.write(" %-*s %s\n" % (opts_len, first, second))
472 472 else:
473 473 ui.write("%s\n" % first)
474 474
475 475 # Commands start here, listed alphabetically
476 476
477 477 def add(ui, repo, *pats, **opts):
478 478 '''add the specified files on the next commit'''
479 479 names = []
480 480 for src, abs, rel, exact in walk(repo, pats, opts):
481 481 if exact:
482 482 if ui.verbose: ui.status('adding %s\n' % rel)
483 483 names.append(abs)
484 484 elif repo.dirstate.state(abs) == '?':
485 485 ui.status('adding %s\n' % rel)
486 486 names.append(abs)
487 487 repo.add(names)
488 488
489 489 def addremove(ui, repo, *pats, **opts):
490 490 """add all new files, delete all missing files"""
491 491 add, remove = [], []
492 492 for src, abs, rel, exact in walk(repo, pats, opts):
493 493 if src == 'f' and repo.dirstate.state(abs) == '?':
494 494 add.append(abs)
495 495 if ui.verbose or not exact:
496 496 ui.status('adding ', rel, '\n')
497 497 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
498 498 remove.append(abs)
499 499 if ui.verbose or not exact:
500 500 ui.status('removing ', rel, '\n')
501 501 repo.add(add)
502 502 repo.remove(remove)
503 503
504 504 def annotate(ui, repo, *pats, **opts):
505 505 """show changeset information per file line"""
506 506 def getnode(rev):
507 507 return short(repo.changelog.node(rev))
508 508
509 509 ucache = {}
510 510 def getname(rev):
511 511 cl = repo.changelog.read(repo.changelog.node(rev))
512 512 return trimuser(ui, cl[1], rev, ucache)
513 513
514 514 if not pats:
515 515 raise util.Abort('at least one file name or pattern required')
516 516
517 517 opmap = [['user', getname], ['number', str], ['changeset', getnode]]
518 518 if not opts['user'] and not opts['changeset']:
519 519 opts['number'] = 1
520 520
521 521 if opts['rev']:
522 522 node = repo.changelog.lookup(opts['rev'])
523 523 else:
524 524 node = repo.dirstate.parents()[0]
525 525 change = repo.changelog.read(node)
526 526 mmap = repo.manifest.read(change[0])
527 527
528 528 for src, abs, rel, exact in walk(repo, pats, opts):
529 529 if abs not in mmap:
530 530 ui.warn("warning: %s is not in the repository!\n" % rel)
531 531 continue
532 532
533 533 f = repo.file(abs)
534 534 if not opts['text'] and util.binary(f.read(mmap[abs])):
535 535 ui.write("%s: binary file\n" % rel)
536 536 continue
537 537
538 538 lines = f.annotate(mmap[abs])
539 539 pieces = []
540 540
541 541 for o, f in opmap:
542 542 if opts[o]:
543 543 l = [f(n) for n, dummy in lines]
544 544 if l:
545 545 m = max(map(len, l))
546 546 pieces.append(["%*s" % (m, x) for x in l])
547 547
548 548 if pieces:
549 549 for p, l in zip(zip(*pieces), lines):
550 550 ui.write("%s: %s" % (" ".join(p), l[1]))
551 551
552 552 def bundle(ui, repo, fname, dest="default-push", **opts):
553 553 """create a changegroup file"""
554 554 f = open(fname, "wb")
555 555 dest = ui.expandpath(dest)
556 556 other = hg.repository(ui, dest)
557 557 o = repo.findoutgoing(other)
558 558 cg = repo.changegroup(o)
559 559
560 560 try:
561 561 f.write("HG10")
562 562 z = bz2.BZ2Compressor(9)
563 563 while 1:
564 564 chunk = cg.read(4096)
565 565 if not chunk:
566 566 break
567 567 f.write(z.compress(chunk))
568 568 f.write(z.flush())
569 569 except:
570 570 os.unlink(fname)
571 571 raise
572 572
573 573 def cat(ui, repo, file1, *pats, **opts):
574 574 """output the latest or given revisions of files"""
575 575 mf = {}
576 576 if opts['rev']:
577 577 change = repo.changelog.read(repo.lookup(opts['rev']))
578 578 mf = repo.manifest.read(change[0])
579 579 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts):
580 580 r = repo.file(abs)
581 581 if opts['rev']:
582 582 try:
583 583 n = mf[abs]
584 584 except (hg.RepoError, KeyError):
585 585 try:
586 586 n = r.lookup(rev)
587 587 except KeyError, inst:
588 588 raise util.Abort('cannot find file %s in rev %s', rel, rev)
589 589 else:
590 590 n = r.tip()
591 591 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
592 592 fp.write(r.read(n))
593 593
594 594 def clone(ui, source, dest=None, **opts):
595 595 """make a copy of an existing repository"""
596 596 if dest is None:
597 597 dest = os.path.basename(os.path.normpath(source))
598 598
599 599 if os.path.exists(dest):
600 600 raise util.Abort("destination '%s' already exists", dest)
601 601
602 602 dest = os.path.realpath(dest)
603 603
604 604 class Dircleanup:
605 605 def __init__(self, dir_):
606 606 self.rmtree = shutil.rmtree
607 607 self.dir_ = dir_
608 608 os.mkdir(dir_)
609 609 def close(self):
610 610 self.dir_ = None
611 611 def __del__(self):
612 612 if self.dir_:
613 613 self.rmtree(self.dir_, True)
614 614
615 615 if opts['ssh']:
616 616 ui.setconfig("ui", "ssh", opts['ssh'])
617 617 if opts['remotecmd']:
618 618 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
619 619
620 620 if not os.path.exists(source):
621 621 source = ui.expandpath(source)
622 622
623 623 d = Dircleanup(dest)
624 624 abspath = source
625 625 other = hg.repository(ui, source)
626 626
627 627 copy = False
628 628 if other.dev() != -1:
629 629 abspath = os.path.abspath(source)
630 630 if not opts['pull']:
631 631 copy = True
632 632
633 633 if copy:
634 634 try:
635 635 # we use a lock here because if we race with commit, we
636 636 # can end up with extra data in the cloned revlogs that's
637 637 # not pointed to by changesets, thus causing verify to
638 638 # fail
639 639 l1 = lock.lock(os.path.join(source, ".hg", "lock"))
640 640 except OSError:
641 641 copy = False
642 642
643 643 if copy:
644 644 # we lock here to avoid premature writing to the target
645 645 os.mkdir(os.path.join(dest, ".hg"))
646 646 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
647 647
648 648 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
649 649 for f in files.split():
650 650 src = os.path.join(source, ".hg", f)
651 651 dst = os.path.join(dest, ".hg", f)
652 652 util.copyfiles(src, dst)
653 653
654 654 repo = hg.repository(ui, dest)
655 655
656 656 else:
657 657 repo = hg.repository(ui, dest, create=1)
658 658 repo.pull(other)
659 659
660 660 f = repo.opener("hgrc", "w", text=True)
661 661 f.write("[paths]\n")
662 662 f.write("default = %s\n" % abspath)
663 663
664 664 if not opts['noupdate']:
665 665 update(ui, repo)
666 666
667 667 d.close()
668 668
669 669 def commit(ui, repo, *pats, **opts):
670 670 """commit the specified files or all outstanding changes"""
671 671 if opts['text']:
672 672 ui.warn("Warning: -t and --text is deprecated,"
673 673 " please use -m or --message instead.\n")
674 674 message = opts['message'] or opts['text']
675 675 logfile = opts['logfile']
676 676
677 677 if message and logfile:
678 678 raise util.Abort('options --message and --logfile are mutually '
679 679 'exclusive')
680 680 if not message and logfile:
681 681 try:
682 682 if logfile == '-':
683 683 message = sys.stdin.read()
684 684 else:
685 685 message = open(logfile).read()
686 686 except IOError, inst:
687 687 raise util.Abort("can't read commit message '%s': %s" %
688 688 (logfile, inst.strerror))
689 689
690 690 if opts['addremove']:
691 691 addremove(ui, repo, *pats, **opts)
692 692 cwd = repo.getcwd()
693 693 if not pats and cwd:
694 694 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
695 695 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
696 696 fns, match, anypats = matchpats(repo, (pats and repo.getcwd()) or '',
697 697 pats, opts)
698 698 if pats:
699 699 c, a, d, u = repo.changes(files=fns, match=match)
700 700 files = c + a + [fn for fn in d if repo.dirstate.state(fn) == 'r']
701 701 else:
702 702 files = []
703 703 try:
704 704 repo.commit(files, message, opts['user'], opts['date'], match)
705 705 except ValueError, inst:
706 706 raise util.Abort(str(inst))
707 707
708 708 def docopy(ui, repo, pats, opts):
709 709 if not pats:
710 710 raise util.Abort('no source or destination specified')
711 711 elif len(pats) == 1:
712 712 raise util.Abort('no destination specified')
713 713 pats = list(pats)
714 714 dest = pats.pop()
715 715 sources = []
716 716
717 717 def okaytocopy(abs, rel, exact):
718 718 reasons = {'?': 'is not managed',
719 719 'a': 'has been marked for add'}
720 720 reason = reasons.get(repo.dirstate.state(abs))
721 721 if reason:
722 722 if exact: ui.warn('%s: not copying - file %s\n' % (rel, reason))
723 723 else:
724 724 return True
725 725
726 726 for src, abs, rel, exact in walk(repo, pats, opts):
727 727 if okaytocopy(abs, rel, exact):
728 728 sources.append((abs, rel, exact))
729 729 if not sources:
730 730 raise util.Abort('no files to copy')
731 731
732 732 cwd = repo.getcwd()
733 733 absdest = util.canonpath(repo.root, cwd, dest)
734 734 reldest = util.pathto(cwd, absdest)
735 735 if os.path.exists(reldest):
736 736 destisfile = not os.path.isdir(reldest)
737 737 else:
738 738 destisfile = len(sources) == 1 or repo.dirstate.state(absdest) != '?'
739 739
740 740 if destisfile:
741 741 if opts['parents']:
742 742 raise util.Abort('with --parents, destination must be a directory')
743 743 elif len(sources) > 1:
744 744 raise util.Abort('with multiple sources, destination must be a '
745 745 'directory')
746 746 errs, copied = 0, []
747 747 for abs, rel, exact in sources:
748 748 if opts['parents']:
749 749 mydest = os.path.join(dest, rel)
750 750 elif destisfile:
751 751 mydest = reldest
752 752 else:
753 753 mydest = os.path.join(dest, os.path.basename(rel))
754 754 myabsdest = util.canonpath(repo.root, cwd, mydest)
755 755 myreldest = util.pathto(cwd, myabsdest)
756 756 if not opts['force'] and repo.dirstate.state(myabsdest) not in 'a?':
757 757 ui.warn('%s: not overwriting - file already managed\n' % myreldest)
758 758 continue
759 759 mydestdir = os.path.dirname(myreldest) or '.'
760 760 if not opts['after']:
761 761 try:
762 762 if opts['parents']: os.makedirs(mydestdir)
763 763 elif not destisfile: os.mkdir(mydestdir)
764 764 except OSError, inst:
765 765 if inst.errno != errno.EEXIST: raise
766 766 if ui.verbose or not exact:
767 767 ui.status('copying %s to %s\n' % (rel, myreldest))
768 768 if not opts['after']:
769 769 try:
770 770 shutil.copyfile(rel, myreldest)
771 771 shutil.copymode(rel, myreldest)
772 772 except shutil.Error, inst:
773 773 raise util.Abort(str(inst))
774 774 except IOError, inst:
775 775 if inst.errno == errno.ENOENT:
776 776 ui.warn('%s: deleted in working copy\n' % rel)
777 777 else:
778 778 ui.warn('%s: cannot copy - %s\n' % (rel, inst.strerror))
779 779 errs += 1
780 780 continue
781 781 repo.copy(abs, myabsdest)
782 782 copied.append((abs, rel, exact))
783 783 if errs:
784 784 ui.warn('(consider using --after)\n')
785 785 return errs, copied
786 786
787 787 def copy(ui, repo, *pats, **opts):
788 788 """mark files as copied for the next commit"""
789 789 errs, copied = docopy(ui, repo, pats, opts)
790 790 return errs
791 791
792 792 def debugancestor(ui, index, rev1, rev2):
793 793 """find the ancestor revision of two revisions in a given index"""
794 794 r = revlog.revlog(file, index, "")
795 795 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
796 796 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
797 797
798 798 def debugcheckstate(ui, repo):
799 799 """validate the correctness of the current dirstate"""
800 800 parent1, parent2 = repo.dirstate.parents()
801 801 repo.dirstate.read()
802 802 dc = repo.dirstate.map
803 803 keys = dc.keys()
804 804 keys.sort()
805 805 m1n = repo.changelog.read(parent1)[0]
806 806 m2n = repo.changelog.read(parent2)[0]
807 807 m1 = repo.manifest.read(m1n)
808 808 m2 = repo.manifest.read(m2n)
809 809 errors = 0
810 810 for f in dc:
811 811 state = repo.dirstate.state(f)
812 812 if state in "nr" and f not in m1:
813 813 ui.warn("%s in state %s, but not in manifest1\n" % (f, state))
814 814 errors += 1
815 815 if state in "a" and f in m1:
816 816 ui.warn("%s in state %s, but also in manifest1\n" % (f, state))
817 817 errors += 1
818 818 if state in "m" and f not in m1 and f not in m2:
819 819 ui.warn("%s in state %s, but not in either manifest\n" %
820 820 (f, state))
821 821 errors += 1
822 822 for f in m1:
823 823 state = repo.dirstate.state(f)
824 824 if state not in "nrm":
825 825 ui.warn("%s in manifest1, but listed as state %s" % (f, state))
826 826 errors += 1
827 827 if errors:
828 828 raise util.Abort(".hg/dirstate inconsistent with current parent's manifest")
829 829
830 830 def debugconfig(ui):
831 831 """show combined config settings from all hgrc files"""
832 832 try:
833 833 repo = hg.repository(ui)
834 834 except hg.RepoError:
835 835 pass
836 836 for section, name, value in ui.walkconfig():
837 837 ui.write('%s.%s=%s\n' % (section, name, value))
838 838
839 839 def debugstate(ui, repo):
840 840 """show the contents of the current dirstate"""
841 841 repo.dirstate.read()
842 842 dc = repo.dirstate.map
843 843 keys = dc.keys()
844 844 keys.sort()
845 845 for file_ in keys:
846 846 ui.write("%c %3o %10d %s %s\n"
847 847 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
848 848 time.strftime("%x %X",
849 849 time.localtime(dc[file_][3])), file_))
850 850 for f in repo.dirstate.copies:
851 851 ui.write("copy: %s -> %s\n" % (repo.dirstate.copies[f], f))
852 852
853 853 def debugdata(ui, file_, rev):
854 854 """dump the contents of an data file revision"""
855 855 r = revlog.revlog(file, file_[:-2] + ".i", file_)
856 856 try:
857 857 ui.write(r.revision(r.lookup(rev)))
858 858 except KeyError:
859 859 raise util.Abort('invalid revision identifier %s', rev)
860 860
861 861 def debugindex(ui, file_):
862 862 """dump the contents of an index file"""
863 863 r = revlog.revlog(file, file_, "")
864 864 ui.write(" rev offset length base linkrev" +
865 865 " nodeid p1 p2\n")
866 866 for i in range(r.count()):
867 867 e = r.index[i]
868 868 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
869 869 i, e[0], e[1], e[2], e[3],
870 870 short(e[6]), short(e[4]), short(e[5])))
871 871
872 872 def debugindexdot(ui, file_):
873 873 """dump an index DAG as a .dot file"""
874 874 r = revlog.revlog(file, file_, "")
875 875 ui.write("digraph G {\n")
876 876 for i in range(r.count()):
877 877 e = r.index[i]
878 878 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
879 879 if e[5] != nullid:
880 880 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
881 881 ui.write("}\n")
882 882
883 883 def debugrename(ui, repo, file, rev=None):
884 884 """dump rename information"""
885 885 r = repo.file(relpath(repo, [file])[0])
886 886 if rev:
887 887 try:
888 888 # assume all revision numbers are for changesets
889 889 n = repo.lookup(rev)
890 890 change = repo.changelog.read(n)
891 891 m = repo.manifest.read(change[0])
892 892 n = m[relpath(repo, [file])[0]]
893 893 except hg.RepoError, KeyError:
894 894 n = r.lookup(rev)
895 895 else:
896 896 n = r.tip()
897 897 m = r.renamed(n)
898 898 if m:
899 899 ui.write("renamed from %s:%s\n" % (m[0], hex(m[1])))
900 900 else:
901 901 ui.write("not renamed\n")
902 902
903 903 def debugwalk(ui, repo, *pats, **opts):
904 904 """show how files match on given patterns"""
905 905 items = list(walk(repo, pats, opts))
906 906 if not items:
907 907 return
908 908 fmt = '%%s %%-%ds %%-%ds %%s' % (
909 909 max([len(abs) for (src, abs, rel, exact) in items]),
910 910 max([len(rel) for (src, abs, rel, exact) in items]))
911 911 for src, abs, rel, exact in items:
912 912 line = fmt % (src, abs, rel, exact and 'exact' or '')
913 913 ui.write("%s\n" % line.rstrip())
914 914
915 915 def diff(ui, repo, *pats, **opts):
916 916 """diff working directory (or selected files)"""
917 917 node1, node2 = None, None
918 918 revs = [repo.lookup(x) for x in opts['rev']]
919 919
920 920 if len(revs) > 0:
921 921 node1 = revs[0]
922 922 if len(revs) > 1:
923 923 node2 = revs[1]
924 924 if len(revs) > 2:
925 925 raise util.Abort("too many revisions to diff")
926 926
927 927 fns, matchfn, anypats = matchpats(repo, repo.getcwd(), pats, opts)
928 928
929 929 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
930 930 text=opts['text'])
931 931
932 932 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
933 933 node = repo.lookup(changeset)
934 934 prev, other = repo.changelog.parents(node)
935 935 change = repo.changelog.read(node)
936 936
937 937 fp = make_file(repo, repo.changelog, opts['output'],
938 938 node=node, total=total, seqno=seqno,
939 939 revwidth=revwidth)
940 940 if fp != sys.stdout:
941 941 ui.note("%s\n" % fp.name)
942 942
943 943 fp.write("# HG changeset patch\n")
944 944 fp.write("# User %s\n" % change[1])
945 945 fp.write("# Node ID %s\n" % hex(node))
946 946 fp.write("# Parent %s\n" % hex(prev))
947 947 if other != nullid:
948 948 fp.write("# Parent %s\n" % hex(other))
949 949 fp.write(change[4].rstrip())
950 950 fp.write("\n\n")
951 951
952 952 dodiff(fp, ui, repo, prev, node, text=opts['text'])
953 953 if fp != sys.stdout:
954 954 fp.close()
955 955
956 956 def export(ui, repo, *changesets, **opts):
957 957 """dump the header and diffs for one or more changesets"""
958 958 if not changesets:
959 959 raise util.Abort("export requires at least one changeset")
960 960 seqno = 0
961 961 revs = list(revrange(ui, repo, changesets))
962 962 total = len(revs)
963 963 revwidth = max(map(len, revs))
964 964 ui.note(len(revs) > 1 and "Exporting patches:\n" or "Exporting patch:\n")
965 965 for cset in revs:
966 966 seqno += 1
967 967 doexport(ui, repo, cset, seqno, total, revwidth, opts)
968 968
969 969 def forget(ui, repo, *pats, **opts):
970 970 """don't add the specified files on the next commit"""
971 971 forget = []
972 972 for src, abs, rel, exact in walk(repo, pats, opts):
973 973 if repo.dirstate.state(abs) == 'a':
974 974 forget.append(abs)
975 975 if ui.verbose or not exact:
976 976 ui.status('forgetting ', rel, '\n')
977 977 repo.forget(forget)
978 978
979 979 def grep(ui, repo, pattern, *pats, **opts):
980 980 """search for a pattern in specified files and revisions"""
981 981 reflags = 0
982 982 if opts['ignore_case']:
983 983 reflags |= re.I
984 984 regexp = re.compile(pattern, reflags)
985 985 sep, eol = ':', '\n'
986 986 if opts['print0']:
987 987 sep = eol = '\0'
988 988
989 989 fcache = {}
990 990 def getfile(fn):
991 991 if fn not in fcache:
992 992 fcache[fn] = repo.file(fn)
993 993 return fcache[fn]
994 994
995 995 def matchlines(body):
996 996 begin = 0
997 997 linenum = 0
998 998 while True:
999 999 match = regexp.search(body, begin)
1000 1000 if not match:
1001 1001 break
1002 1002 mstart, mend = match.span()
1003 1003 linenum += body.count('\n', begin, mstart) + 1
1004 1004 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1005 1005 lend = body.find('\n', mend)
1006 1006 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1007 1007 begin = lend + 1
1008 1008
1009 1009 class linestate:
1010 1010 def __init__(self, line, linenum, colstart, colend):
1011 1011 self.line = line
1012 1012 self.linenum = linenum
1013 1013 self.colstart = colstart
1014 1014 self.colend = colend
1015 1015 def __eq__(self, other):
1016 1016 return self.line == other.line
1017 1017 def __hash__(self):
1018 1018 return hash(self.line)
1019 1019
1020 1020 matches = {}
1021 1021 def grepbody(fn, rev, body):
1022 1022 matches[rev].setdefault(fn, {})
1023 1023 m = matches[rev][fn]
1024 1024 for lnum, cstart, cend, line in matchlines(body):
1025 1025 s = linestate(line, lnum, cstart, cend)
1026 1026 m[s] = s
1027 1027
1028 1028 prev = {}
1029 1029 ucache = {}
1030 1030 def display(fn, rev, states, prevstates):
1031 1031 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1032 1032 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1033 1033 counts = {'-': 0, '+': 0}
1034 1034 filerevmatches = {}
1035 1035 for l in diff:
1036 1036 if incrementing or not opts['all']:
1037 1037 change = ((l in prevstates) and '-') or '+'
1038 1038 r = rev
1039 1039 else:
1040 1040 change = ((l in states) and '-') or '+'
1041 1041 r = prev[fn]
1042 1042 cols = [fn, str(rev)]
1043 1043 if opts['line_number']: cols.append(str(l.linenum))
1044 1044 if opts['all']: cols.append(change)
1045 1045 if opts['user']: cols.append(trimuser(ui, getchange(rev)[1], rev,
1046 1046 ucache))
1047 1047 if opts['files_with_matches']:
1048 1048 c = (fn, rev)
1049 1049 if c in filerevmatches: continue
1050 1050 filerevmatches[c] = 1
1051 1051 else:
1052 1052 cols.append(l.line)
1053 1053 ui.write(sep.join(cols), eol)
1054 1054 counts[change] += 1
1055 1055 return counts['+'], counts['-']
1056 1056
1057 1057 fstate = {}
1058 1058 skip = {}
1059 1059 changeiter, getchange = walkchangerevs(ui, repo, repo.getcwd(), pats, opts)
1060 1060 count = 0
1061 1061 incrementing = False
1062 1062 for st, rev, fns in changeiter:
1063 1063 if st == 'window':
1064 1064 incrementing = rev
1065 1065 matches.clear()
1066 1066 elif st == 'add':
1067 1067 change = repo.changelog.read(repo.lookup(str(rev)))
1068 1068 mf = repo.manifest.read(change[0])
1069 1069 matches[rev] = {}
1070 1070 for fn in fns:
1071 1071 if fn in skip: continue
1072 1072 fstate.setdefault(fn, {})
1073 1073 try:
1074 1074 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1075 1075 except KeyError:
1076 1076 pass
1077 1077 elif st == 'iter':
1078 1078 states = matches[rev].items()
1079 1079 states.sort()
1080 1080 for fn, m in states:
1081 1081 if fn in skip: continue
1082 1082 if incrementing or not opts['all'] or fstate[fn]:
1083 1083 pos, neg = display(fn, rev, m, fstate[fn])
1084 1084 count += pos + neg
1085 1085 if pos and not opts['all']:
1086 1086 skip[fn] = True
1087 1087 fstate[fn] = m
1088 1088 prev[fn] = rev
1089 1089
1090 1090 if not incrementing:
1091 1091 fstate = fstate.items()
1092 1092 fstate.sort()
1093 1093 for fn, state in fstate:
1094 1094 if fn in skip: continue
1095 1095 display(fn, rev, {}, state)
1096 1096 return (count == 0 and 1) or 0
1097 1097
1098 1098 def heads(ui, repo, **opts):
1099 1099 """show current repository heads"""
1100 1100 heads = repo.changelog.heads()
1101 1101 br = None
1102 1102 if opts['branches']:
1103 1103 br = repo.branchlookup(heads)
1104 1104 for n in repo.changelog.heads():
1105 1105 show_changeset(ui, repo, changenode=n, brinfo=br)
1106 1106
1107 1107 def identify(ui, repo):
1108 1108 """print information about the working copy"""
1109 1109 parents = [p for p in repo.dirstate.parents() if p != nullid]
1110 1110 if not parents:
1111 1111 ui.write("unknown\n")
1112 1112 return
1113 1113
1114 1114 hexfunc = ui.verbose and hex or short
1115 1115 (c, a, d, u) = repo.changes()
1116 1116 output = ["%s%s" % ('+'.join([hexfunc(parent) for parent in parents]),
1117 1117 (c or a or d) and "+" or "")]
1118 1118
1119 1119 if not ui.quiet:
1120 1120 # multiple tags for a single parent separated by '/'
1121 1121 parenttags = ['/'.join(tags)
1122 1122 for tags in map(repo.nodetags, parents) if tags]
1123 1123 # tags for multiple parents separated by ' + '
1124 1124 if parenttags:
1125 1125 output.append(' + '.join(parenttags))
1126 1126
1127 1127 ui.write("%s\n" % ' '.join(output))
1128 1128
1129 1129 def import_(ui, repo, patch1, *patches, **opts):
1130 1130 """import an ordered set of patches"""
1131 1131 patches = (patch1,) + patches
1132 1132
1133 1133 if not opts['force']:
1134 1134 (c, a, d, u) = repo.changes()
1135 1135 if c or a or d:
1136 1136 raise util.Abort("outstanding uncommitted changes")
1137 1137
1138 1138 d = opts["base"]
1139 1139 strip = opts["strip"]
1140 1140
1141 1141 mailre = re.compile(r'(?:From |[\w-]+:)')
1142 1142 diffre = re.compile(r'(?:diff -|--- .*\s+\w+ \w+ +\d+ \d+:\d+:\d+ \d+)')
1143 1143
1144 1144 for patch in patches:
1145 1145 ui.status("applying %s\n" % patch)
1146 1146 pf = os.path.join(d, patch)
1147 1147
1148 1148 message = []
1149 1149 user = None
1150 1150 hgpatch = False
1151 1151 for line in file(pf):
1152 1152 line = line.rstrip()
1153 1153 if (not message and not hgpatch and
1154 1154 mailre.match(line) and not opts['force']):
1155 1155 if len(line) > 35: line = line[:32] + '...'
1156 1156 raise util.Abort('first line looks like a '
1157 1157 'mail header: ' + line)
1158 1158 if diffre.match(line):
1159 1159 break
1160 1160 elif hgpatch:
1161 1161 # parse values when importing the result of an hg export
1162 1162 if line.startswith("# User "):
1163 1163 user = line[7:]
1164 1164 ui.debug('User: %s\n' % user)
1165 1165 elif not line.startswith("# ") and line:
1166 1166 message.append(line)
1167 1167 hgpatch = False
1168 1168 elif line == '# HG changeset patch':
1169 1169 hgpatch = True
1170 1170 message = [] # We may have collected garbage
1171 1171 else:
1172 1172 message.append(line)
1173 1173
1174 1174 # make sure message isn't empty
1175 1175 if not message:
1176 1176 message = "imported patch %s\n" % patch
1177 1177 else:
1178 1178 message = "%s\n" % '\n'.join(message)
1179 1179 ui.debug('message:\n%s\n' % message)
1180 1180
1181 1181 files = util.patch(strip, pf, ui)
1182 1182
1183 1183 if len(files) > 0:
1184 1184 addremove(ui, repo, *files)
1185 1185 repo.commit(files, message, user)
1186 1186
1187 1187 def incoming(ui, repo, source="default", **opts):
1188 1188 """show new changesets found in source"""
1189 1189 source = ui.expandpath(source)
1190 1190 other = hg.repository(ui, source)
1191 1191 if not other.local():
1192 1192 raise util.Abort("incoming doesn't work for remote repositories yet")
1193 1193 o = repo.findincoming(other)
1194 1194 if not o:
1195 1195 return
1196 o = other.newer(o)
1196 o = other.changelog.nodesbetween(o)[0]
1197 1197 for n in o:
1198 1198 show_changeset(ui, other, changenode=n)
1199 1199 if opts['patch']:
1200 1200 prev = other.changelog.parents(n)[0]
1201 1201 dodiff(ui, ui, other, prev, n)
1202 1202 ui.write("\n")
1203 1203
1204 1204 def init(ui, dest="."):
1205 1205 """create a new repository in the given directory"""
1206 1206 if not os.path.exists(dest):
1207 1207 os.mkdir(dest)
1208 1208 hg.repository(ui, dest, create=1)
1209 1209
1210 1210 def locate(ui, repo, *pats, **opts):
1211 1211 """locate files matching specific patterns"""
1212 1212 end = opts['print0'] and '\0' or '\n'
1213 1213
1214 1214 for src, abs, rel, exact in walk(repo, pats, opts, '(?:.*/|)'):
1215 1215 if repo.dirstate.state(abs) == '?':
1216 1216 continue
1217 1217 if opts['fullpath']:
1218 1218 ui.write(os.path.join(repo.root, abs), end)
1219 1219 else:
1220 1220 ui.write(rel, end)
1221 1221
1222 1222 def log(ui, repo, *pats, **opts):
1223 1223 """show revision history of entire repository or files"""
1224 1224 class dui:
1225 1225 # Implement and delegate some ui protocol. Save hunks of
1226 1226 # output for later display in the desired order.
1227 1227 def __init__(self, ui):
1228 1228 self.ui = ui
1229 1229 self.hunk = {}
1230 1230 def bump(self, rev):
1231 1231 self.rev = rev
1232 1232 self.hunk[rev] = []
1233 1233 def note(self, *args):
1234 1234 if self.verbose:
1235 1235 self.write(*args)
1236 1236 def status(self, *args):
1237 1237 if not self.quiet:
1238 1238 self.write(*args)
1239 1239 def write(self, *args):
1240 1240 self.hunk[self.rev].append(args)
1241 1241 def debug(self, *args):
1242 1242 if self.debugflag:
1243 1243 self.write(*args)
1244 1244 def __getattr__(self, key):
1245 1245 return getattr(self.ui, key)
1246 1246 cwd = repo.getcwd()
1247 1247 if not pats and cwd:
1248 1248 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
1249 1249 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
1250 1250 changeiter, getchange = walkchangerevs(ui, repo, (pats and cwd) or '',
1251 1251 pats, opts)
1252 1252 for st, rev, fns in changeiter:
1253 1253 if st == 'window':
1254 1254 du = dui(ui)
1255 1255 elif st == 'add':
1256 1256 du.bump(rev)
1257 1257 br = None
1258 1258 if opts['branch']:
1259 1259 br = repo.branchlookup([repo.changelog.node(rev)])
1260 1260
1261 1261 if opts['keyword']:
1262 1262 changes = repo.changelog.read(repo.changelog.node(rev))
1263 1263 miss = 0
1264 1264 for k in opts['keyword']:
1265 1265 if not (k in changes[1].lower() or
1266 1266 k in changes[4].lower() or
1267 1267 k in " ".join(changes[3][:20]).lower()):
1268 1268 miss = 1
1269 1269 break
1270 1270 if miss:
1271 1271 continue
1272 1272
1273 1273 show_changeset(du, repo, rev, brinfo=br)
1274 1274 if opts['patch']:
1275 1275 changenode = repo.changelog.node(rev)
1276 1276 prev, other = repo.changelog.parents(changenode)
1277 1277 dodiff(du, du, repo, prev, changenode, fns)
1278 1278 du.write("\n\n")
1279 1279 elif st == 'iter':
1280 1280 for args in du.hunk[rev]:
1281 1281 ui.write(*args)
1282 1282
1283 1283 def manifest(ui, repo, rev=None):
1284 1284 """output the latest or given revision of the project manifest"""
1285 1285 if rev:
1286 1286 try:
1287 1287 # assume all revision numbers are for changesets
1288 1288 n = repo.lookup(rev)
1289 1289 change = repo.changelog.read(n)
1290 1290 n = change[0]
1291 1291 except hg.RepoError:
1292 1292 n = repo.manifest.lookup(rev)
1293 1293 else:
1294 1294 n = repo.manifest.tip()
1295 1295 m = repo.manifest.read(n)
1296 1296 mf = repo.manifest.readflags(n)
1297 1297 files = m.keys()
1298 1298 files.sort()
1299 1299
1300 1300 for f in files:
1301 1301 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
1302 1302
1303 1303 def outgoing(ui, repo, dest="default-push", **opts):
1304 1304 """show changesets not found in destination"""
1305 1305 dest = ui.expandpath(dest)
1306 1306 other = hg.repository(ui, dest)
1307 1307 o = repo.findoutgoing(other)
1308 o = repo.newer(o)
1308 o = repo.changelog.nodesbetween(o)[0]
1309 1309 for n in o:
1310 1310 show_changeset(ui, repo, changenode=n)
1311 1311 if opts['patch']:
1312 1312 prev = repo.changelog.parents(n)[0]
1313 1313 dodiff(ui, ui, repo, prev, n)
1314 1314 ui.write("\n")
1315 1315
1316 1316 def parents(ui, repo, rev=None):
1317 1317 """show the parents of the working dir or revision"""
1318 1318 if rev:
1319 1319 p = repo.changelog.parents(repo.lookup(rev))
1320 1320 else:
1321 1321 p = repo.dirstate.parents()
1322 1322
1323 1323 for n in p:
1324 1324 if n != nullid:
1325 1325 show_changeset(ui, repo, changenode=n)
1326 1326
1327 1327 def paths(ui, search=None):
1328 1328 """show definition of symbolic path names"""
1329 1329 try:
1330 1330 repo = hg.repository(ui=ui)
1331 1331 except hg.RepoError:
1332 1332 pass
1333 1333
1334 1334 if search:
1335 1335 for name, path in ui.configitems("paths"):
1336 1336 if name == search:
1337 1337 ui.write("%s\n" % path)
1338 1338 return
1339 1339 ui.warn("not found!\n")
1340 1340 return 1
1341 1341 else:
1342 1342 for name, path in ui.configitems("paths"):
1343 1343 ui.write("%s = %s\n" % (name, path))
1344 1344
1345 1345 def pull(ui, repo, source="default", **opts):
1346 1346 """pull changes from the specified source"""
1347 1347 source = ui.expandpath(source)
1348 1348 ui.status('pulling from %s\n' % (source))
1349 1349
1350 1350 if opts['ssh']:
1351 1351 ui.setconfig("ui", "ssh", opts['ssh'])
1352 1352 if opts['remotecmd']:
1353 1353 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1354 1354
1355 1355 other = hg.repository(ui, source)
1356 1356 r = repo.pull(other)
1357 1357 if not r:
1358 1358 if opts['update']:
1359 1359 return update(ui, repo)
1360 1360 else:
1361 1361 ui.status("(run 'hg update' to get a working copy)\n")
1362 1362
1363 1363 return r
1364 1364
1365 1365 def push(ui, repo, dest="default-push", force=False, ssh=None, remotecmd=None):
1366 1366 """push changes to the specified destination"""
1367 1367 dest = ui.expandpath(dest)
1368 1368 ui.status('pushing to %s\n' % (dest))
1369 1369
1370 1370 if ssh:
1371 1371 ui.setconfig("ui", "ssh", ssh)
1372 1372 if remotecmd:
1373 1373 ui.setconfig("ui", "remotecmd", remotecmd)
1374 1374
1375 1375 other = hg.repository(ui, dest)
1376 1376 r = repo.push(other, force)
1377 1377 return r
1378 1378
1379 1379 def rawcommit(ui, repo, *flist, **rc):
1380 1380 "raw commit interface"
1381 1381 if rc['text']:
1382 1382 ui.warn("Warning: -t and --text is deprecated,"
1383 1383 " please use -m or --message instead.\n")
1384 1384 message = rc['message'] or rc['text']
1385 1385 if not message and rc['logfile']:
1386 1386 try:
1387 1387 message = open(rc['logfile']).read()
1388 1388 except IOError:
1389 1389 pass
1390 1390 if not message and not rc['logfile']:
1391 1391 raise util.Abort("missing commit message")
1392 1392
1393 1393 files = relpath(repo, list(flist))
1394 1394 if rc['files']:
1395 1395 files += open(rc['files']).read().splitlines()
1396 1396
1397 1397 rc['parent'] = map(repo.lookup, rc['parent'])
1398 1398
1399 1399 try:
1400 1400 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
1401 1401 except ValueError, inst:
1402 1402 raise util.Abort(str(inst))
1403 1403
1404 1404 def recover(ui, repo):
1405 1405 """roll back an interrupted transaction"""
1406 1406 repo.recover()
1407 1407
1408 1408 def remove(ui, repo, pat, *pats, **opts):
1409 1409 """remove the specified files on the next commit"""
1410 1410 names = []
1411 1411 def okaytoremove(abs, rel, exact):
1412 1412 c, a, d, u = repo.changes(files = [abs])
1413 1413 reason = None
1414 1414 if c: reason = 'is modified'
1415 1415 elif a: reason = 'has been marked for add'
1416 1416 elif u: reason = 'is not managed'
1417 1417 if reason:
1418 1418 if exact: ui.warn('not removing %s: file %s\n' % (rel, reason))
1419 1419 else:
1420 1420 return True
1421 1421 for src, abs, rel, exact in walk(repo, (pat,) + pats, opts):
1422 1422 if okaytoremove(abs, rel, exact):
1423 1423 if ui.verbose or not exact: ui.status('removing %s\n' % rel)
1424 1424 names.append(abs)
1425 1425 for name in names:
1426 1426 try:
1427 1427 os.unlink(name)
1428 1428 except OSError, inst:
1429 1429 if inst.errno != errno.ENOENT: raise
1430 1430 repo.remove(names)
1431 1431
1432 1432 def rename(ui, repo, *pats, **opts):
1433 1433 """rename files; equivalent of copy + remove"""
1434 1434 errs, copied = docopy(ui, repo, pats, opts)
1435 1435 names = []
1436 1436 for abs, rel, exact in copied:
1437 1437 if ui.verbose or not exact: ui.status('removing %s\n' % rel)
1438 1438 try:
1439 1439 os.unlink(rel)
1440 1440 except OSError, inst:
1441 1441 if inst.errno != errno.ENOENT: raise
1442 1442 names.append(abs)
1443 1443 repo.remove(names)
1444 1444 return errs
1445 1445
1446 1446 def revert(ui, repo, *names, **opts):
1447 1447 """revert modified files or dirs back to their unmodified states"""
1448 1448 node = opts['rev'] and repo.lookup(opts['rev']) or \
1449 1449 repo.dirstate.parents()[0]
1450 1450 root = os.path.realpath(repo.root)
1451 1451
1452 1452 def trimpath(p):
1453 1453 p = os.path.realpath(p)
1454 1454 if p.startswith(root):
1455 1455 rest = p[len(root):]
1456 1456 if not rest:
1457 1457 return rest
1458 1458 if p.startswith(os.sep):
1459 1459 return rest[1:]
1460 1460 return p
1461 1461
1462 1462 relnames = map(trimpath, names or [os.getcwd()])
1463 1463 chosen = {}
1464 1464
1465 1465 def choose(name):
1466 1466 def body(name):
1467 1467 for r in relnames:
1468 1468 if not name.startswith(r):
1469 1469 continue
1470 1470 rest = name[len(r):]
1471 1471 if not rest:
1472 1472 return r, True
1473 1473 depth = rest.count(os.sep)
1474 1474 if not r:
1475 1475 if depth == 0 or not opts['nonrecursive']:
1476 1476 return r, True
1477 1477 elif rest[0] == os.sep:
1478 1478 if depth == 1 or not opts['nonrecursive']:
1479 1479 return r, True
1480 1480 return None, False
1481 1481 relname, ret = body(name)
1482 1482 if ret:
1483 1483 chosen[relname] = 1
1484 1484 return ret
1485 1485
1486 1486 r = repo.update(node, False, True, choose, False)
1487 1487 for n in relnames:
1488 1488 if n not in chosen:
1489 1489 ui.warn('error: no matches for %s\n' % n)
1490 1490 r = 1
1491 1491 sys.stdout.flush()
1492 1492 return r
1493 1493
1494 1494 def root(ui, repo):
1495 1495 """print the root (top) of the current working dir"""
1496 1496 ui.write(repo.root + "\n")
1497 1497
1498 1498 def serve(ui, repo, **opts):
1499 1499 """export the repository via HTTP"""
1500 1500
1501 1501 if opts["stdio"]:
1502 1502 fin, fout = sys.stdin, sys.stdout
1503 1503 sys.stdout = sys.stderr
1504 1504
1505 1505 def getarg():
1506 1506 argline = fin.readline()[:-1]
1507 1507 arg, l = argline.split()
1508 1508 val = fin.read(int(l))
1509 1509 return arg, val
1510 1510 def respond(v):
1511 1511 fout.write("%d\n" % len(v))
1512 1512 fout.write(v)
1513 1513 fout.flush()
1514 1514
1515 1515 lock = None
1516 1516
1517 1517 while 1:
1518 1518 cmd = fin.readline()[:-1]
1519 1519 if cmd == '':
1520 1520 return
1521 1521 if cmd == "heads":
1522 1522 h = repo.heads()
1523 1523 respond(" ".join(map(hex, h)) + "\n")
1524 1524 if cmd == "lock":
1525 1525 lock = repo.lock()
1526 1526 respond("")
1527 1527 if cmd == "unlock":
1528 1528 if lock:
1529 1529 lock.release()
1530 1530 lock = None
1531 1531 respond("")
1532 1532 elif cmd == "branches":
1533 1533 arg, nodes = getarg()
1534 1534 nodes = map(bin, nodes.split(" "))
1535 1535 r = []
1536 1536 for b in repo.branches(nodes):
1537 1537 r.append(" ".join(map(hex, b)) + "\n")
1538 1538 respond("".join(r))
1539 1539 elif cmd == "between":
1540 1540 arg, pairs = getarg()
1541 1541 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
1542 1542 r = []
1543 1543 for b in repo.between(pairs):
1544 1544 r.append(" ".join(map(hex, b)) + "\n")
1545 1545 respond("".join(r))
1546 1546 elif cmd == "changegroup":
1547 1547 nodes = []
1548 1548 arg, roots = getarg()
1549 1549 nodes = map(bin, roots.split(" "))
1550 1550
1551 1551 cg = repo.changegroup(nodes)
1552 1552 while 1:
1553 1553 d = cg.read(4096)
1554 1554 if not d:
1555 1555 break
1556 1556 fout.write(d)
1557 1557
1558 1558 fout.flush()
1559 1559
1560 1560 elif cmd == "addchangegroup":
1561 1561 if not lock:
1562 1562 respond("not locked")
1563 1563 continue
1564 1564 respond("")
1565 1565
1566 1566 r = repo.addchangegroup(fin)
1567 1567 respond("")
1568 1568
1569 1569 optlist = "name templates style address port ipv6 accesslog errorlog"
1570 1570 for o in optlist.split():
1571 1571 if opts[o]:
1572 1572 ui.setconfig("web", o, opts[o])
1573 1573
1574 1574 try:
1575 1575 httpd = hgweb.create_server(repo)
1576 1576 except socket.error, inst:
1577 1577 raise util.Abort('cannot start server: ' + inst.args[1])
1578 1578
1579 1579 if ui.verbose:
1580 1580 addr, port = httpd.socket.getsockname()
1581 1581 if addr == '0.0.0.0':
1582 1582 addr = socket.gethostname()
1583 1583 else:
1584 1584 try:
1585 1585 addr = socket.gethostbyaddr(addr)[0]
1586 1586 except socket.error:
1587 1587 pass
1588 1588 if port != 80:
1589 1589 ui.status('listening at http://%s:%d/\n' % (addr, port))
1590 1590 else:
1591 1591 ui.status('listening at http://%s/\n' % addr)
1592 1592 httpd.serve_forever()
1593 1593
1594 1594 def status(ui, repo, *pats, **opts):
1595 1595 '''show changed files in the working directory
1596 1596
1597 1597 M = modified
1598 1598 A = added
1599 1599 R = removed
1600 1600 ? = not tracked
1601 1601 '''
1602 1602
1603 1603 cwd = repo.getcwd()
1604 1604 files, matchfn, anypats = matchpats(repo, cwd, pats, opts)
1605 1605 (c, a, d, u) = [[util.pathto(cwd, x) for x in n]
1606 1606 for n in repo.changes(files=files, match=matchfn)]
1607 1607
1608 1608 changetypes = [('modified', 'M', c),
1609 1609 ('added', 'A', a),
1610 1610 ('removed', 'R', d),
1611 1611 ('unknown', '?', u)]
1612 1612
1613 1613 end = opts['print0'] and '\0' or '\n'
1614 1614
1615 1615 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
1616 1616 or changetypes):
1617 1617 if opts['no_status']:
1618 1618 format = "%%s%s" % end
1619 1619 else:
1620 1620 format = "%s %%s%s" % (char, end);
1621 1621
1622 1622 for f in changes:
1623 1623 ui.write(format % f)
1624 1624
1625 1625 def tag(ui, repo, name, rev=None, **opts):
1626 1626 """add a tag for the current tip or a given revision"""
1627 1627 if opts['text']:
1628 1628 ui.warn("Warning: -t and --text is deprecated,"
1629 1629 " please use -m or --message instead.\n")
1630 1630 if name == "tip":
1631 1631 raise util.Abort("the name 'tip' is reserved")
1632 1632 if rev:
1633 1633 r = hex(repo.lookup(rev))
1634 1634 else:
1635 1635 r = hex(repo.changelog.tip())
1636 1636
1637 1637 if name.find(revrangesep) >= 0:
1638 1638 raise util.Abort("'%s' cannot be used in a tag name" % revrangesep)
1639 1639
1640 1640 if opts['local']:
1641 1641 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
1642 1642 return
1643 1643
1644 1644 (c, a, d, u) = repo.changes()
1645 1645 for x in (c, a, d, u):
1646 1646 if ".hgtags" in x:
1647 1647 raise util.Abort("working copy of .hgtags is changed "
1648 1648 "(please commit .hgtags manually)")
1649 1649
1650 1650 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
1651 1651 if repo.dirstate.state(".hgtags") == '?':
1652 1652 repo.add([".hgtags"])
1653 1653
1654 1654 message = (opts['message'] or opts['text'] or
1655 1655 "Added tag %s for changeset %s" % (name, r))
1656 1656 try:
1657 1657 repo.commit([".hgtags"], message, opts['user'], opts['date'])
1658 1658 except ValueError, inst:
1659 1659 raise util.Abort(str(inst))
1660 1660
1661 1661 def tags(ui, repo):
1662 1662 """list repository tags"""
1663 1663
1664 1664 l = repo.tagslist()
1665 1665 l.reverse()
1666 1666 for t, n in l:
1667 1667 try:
1668 1668 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
1669 1669 except KeyError:
1670 1670 r = " ?:?"
1671 1671 ui.write("%-30s %s\n" % (t, r))
1672 1672
1673 1673 def tip(ui, repo):
1674 1674 """show the tip revision"""
1675 1675 n = repo.changelog.tip()
1676 1676 show_changeset(ui, repo, changenode=n)
1677 1677
1678 1678 def unbundle(ui, repo, fname):
1679 1679 """apply a changegroup file"""
1680 1680 f = urllib.urlopen(fname)
1681 1681
1682 1682 if f.read(4) != "HG10":
1683 1683 raise util.Abort("%s: not a Mercurial bundle file" % fname)
1684 1684
1685 1685 def bzgenerator(f):
1686 1686 zd = bz2.BZ2Decompressor()
1687 1687 for chunk in f:
1688 1688 yield zd.decompress(chunk)
1689 1689 yield zd.flush()
1690 1690
1691 1691 bzgen = bzgenerator(util.filechunkiter(f, 4096))
1692 1692 repo.addchangegroup(util.chunkbuffer(bzgen))
1693 1693
1694 1694 def undo(ui, repo):
1695 1695 """undo the last commit or pull
1696 1696
1697 1697 Roll back the last pull or commit transaction on the
1698 1698 repository, restoring the project to its earlier state.
1699 1699
1700 1700 This command should be used with care. There is only one level of
1701 1701 undo and there is no redo.
1702 1702
1703 1703 This command is not intended for use on public repositories. Once
1704 1704 a change is visible for pull by other users, undoing it locally is
1705 1705 ineffective.
1706 1706 """
1707 1707 repo.undo()
1708 1708
1709 1709 def update(ui, repo, node=None, merge=False, clean=False, branch=None):
1710 1710 '''update or merge working directory
1711 1711
1712 1712 If there are no outstanding changes in the working directory and
1713 1713 there is a linear relationship between the current version and the
1714 1714 requested version, the result is the requested version.
1715 1715
1716 1716 Otherwise the result is a merge between the contents of the
1717 1717 current working directory and the requested version. Files that
1718 1718 changed between either parent are marked as changed for the next
1719 1719 commit and a commit must be performed before any further updates
1720 1720 are allowed.
1721 1721 '''
1722 1722 if branch:
1723 1723 br = repo.branchlookup(branch=branch)
1724 1724 found = []
1725 1725 for x in br:
1726 1726 if branch in br[x]:
1727 1727 found.append(x)
1728 1728 if len(found) > 1:
1729 1729 ui.warn("Found multiple heads for %s\n" % branch)
1730 1730 for x in found:
1731 1731 show_changeset(ui, repo, changenode=x, brinfo=br)
1732 1732 return 1
1733 1733 if len(found) == 1:
1734 1734 node = found[0]
1735 1735 ui.warn("Using head %s for branch %s\n" % (short(node), branch))
1736 1736 else:
1737 1737 ui.warn("branch %s not found\n" % (branch))
1738 1738 return 1
1739 1739 else:
1740 1740 node = node and repo.lookup(node) or repo.changelog.tip()
1741 1741 return repo.update(node, allow=merge, force=clean)
1742 1742
1743 1743 def verify(ui, repo):
1744 1744 """verify the integrity of the repository"""
1745 1745 return repo.verify()
1746 1746
1747 1747 # Command options and aliases are listed here, alphabetically
1748 1748
1749 1749 table = {
1750 1750 "^add":
1751 1751 (add,
1752 1752 [('I', 'include', [], 'include path in search'),
1753 1753 ('X', 'exclude', [], 'exclude path from search')],
1754 1754 "hg add [OPTION]... [FILE]..."),
1755 1755 "addremove":
1756 1756 (addremove,
1757 1757 [('I', 'include', [], 'include path in search'),
1758 1758 ('X', 'exclude', [], 'exclude path from search')],
1759 1759 "hg addremove [OPTION]... [FILE]..."),
1760 1760 "^annotate":
1761 1761 (annotate,
1762 1762 [('r', 'rev', '', 'revision'),
1763 1763 ('a', 'text', None, 'treat all files as text'),
1764 1764 ('u', 'user', None, 'show user'),
1765 1765 ('n', 'number', None, 'show revision number'),
1766 1766 ('c', 'changeset', None, 'show changeset'),
1767 1767 ('I', 'include', [], 'include path in search'),
1768 1768 ('X', 'exclude', [], 'exclude path from search')],
1769 1769 'hg annotate [OPTION]... FILE...'),
1770 1770 "bundle":
1771 1771 (bundle,
1772 1772 [],
1773 1773 'hg bundle FILE DEST'),
1774 1774 "cat":
1775 1775 (cat,
1776 1776 [('I', 'include', [], 'include path in search'),
1777 1777 ('X', 'exclude', [], 'exclude path from search'),
1778 1778 ('o', 'output', "", 'output to file'),
1779 1779 ('r', 'rev', '', 'revision')],
1780 1780 'hg cat [OPTION]... FILE...'),
1781 1781 "^clone":
1782 1782 (clone,
1783 1783 [('U', 'noupdate', None, 'skip update after cloning'),
1784 1784 ('e', 'ssh', "", 'ssh command'),
1785 1785 ('', 'pull', None, 'use pull protocol to copy metadata'),
1786 1786 ('', 'remotecmd', "", 'remote hg command')],
1787 1787 'hg clone [OPTION]... SOURCE [DEST]'),
1788 1788 "^commit|ci":
1789 1789 (commit,
1790 1790 [('A', 'addremove', None, 'run add/remove during commit'),
1791 1791 ('I', 'include', [], 'include path in search'),
1792 1792 ('X', 'exclude', [], 'exclude path from search'),
1793 1793 ('m', 'message', "", 'commit message'),
1794 1794 ('t', 'text', "", 'commit message (deprecated: use -m)'),
1795 1795 ('l', 'logfile', "", 'commit message file'),
1796 1796 ('d', 'date', "", 'date code'),
1797 1797 ('u', 'user', "", 'user')],
1798 1798 'hg commit [OPTION]... [FILE]...'),
1799 1799 "copy|cp": (copy,
1800 1800 [('I', 'include', [], 'include path in search'),
1801 1801 ('X', 'exclude', [], 'exclude path from search'),
1802 1802 ('A', 'after', None, 'record a copy after it has happened'),
1803 1803 ('f', 'force', None, 'replace destination if it exists'),
1804 1804 ('p', 'parents', None, 'append source path to dest')],
1805 1805 'hg copy [OPTION]... [SOURCE]... DEST'),
1806 1806 "debugancestor": (debugancestor, [], 'debugancestor INDEX REV1 REV2'),
1807 1807 "debugcheckstate": (debugcheckstate, [], 'debugcheckstate'),
1808 1808 "debugconfig": (debugconfig, [], 'debugconfig'),
1809 1809 "debugstate": (debugstate, [], 'debugstate'),
1810 1810 "debugdata": (debugdata, [], 'debugdata FILE REV'),
1811 1811 "debugindex": (debugindex, [], 'debugindex FILE'),
1812 1812 "debugindexdot": (debugindexdot, [], 'debugindexdot FILE'),
1813 1813 "debugrename": (debugrename, [], 'debugrename FILE [REV]'),
1814 1814 "debugwalk":
1815 1815 (debugwalk,
1816 1816 [('I', 'include', [], 'include path in search'),
1817 1817 ('X', 'exclude', [], 'exclude path from search')],
1818 1818 'debugwalk [OPTION]... [FILE]...'),
1819 1819 "^diff":
1820 1820 (diff,
1821 1821 [('r', 'rev', [], 'revision'),
1822 1822 ('a', 'text', None, 'treat all files as text'),
1823 1823 ('I', 'include', [], 'include path in search'),
1824 1824 ('X', 'exclude', [], 'exclude path from search')],
1825 1825 'hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...'),
1826 1826 "^export":
1827 1827 (export,
1828 1828 [('o', 'output', "", 'output to file'),
1829 1829 ('a', 'text', None, 'treat all files as text')],
1830 1830 "hg export [-a] [-o OUTFILE] REV..."),
1831 1831 "forget":
1832 1832 (forget,
1833 1833 [('I', 'include', [], 'include path in search'),
1834 1834 ('X', 'exclude', [], 'exclude path from search')],
1835 1835 "hg forget [OPTION]... FILE..."),
1836 1836 "grep":
1837 1837 (grep,
1838 1838 [('0', 'print0', None, 'end fields with NUL'),
1839 1839 ('I', 'include', [], 'include path in search'),
1840 1840 ('X', 'exclude', [], 'include path in search'),
1841 1841 ('', 'all', None, 'print all revisions with matches'),
1842 1842 ('i', 'ignore-case', None, 'ignore case when matching'),
1843 1843 ('l', 'files-with-matches', None, 'print names of files and revs with matches'),
1844 1844 ('n', 'line-number', None, 'print line numbers'),
1845 1845 ('r', 'rev', [], 'search in revision rev'),
1846 1846 ('u', 'user', None, 'print user who made change')],
1847 1847 "hg grep [OPTION]... PATTERN [FILE]..."),
1848 1848 "heads":
1849 1849 (heads,
1850 1850 [('b', 'branches', None, 'find branch info')],
1851 1851 'hg heads [-b]'),
1852 1852 "help": (help_, [], 'hg help [COMMAND]'),
1853 1853 "identify|id": (identify, [], 'hg identify'),
1854 1854 "import|patch":
1855 1855 (import_,
1856 1856 [('p', 'strip', 1, 'path strip'),
1857 1857 ('f', 'force', None, 'skip check for outstanding changes'),
1858 1858 ('b', 'base', "", 'base path')],
1859 1859 "hg import [-f] [-p NUM] [-b BASE] PATCH..."),
1860 1860 "incoming|in": (incoming,
1861 1861 [('p', 'patch', None, 'show patch')],
1862 1862 'hg incoming [-p] [SOURCE]'),
1863 1863 "^init": (init, [], 'hg init [DEST]'),
1864 1864 "locate":
1865 1865 (locate,
1866 1866 [('r', 'rev', '', 'revision'),
1867 1867 ('0', 'print0', None, 'end filenames with NUL'),
1868 1868 ('f', 'fullpath', None, 'print complete paths'),
1869 1869 ('I', 'include', [], 'include path in search'),
1870 1870 ('X', 'exclude', [], 'exclude path from search')],
1871 1871 'hg locate [OPTION]... [PATTERN]...'),
1872 1872 "^log|history":
1873 1873 (log,
1874 1874 [('I', 'include', [], 'include path in search'),
1875 1875 ('X', 'exclude', [], 'exclude path from search'),
1876 1876 ('b', 'branch', None, 'show branches'),
1877 1877 ('k', 'keyword', [], 'search for a keyword'),
1878 1878 ('r', 'rev', [], 'revision'),
1879 1879 ('p', 'patch', None, 'show patch')],
1880 1880 'hg log [-I] [-X] [-r REV]... [-p] [FILE]'),
1881 1881 "manifest": (manifest, [], 'hg manifest [REV]'),
1882 1882 "outgoing|out": (outgoing,
1883 1883 [('p', 'patch', None, 'show patch')],
1884 1884 'hg outgoing [-p] [DEST]'),
1885 1885 "parents": (parents, [], 'hg parents [REV]'),
1886 1886 "paths": (paths, [], 'hg paths [NAME]'),
1887 1887 "^pull":
1888 1888 (pull,
1889 1889 [('u', 'update', None, 'update working directory'),
1890 1890 ('e', 'ssh', "", 'ssh command'),
1891 1891 ('', 'remotecmd', "", 'remote hg command')],
1892 1892 'hg pull [-u] [-e FILE] [--remotecmd FILE] [SOURCE]'),
1893 1893 "^push":
1894 1894 (push,
1895 1895 [('f', 'force', None, 'force push'),
1896 1896 ('e', 'ssh', "", 'ssh command'),
1897 1897 ('', 'remotecmd', "", 'remote hg command')],
1898 1898 'hg push [-f] [-e FILE] [--remotecmd FILE] [DEST]'),
1899 1899 "rawcommit":
1900 1900 (rawcommit,
1901 1901 [('p', 'parent', [], 'parent'),
1902 1902 ('d', 'date', "", 'date code'),
1903 1903 ('u', 'user', "", 'user'),
1904 1904 ('F', 'files', "", 'file list'),
1905 1905 ('m', 'message', "", 'commit message'),
1906 1906 ('t', 'text', "", 'commit message (deprecated: use -m)'),
1907 1907 ('l', 'logfile', "", 'commit message file')],
1908 1908 'hg rawcommit [OPTION]... [FILE]...'),
1909 1909 "recover": (recover, [], "hg recover"),
1910 1910 "^remove|rm": (remove,
1911 1911 [('I', 'include', [], 'include path in search'),
1912 1912 ('X', 'exclude', [], 'exclude path from search')],
1913 1913 "hg remove [OPTION]... FILE..."),
1914 1914 "rename|mv": (rename,
1915 1915 [('I', 'include', [], 'include path in search'),
1916 1916 ('X', 'exclude', [], 'exclude path from search'),
1917 1917 ('A', 'after', None, 'record a copy after it has happened'),
1918 1918 ('f', 'force', None, 'replace destination if it exists'),
1919 1919 ('p', 'parents', None, 'append source path to dest')],
1920 1920 'hg rename [OPTION]... [SOURCE]... DEST'),
1921 1921 "^revert":
1922 1922 (revert,
1923 1923 [("n", "nonrecursive", None, "don't recurse into subdirs"),
1924 1924 ("r", "rev", "", "revision")],
1925 1925 "hg revert [-n] [-r REV] [NAME]..."),
1926 1926 "root": (root, [], "hg root"),
1927 1927 "^serve":
1928 1928 (serve,
1929 1929 [('A', 'accesslog', '', 'access log file'),
1930 1930 ('E', 'errorlog', '', 'error log file'),
1931 1931 ('p', 'port', 0, 'listen port'),
1932 1932 ('a', 'address', '', 'interface address'),
1933 1933 ('n', 'name', "", 'repository name'),
1934 1934 ('', 'stdio', None, 'for remote clients'),
1935 1935 ('t', 'templates', "", 'template directory'),
1936 1936 ('', 'style', "", 'template style'),
1937 1937 ('6', 'ipv6', None, 'use IPv6 in addition to IPv4')],
1938 1938 "hg serve [OPTION]..."),
1939 1939 "^status":
1940 1940 (status,
1941 1941 [('m', 'modified', None, 'show only modified files'),
1942 1942 ('a', 'added', None, 'show only added files'),
1943 1943 ('r', 'removed', None, 'show only removed files'),
1944 1944 ('u', 'unknown', None, 'show only unknown (not tracked) files'),
1945 1945 ('n', 'no-status', None, 'hide status prefix'),
1946 1946 ('0', 'print0', None, 'end filenames with NUL'),
1947 1947 ('I', 'include', [], 'include path in search'),
1948 1948 ('X', 'exclude', [], 'exclude path from search')],
1949 1949 "hg status [OPTION]... [FILE]..."),
1950 1950 "tag":
1951 1951 (tag,
1952 1952 [('l', 'local', None, 'make the tag local'),
1953 1953 ('m', 'message', "", 'commit message'),
1954 1954 ('t', 'text', "", 'commit message (deprecated: use -m)'),
1955 1955 ('d', 'date', "", 'date code'),
1956 1956 ('u', 'user', "", 'user')],
1957 1957 'hg tag [OPTION]... NAME [REV]'),
1958 1958 "tags": (tags, [], 'hg tags'),
1959 1959 "tip": (tip, [], 'hg tip'),
1960 1960 "unbundle":
1961 1961 (unbundle,
1962 1962 [],
1963 1963 'hg unbundle FILE'),
1964 1964 "undo": (undo, [], 'hg undo'),
1965 1965 "^update|up|checkout|co":
1966 1966 (update,
1967 1967 [('b', 'branch', "", 'checkout the head of a specific branch'),
1968 1968 ('m', 'merge', None, 'allow merging of conflicts'),
1969 1969 ('C', 'clean', None, 'overwrite locally modified files')],
1970 1970 'hg update [-b TAG] [-m] [-C] [REV]'),
1971 1971 "verify": (verify, [], 'hg verify'),
1972 1972 "version": (show_version, [], 'hg version'),
1973 1973 }
1974 1974
1975 1975 globalopts = [
1976 1976 ('R', 'repository', "", 'repository root directory'),
1977 1977 ('', 'cwd', '', 'change working directory'),
1978 1978 ('y', 'noninteractive', None, 'run non-interactively'),
1979 1979 ('q', 'quiet', None, 'quiet mode'),
1980 1980 ('v', 'verbose', None, 'verbose mode'),
1981 1981 ('', 'debug', None, 'debug mode'),
1982 1982 ('', 'debugger', None, 'start debugger'),
1983 1983 ('', 'traceback', None, 'print traceback on exception'),
1984 1984 ('', 'time', None, 'time how long the command takes'),
1985 1985 ('', 'profile', None, 'profile'),
1986 1986 ('', 'version', None, 'output version information and exit'),
1987 1987 ('h', 'help', None, 'display help and exit'),
1988 1988 ]
1989 1989
1990 1990 norepo = ("clone init version help debugancestor debugconfig debugdata"
1991 1991 " debugindex debugindexdot paths")
1992 1992
1993 1993 def find(cmd):
1994 1994 for e in table.keys():
1995 1995 if re.match("(%s)$" % e, cmd):
1996 1996 return e, table[e]
1997 1997
1998 1998 raise UnknownCommand(cmd)
1999 1999
2000 2000 class SignalInterrupt(Exception):
2001 2001 """Exception raised on SIGTERM and SIGHUP."""
2002 2002
2003 2003 def catchterm(*args):
2004 2004 raise SignalInterrupt
2005 2005
2006 2006 def run():
2007 2007 sys.exit(dispatch(sys.argv[1:]))
2008 2008
2009 2009 class ParseError(Exception):
2010 2010 """Exception raised on errors in parsing the command line."""
2011 2011
2012 2012 def parse(args):
2013 2013 options = {}
2014 2014 cmdoptions = {}
2015 2015
2016 2016 try:
2017 2017 args = fancyopts.fancyopts(args, globalopts, options)
2018 2018 except fancyopts.getopt.GetoptError, inst:
2019 2019 raise ParseError(None, inst)
2020 2020
2021 2021 if args:
2022 2022 cmd, args = args[0], args[1:]
2023 2023 i = find(cmd)[1]
2024 2024 c = list(i[1])
2025 2025 else:
2026 2026 cmd = None
2027 2027 c = []
2028 2028
2029 2029 # combine global options into local
2030 2030 for o in globalopts:
2031 2031 c.append((o[0], o[1], options[o[1]], o[3]))
2032 2032
2033 2033 try:
2034 2034 args = fancyopts.fancyopts(args, c, cmdoptions)
2035 2035 except fancyopts.getopt.GetoptError, inst:
2036 2036 raise ParseError(cmd, inst)
2037 2037
2038 2038 # separate global options back out
2039 2039 for o in globalopts:
2040 2040 n = o[1]
2041 2041 options[n] = cmdoptions[n]
2042 2042 del cmdoptions[n]
2043 2043
2044 2044 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
2045 2045
2046 2046 def dispatch(args):
2047 2047 signal.signal(signal.SIGTERM, catchterm)
2048 2048 try:
2049 2049 signal.signal(signal.SIGHUP, catchterm)
2050 2050 except AttributeError:
2051 2051 pass
2052 2052
2053 2053 u = ui.ui()
2054 2054 external = []
2055 2055 for x in u.extensions():
2056 2056 if x[1]:
2057 2057 try:
2058 2058 mod = imp.load_source(x[0], x[1])
2059 2059 except:
2060 2060 u.warn("*** failed to import extension %s\n" % x[1])
2061 2061 continue
2062 2062 else:
2063 2063 def importh(name):
2064 2064 mod = __import__(name)
2065 2065 components = name.split('.')
2066 2066 for comp in components[1:]:
2067 2067 mod = getattr(mod, comp)
2068 2068 return mod
2069 2069 try:
2070 2070 mod = importh(x[0])
2071 2071 except:
2072 2072 u.warn("failed to import extension %s\n" % x[0])
2073 2073 continue
2074 2074
2075 2075 external.append(mod)
2076 2076 for x in external:
2077 2077 cmdtable = getattr(x, 'cmdtable', {})
2078 2078 for t in cmdtable:
2079 2079 if t in table:
2080 2080 u.warn("module %s overrides %s\n" % (x.__name__, t))
2081 2081 table.update(cmdtable)
2082 2082
2083 2083 try:
2084 2084 cmd, func, args, options, cmdoptions = parse(args)
2085 2085 except ParseError, inst:
2086 2086 if inst.args[0]:
2087 2087 u.warn("hg %s: %s\n" % (inst.args[0], inst.args[1]))
2088 2088 help_(u, inst.args[0])
2089 2089 else:
2090 2090 u.warn("hg: %s\n" % inst.args[1])
2091 2091 help_(u, 'shortlist')
2092 2092 sys.exit(-1)
2093 2093 except UnknownCommand, inst:
2094 2094 u.warn("hg: unknown command '%s'\n" % inst.args[0])
2095 2095 help_(u, 'shortlist')
2096 2096 sys.exit(1)
2097 2097
2098 2098 if options["time"]:
2099 2099 def get_times():
2100 2100 t = os.times()
2101 2101 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
2102 2102 t = (t[0], t[1], t[2], t[3], time.clock())
2103 2103 return t
2104 2104 s = get_times()
2105 2105 def print_time():
2106 2106 t = get_times()
2107 2107 u.warn("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n" %
2108 2108 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
2109 2109 atexit.register(print_time)
2110 2110
2111 2111 u.updateopts(options["verbose"], options["debug"], options["quiet"],
2112 2112 not options["noninteractive"])
2113 2113
2114 2114 # enter the debugger before command execution
2115 2115 if options['debugger']:
2116 2116 pdb.set_trace()
2117 2117
2118 2118 try:
2119 2119 try:
2120 2120 if options['help']:
2121 2121 help_(u, cmd, options['version'])
2122 2122 sys.exit(0)
2123 2123 elif options['version']:
2124 2124 show_version(u)
2125 2125 sys.exit(0)
2126 2126 elif not cmd:
2127 2127 help_(u, 'shortlist')
2128 2128 sys.exit(0)
2129 2129
2130 2130 if options['cwd']:
2131 2131 try:
2132 2132 os.chdir(options['cwd'])
2133 2133 except OSError, inst:
2134 2134 raise util.Abort('%s: %s' %
2135 2135 (options['cwd'], inst.strerror))
2136 2136
2137 2137 if cmd not in norepo.split():
2138 2138 path = options["repository"] or ""
2139 2139 repo = hg.repository(ui=u, path=path)
2140 2140 for x in external:
2141 2141 if hasattr(x, 'reposetup'): x.reposetup(u, repo)
2142 2142 d = lambda: func(u, repo, *args, **cmdoptions)
2143 2143 else:
2144 2144 d = lambda: func(u, *args, **cmdoptions)
2145 2145
2146 2146 if options['profile']:
2147 2147 import hotshot, hotshot.stats
2148 2148 prof = hotshot.Profile("hg.prof")
2149 2149 r = prof.runcall(d)
2150 2150 prof.close()
2151 2151 stats = hotshot.stats.load("hg.prof")
2152 2152 stats.strip_dirs()
2153 2153 stats.sort_stats('time', 'calls')
2154 2154 stats.print_stats(40)
2155 2155 return r
2156 2156 else:
2157 2157 return d()
2158 2158 except:
2159 2159 # enter the debugger when we hit an exception
2160 2160 if options['debugger']:
2161 2161 pdb.post_mortem(sys.exc_info()[2])
2162 2162 if options['traceback']:
2163 2163 traceback.print_exc()
2164 2164 raise
2165 2165 except hg.RepoError, inst:
2166 2166 u.warn("abort: ", inst, "!\n")
2167 2167 except revlog.RevlogError, inst:
2168 2168 u.warn("abort: ", inst, "!\n")
2169 2169 except SignalInterrupt:
2170 2170 u.warn("killed!\n")
2171 2171 except KeyboardInterrupt:
2172 2172 try:
2173 2173 u.warn("interrupted!\n")
2174 2174 except IOError, inst:
2175 2175 if inst.errno == errno.EPIPE:
2176 2176 if u.debugflag:
2177 2177 u.warn("\nbroken pipe\n")
2178 2178 else:
2179 2179 raise
2180 2180 except IOError, inst:
2181 2181 if hasattr(inst, "code"):
2182 2182 u.warn("abort: %s\n" % inst)
2183 2183 elif hasattr(inst, "reason"):
2184 2184 u.warn("abort: error: %s\n" % inst.reason[1])
2185 2185 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
2186 2186 if u.debugflag:
2187 2187 u.warn("broken pipe\n")
2188 2188 elif getattr(inst, "strerror", None):
2189 2189 if getattr(inst, "filename", None):
2190 2190 u.warn("abort: %s - %s\n" % (inst.strerror, inst.filename))
2191 2191 else:
2192 2192 u.warn("abort: %s\n" % inst.strerror)
2193 2193 else:
2194 2194 raise
2195 2195 except OSError, inst:
2196 2196 if hasattr(inst, "filename"):
2197 2197 u.warn("abort: %s: %s\n" % (inst.strerror, inst.filename))
2198 2198 else:
2199 2199 u.warn("abort: %s\n" % inst.strerror)
2200 2200 except util.Abort, inst:
2201 2201 u.warn('abort: ', inst.args[0] % inst.args[1:], '\n')
2202 2202 sys.exit(1)
2203 2203 except TypeError, inst:
2204 2204 # was this an argument error?
2205 2205 tb = traceback.extract_tb(sys.exc_info()[2])
2206 2206 if len(tb) > 2: # no
2207 2207 raise
2208 2208 u.debug(inst, "\n")
2209 2209 u.warn("%s: invalid arguments\n" % cmd)
2210 2210 help_(u, cmd)
2211 2211 except UnknownCommand, inst:
2212 2212 u.warn("hg: unknown command '%s'\n" % inst.args[0])
2213 2213 help_(u, 'shortlist')
2214 2214 except SystemExit:
2215 2215 # don't catch this in the catch-all below
2216 2216 raise
2217 2217 except:
2218 2218 u.warn("** unknown exception encountered, details follow\n")
2219 2219 u.warn("** report bug details to mercurial@selenic.com\n")
2220 2220 raise
2221 2221
2222 2222 sys.exit(-1)
@@ -1,1446 +1,1420 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import struct, os, util
9 9 import filelog, manifest, changelog, dirstate, repo
10 10 from node import *
11 11 from demandload import *
12 12 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
13 13
14 14 class localrepository:
15 15 def __init__(self, ui, path=None, create=0):
16 16 if not path:
17 17 p = os.getcwd()
18 18 while not os.path.isdir(os.path.join(p, ".hg")):
19 19 oldp = p
20 20 p = os.path.dirname(p)
21 21 if p == oldp: raise repo.RepoError("no repo found")
22 22 path = p
23 23 self.path = os.path.join(path, ".hg")
24 24
25 25 if not create and not os.path.isdir(self.path):
26 26 raise repo.RepoError("repository %s not found" % self.path)
27 27
28 28 self.root = os.path.abspath(path)
29 29 self.ui = ui
30 30 self.opener = util.opener(self.path)
31 31 self.wopener = util.opener(self.root)
32 32 self.manifest = manifest.manifest(self.opener)
33 33 self.changelog = changelog.changelog(self.opener)
34 34 self.tagscache = None
35 35 self.nodetagscache = None
36 36 self.encodepats = None
37 37 self.decodepats = None
38 38
39 39 if create:
40 40 os.mkdir(self.path)
41 41 os.mkdir(self.join("data"))
42 42
43 43 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
44 44 try:
45 45 self.ui.readconfig(self.opener("hgrc"))
46 46 except IOError: pass
47 47
48 48 def hook(self, name, **args):
49 49 s = self.ui.config("hooks", name)
50 50 if s:
51 51 self.ui.note("running hook %s: %s\n" % (name, s))
52 52 old = {}
53 53 for k, v in args.items():
54 54 k = k.upper()
55 55 old[k] = os.environ.get(k, None)
56 56 os.environ[k] = v
57 57
58 58 # Hooks run in the repository root
59 59 olddir = os.getcwd()
60 60 os.chdir(self.root)
61 61 r = os.system(s)
62 62 os.chdir(olddir)
63 63
64 64 for k, v in old.items():
65 65 if v != None:
66 66 os.environ[k] = v
67 67 else:
68 68 del os.environ[k]
69 69
70 70 if r:
71 71 self.ui.warn("abort: %s hook failed with status %d!\n" %
72 72 (name, r))
73 73 return False
74 74 return True
75 75
76 76 def tags(self):
77 77 '''return a mapping of tag to node'''
78 78 if not self.tagscache:
79 79 self.tagscache = {}
80 80 def addtag(self, k, n):
81 81 try:
82 82 bin_n = bin(n)
83 83 except TypeError:
84 84 bin_n = ''
85 85 self.tagscache[k.strip()] = bin_n
86 86
87 87 try:
88 88 # read each head of the tags file, ending with the tip
89 89 # and add each tag found to the map, with "newer" ones
90 90 # taking precedence
91 91 fl = self.file(".hgtags")
92 92 h = fl.heads()
93 93 h.reverse()
94 94 for r in h:
95 95 for l in fl.read(r).splitlines():
96 96 if l:
97 97 n, k = l.split(" ", 1)
98 98 addtag(self, k, n)
99 99 except KeyError:
100 100 pass
101 101
102 102 try:
103 103 f = self.opener("localtags")
104 104 for l in f:
105 105 n, k = l.split(" ", 1)
106 106 addtag(self, k, n)
107 107 except IOError:
108 108 pass
109 109
110 110 self.tagscache['tip'] = self.changelog.tip()
111 111
112 112 return self.tagscache
113 113
114 114 def tagslist(self):
115 115 '''return a list of tags ordered by revision'''
116 116 l = []
117 117 for t, n in self.tags().items():
118 118 try:
119 119 r = self.changelog.rev(n)
120 120 except:
121 121 r = -2 # sort to the beginning of the list if unknown
122 122 l.append((r,t,n))
123 123 l.sort()
124 124 return [(t,n) for r,t,n in l]
125 125
126 126 def nodetags(self, node):
127 127 '''return the tags associated with a node'''
128 128 if not self.nodetagscache:
129 129 self.nodetagscache = {}
130 130 for t,n in self.tags().items():
131 131 self.nodetagscache.setdefault(n,[]).append(t)
132 132 return self.nodetagscache.get(node, [])
133 133
134 134 def lookup(self, key):
135 135 try:
136 136 return self.tags()[key]
137 137 except KeyError:
138 138 try:
139 139 return self.changelog.lookup(key)
140 140 except:
141 141 raise repo.RepoError("unknown revision '%s'" % key)
142 142
143 143 def dev(self):
144 144 return os.stat(self.path).st_dev
145 145
146 146 def local(self):
147 147 return True
148 148
149 149 def join(self, f):
150 150 return os.path.join(self.path, f)
151 151
152 152 def wjoin(self, f):
153 153 return os.path.join(self.root, f)
154 154
155 155 def file(self, f):
156 156 if f[0] == '/': f = f[1:]
157 157 return filelog.filelog(self.opener, f)
158 158
159 159 def getcwd(self):
160 160 return self.dirstate.getcwd()
161 161
162 162 def wfile(self, f, mode='r'):
163 163 return self.wopener(f, mode)
164 164
165 165 def wread(self, filename):
166 166 if self.encodepats == None:
167 167 l = []
168 168 for pat, cmd in self.ui.configitems("encode"):
169 169 mf = util.matcher("", "/", [pat], [], [])[1]
170 170 l.append((mf, cmd))
171 171 self.encodepats = l
172 172
173 173 data = self.wopener(filename, 'r').read()
174 174
175 175 for mf, cmd in self.encodepats:
176 176 if mf(filename):
177 177 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
178 178 data = util.filter(data, cmd)
179 179 break
180 180
181 181 return data
182 182
183 183 def wwrite(self, filename, data, fd=None):
184 184 if self.decodepats == None:
185 185 l = []
186 186 for pat, cmd in self.ui.configitems("decode"):
187 187 mf = util.matcher("", "/", [pat], [], [])[1]
188 188 l.append((mf, cmd))
189 189 self.decodepats = l
190 190
191 191 for mf, cmd in self.decodepats:
192 192 if mf(filename):
193 193 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
194 194 data = util.filter(data, cmd)
195 195 break
196 196
197 197 if fd:
198 198 return fd.write(data)
199 199 return self.wopener(filename, 'w').write(data)
200 200
201 201 def transaction(self):
202 202 # save dirstate for undo
203 203 try:
204 204 ds = self.opener("dirstate").read()
205 205 except IOError:
206 206 ds = ""
207 207 self.opener("journal.dirstate", "w").write(ds)
208 208
209 209 def after():
210 210 util.rename(self.join("journal"), self.join("undo"))
211 211 util.rename(self.join("journal.dirstate"),
212 212 self.join("undo.dirstate"))
213 213
214 214 return transaction.transaction(self.ui.warn, self.opener,
215 215 self.join("journal"), after)
216 216
217 217 def recover(self):
218 218 lock = self.lock()
219 219 if os.path.exists(self.join("journal")):
220 220 self.ui.status("rolling back interrupted transaction\n")
221 221 return transaction.rollback(self.opener, self.join("journal"))
222 222 else:
223 223 self.ui.warn("no interrupted transaction available\n")
224 224
225 225 def undo(self):
226 226 lock = self.lock()
227 227 if os.path.exists(self.join("undo")):
228 228 self.ui.status("rolling back last transaction\n")
229 229 transaction.rollback(self.opener, self.join("undo"))
230 230 self.dirstate = None
231 231 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
232 232 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
233 233 else:
234 234 self.ui.warn("no undo information available\n")
235 235
236 236 def lock(self, wait=1):
237 237 try:
238 238 return lock.lock(self.join("lock"), 0)
239 239 except lock.LockHeld, inst:
240 240 if wait:
241 241 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
242 242 return lock.lock(self.join("lock"), wait)
243 243 raise inst
244 244
245 245 def rawcommit(self, files, text, user, date, p1=None, p2=None):
246 246 orig_parent = self.dirstate.parents()[0] or nullid
247 247 p1 = p1 or self.dirstate.parents()[0] or nullid
248 248 p2 = p2 or self.dirstate.parents()[1] or nullid
249 249 c1 = self.changelog.read(p1)
250 250 c2 = self.changelog.read(p2)
251 251 m1 = self.manifest.read(c1[0])
252 252 mf1 = self.manifest.readflags(c1[0])
253 253 m2 = self.manifest.read(c2[0])
254 254 changed = []
255 255
256 256 if orig_parent == p1:
257 257 update_dirstate = 1
258 258 else:
259 259 update_dirstate = 0
260 260
261 261 tr = self.transaction()
262 262 mm = m1.copy()
263 263 mfm = mf1.copy()
264 264 linkrev = self.changelog.count()
265 265 for f in files:
266 266 try:
267 267 t = self.wread(f)
268 268 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
269 269 r = self.file(f)
270 270 mfm[f] = tm
271 271
272 272 fp1 = m1.get(f, nullid)
273 273 fp2 = m2.get(f, nullid)
274 274
275 275 # is the same revision on two branches of a merge?
276 276 if fp2 == fp1:
277 277 fp2 = nullid
278 278
279 279 if fp2 != nullid:
280 280 # is one parent an ancestor of the other?
281 281 fpa = r.ancestor(fp1, fp2)
282 282 if fpa == fp1:
283 283 fp1, fp2 = fp2, nullid
284 284 elif fpa == fp2:
285 285 fp2 = nullid
286 286
287 287 # is the file unmodified from the parent?
288 288 if t == r.read(fp1):
289 289 # record the proper existing parent in manifest
290 290 # no need to add a revision
291 291 mm[f] = fp1
292 292 continue
293 293
294 294 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
295 295 changed.append(f)
296 296 if update_dirstate:
297 297 self.dirstate.update([f], "n")
298 298 except IOError:
299 299 try:
300 300 del mm[f]
301 301 del mfm[f]
302 302 if update_dirstate:
303 303 self.dirstate.forget([f])
304 304 except:
305 305 # deleted from p2?
306 306 pass
307 307
308 308 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
309 309 user = user or self.ui.username()
310 310 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
311 311 tr.close()
312 312 if update_dirstate:
313 313 self.dirstate.setparents(n, nullid)
314 314
315 315 def commit(self, files = None, text = "", user = None, date = None,
316 316 match = util.always, force=False):
317 317 commit = []
318 318 remove = []
319 319 changed = []
320 320
321 321 if files:
322 322 for f in files:
323 323 s = self.dirstate.state(f)
324 324 if s in 'nmai':
325 325 commit.append(f)
326 326 elif s == 'r':
327 327 remove.append(f)
328 328 else:
329 329 self.ui.warn("%s not tracked!\n" % f)
330 330 else:
331 331 (c, a, d, u) = self.changes(match=match)
332 332 commit = c + a
333 333 remove = d
334 334
335 335 p1, p2 = self.dirstate.parents()
336 336 c1 = self.changelog.read(p1)
337 337 c2 = self.changelog.read(p2)
338 338 m1 = self.manifest.read(c1[0])
339 339 mf1 = self.manifest.readflags(c1[0])
340 340 m2 = self.manifest.read(c2[0])
341 341
342 342 if not commit and not remove and not force and p2 == nullid:
343 343 self.ui.status("nothing changed\n")
344 344 return None
345 345
346 346 if not self.hook("precommit"):
347 347 return None
348 348
349 349 lock = self.lock()
350 350 tr = self.transaction()
351 351
352 352 # check in files
353 353 new = {}
354 354 linkrev = self.changelog.count()
355 355 commit.sort()
356 356 for f in commit:
357 357 self.ui.note(f + "\n")
358 358 try:
359 359 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
360 360 t = self.wread(f)
361 361 except IOError:
362 362 self.ui.warn("trouble committing %s!\n" % f)
363 363 raise
364 364
365 365 r = self.file(f)
366 366
367 367 meta = {}
368 368 cp = self.dirstate.copied(f)
369 369 if cp:
370 370 meta["copy"] = cp
371 371 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
372 372 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
373 373 fp1, fp2 = nullid, nullid
374 374 else:
375 375 fp1 = m1.get(f, nullid)
376 376 fp2 = m2.get(f, nullid)
377 377
378 378 # is the same revision on two branches of a merge?
379 379 if fp2 == fp1:
380 380 fp2 = nullid
381 381
382 382 if fp2 != nullid:
383 383 # is one parent an ancestor of the other?
384 384 fpa = r.ancestor(fp1, fp2)
385 385 if fpa == fp1:
386 386 fp1, fp2 = fp2, nullid
387 387 elif fpa == fp2:
388 388 fp2 = nullid
389 389
390 390 # is the file unmodified from the parent?
391 391 if not meta and t == r.read(fp1):
392 392 # record the proper existing parent in manifest
393 393 # no need to add a revision
394 394 new[f] = fp1
395 395 continue
396 396
397 397 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
398 398 # remember what we've added so that we can later calculate
399 399 # the files to pull from a set of changesets
400 400 changed.append(f)
401 401
402 402 # update manifest
403 403 m1.update(new)
404 404 for f in remove:
405 405 if f in m1:
406 406 del m1[f]
407 407 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
408 408 (new, remove))
409 409
410 410 # add changeset
411 411 new = new.keys()
412 412 new.sort()
413 413
414 414 if not text:
415 415 edittext = ""
416 416 if p2 != nullid:
417 417 edittext += "HG: branch merge\n"
418 418 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
419 419 edittext += "".join(["HG: changed %s\n" % f for f in changed])
420 420 edittext += "".join(["HG: removed %s\n" % f for f in remove])
421 421 if not changed and not remove:
422 422 edittext += "HG: no files changed\n"
423 423 edittext = self.ui.edit(edittext)
424 424 if not edittext.rstrip():
425 425 return None
426 426 text = edittext
427 427
428 428 user = user or self.ui.username()
429 429 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
430 430 tr.close()
431 431
432 432 self.dirstate.setparents(n)
433 433 self.dirstate.update(new, "n")
434 434 self.dirstate.forget(remove)
435 435
436 436 if not self.hook("commit", node=hex(n)):
437 437 return None
438 438 return n
439 439
440 440 def walk(self, node=None, files=[], match=util.always):
441 441 if node:
442 442 for fn in self.manifest.read(self.changelog.read(node)[0]):
443 443 if match(fn): yield 'm', fn
444 444 else:
445 445 for src, fn in self.dirstate.walk(files, match):
446 446 yield src, fn
447 447
448 448 def changes(self, node1 = None, node2 = None, files = [],
449 449 match = util.always):
450 450 mf2, u = None, []
451 451
452 452 def fcmp(fn, mf):
453 453 t1 = self.wread(fn)
454 454 t2 = self.file(fn).read(mf.get(fn, nullid))
455 455 return cmp(t1, t2)
456 456
457 457 def mfmatches(node):
458 458 mf = dict(self.manifest.read(node))
459 459 for fn in mf.keys():
460 460 if not match(fn):
461 461 del mf[fn]
462 462 return mf
463 463
464 464 # are we comparing the working directory?
465 465 if not node2:
466 466 l, c, a, d, u = self.dirstate.changes(files, match)
467 467
468 468 # are we comparing working dir against its parent?
469 469 if not node1:
470 470 if l:
471 471 # do a full compare of any files that might have changed
472 472 change = self.changelog.read(self.dirstate.parents()[0])
473 473 mf2 = mfmatches(change[0])
474 474 for f in l:
475 475 if fcmp(f, mf2):
476 476 c.append(f)
477 477
478 478 for l in c, a, d, u:
479 479 l.sort()
480 480
481 481 return (c, a, d, u)
482 482
483 483 # are we comparing working dir against non-tip?
484 484 # generate a pseudo-manifest for the working dir
485 485 if not node2:
486 486 if not mf2:
487 487 change = self.changelog.read(self.dirstate.parents()[0])
488 488 mf2 = mfmatches(change[0])
489 489 for f in a + c + l:
490 490 mf2[f] = ""
491 491 for f in d:
492 492 if f in mf2: del mf2[f]
493 493 else:
494 494 change = self.changelog.read(node2)
495 495 mf2 = mfmatches(change[0])
496 496
497 497 # flush lists from dirstate before comparing manifests
498 498 c, a = [], []
499 499
500 500 change = self.changelog.read(node1)
501 501 mf1 = mfmatches(change[0])
502 502
503 503 for fn in mf2:
504 504 if mf1.has_key(fn):
505 505 if mf1[fn] != mf2[fn]:
506 506 if mf2[fn] != "" or fcmp(fn, mf1):
507 507 c.append(fn)
508 508 del mf1[fn]
509 509 else:
510 510 a.append(fn)
511 511
512 512 d = mf1.keys()
513 513
514 514 for l in c, a, d, u:
515 515 l.sort()
516 516
517 517 return (c, a, d, u)
518 518
519 519 def add(self, list):
520 520 for f in list:
521 521 p = self.wjoin(f)
522 522 if not os.path.exists(p):
523 523 self.ui.warn("%s does not exist!\n" % f)
524 524 elif not os.path.isfile(p):
525 525 self.ui.warn("%s not added: only files supported currently\n" % f)
526 526 elif self.dirstate.state(f) in 'an':
527 527 self.ui.warn("%s already tracked!\n" % f)
528 528 else:
529 529 self.dirstate.update([f], "a")
530 530
531 531 def forget(self, list):
532 532 for f in list:
533 533 if self.dirstate.state(f) not in 'ai':
534 534 self.ui.warn("%s not added!\n" % f)
535 535 else:
536 536 self.dirstate.forget([f])
537 537
538 538 def remove(self, list):
539 539 for f in list:
540 540 p = self.wjoin(f)
541 541 if os.path.exists(p):
542 542 self.ui.warn("%s still exists!\n" % f)
543 543 elif self.dirstate.state(f) == 'a':
544 544 self.ui.warn("%s never committed!\n" % f)
545 545 self.dirstate.forget([f])
546 546 elif f not in self.dirstate:
547 547 self.ui.warn("%s not tracked!\n" % f)
548 548 else:
549 549 self.dirstate.update([f], "r")
550 550
551 551 def copy(self, source, dest):
552 552 p = self.wjoin(dest)
553 553 if not os.path.exists(p):
554 554 self.ui.warn("%s does not exist!\n" % dest)
555 555 elif not os.path.isfile(p):
556 556 self.ui.warn("copy failed: %s is not a file\n" % dest)
557 557 else:
558 558 if self.dirstate.state(dest) == '?':
559 559 self.dirstate.update([dest], "a")
560 560 self.dirstate.copy(source, dest)
561 561
562 562 def heads(self):
563 563 return self.changelog.heads()
564 564
565 565 # branchlookup returns a dict giving a list of branches for
566 566 # each head. A branch is defined as the tag of a node or
567 567 # the branch of the node's parents. If a node has multiple
568 568 # branch tags, tags are eliminated if they are visible from other
569 569 # branch tags.
570 570 #
571 571 # So, for this graph: a->b->c->d->e
572 572 # \ /
573 573 # aa -----/
574 574 # a has tag 2.6.12
575 575 # d has tag 2.6.13
576 576 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
577 577 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
578 578 # from the list.
579 579 #
580 580 # It is possible that more than one head will have the same branch tag.
581 581 # callers need to check the result for multiple heads under the same
582 582 # branch tag if that is a problem for them (ie checkout of a specific
583 583 # branch).
584 584 #
585 585 # passing in a specific branch will limit the depth of the search
586 586 # through the parents. It won't limit the branches returned in the
587 587 # result though.
588 588 def branchlookup(self, heads=None, branch=None):
589 589 if not heads:
590 590 heads = self.heads()
591 591 headt = [ h for h in heads ]
592 592 chlog = self.changelog
593 593 branches = {}
594 594 merges = []
595 595 seenmerge = {}
596 596
597 597 # traverse the tree once for each head, recording in the branches
598 598 # dict which tags are visible from this head. The branches
599 599 # dict also records which tags are visible from each tag
600 600 # while we traverse.
601 601 while headt or merges:
602 602 if merges:
603 603 n, found = merges.pop()
604 604 visit = [n]
605 605 else:
606 606 h = headt.pop()
607 607 visit = [h]
608 608 found = [h]
609 609 seen = {}
610 610 while visit:
611 611 n = visit.pop()
612 612 if n in seen:
613 613 continue
614 614 pp = chlog.parents(n)
615 615 tags = self.nodetags(n)
616 616 if tags:
617 617 for x in tags:
618 618 if x == 'tip':
619 619 continue
620 620 for f in found:
621 621 branches.setdefault(f, {})[n] = 1
622 622 branches.setdefault(n, {})[n] = 1
623 623 break
624 624 if n not in found:
625 625 found.append(n)
626 626 if branch in tags:
627 627 continue
628 628 seen[n] = 1
629 629 if pp[1] != nullid and n not in seenmerge:
630 630 merges.append((pp[1], [x for x in found]))
631 631 seenmerge[n] = 1
632 632 if pp[0] != nullid:
633 633 visit.append(pp[0])
634 634 # traverse the branches dict, eliminating branch tags from each
635 635 # head that are visible from another branch tag for that head.
636 636 out = {}
637 637 viscache = {}
638 638 for h in heads:
639 639 def visible(node):
640 640 if node in viscache:
641 641 return viscache[node]
642 642 ret = {}
643 643 visit = [node]
644 644 while visit:
645 645 x = visit.pop()
646 646 if x in viscache:
647 647 ret.update(viscache[x])
648 648 elif x not in ret:
649 649 ret[x] = 1
650 650 if x in branches:
651 651 visit[len(visit):] = branches[x].keys()
652 652 viscache[node] = ret
653 653 return ret
654 654 if h not in branches:
655 655 continue
656 656 # O(n^2), but somewhat limited. This only searches the
657 657 # tags visible from a specific head, not all the tags in the
658 658 # whole repo.
659 659 for b in branches[h]:
660 660 vis = False
661 661 for bb in branches[h].keys():
662 662 if b != bb:
663 663 if b in visible(bb):
664 664 vis = True
665 665 break
666 666 if not vis:
667 667 l = out.setdefault(h, [])
668 668 l[len(l):] = self.nodetags(b)
669 669 return out
670 670
671 671 def branches(self, nodes):
672 672 if not nodes: nodes = [self.changelog.tip()]
673 673 b = []
674 674 for n in nodes:
675 675 t = n
676 676 while n:
677 677 p = self.changelog.parents(n)
678 678 if p[1] != nullid or p[0] == nullid:
679 679 b.append((t, n, p[0], p[1]))
680 680 break
681 681 n = p[0]
682 682 return b
683 683
684 684 def between(self, pairs):
685 685 r = []
686 686
687 687 for top, bottom in pairs:
688 688 n, l, i = top, [], 0
689 689 f = 1
690 690
691 691 while n != bottom:
692 692 p = self.changelog.parents(n)[0]
693 693 if i == f:
694 694 l.append(n)
695 695 f = f * 2
696 696 n = p
697 697 i += 1
698 698
699 699 r.append(l)
700 700
701 701 return r
702 702
703 def newer(self, nodes):
704 m = {}
705 nl = []
706 pm = {}
707 cl = self.changelog
708 t = l = cl.count()
709
710 # find the lowest numbered node
711 for n in nodes:
712 l = min(l, cl.rev(n))
713 m[n] = 1
714
715 for i in xrange(l, t):
716 n = cl.node(i)
717 if n in m: # explicitly listed
718 pm[n] = 1
719 nl.append(n)
720 continue
721 for p in cl.parents(n):
722 if p in pm: # parent listed
723 pm[n] = 1
724 nl.append(n)
725 break
726
727 return nl
728
729 703 def findincoming(self, remote, base=None, heads=None):
730 704 m = self.changelog.nodemap
731 705 search = []
732 706 fetch = {}
733 707 seen = {}
734 708 seenbranch = {}
735 709 if base == None:
736 710 base = {}
737 711
738 712 # assume we're closer to the tip than the root
739 713 # and start by examining the heads
740 714 self.ui.status("searching for changes\n")
741 715
742 716 if not heads:
743 717 heads = remote.heads()
744 718
745 719 unknown = []
746 720 for h in heads:
747 721 if h not in m:
748 722 unknown.append(h)
749 723 else:
750 724 base[h] = 1
751 725
752 726 if not unknown:
753 727 return None
754 728
755 729 rep = {}
756 730 reqcnt = 0
757 731
758 732 # search through remote branches
759 733 # a 'branch' here is a linear segment of history, with four parts:
760 734 # head, root, first parent, second parent
761 735 # (a branch always has two parents (or none) by definition)
762 736 unknown = remote.branches(unknown)
763 737 while unknown:
764 738 r = []
765 739 while unknown:
766 740 n = unknown.pop(0)
767 741 if n[0] in seen:
768 742 continue
769 743
770 744 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
771 745 if n[0] == nullid:
772 746 break
773 747 if n in seenbranch:
774 748 self.ui.debug("branch already found\n")
775 749 continue
776 750 if n[1] and n[1] in m: # do we know the base?
777 751 self.ui.debug("found incomplete branch %s:%s\n"
778 752 % (short(n[0]), short(n[1])))
779 753 search.append(n) # schedule branch range for scanning
780 754 seenbranch[n] = 1
781 755 else:
782 756 if n[1] not in seen and n[1] not in fetch:
783 757 if n[2] in m and n[3] in m:
784 758 self.ui.debug("found new changeset %s\n" %
785 759 short(n[1]))
786 760 fetch[n[1]] = 1 # earliest unknown
787 761 base[n[2]] = 1 # latest known
788 762 continue
789 763
790 764 for a in n[2:4]:
791 765 if a not in rep:
792 766 r.append(a)
793 767 rep[a] = 1
794 768
795 769 seen[n[0]] = 1
796 770
797 771 if r:
798 772 reqcnt += 1
799 773 self.ui.debug("request %d: %s\n" %
800 774 (reqcnt, " ".join(map(short, r))))
801 775 for p in range(0, len(r), 10):
802 776 for b in remote.branches(r[p:p+10]):
803 777 self.ui.debug("received %s:%s\n" %
804 778 (short(b[0]), short(b[1])))
805 779 if b[0] in m:
806 780 self.ui.debug("found base node %s\n" % short(b[0]))
807 781 base[b[0]] = 1
808 782 elif b[0] not in seen:
809 783 unknown.append(b)
810 784
811 785 # do binary search on the branches we found
812 786 while search:
813 787 n = search.pop(0)
814 788 reqcnt += 1
815 789 l = remote.between([(n[0], n[1])])[0]
816 790 l.append(n[1])
817 791 p = n[0]
818 792 f = 1
819 793 for i in l:
820 794 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
821 795 if i in m:
822 796 if f <= 2:
823 797 self.ui.debug("found new branch changeset %s\n" %
824 798 short(p))
825 799 fetch[p] = 1
826 800 base[i] = 1
827 801 else:
828 802 self.ui.debug("narrowed branch search to %s:%s\n"
829 803 % (short(p), short(i)))
830 804 search.append((p, i))
831 805 break
832 806 p, f = i, f * 2
833 807
834 808 # sanity check our fetch list
835 809 for f in fetch.keys():
836 810 if f in m:
837 811 raise repo.RepoError("already have changeset " + short(f[:4]))
838 812
839 813 if base.keys() == [nullid]:
840 814 self.ui.warn("warning: pulling from an unrelated repository!\n")
841 815
842 816 self.ui.note("found new changesets starting at " +
843 817 " ".join([short(f) for f in fetch]) + "\n")
844 818
845 819 self.ui.debug("%d total queries\n" % reqcnt)
846 820
847 821 return fetch.keys()
848 822
849 823 def findoutgoing(self, remote, base=None, heads=None):
850 824 if base == None:
851 825 base = {}
852 826 self.findincoming(remote, base, heads)
853 827
854 828 self.ui.debug("common changesets up to "
855 829 + " ".join(map(short, base.keys())) + "\n")
856 830
857 831 remain = dict.fromkeys(self.changelog.nodemap)
858 832
859 833 # prune everything remote has from the tree
860 834 del remain[nullid]
861 835 remove = base.keys()
862 836 while remove:
863 837 n = remove.pop(0)
864 838 if n in remain:
865 839 del remain[n]
866 840 for p in self.changelog.parents(n):
867 841 remove.append(p)
868 842
869 843 # find every node whose parents have been pruned
870 844 subset = []
871 845 for n in remain:
872 846 p1, p2 = self.changelog.parents(n)
873 847 if p1 not in remain and p2 not in remain:
874 848 subset.append(n)
875 849
876 850 # this is the set of all roots we have to push
877 851 return subset
878 852
879 853 def pull(self, remote):
880 854 lock = self.lock()
881 855
882 856 # if we have an empty repo, fetch everything
883 857 if self.changelog.tip() == nullid:
884 858 self.ui.status("requesting all changes\n")
885 859 fetch = [nullid]
886 860 else:
887 861 fetch = self.findincoming(remote)
888 862
889 863 if not fetch:
890 864 self.ui.status("no changes found\n")
891 865 return 1
892 866
893 867 cg = remote.changegroup(fetch)
894 868 return self.addchangegroup(cg)
895 869
896 870 def push(self, remote, force=False):
897 871 lock = remote.lock()
898 872
899 873 base = {}
900 874 heads = remote.heads()
901 875 inc = self.findincoming(remote, base, heads)
902 876 if not force and inc:
903 877 self.ui.warn("abort: unsynced remote changes!\n")
904 878 self.ui.status("(did you forget to sync? use push -f to force)\n")
905 879 return 1
906 880
907 881 update = self.findoutgoing(remote, base)
908 882 if not update:
909 883 self.ui.status("no changes found\n")
910 884 return 1
911 885 elif not force:
912 886 if len(heads) < len(self.changelog.heads()):
913 887 self.ui.warn("abort: push creates new remote branches!\n")
914 888 self.ui.status("(did you forget to merge?" +
915 889 " use push -f to force)\n")
916 890 return 1
917 891
918 892 cg = self.changegroup(update)
919 893 return remote.addchangegroup(cg)
920 894
921 895 def changegroup(self, basenodes):
922 896 genread = util.chunkbuffer
923 897
924 898 def gengroup():
925 nodes = self.newer(basenodes)
899 nodes = self.changelog.nodesbetween(basenodes)[0]
926 900
927 901 # construct the link map
928 902 linkmap = {}
929 903 for n in nodes:
930 904 linkmap[self.changelog.rev(n)] = n
931 905
932 906 # construct a list of all changed files
933 907 changed = {}
934 908 for n in nodes:
935 909 c = self.changelog.read(n)
936 910 for f in c[3]:
937 911 changed[f] = 1
938 912 changed = changed.keys()
939 913 changed.sort()
940 914
941 915 # the changegroup is changesets + manifests + all file revs
942 916 revs = [ self.changelog.rev(n) for n in nodes ]
943 917
944 918 for y in self.changelog.group(linkmap): yield y
945 919 for y in self.manifest.group(linkmap): yield y
946 920 for f in changed:
947 921 yield struct.pack(">l", len(f) + 4) + f
948 922 g = self.file(f).group(linkmap)
949 923 for y in g:
950 924 yield y
951 925
952 926 yield struct.pack(">l", 0)
953 927
954 928 return genread(gengroup())
955 929
956 930 def addchangegroup(self, source):
957 931
958 932 def getchunk():
959 933 d = source.read(4)
960 934 if not d: return ""
961 935 l = struct.unpack(">l", d)[0]
962 936 if l <= 4: return ""
963 937 d = source.read(l - 4)
964 938 if len(d) < l - 4:
965 939 raise repo.RepoError("premature EOF reading chunk" +
966 940 " (got %d bytes, expected %d)"
967 941 % (len(d), l - 4))
968 942 return d
969 943
970 944 def getgroup():
971 945 while 1:
972 946 c = getchunk()
973 947 if not c: break
974 948 yield c
975 949
976 950 def csmap(x):
977 951 self.ui.debug("add changeset %s\n" % short(x))
978 952 return self.changelog.count()
979 953
980 954 def revmap(x):
981 955 return self.changelog.rev(x)
982 956
983 957 if not source: return
984 958 changesets = files = revisions = 0
985 959
986 960 tr = self.transaction()
987 961
988 962 oldheads = len(self.changelog.heads())
989 963
990 964 # pull off the changeset group
991 965 self.ui.status("adding changesets\n")
992 966 co = self.changelog.tip()
993 967 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
994 968 cnr, cor = map(self.changelog.rev, (cn, co))
995 969 if cn == nullid:
996 970 cnr = cor
997 971 changesets = cnr - cor
998 972
999 973 # pull off the manifest group
1000 974 self.ui.status("adding manifests\n")
1001 975 mm = self.manifest.tip()
1002 976 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1003 977
1004 978 # process the files
1005 979 self.ui.status("adding file changes\n")
1006 980 while 1:
1007 981 f = getchunk()
1008 982 if not f: break
1009 983 self.ui.debug("adding %s revisions\n" % f)
1010 984 fl = self.file(f)
1011 985 o = fl.count()
1012 986 n = fl.addgroup(getgroup(), revmap, tr)
1013 987 revisions += fl.count() - o
1014 988 files += 1
1015 989
1016 990 newheads = len(self.changelog.heads())
1017 991 heads = ""
1018 992 if oldheads and newheads > oldheads:
1019 993 heads = " (+%d heads)" % (newheads - oldheads)
1020 994
1021 995 self.ui.status(("added %d changesets" +
1022 996 " with %d changes to %d files%s\n")
1023 997 % (changesets, revisions, files, heads))
1024 998
1025 999 tr.close()
1026 1000
1027 1001 if changesets > 0:
1028 1002 if not self.hook("changegroup",
1029 1003 node=hex(self.changelog.node(cor+1))):
1030 1004 self.ui.warn("abort: changegroup hook returned failure!\n")
1031 1005 return 1
1032 1006
1033 1007 for i in range(cor + 1, cnr + 1):
1034 1008 self.hook("commit", node=hex(self.changelog.node(i)))
1035 1009
1036 1010 return
1037 1011
1038 1012 def update(self, node, allow=False, force=False, choose=None,
1039 1013 moddirstate=True):
1040 1014 pl = self.dirstate.parents()
1041 1015 if not force and pl[1] != nullid:
1042 1016 self.ui.warn("aborting: outstanding uncommitted merges\n")
1043 1017 return 1
1044 1018
1045 1019 p1, p2 = pl[0], node
1046 1020 pa = self.changelog.ancestor(p1, p2)
1047 1021 m1n = self.changelog.read(p1)[0]
1048 1022 m2n = self.changelog.read(p2)[0]
1049 1023 man = self.manifest.ancestor(m1n, m2n)
1050 1024 m1 = self.manifest.read(m1n)
1051 1025 mf1 = self.manifest.readflags(m1n)
1052 1026 m2 = self.manifest.read(m2n)
1053 1027 mf2 = self.manifest.readflags(m2n)
1054 1028 ma = self.manifest.read(man)
1055 1029 mfa = self.manifest.readflags(man)
1056 1030
1057 1031 (c, a, d, u) = self.changes()
1058 1032
1059 1033 # is this a jump, or a merge? i.e. is there a linear path
1060 1034 # from p1 to p2?
1061 1035 linear_path = (pa == p1 or pa == p2)
1062 1036
1063 1037 # resolve the manifest to determine which files
1064 1038 # we care about merging
1065 1039 self.ui.note("resolving manifests\n")
1066 1040 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1067 1041 (force, allow, moddirstate, linear_path))
1068 1042 self.ui.debug(" ancestor %s local %s remote %s\n" %
1069 1043 (short(man), short(m1n), short(m2n)))
1070 1044
1071 1045 merge = {}
1072 1046 get = {}
1073 1047 remove = []
1074 1048
1075 1049 # construct a working dir manifest
1076 1050 mw = m1.copy()
1077 1051 mfw = mf1.copy()
1078 1052 umap = dict.fromkeys(u)
1079 1053
1080 1054 for f in a + c + u:
1081 1055 mw[f] = ""
1082 1056 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1083 1057
1084 1058 for f in d:
1085 1059 if f in mw: del mw[f]
1086 1060
1087 1061 # If we're jumping between revisions (as opposed to merging),
1088 1062 # and if neither the working directory nor the target rev has
1089 1063 # the file, then we need to remove it from the dirstate, to
1090 1064 # prevent the dirstate from listing the file when it is no
1091 1065 # longer in the manifest.
1092 1066 if moddirstate and linear_path and f not in m2:
1093 1067 self.dirstate.forget((f,))
1094 1068
1095 1069 # Compare manifests
1096 1070 for f, n in mw.iteritems():
1097 1071 if choose and not choose(f): continue
1098 1072 if f in m2:
1099 1073 s = 0
1100 1074
1101 1075 # is the wfile new since m1, and match m2?
1102 1076 if f not in m1:
1103 1077 t1 = self.wread(f)
1104 1078 t2 = self.file(f).read(m2[f])
1105 1079 if cmp(t1, t2) == 0:
1106 1080 n = m2[f]
1107 1081 del t1, t2
1108 1082
1109 1083 # are files different?
1110 1084 if n != m2[f]:
1111 1085 a = ma.get(f, nullid)
1112 1086 # are both different from the ancestor?
1113 1087 if n != a and m2[f] != a:
1114 1088 self.ui.debug(" %s versions differ, resolve\n" % f)
1115 1089 # merge executable bits
1116 1090 # "if we changed or they changed, change in merge"
1117 1091 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1118 1092 mode = ((a^b) | (a^c)) ^ a
1119 1093 merge[f] = (m1.get(f, nullid), m2[f], mode)
1120 1094 s = 1
1121 1095 # are we clobbering?
1122 1096 # is remote's version newer?
1123 1097 # or are we going back in time?
1124 1098 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1125 1099 self.ui.debug(" remote %s is newer, get\n" % f)
1126 1100 get[f] = m2[f]
1127 1101 s = 1
1128 1102 elif f in umap:
1129 1103 # this unknown file is the same as the checkout
1130 1104 get[f] = m2[f]
1131 1105
1132 1106 if not s and mfw[f] != mf2[f]:
1133 1107 if force:
1134 1108 self.ui.debug(" updating permissions for %s\n" % f)
1135 1109 util.set_exec(self.wjoin(f), mf2[f])
1136 1110 else:
1137 1111 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1138 1112 mode = ((a^b) | (a^c)) ^ a
1139 1113 if mode != b:
1140 1114 self.ui.debug(" updating permissions for %s\n" % f)
1141 1115 util.set_exec(self.wjoin(f), mode)
1142 1116 del m2[f]
1143 1117 elif f in ma:
1144 1118 if n != ma[f]:
1145 1119 r = "d"
1146 1120 if not force and (linear_path or allow):
1147 1121 r = self.ui.prompt(
1148 1122 (" local changed %s which remote deleted\n" % f) +
1149 1123 "(k)eep or (d)elete?", "[kd]", "k")
1150 1124 if r == "d":
1151 1125 remove.append(f)
1152 1126 else:
1153 1127 self.ui.debug("other deleted %s\n" % f)
1154 1128 remove.append(f) # other deleted it
1155 1129 else:
1156 1130 # file is created on branch or in working directory
1157 1131 if force and f not in umap:
1158 1132 self.ui.debug("remote deleted %s, clobbering\n" % f)
1159 1133 remove.append(f)
1160 1134 elif n == m1.get(f, nullid): # same as parent
1161 1135 if p2 == pa: # going backwards?
1162 1136 self.ui.debug("remote deleted %s\n" % f)
1163 1137 remove.append(f)
1164 1138 else:
1165 1139 self.ui.debug("local modified %s, keeping\n" % f)
1166 1140 else:
1167 1141 self.ui.debug("working dir created %s, keeping\n" % f)
1168 1142
1169 1143 for f, n in m2.iteritems():
1170 1144 if choose and not choose(f): continue
1171 1145 if f[0] == "/": continue
1172 1146 if f in ma and n != ma[f]:
1173 1147 r = "k"
1174 1148 if not force and (linear_path or allow):
1175 1149 r = self.ui.prompt(
1176 1150 ("remote changed %s which local deleted\n" % f) +
1177 1151 "(k)eep or (d)elete?", "[kd]", "k")
1178 1152 if r == "k": get[f] = n
1179 1153 elif f not in ma:
1180 1154 self.ui.debug("remote created %s\n" % f)
1181 1155 get[f] = n
1182 1156 else:
1183 1157 if force or p2 == pa: # going backwards?
1184 1158 self.ui.debug("local deleted %s, recreating\n" % f)
1185 1159 get[f] = n
1186 1160 else:
1187 1161 self.ui.debug("local deleted %s\n" % f)
1188 1162
1189 1163 del mw, m1, m2, ma
1190 1164
1191 1165 if force:
1192 1166 for f in merge:
1193 1167 get[f] = merge[f][1]
1194 1168 merge = {}
1195 1169
1196 1170 if linear_path or force:
1197 1171 # we don't need to do any magic, just jump to the new rev
1198 1172 branch_merge = False
1199 1173 p1, p2 = p2, nullid
1200 1174 else:
1201 1175 if not allow:
1202 1176 self.ui.status("this update spans a branch" +
1203 1177 " affecting the following files:\n")
1204 1178 fl = merge.keys() + get.keys()
1205 1179 fl.sort()
1206 1180 for f in fl:
1207 1181 cf = ""
1208 1182 if f in merge: cf = " (resolve)"
1209 1183 self.ui.status(" %s%s\n" % (f, cf))
1210 1184 self.ui.warn("aborting update spanning branches!\n")
1211 1185 self.ui.status("(use update -m to merge across branches" +
1212 1186 " or -C to lose changes)\n")
1213 1187 return 1
1214 1188 branch_merge = True
1215 1189
1216 1190 if moddirstate:
1217 1191 self.dirstate.setparents(p1, p2)
1218 1192
1219 1193 # get the files we don't need to change
1220 1194 files = get.keys()
1221 1195 files.sort()
1222 1196 for f in files:
1223 1197 if f[0] == "/": continue
1224 1198 self.ui.note("getting %s\n" % f)
1225 1199 t = self.file(f).read(get[f])
1226 1200 try:
1227 1201 self.wwrite(f, t)
1228 1202 except IOError, e:
1229 1203 if e.errno != errno.ENOENT:
1230 1204 raise
1231 1205 os.makedirs(os.path.dirname(self.wjoin(f)))
1232 1206 self.wwrite(f, t)
1233 1207 util.set_exec(self.wjoin(f), mf2[f])
1234 1208 if moddirstate:
1235 1209 if branch_merge:
1236 1210 self.dirstate.update([f], 'n', st_mtime=-1)
1237 1211 else:
1238 1212 self.dirstate.update([f], 'n')
1239 1213
1240 1214 # merge the tricky bits
1241 1215 files = merge.keys()
1242 1216 files.sort()
1243 1217 for f in files:
1244 1218 self.ui.status("merging %s\n" % f)
1245 1219 my, other, flag = merge[f]
1246 1220 self.merge3(f, my, other)
1247 1221 util.set_exec(self.wjoin(f), flag)
1248 1222 if moddirstate:
1249 1223 if branch_merge:
1250 1224 # We've done a branch merge, mark this file as merged
1251 1225 # so that we properly record the merger later
1252 1226 self.dirstate.update([f], 'm')
1253 1227 else:
1254 1228 # We've update-merged a locally modified file, so
1255 1229 # we set the dirstate to emulate a normal checkout
1256 1230 # of that file some time in the past. Thus our
1257 1231 # merge will appear as a normal local file
1258 1232 # modification.
1259 1233 f_len = len(self.file(f).read(other))
1260 1234 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1261 1235
1262 1236 remove.sort()
1263 1237 for f in remove:
1264 1238 self.ui.note("removing %s\n" % f)
1265 1239 try:
1266 1240 os.unlink(self.wjoin(f))
1267 1241 except OSError, inst:
1268 1242 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1269 1243 # try removing directories that might now be empty
1270 1244 try: os.removedirs(os.path.dirname(self.wjoin(f)))
1271 1245 except: pass
1272 1246 if moddirstate:
1273 1247 if branch_merge:
1274 1248 self.dirstate.update(remove, 'r')
1275 1249 else:
1276 1250 self.dirstate.forget(remove)
1277 1251
1278 1252 def merge3(self, fn, my, other):
1279 1253 """perform a 3-way merge in the working directory"""
1280 1254
1281 1255 def temp(prefix, node):
1282 1256 pre = "%s~%s." % (os.path.basename(fn), prefix)
1283 1257 (fd, name) = tempfile.mkstemp("", pre)
1284 1258 f = os.fdopen(fd, "wb")
1285 1259 self.wwrite(fn, fl.read(node), f)
1286 1260 f.close()
1287 1261 return name
1288 1262
1289 1263 fl = self.file(fn)
1290 1264 base = fl.ancestor(my, other)
1291 1265 a = self.wjoin(fn)
1292 1266 b = temp("base", base)
1293 1267 c = temp("other", other)
1294 1268
1295 1269 self.ui.note("resolving %s\n" % fn)
1296 1270 self.ui.debug("file %s: my %s other %s ancestor %s\n" %
1297 1271 (fn, short(my), short(other), short(base)))
1298 1272
1299 1273 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1300 1274 or "hgmerge")
1301 1275 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1302 1276 if r:
1303 1277 self.ui.warn("merging %s failed!\n" % fn)
1304 1278
1305 1279 os.unlink(b)
1306 1280 os.unlink(c)
1307 1281
1308 1282 def verify(self):
1309 1283 filelinkrevs = {}
1310 1284 filenodes = {}
1311 1285 changesets = revisions = files = 0
1312 1286 errors = [0]
1313 1287 neededmanifests = {}
1314 1288
1315 1289 def err(msg):
1316 1290 self.ui.warn(msg + "\n")
1317 1291 errors[0] += 1
1318 1292
1319 1293 seen = {}
1320 1294 self.ui.status("checking changesets\n")
1321 1295 for i in range(self.changelog.count()):
1322 1296 changesets += 1
1323 1297 n = self.changelog.node(i)
1324 1298 l = self.changelog.linkrev(n)
1325 1299 if l != i:
1326 1300 err("incorrect link (%d) for changeset revision %d" % (l, i))
1327 1301 if n in seen:
1328 1302 err("duplicate changeset at revision %d" % i)
1329 1303 seen[n] = 1
1330 1304
1331 1305 for p in self.changelog.parents(n):
1332 1306 if p not in self.changelog.nodemap:
1333 1307 err("changeset %s has unknown parent %s" %
1334 1308 (short(n), short(p)))
1335 1309 try:
1336 1310 changes = self.changelog.read(n)
1337 1311 except Exception, inst:
1338 1312 err("unpacking changeset %s: %s" % (short(n), inst))
1339 1313
1340 1314 neededmanifests[changes[0]] = n
1341 1315
1342 1316 for f in changes[3]:
1343 1317 filelinkrevs.setdefault(f, []).append(i)
1344 1318
1345 1319 seen = {}
1346 1320 self.ui.status("checking manifests\n")
1347 1321 for i in range(self.manifest.count()):
1348 1322 n = self.manifest.node(i)
1349 1323 l = self.manifest.linkrev(n)
1350 1324
1351 1325 if l < 0 or l >= self.changelog.count():
1352 1326 err("bad manifest link (%d) at revision %d" % (l, i))
1353 1327
1354 1328 if n in neededmanifests:
1355 1329 del neededmanifests[n]
1356 1330
1357 1331 if n in seen:
1358 1332 err("duplicate manifest at revision %d" % i)
1359 1333
1360 1334 seen[n] = 1
1361 1335
1362 1336 for p in self.manifest.parents(n):
1363 1337 if p not in self.manifest.nodemap:
1364 1338 err("manifest %s has unknown parent %s" %
1365 1339 (short(n), short(p)))
1366 1340
1367 1341 try:
1368 1342 delta = mdiff.patchtext(self.manifest.delta(n))
1369 1343 except KeyboardInterrupt:
1370 1344 self.ui.warn("interrupted")
1371 1345 raise
1372 1346 except Exception, inst:
1373 1347 err("unpacking manifest %s: %s" % (short(n), inst))
1374 1348
1375 1349 ff = [ l.split('\0') for l in delta.splitlines() ]
1376 1350 for f, fn in ff:
1377 1351 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1378 1352
1379 1353 self.ui.status("crosschecking files in changesets and manifests\n")
1380 1354
1381 1355 for m,c in neededmanifests.items():
1382 1356 err("Changeset %s refers to unknown manifest %s" %
1383 1357 (short(m), short(c)))
1384 1358 del neededmanifests
1385 1359
1386 1360 for f in filenodes:
1387 1361 if f not in filelinkrevs:
1388 1362 err("file %s in manifest but not in changesets" % f)
1389 1363
1390 1364 for f in filelinkrevs:
1391 1365 if f not in filenodes:
1392 1366 err("file %s in changeset but not in manifest" % f)
1393 1367
1394 1368 self.ui.status("checking files\n")
1395 1369 ff = filenodes.keys()
1396 1370 ff.sort()
1397 1371 for f in ff:
1398 1372 if f == "/dev/null": continue
1399 1373 files += 1
1400 1374 fl = self.file(f)
1401 1375 nodes = { nullid: 1 }
1402 1376 seen = {}
1403 1377 for i in range(fl.count()):
1404 1378 revisions += 1
1405 1379 n = fl.node(i)
1406 1380
1407 1381 if n in seen:
1408 1382 err("%s: duplicate revision %d" % (f, i))
1409 1383 if n not in filenodes[f]:
1410 1384 err("%s: %d:%s not in manifests" % (f, i, short(n)))
1411 1385 else:
1412 1386 del filenodes[f][n]
1413 1387
1414 1388 flr = fl.linkrev(n)
1415 1389 if flr not in filelinkrevs[f]:
1416 1390 err("%s:%s points to unexpected changeset %d"
1417 1391 % (f, short(n), flr))
1418 1392 else:
1419 1393 filelinkrevs[f].remove(flr)
1420 1394
1421 1395 # verify contents
1422 1396 try:
1423 1397 t = fl.read(n)
1424 1398 except Exception, inst:
1425 1399 err("unpacking file %s %s: %s" % (f, short(n), inst))
1426 1400
1427 1401 # verify parents
1428 1402 (p1, p2) = fl.parents(n)
1429 1403 if p1 not in nodes:
1430 1404 err("file %s:%s unknown parent 1 %s" %
1431 1405 (f, short(n), short(p1)))
1432 1406 if p2 not in nodes:
1433 1407 err("file %s:%s unknown parent 2 %s" %
1434 1408 (f, short(n), short(p1)))
1435 1409 nodes[n] = 1
1436 1410
1437 1411 # cross-check
1438 1412 for node in filenodes[f]:
1439 1413 err("node %s in manifests not in %s" % (hex(node), f))
1440 1414
1441 1415 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1442 1416 (files, changesets, revisions))
1443 1417
1444 1418 if errors[0]:
1445 1419 self.ui.warn("%d integrity errors encountered!\n" % errors[0])
1446 1420 return 1
@@ -1,674 +1,817 b''
1 1 """
2 2 revlog.py - storage back-end for mercurial
3 3
4 4 This provides efficient delta storage with O(1) retrieve and append
5 5 and O(changes) merge between branches
6 6
7 7 Copyright 2005 Matt Mackall <mpm@selenic.com>
8 8
9 9 This software may be used and distributed according to the terms
10 10 of the GNU General Public License, incorporated herein by reference.
11 11 """
12 12
13 13 from node import *
14 14 from demandload import demandload
15 15 demandload(globals(), "binascii errno heapq mdiff sha struct zlib")
16 16
17 17 def hash(text, p1, p2):
18 18 """generate a hash from the given text and its parent hashes
19 19
20 20 This hash combines both the current file contents and its history
21 21 in a manner that makes it easy to distinguish nodes with the same
22 22 content in the revision graph.
23 23 """
24 24 l = [p1, p2]
25 25 l.sort()
26 26 s = sha.new(l[0])
27 27 s.update(l[1])
28 28 s.update(text)
29 29 return s.digest()
30 30
31 31 def compress(text):
32 32 """ generate a possibly-compressed representation of text """
33 33 if not text: return text
34 34 if len(text) < 44:
35 35 if text[0] == '\0': return text
36 36 return 'u' + text
37 37 bin = zlib.compress(text)
38 38 if len(bin) > len(text):
39 39 if text[0] == '\0': return text
40 40 return 'u' + text
41 41 return bin
42 42
43 43 def decompress(bin):
44 44 """ decompress the given input """
45 45 if not bin: return bin
46 46 t = bin[0]
47 47 if t == '\0': return bin
48 48 if t == 'x': return zlib.decompress(bin)
49 49 if t == 'u': return bin[1:]
50 50 raise RevlogError("unknown compression type %s" % t)
51 51
52 52 indexformat = ">4l20s20s20s"
53 53
54 54 class lazyparser:
55 55 """
56 56 this class avoids the need to parse the entirety of large indices
57 57
58 58 By default we parse and load 1000 entries at a time.
59 59
60 60 If no position is specified, we load the whole index, and replace
61 61 the lazy objects in revlog with the underlying objects for
62 62 efficiency in cases where we look at most of the nodes.
63 63 """
64 64 def __init__(self, data, revlog):
65 65 self.data = data
66 66 self.s = struct.calcsize(indexformat)
67 67 self.l = len(data)/self.s
68 68 self.index = [None] * self.l
69 69 self.map = {nullid: -1}
70 70 self.all = 0
71 71 self.revlog = revlog
72 72
73 73 def load(self, pos=None):
74 74 if self.all: return
75 75 if pos is not None:
76 76 block = pos / 1000
77 77 i = block * 1000
78 78 end = min(self.l, i + 1000)
79 79 else:
80 80 self.all = 1
81 81 i = 0
82 82 end = self.l
83 83 self.revlog.index = self.index
84 84 self.revlog.nodemap = self.map
85 85
86 86 while i < end:
87 87 d = self.data[i * self.s: (i + 1) * self.s]
88 88 e = struct.unpack(indexformat, d)
89 89 self.index[i] = e
90 90 self.map[e[6]] = i
91 91 i += 1
92 92
93 93 class lazyindex:
94 94 """a lazy version of the index array"""
95 95 def __init__(self, parser):
96 96 self.p = parser
97 97 def __len__(self):
98 98 return len(self.p.index)
99 99 def load(self, pos):
100 100 self.p.load(pos)
101 101 return self.p.index[pos]
102 102 def __getitem__(self, pos):
103 103 return self.p.index[pos] or self.load(pos)
104 104 def append(self, e):
105 105 self.p.index.append(e)
106 106
107 107 class lazymap:
108 108 """a lazy version of the node map"""
109 109 def __init__(self, parser):
110 110 self.p = parser
111 111 def load(self, key):
112 112 if self.p.all: return
113 113 n = self.p.data.find(key)
114 114 if n < 0:
115 115 raise KeyError(key)
116 116 pos = n / self.p.s
117 117 self.p.load(pos)
118 118 def __contains__(self, key):
119 119 self.p.load()
120 120 return key in self.p.map
121 121 def __iter__(self):
122 122 yield nullid
123 123 for i in xrange(self.p.l):
124 124 try:
125 125 yield self.p.index[i][6]
126 126 except:
127 127 self.p.load(i)
128 128 yield self.p.index[i][6]
129 129 def __getitem__(self, key):
130 130 try:
131 131 return self.p.map[key]
132 132 except KeyError:
133 133 try:
134 134 self.load(key)
135 135 return self.p.map[key]
136 136 except KeyError:
137 137 raise KeyError("node " + hex(key))
138 138 def __setitem__(self, key, val):
139 139 self.p.map[key] = val
140 140
141 141 class RevlogError(Exception): pass
142 142
143 143 class revlog:
144 144 """
145 145 the underlying revision storage object
146 146
147 147 A revlog consists of two parts, an index and the revision data.
148 148
149 149 The index is a file with a fixed record size containing
150 150 information on each revision, includings its nodeid (hash), the
151 151 nodeids of its parents, the position and offset of its data within
152 152 the data file, and the revision it's based on. Finally, each entry
153 153 contains a linkrev entry that can serve as a pointer to external
154 154 data.
155 155
156 156 The revision data itself is a linear collection of data chunks.
157 157 Each chunk represents a revision and is usually represented as a
158 158 delta against the previous chunk. To bound lookup time, runs of
159 159 deltas are limited to about 2 times the length of the original
160 160 version data. This makes retrieval of a version proportional to
161 161 its size, or O(1) relative to the number of revisions.
162 162
163 163 Both pieces of the revlog are written to in an append-only
164 164 fashion, which means we never need to rewrite a file to insert or
165 165 remove data, and can use some simple techniques to avoid the need
166 166 for locking while reading.
167 167 """
168 168 def __init__(self, opener, indexfile, datafile):
169 169 """
170 170 create a revlog object
171 171
172 172 opener is a function that abstracts the file opening operation
173 173 and can be used to implement COW semantics or the like.
174 174 """
175 175 self.indexfile = indexfile
176 176 self.datafile = datafile
177 177 self.opener = opener
178 178 self.cache = None
179 179
180 180 try:
181 181 i = self.opener(self.indexfile).read()
182 182 except IOError, inst:
183 183 if inst.errno != errno.ENOENT:
184 184 raise
185 185 i = ""
186 186
187 187 if len(i) > 10000:
188 188 # big index, let's parse it on demand
189 189 parser = lazyparser(i, self)
190 190 self.index = lazyindex(parser)
191 191 self.nodemap = lazymap(parser)
192 192 else:
193 193 s = struct.calcsize(indexformat)
194 194 l = len(i) / s
195 195 self.index = [None] * l
196 196 m = [None] * l
197 197
198 198 n = 0
199 199 for f in xrange(0, len(i), s):
200 200 # offset, size, base, linkrev, p1, p2, nodeid
201 201 e = struct.unpack(indexformat, i[f:f + s])
202 202 m[n] = (e[6], n)
203 203 self.index[n] = e
204 204 n += 1
205 205
206 206 self.nodemap = dict(m)
207 207 self.nodemap[nullid] = -1
208 208
209 209 def tip(self): return self.node(len(self.index) - 1)
210 210 def count(self): return len(self.index)
211 211 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
212 212 def rev(self, node):
213 213 try:
214 214 return self.nodemap[node]
215 215 except KeyError:
216 216 raise RevlogError('%s: no node %s' % (self.indexfile, hex(node)))
217 217 def linkrev(self, node): return self.index[self.rev(node)][3]
218 218 def parents(self, node):
219 219 if node == nullid: return (nullid, nullid)
220 220 return self.index[self.rev(node)][4:6]
221 221
222 222 def start(self, rev): return self.index[rev][0]
223 223 def length(self, rev): return self.index[rev][1]
224 224 def end(self, rev): return self.start(rev) + self.length(rev)
225 225 def base(self, rev): return self.index[rev][2]
226 226
227 227 def reachable(self, rev, stop=None):
228 228 reachable = {}
229 229 visit = [rev]
230 230 reachable[rev] = 1
231 231 if stop:
232 232 stopn = self.rev(stop)
233 233 else:
234 234 stopn = 0
235 235 while visit:
236 236 n = visit.pop(0)
237 237 if n == stop:
238 238 continue
239 239 if n == nullid:
240 240 continue
241 241 for p in self.parents(n):
242 242 if self.rev(p) < stopn:
243 243 continue
244 244 if p not in reachable:
245 245 reachable[p] = 1
246 246 visit.append(p)
247 247 return reachable
248 248
249 def nodesbetween(self, roots=None, heads=None):
250 """Return a tuple containing three elements. Elements 1 and 2 contain
251 a final list bases and heads after all the unreachable ones have been
252 pruned. Element 0 contains a topologically sorted list of all
253
254 nodes that satisfy these constraints:
255 1. All nodes must be descended from a node in roots (the nodes on
256 roots are considered descended from themselves).
257 2. All nodes must also be ancestors of a node in heads (the nodes in
258 heads are considered to be their own ancestors).
259
260 If roots is unspecified, nullid is assumed as the only root.
261 If heads is unspecified, it is taken to be the output of the
262 heads method (i.e. a list of all nodes in the repository that
263 have no children)."""
264 if roots is not None:
265 roots = list(roots)
266 lowestrev = min([self.rev(n) for n in roots])
267 else:
268 roots = [nullid] # Everybody's a descendent of nullid
269 lowestrev = -1
270 if (lowestrev == -1) and (heads is None):
271 # We want _all_ the nodes!
272 return ([self.node(r) for r in xrange(0, self.count())],
273 [nullid], list(self.heads()))
274 if heads is None:
275 # All nodes are ancestors, so the latest ancestor is the last
276 # node.
277 highestrev = self.count() - 1
278 # Set ancestors to None to signal that every node is an ancestor.
279 ancestors = None
280 # Set heads to an empty dictionary for later discovery of heads
281 heads = {}
282 else:
283 ancestors = {}
284 # Start at the top and keep marking parents until we're done.
285 nodestotag = list(heads)
286 # Turn heads into a dictionary so we can remove 'fake' heads.
287 # Also, later we will be using it to filter out the heads we can't
288 # find from roots.
289 heads = dict.fromkeys(heads, 0)
290 # Remember where the top was so we can use it as a limit later.
291 highestrev = max([self.rev(n) for n in nodestotag])
292 while nodestotag:
293 # grab a node to tag
294 n = nodestotag.pop()
295 # Never tag nullid
296 if n == nullid:
297 continue
298 # A node's revision number represents its place in a
299 # topologically sorted list of nodes.
300 r = self.rev(n)
301 if r >= lowestrev:
302 if n not in ancestors:
303 # If we are possibly a descendent of one of the roots
304 # and we haven't already been marked as an ancestor
305 ancestors[n] = 1 # Mark as ancestor
306 # Add non-nullid parents to list of nodes to tag.
307 nodestotag.extend([p for p in self.parents(n) if
308 p != nullid])
309 elif n in heads: # We've seen it before, is it a fake head?
310 # So it is, real heads should not be the ancestors of
311 # any other heads.
312 heads.pop(n)
313 # Now that we have our set of ancestors, we want to remove any
314 # roots that are not ancestors.
315
316 # If one of the roots was nullid, everything is included anyway.
317 if lowestrev > -1:
318 # But, since we weren't, let's recompute the lowest rev to not
319 # include roots that aren't ancestors.
320
321 # Filter out roots that aren't ancestors of heads
322 roots = [n for n in roots if n in ancestors]
323 # Recompute the lowest revision
324 if roots:
325 lowestrev = min([self.rev(n) for n in roots])
326 else:
327 # No more roots? Return empty list
328 return ([], [], [])
329 else:
330 # We are descending from nullid, and don't need to care about
331 # any other roots.
332 lowestrev = -1
333 roots = [nullid]
334 # Transform our roots list into a 'set' (i.e. a dictionary where the
335 # values don't matter.
336 descendents = dict.fromkeys(roots, 1)
337 # Also, keep the original roots so we can filter out roots that aren't
338 # 'real' roots (i.e. are descended from other roots).
339 roots = descendents.copy()
340 # Our topologically sorted list of output nodes.
341 orderedout = []
342 # Don't start at nullid since we don't want nullid in our output list,
343 # and if nullid shows up in descedents, empty parents will look like
344 # they're descendents.
345 for r in xrange(max(lowestrev, 0), highestrev + 1):
346 n = self.node(r)
347 isdescendent = False
348 if lowestrev == -1: # Everybody is a descendent of nullid
349 isdescendent = True
350 elif n in descendents:
351 # n is already a descendent
352 isdescendent = True
353 # This check only needs to be done here because all the roots
354 # will start being marked is descendents before the loop.
355 if n in roots:
356 # If n was a root, check if it's a 'real' root.
357 p = tuple(self.parents(n))
358 # If any of its parents are descendents, it's not a root.
359 if (p[0] in descendents) or (p[1] in descendents):
360 roots.pop(n)
361 else:
362 p = tuple(self.parents(n))
363 # A node is a descendent if either of its parents are
364 # descendents. (We seeded the dependents list with the roots
365 # up there, remember?)
366 if (p[0] in descendents) or (p[1] in descendents):
367 descendents[n] = 1
368 isdescendent = True
369 if isdescendent and ((ancestors is None) or (n in ancestors)):
370 # Only include nodes that are both descendents and ancestors.
371 orderedout.append(n)
372 if (ancestors is not None) and (n in heads):
373 # We're trying to figure out which heads are reachable
374 # from roots.
375 # Mark this head as having been reached
376 heads[n] = 1
377 elif ancestors is None:
378 # Otherwise, we're trying to discover the heads.
379 # Assume this is a head because if it isn't, the next step
380 # will eventually remove it.
381 heads[n] = 1
382 # But, obviously its parents aren't.
383 for p in self.parents(n):
384 heads.pop(p, None)
385 heads = [n for n in heads.iterkeys() if heads[n] != 0]
386 roots = roots.keys()
387 assert orderedout
388 assert roots
389 assert heads
390 return (orderedout, roots, heads)
391
249 392 def heads(self, stop=None):
250 393 """return the list of all nodes that have no children"""
251 394 p = {}
252 395 h = []
253 396 stoprev = 0
254 397 if stop and stop in self.nodemap:
255 398 stoprev = self.rev(stop)
256 399
257 400 for r in range(self.count() - 1, -1, -1):
258 401 n = self.node(r)
259 402 if n not in p:
260 403 h.append(n)
261 404 if n == stop:
262 405 break
263 406 if r < stoprev:
264 407 break
265 408 for pn in self.parents(n):
266 409 p[pn] = 1
267 410 return h
268 411
269 412 def children(self, node):
270 413 """find the children of a given node"""
271 414 c = []
272 415 p = self.rev(node)
273 416 for r in range(p + 1, self.count()):
274 417 n = self.node(r)
275 418 for pn in self.parents(n):
276 419 if pn == node:
277 420 c.append(n)
278 421 continue
279 422 elif pn == nullid:
280 423 continue
281 424 return c
282 425
283 426 def lookup(self, id):
284 427 """locate a node based on revision number or subset of hex nodeid"""
285 428 try:
286 429 rev = int(id)
287 430 if str(rev) != id: raise ValueError
288 431 if rev < 0: rev = self.count() + rev
289 432 if rev < 0 or rev >= self.count(): raise ValueError
290 433 return self.node(rev)
291 434 except (ValueError, OverflowError):
292 435 c = []
293 436 for n in self.nodemap:
294 437 if hex(n).startswith(id):
295 438 c.append(n)
296 439 if len(c) > 1: raise KeyError("Ambiguous identifier")
297 440 if len(c) < 1: raise KeyError("No match found")
298 441 return c[0]
299 442
300 443 return None
301 444
302 445 def diff(self, a, b):
303 446 """return a delta between two revisions"""
304 447 return mdiff.textdiff(a, b)
305 448
306 449 def patches(self, t, pl):
307 450 """apply a list of patches to a string"""
308 451 return mdiff.patches(t, pl)
309 452
310 453 def delta(self, node):
311 454 """return or calculate a delta between a node and its predecessor"""
312 455 r = self.rev(node)
313 456 b = self.base(r)
314 457 if r == b:
315 458 return self.diff(self.revision(self.node(r - 1)),
316 459 self.revision(node))
317 460 else:
318 461 f = self.opener(self.datafile)
319 462 f.seek(self.start(r))
320 463 data = f.read(self.length(r))
321 464 return decompress(data)
322 465
323 466 def revision(self, node):
324 467 """return an uncompressed revision of a given"""
325 468 if node == nullid: return ""
326 469 if self.cache and self.cache[0] == node: return self.cache[2]
327 470
328 471 # look up what we need to read
329 472 text = None
330 473 rev = self.rev(node)
331 474 start, length, base, link, p1, p2, node = self.index[rev]
332 475 end = start + length
333 476 if base != rev: start = self.start(base)
334 477
335 478 # do we have useful data cached?
336 479 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
337 480 base = self.cache[1]
338 481 start = self.start(base + 1)
339 482 text = self.cache[2]
340 483 last = 0
341 484
342 485 f = self.opener(self.datafile)
343 486 f.seek(start)
344 487 data = f.read(end - start)
345 488
346 489 if text is None:
347 490 last = self.length(base)
348 491 text = decompress(data[:last])
349 492
350 493 bins = []
351 494 for r in xrange(base + 1, rev + 1):
352 495 s = self.length(r)
353 496 bins.append(decompress(data[last:last + s]))
354 497 last = last + s
355 498
356 499 text = mdiff.patches(text, bins)
357 500
358 501 if node != hash(text, p1, p2):
359 502 raise RevlogError("integrity check failed on %s:%d"
360 503 % (self.datafile, rev))
361 504
362 505 self.cache = (node, rev, text)
363 506 return text
364 507
365 508 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
366 509 """add a revision to the log
367 510
368 511 text - the revision data to add
369 512 transaction - the transaction object used for rollback
370 513 link - the linkrev data to add
371 514 p1, p2 - the parent nodeids of the revision
372 515 d - an optional precomputed delta
373 516 """
374 517 if text is None: text = ""
375 518 if p1 is None: p1 = self.tip()
376 519 if p2 is None: p2 = nullid
377 520
378 521 node = hash(text, p1, p2)
379 522
380 523 if node in self.nodemap:
381 524 return node
382 525
383 526 n = self.count()
384 527 t = n - 1
385 528
386 529 if n:
387 530 base = self.base(t)
388 531 start = self.start(base)
389 532 end = self.end(t)
390 533 if not d:
391 534 prev = self.revision(self.tip())
392 535 d = self.diff(prev, text)
393 536 data = compress(d)
394 537 dist = end - start + len(data)
395 538
396 539 # full versions are inserted when the needed deltas
397 540 # become comparable to the uncompressed text
398 541 if not n or dist > len(text) * 2:
399 542 data = compress(text)
400 543 base = n
401 544 else:
402 545 base = self.base(t)
403 546
404 547 offset = 0
405 548 if t >= 0:
406 549 offset = self.end(t)
407 550
408 551 e = (offset, len(data), base, link, p1, p2, node)
409 552
410 553 self.index.append(e)
411 554 self.nodemap[node] = n
412 555 entry = struct.pack(indexformat, *e)
413 556
414 557 transaction.add(self.datafile, e[0])
415 558 self.opener(self.datafile, "a").write(data)
416 559 transaction.add(self.indexfile, n * len(entry))
417 560 self.opener(self.indexfile, "a").write(entry)
418 561
419 562 self.cache = (node, n, text)
420 563 return node
421 564
422 565 def ancestor(self, a, b):
423 566 """calculate the least common ancestor of nodes a and b"""
424 567 # calculate the distance of every node from root
425 568 dist = {nullid: 0}
426 569 for i in xrange(self.count()):
427 570 n = self.node(i)
428 571 p1, p2 = self.parents(n)
429 572 dist[n] = max(dist[p1], dist[p2]) + 1
430 573
431 574 # traverse ancestors in order of decreasing distance from root
432 575 def ancestors(node):
433 576 # we store negative distances because heap returns smallest member
434 577 h = [(-dist[node], node)]
435 578 seen = {}
436 579 earliest = self.count()
437 580 while h:
438 581 d, n = heapq.heappop(h)
439 582 if n not in seen:
440 583 seen[n] = 1
441 584 r = self.rev(n)
442 585 yield (-d, n)
443 586 for p in self.parents(n):
444 587 heapq.heappush(h, (-dist[p], p))
445 588
446 589 def generations(node):
447 590 sg, s = None, {}
448 591 for g,n in ancestors(node):
449 592 if g != sg:
450 593 if sg:
451 594 yield sg, s
452 595 sg, s = g, {n:1}
453 596 else:
454 597 s[n] = 1
455 598 yield sg, s
456 599
457 600 x = generations(a)
458 601 y = generations(b)
459 602 gx = x.next()
460 603 gy = y.next()
461 604
462 605 # increment each ancestor list until it is closer to root than
463 606 # the other, or they match
464 607 while 1:
465 608 #print "ancestor gen %s %s" % (gx[0], gy[0])
466 609 if gx[0] == gy[0]:
467 610 # find the intersection
468 611 i = [ n for n in gx[1] if n in gy[1] ]
469 612 if i:
470 613 return i[0]
471 614 else:
472 615 #print "next"
473 616 gy = y.next()
474 617 gx = x.next()
475 618 elif gx[0] < gy[0]:
476 619 #print "next y"
477 620 gy = y.next()
478 621 else:
479 622 #print "next x"
480 623 gx = x.next()
481 624
482 625 def group(self, linkmap):
483 626 """calculate a delta group
484 627
485 628 Given a list of changeset revs, return a set of deltas and
486 629 metadata corresponding to nodes. the first delta is
487 630 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
488 631 have this parent as it has all history before these
489 632 changesets. parent is parent[0]
490 633 """
491 634 revs = []
492 635 needed = {}
493 636
494 637 # find file nodes/revs that match changeset revs
495 638 for i in xrange(0, self.count()):
496 639 if self.index[i][3] in linkmap:
497 640 revs.append(i)
498 641 needed[i] = 1
499 642
500 643 # if we don't have any revisions touched by these changesets, bail
501 644 if not revs:
502 645 yield struct.pack(">l", 0)
503 646 return
504 647
505 648 # add the parent of the first rev
506 649 p = self.parents(self.node(revs[0]))[0]
507 650 revs.insert(0, self.rev(p))
508 651
509 652 # for each delta that isn't contiguous in the log, we need to
510 653 # reconstruct the base, reconstruct the result, and then
511 654 # calculate the delta. We also need to do this where we've
512 655 # stored a full version and not a delta
513 656 for i in xrange(0, len(revs) - 1):
514 657 a, b = revs[i], revs[i + 1]
515 658 if a + 1 != b or self.base(b) == b:
516 659 for j in xrange(self.base(a), a + 1):
517 660 needed[j] = 1
518 661 for j in xrange(self.base(b), b + 1):
519 662 needed[j] = 1
520 663
521 664 # calculate spans to retrieve from datafile
522 665 needed = needed.keys()
523 666 needed.sort()
524 667 spans = []
525 668 oo = -1
526 669 ol = 0
527 670 for n in needed:
528 671 if n < 0: continue
529 672 o = self.start(n)
530 673 l = self.length(n)
531 674 if oo + ol == o: # can we merge with the previous?
532 675 nl = spans[-1][2]
533 676 nl.append((n, l))
534 677 ol += l
535 678 spans[-1] = (oo, ol, nl)
536 679 else:
537 680 oo = o
538 681 ol = l
539 682 spans.append((oo, ol, [(n, l)]))
540 683
541 684 # read spans in, divide up chunks
542 685 chunks = {}
543 686 for span in spans:
544 687 # we reopen the file for each span to make http happy for now
545 688 f = self.opener(self.datafile)
546 689 f.seek(span[0])
547 690 data = f.read(span[1])
548 691
549 692 # divide up the span
550 693 pos = 0
551 694 for r, l in span[2]:
552 695 chunks[r] = decompress(data[pos: pos + l])
553 696 pos += l
554 697
555 698 # helper to reconstruct intermediate versions
556 699 def construct(text, base, rev):
557 700 bins = [chunks[r] for r in xrange(base + 1, rev + 1)]
558 701 return mdiff.patches(text, bins)
559 702
560 703 # build deltas
561 704 deltas = []
562 705 for d in xrange(0, len(revs) - 1):
563 706 a, b = revs[d], revs[d + 1]
564 707 n = self.node(b)
565 708
566 709 # do we need to construct a new delta?
567 710 if a + 1 != b or self.base(b) == b:
568 711 if a >= 0:
569 712 base = self.base(a)
570 713 ta = chunks[self.base(a)]
571 714 ta = construct(ta, base, a)
572 715 else:
573 716 ta = ""
574 717
575 718 base = self.base(b)
576 719 if a > base:
577 720 base = a
578 721 tb = ta
579 722 else:
580 723 tb = chunks[self.base(b)]
581 724 tb = construct(tb, base, b)
582 725 d = self.diff(ta, tb)
583 726 else:
584 727 d = chunks[b]
585 728
586 729 p = self.parents(n)
587 730 meta = n + p[0] + p[1] + linkmap[self.linkrev(n)]
588 731 l = struct.pack(">l", len(meta) + len(d) + 4)
589 732 yield l
590 733 yield meta
591 734 yield d
592 735
593 736 yield struct.pack(">l", 0)
594 737
595 738 def addgroup(self, revs, linkmapper, transaction, unique=0):
596 739 """
597 740 add a delta group
598 741
599 742 given a set of deltas, add them to the revision log. the
600 743 first delta is against its parent, which should be in our
601 744 log, the rest are against the previous delta.
602 745 """
603 746
604 747 #track the base of the current delta log
605 748 r = self.count()
606 749 t = r - 1
607 750 node = nullid
608 751
609 752 base = prev = -1
610 753 start = end = measure = 0
611 754 if r:
612 755 start = self.start(self.base(t))
613 756 end = self.end(t)
614 757 measure = self.length(self.base(t))
615 758 base = self.base(t)
616 759 prev = self.tip()
617 760
618 761 transaction.add(self.datafile, end)
619 762 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
620 763 dfh = self.opener(self.datafile, "a")
621 764 ifh = self.opener(self.indexfile, "a")
622 765
623 766 # loop through our set of deltas
624 767 chain = None
625 768 for chunk in revs:
626 769 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
627 770 link = linkmapper(cs)
628 771 if node in self.nodemap:
629 772 # this can happen if two branches make the same change
630 773 # if unique:
631 774 # raise RevlogError("already have %s" % hex(node[:4]))
632 775 chain = node
633 776 continue
634 777 delta = chunk[80:]
635 778
636 779 if not chain:
637 780 # retrieve the parent revision of the delta chain
638 781 chain = p1
639 782 if not chain in self.nodemap:
640 783 raise RevlogError("unknown base %s" % short(chain[:4]))
641 784
642 785 # full versions are inserted when the needed deltas become
643 786 # comparable to the uncompressed text or when the previous
644 787 # version is not the one we have a delta against. We use
645 788 # the size of the previous full rev as a proxy for the
646 789 # current size.
647 790
648 791 if chain == prev:
649 792 cdelta = compress(delta)
650 793
651 794 if chain != prev or (end - start + len(cdelta)) > measure * 2:
652 795 # flush our writes here so we can read it in revision
653 796 dfh.flush()
654 797 ifh.flush()
655 798 text = self.revision(chain)
656 799 text = self.patches(text, [delta])
657 800 chk = self.addrevision(text, transaction, link, p1, p2)
658 801 if chk != node:
659 802 raise RevlogError("consistency error adding group")
660 803 measure = len(text)
661 804 else:
662 805 e = (end, len(cdelta), self.base(t), link, p1, p2, node)
663 806 self.index.append(e)
664 807 self.nodemap[node] = r
665 808 dfh.write(cdelta)
666 809 ifh.write(struct.pack(indexformat, *e))
667 810
668 811 t, r, chain, prev = r, r + 1, node, node
669 812 start = self.start(self.base(t))
670 813 end = self.end(t)
671 814
672 815 dfh.close()
673 816 ifh.close()
674 817 return node
General Comments 0
You need to be logged in to leave comments. Login now