##// END OF EJS Templates
Show repo's revlog format on verify only if it doesn't match the default format....
Thomas Arendsen Hein -
r2152:57729c56 default
parent child Browse files
Show More
@@ -1,1998 +1,2000 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import os, util
9 9 import filelog, manifest, changelog, dirstate, repo
10 10 from node import *
11 11 from i18n import gettext as _
12 12 from demandload import *
13 13 demandload(globals(), "appendfile changegroup")
14 14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui revlog")
15 15
16 16 class localrepository(object):
17 17 def __del__(self):
18 18 self.transhandle = None
19 19 def __init__(self, parentui, path=None, create=0):
20 20 if not path:
21 21 p = os.getcwd()
22 22 while not os.path.isdir(os.path.join(p, ".hg")):
23 23 oldp = p
24 24 p = os.path.dirname(p)
25 25 if p == oldp:
26 26 raise repo.RepoError(_("no repo found"))
27 27 path = p
28 28 self.path = os.path.join(path, ".hg")
29 29
30 30 if not create and not os.path.isdir(self.path):
31 31 raise repo.RepoError(_("repository %s not found") % path)
32 32
33 33 self.root = os.path.abspath(path)
34 34 self.origroot = path
35 35 self.ui = ui.ui(parentui=parentui)
36 36 self.opener = util.opener(self.path)
37 37 self.wopener = util.opener(self.root)
38 38
39 39 try:
40 40 self.ui.readconfig(self.join("hgrc"), self.root)
41 41 except IOError:
42 42 pass
43 43
44 44 v = self.ui.revlogopts
45 45 self.revlogversion = int(v.get('format', revlog.REVLOGV0))
46 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
46 47 flags = 0
47 48 for x in v.get('flags', "").split():
48 49 flags |= revlog.flagstr(x)
49 50
50 51 v = self.revlogversion | flags
51 52 self.manifest = manifest.manifest(self.opener, v)
52 53 self.changelog = changelog.changelog(self.opener, v)
53 54
54 55 # the changelog might not have the inline index flag
55 56 # on. If the format of the changelog is the same as found in
56 57 # .hgrc, apply any flags found in the .hgrc as well.
57 58 # Otherwise, just version from the changelog
58 59 v = self.changelog.version
59 60 if v == self.revlogversion:
60 61 v |= flags
61 62 self.revlogversion = v
62 63
63 64 self.tagscache = None
64 65 self.nodetagscache = None
65 66 self.encodepats = None
66 67 self.decodepats = None
67 68 self.transhandle = None
68 69
69 70 if create:
70 71 os.mkdir(self.path)
71 72 os.mkdir(self.join("data"))
72 73
73 74 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
74 75 def hook(self, name, throw=False, **args):
75 76 def runhook(name, cmd):
76 77 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
77 78 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()] +
78 79 [(k.upper(), v) for k, v in args.iteritems()])
79 80 r = util.system(cmd, environ=env, cwd=self.root)
80 81 if r:
81 82 desc, r = util.explain_exit(r)
82 83 if throw:
83 84 raise util.Abort(_('%s hook %s') % (name, desc))
84 85 self.ui.warn(_('error: %s hook %s\n') % (name, desc))
85 86 return False
86 87 return True
87 88
88 89 r = True
89 90 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
90 91 if hname.split(".", 1)[0] == name and cmd]
91 92 hooks.sort()
92 93 for hname, cmd in hooks:
93 94 r = runhook(hname, cmd) and r
94 95 return r
95 96
96 97 def tags(self):
97 98 '''return a mapping of tag to node'''
98 99 if not self.tagscache:
99 100 self.tagscache = {}
100 101
101 102 def parsetag(line, context):
102 103 if not line:
103 104 return
104 105 s = l.split(" ", 1)
105 106 if len(s) != 2:
106 107 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
107 108 return
108 109 node, key = s
109 110 try:
110 111 bin_n = bin(node)
111 112 except TypeError:
112 113 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
113 114 return
114 115 if bin_n not in self.changelog.nodemap:
115 116 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
116 117 return
117 118 self.tagscache[key.strip()] = bin_n
118 119
119 120 # read each head of the tags file, ending with the tip
120 121 # and add each tag found to the map, with "newer" ones
121 122 # taking precedence
122 123 fl = self.file(".hgtags")
123 124 h = fl.heads()
124 125 h.reverse()
125 126 for r in h:
126 127 count = 0
127 128 for l in fl.read(r).splitlines():
128 129 count += 1
129 130 parsetag(l, ".hgtags:%d" % count)
130 131
131 132 try:
132 133 f = self.opener("localtags")
133 134 count = 0
134 135 for l in f:
135 136 count += 1
136 137 parsetag(l, "localtags:%d" % count)
137 138 except IOError:
138 139 pass
139 140
140 141 self.tagscache['tip'] = self.changelog.tip()
141 142
142 143 return self.tagscache
143 144
144 145 def tagslist(self):
145 146 '''return a list of tags ordered by revision'''
146 147 l = []
147 148 for t, n in self.tags().items():
148 149 try:
149 150 r = self.changelog.rev(n)
150 151 except:
151 152 r = -2 # sort to the beginning of the list if unknown
152 153 l.append((r, t, n))
153 154 l.sort()
154 155 return [(t, n) for r, t, n in l]
155 156
156 157 def nodetags(self, node):
157 158 '''return the tags associated with a node'''
158 159 if not self.nodetagscache:
159 160 self.nodetagscache = {}
160 161 for t, n in self.tags().items():
161 162 self.nodetagscache.setdefault(n, []).append(t)
162 163 return self.nodetagscache.get(node, [])
163 164
164 165 def lookup(self, key):
165 166 try:
166 167 return self.tags()[key]
167 168 except KeyError:
168 169 try:
169 170 return self.changelog.lookup(key)
170 171 except:
171 172 raise repo.RepoError(_("unknown revision '%s'") % key)
172 173
173 174 def dev(self):
174 175 return os.stat(self.path).st_dev
175 176
176 177 def local(self):
177 178 return True
178 179
179 180 def join(self, f):
180 181 return os.path.join(self.path, f)
181 182
182 183 def wjoin(self, f):
183 184 return os.path.join(self.root, f)
184 185
185 186 def file(self, f):
186 187 if f[0] == '/':
187 188 f = f[1:]
188 189 return filelog.filelog(self.opener, f, self.revlogversion)
189 190
190 191 def getcwd(self):
191 192 return self.dirstate.getcwd()
192 193
193 194 def wfile(self, f, mode='r'):
194 195 return self.wopener(f, mode)
195 196
196 197 def wread(self, filename):
197 198 if self.encodepats == None:
198 199 l = []
199 200 for pat, cmd in self.ui.configitems("encode"):
200 201 mf = util.matcher(self.root, "", [pat], [], [])[1]
201 202 l.append((mf, cmd))
202 203 self.encodepats = l
203 204
204 205 data = self.wopener(filename, 'r').read()
205 206
206 207 for mf, cmd in self.encodepats:
207 208 if mf(filename):
208 209 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
209 210 data = util.filter(data, cmd)
210 211 break
211 212
212 213 return data
213 214
214 215 def wwrite(self, filename, data, fd=None):
215 216 if self.decodepats == None:
216 217 l = []
217 218 for pat, cmd in self.ui.configitems("decode"):
218 219 mf = util.matcher(self.root, "", [pat], [], [])[1]
219 220 l.append((mf, cmd))
220 221 self.decodepats = l
221 222
222 223 for mf, cmd in self.decodepats:
223 224 if mf(filename):
224 225 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
225 226 data = util.filter(data, cmd)
226 227 break
227 228
228 229 if fd:
229 230 return fd.write(data)
230 231 return self.wopener(filename, 'w').write(data)
231 232
232 233 def transaction(self):
233 234 tr = self.transhandle
234 235 if tr != None and tr.running():
235 236 return tr.nest()
236 237
237 238 # save dirstate for undo
238 239 try:
239 240 ds = self.opener("dirstate").read()
240 241 except IOError:
241 242 ds = ""
242 243 self.opener("journal.dirstate", "w").write(ds)
243 244
244 245 tr = transaction.transaction(self.ui.warn, self.opener,
245 246 self.join("journal"),
246 247 aftertrans(self.path))
247 248 self.transhandle = tr
248 249 return tr
249 250
250 251 def recover(self):
251 252 l = self.lock()
252 253 if os.path.exists(self.join("journal")):
253 254 self.ui.status(_("rolling back interrupted transaction\n"))
254 255 transaction.rollback(self.opener, self.join("journal"))
255 256 self.reload()
256 257 return True
257 258 else:
258 259 self.ui.warn(_("no interrupted transaction available\n"))
259 260 return False
260 261
261 262 def undo(self, wlock=None):
262 263 if not wlock:
263 264 wlock = self.wlock()
264 265 l = self.lock()
265 266 if os.path.exists(self.join("undo")):
266 267 self.ui.status(_("rolling back last transaction\n"))
267 268 transaction.rollback(self.opener, self.join("undo"))
268 269 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
269 270 self.reload()
270 271 self.wreload()
271 272 else:
272 273 self.ui.warn(_("no undo information available\n"))
273 274
274 275 def wreload(self):
275 276 self.dirstate.read()
276 277
277 278 def reload(self):
278 279 self.changelog.load()
279 280 self.manifest.load()
280 281 self.tagscache = None
281 282 self.nodetagscache = None
282 283
283 284 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
284 285 desc=None):
285 286 try:
286 287 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
287 288 except lock.LockHeld, inst:
288 289 if not wait:
289 290 raise
290 291 self.ui.warn(_("waiting for lock on %s held by %s\n") %
291 292 (desc, inst.args[0]))
292 293 # default to 600 seconds timeout
293 294 l = lock.lock(self.join(lockname),
294 295 int(self.ui.config("ui", "timeout") or 600),
295 296 releasefn, desc=desc)
296 297 if acquirefn:
297 298 acquirefn()
298 299 return l
299 300
300 301 def lock(self, wait=1):
301 302 return self.do_lock("lock", wait, acquirefn=self.reload,
302 303 desc=_('repository %s') % self.origroot)
303 304
304 305 def wlock(self, wait=1):
305 306 return self.do_lock("wlock", wait, self.dirstate.write,
306 307 self.wreload,
307 308 desc=_('working directory of %s') % self.origroot)
308 309
309 310 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
310 311 "determine whether a new filenode is needed"
311 312 fp1 = manifest1.get(filename, nullid)
312 313 fp2 = manifest2.get(filename, nullid)
313 314
314 315 if fp2 != nullid:
315 316 # is one parent an ancestor of the other?
316 317 fpa = filelog.ancestor(fp1, fp2)
317 318 if fpa == fp1:
318 319 fp1, fp2 = fp2, nullid
319 320 elif fpa == fp2:
320 321 fp2 = nullid
321 322
322 323 # is the file unmodified from the parent? report existing entry
323 324 if fp2 == nullid and text == filelog.read(fp1):
324 325 return (fp1, None, None)
325 326
326 327 return (None, fp1, fp2)
327 328
328 329 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
329 330 orig_parent = self.dirstate.parents()[0] or nullid
330 331 p1 = p1 or self.dirstate.parents()[0] or nullid
331 332 p2 = p2 or self.dirstate.parents()[1] or nullid
332 333 c1 = self.changelog.read(p1)
333 334 c2 = self.changelog.read(p2)
334 335 m1 = self.manifest.read(c1[0])
335 336 mf1 = self.manifest.readflags(c1[0])
336 337 m2 = self.manifest.read(c2[0])
337 338 changed = []
338 339
339 340 if orig_parent == p1:
340 341 update_dirstate = 1
341 342 else:
342 343 update_dirstate = 0
343 344
344 345 if not wlock:
345 346 wlock = self.wlock()
346 347 l = self.lock()
347 348 tr = self.transaction()
348 349 mm = m1.copy()
349 350 mfm = mf1.copy()
350 351 linkrev = self.changelog.count()
351 352 for f in files:
352 353 try:
353 354 t = self.wread(f)
354 355 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
355 356 r = self.file(f)
356 357 mfm[f] = tm
357 358
358 359 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
359 360 if entry:
360 361 mm[f] = entry
361 362 continue
362 363
363 364 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
364 365 changed.append(f)
365 366 if update_dirstate:
366 367 self.dirstate.update([f], "n")
367 368 except IOError:
368 369 try:
369 370 del mm[f]
370 371 del mfm[f]
371 372 if update_dirstate:
372 373 self.dirstate.forget([f])
373 374 except:
374 375 # deleted from p2?
375 376 pass
376 377
377 378 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
378 379 user = user or self.ui.username()
379 380 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
380 381 tr.close()
381 382 if update_dirstate:
382 383 self.dirstate.setparents(n, nullid)
383 384
384 385 def commit(self, files=None, text="", user=None, date=None,
385 386 match=util.always, force=False, lock=None, wlock=None):
386 387 commit = []
387 388 remove = []
388 389 changed = []
389 390
390 391 if files:
391 392 for f in files:
392 393 s = self.dirstate.state(f)
393 394 if s in 'nmai':
394 395 commit.append(f)
395 396 elif s == 'r':
396 397 remove.append(f)
397 398 else:
398 399 self.ui.warn(_("%s not tracked!\n") % f)
399 400 else:
400 401 modified, added, removed, deleted, unknown = self.changes(match=match)
401 402 commit = modified + added
402 403 remove = removed
403 404
404 405 p1, p2 = self.dirstate.parents()
405 406 c1 = self.changelog.read(p1)
406 407 c2 = self.changelog.read(p2)
407 408 m1 = self.manifest.read(c1[0])
408 409 mf1 = self.manifest.readflags(c1[0])
409 410 m2 = self.manifest.read(c2[0])
410 411
411 412 if not commit and not remove and not force and p2 == nullid:
412 413 self.ui.status(_("nothing changed\n"))
413 414 return None
414 415
415 416 xp1 = hex(p1)
416 417 if p2 == nullid: xp2 = ''
417 418 else: xp2 = hex(p2)
418 419
419 420 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
420 421
421 422 if not wlock:
422 423 wlock = self.wlock()
423 424 if not lock:
424 425 lock = self.lock()
425 426 tr = self.transaction()
426 427
427 428 # check in files
428 429 new = {}
429 430 linkrev = self.changelog.count()
430 431 commit.sort()
431 432 for f in commit:
432 433 self.ui.note(f + "\n")
433 434 try:
434 435 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
435 436 t = self.wread(f)
436 437 except IOError:
437 438 self.ui.warn(_("trouble committing %s!\n") % f)
438 439 raise
439 440
440 441 r = self.file(f)
441 442
442 443 meta = {}
443 444 cp = self.dirstate.copied(f)
444 445 if cp:
445 446 meta["copy"] = cp
446 447 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
447 448 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
448 449 fp1, fp2 = nullid, nullid
449 450 else:
450 451 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
451 452 if entry:
452 453 new[f] = entry
453 454 continue
454 455
455 456 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
456 457 # remember what we've added so that we can later calculate
457 458 # the files to pull from a set of changesets
458 459 changed.append(f)
459 460
460 461 # update manifest
461 462 m1 = m1.copy()
462 463 m1.update(new)
463 464 for f in remove:
464 465 if f in m1:
465 466 del m1[f]
466 467 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
467 468 (new, remove))
468 469
469 470 # add changeset
470 471 new = new.keys()
471 472 new.sort()
472 473
473 474 user = user or self.ui.username()
474 475 if not text:
475 476 edittext = [""]
476 477 if p2 != nullid:
477 478 edittext.append("HG: branch merge")
478 479 edittext.extend(["HG: changed %s" % f for f in changed])
479 480 edittext.extend(["HG: removed %s" % f for f in remove])
480 481 if not changed and not remove:
481 482 edittext.append("HG: no files changed")
482 483 edittext.append("")
483 484 # run editor in the repository root
484 485 olddir = os.getcwd()
485 486 os.chdir(self.root)
486 487 edittext = self.ui.edit("\n".join(edittext), user)
487 488 os.chdir(olddir)
488 489 if not edittext.rstrip():
489 490 return None
490 491 text = edittext
491 492
492 493 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
493 494 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
494 495 parent2=xp2)
495 496 tr.close()
496 497
497 498 self.dirstate.setparents(n)
498 499 self.dirstate.update(new, "n")
499 500 self.dirstate.forget(remove)
500 501
501 502 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
502 503 return n
503 504
504 505 def walk(self, node=None, files=[], match=util.always, badmatch=None):
505 506 if node:
506 507 fdict = dict.fromkeys(files)
507 508 for fn in self.manifest.read(self.changelog.read(node)[0]):
508 509 fdict.pop(fn, None)
509 510 if match(fn):
510 511 yield 'm', fn
511 512 for fn in fdict:
512 513 if badmatch and badmatch(fn):
513 514 if match(fn):
514 515 yield 'b', fn
515 516 else:
516 517 self.ui.warn(_('%s: No such file in rev %s\n') % (
517 518 util.pathto(self.getcwd(), fn), short(node)))
518 519 else:
519 520 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
520 521 yield src, fn
521 522
522 523 def changes(self, node1=None, node2=None, files=[], match=util.always,
523 524 wlock=None, show_ignored=None):
524 525 """return changes between two nodes or node and working directory
525 526
526 527 If node1 is None, use the first dirstate parent instead.
527 528 If node2 is None, compare node1 with working directory.
528 529 """
529 530
530 531 def fcmp(fn, mf):
531 532 t1 = self.wread(fn)
532 533 t2 = self.file(fn).read(mf.get(fn, nullid))
533 534 return cmp(t1, t2)
534 535
535 536 def mfmatches(node):
536 537 change = self.changelog.read(node)
537 538 mf = dict(self.manifest.read(change[0]))
538 539 for fn in mf.keys():
539 540 if not match(fn):
540 541 del mf[fn]
541 542 return mf
542 543
543 544 if node1:
544 545 # read the manifest from node1 before the manifest from node2,
545 546 # so that we'll hit the manifest cache if we're going through
546 547 # all the revisions in parent->child order.
547 548 mf1 = mfmatches(node1)
548 549
549 550 # are we comparing the working directory?
550 551 if not node2:
551 552 if not wlock:
552 553 try:
553 554 wlock = self.wlock(wait=0)
554 555 except lock.LockException:
555 556 wlock = None
556 557 lookup, modified, added, removed, deleted, unknown, ignored = (
557 558 self.dirstate.changes(files, match, show_ignored))
558 559
559 560 # are we comparing working dir against its parent?
560 561 if not node1:
561 562 if lookup:
562 563 # do a full compare of any files that might have changed
563 564 mf2 = mfmatches(self.dirstate.parents()[0])
564 565 for f in lookup:
565 566 if fcmp(f, mf2):
566 567 modified.append(f)
567 568 elif wlock is not None:
568 569 self.dirstate.update([f], "n")
569 570 else:
570 571 # we are comparing working dir against non-parent
571 572 # generate a pseudo-manifest for the working dir
572 573 mf2 = mfmatches(self.dirstate.parents()[0])
573 574 for f in lookup + modified + added:
574 575 mf2[f] = ""
575 576 for f in removed:
576 577 if f in mf2:
577 578 del mf2[f]
578 579 else:
579 580 # we are comparing two revisions
580 581 deleted, unknown, ignored = [], [], []
581 582 mf2 = mfmatches(node2)
582 583
583 584 if node1:
584 585 # flush lists from dirstate before comparing manifests
585 586 modified, added = [], []
586 587
587 588 for fn in mf2:
588 589 if mf1.has_key(fn):
589 590 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
590 591 modified.append(fn)
591 592 del mf1[fn]
592 593 else:
593 594 added.append(fn)
594 595
595 596 removed = mf1.keys()
596 597
597 598 # sort and return results:
598 599 for l in modified, added, removed, deleted, unknown, ignored:
599 600 l.sort()
600 601 if show_ignored is None:
601 602 return (modified, added, removed, deleted, unknown)
602 603 else:
603 604 return (modified, added, removed, deleted, unknown, ignored)
604 605
605 606 def add(self, list, wlock=None):
606 607 if not wlock:
607 608 wlock = self.wlock()
608 609 for f in list:
609 610 p = self.wjoin(f)
610 611 if not os.path.exists(p):
611 612 self.ui.warn(_("%s does not exist!\n") % f)
612 613 elif not os.path.isfile(p):
613 614 self.ui.warn(_("%s not added: only files supported currently\n")
614 615 % f)
615 616 elif self.dirstate.state(f) in 'an':
616 617 self.ui.warn(_("%s already tracked!\n") % f)
617 618 else:
618 619 self.dirstate.update([f], "a")
619 620
620 621 def forget(self, list, wlock=None):
621 622 if not wlock:
622 623 wlock = self.wlock()
623 624 for f in list:
624 625 if self.dirstate.state(f) not in 'ai':
625 626 self.ui.warn(_("%s not added!\n") % f)
626 627 else:
627 628 self.dirstate.forget([f])
628 629
629 630 def remove(self, list, unlink=False, wlock=None):
630 631 if unlink:
631 632 for f in list:
632 633 try:
633 634 util.unlink(self.wjoin(f))
634 635 except OSError, inst:
635 636 if inst.errno != errno.ENOENT:
636 637 raise
637 638 if not wlock:
638 639 wlock = self.wlock()
639 640 for f in list:
640 641 p = self.wjoin(f)
641 642 if os.path.exists(p):
642 643 self.ui.warn(_("%s still exists!\n") % f)
643 644 elif self.dirstate.state(f) == 'a':
644 645 self.dirstate.forget([f])
645 646 elif f not in self.dirstate:
646 647 self.ui.warn(_("%s not tracked!\n") % f)
647 648 else:
648 649 self.dirstate.update([f], "r")
649 650
650 651 def undelete(self, list, wlock=None):
651 652 p = self.dirstate.parents()[0]
652 653 mn = self.changelog.read(p)[0]
653 654 mf = self.manifest.readflags(mn)
654 655 m = self.manifest.read(mn)
655 656 if not wlock:
656 657 wlock = self.wlock()
657 658 for f in list:
658 659 if self.dirstate.state(f) not in "r":
659 660 self.ui.warn("%s not removed!\n" % f)
660 661 else:
661 662 t = self.file(f).read(m[f])
662 663 self.wwrite(f, t)
663 664 util.set_exec(self.wjoin(f), mf[f])
664 665 self.dirstate.update([f], "n")
665 666
666 667 def copy(self, source, dest, wlock=None):
667 668 p = self.wjoin(dest)
668 669 if not os.path.exists(p):
669 670 self.ui.warn(_("%s does not exist!\n") % dest)
670 671 elif not os.path.isfile(p):
671 672 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
672 673 else:
673 674 if not wlock:
674 675 wlock = self.wlock()
675 676 if self.dirstate.state(dest) == '?':
676 677 self.dirstate.update([dest], "a")
677 678 self.dirstate.copy(source, dest)
678 679
679 680 def heads(self, start=None):
680 681 heads = self.changelog.heads(start)
681 682 # sort the output in rev descending order
682 683 heads = [(-self.changelog.rev(h), h) for h in heads]
683 684 heads.sort()
684 685 return [n for (r, n) in heads]
685 686
686 687 # branchlookup returns a dict giving a list of branches for
687 688 # each head. A branch is defined as the tag of a node or
688 689 # the branch of the node's parents. If a node has multiple
689 690 # branch tags, tags are eliminated if they are visible from other
690 691 # branch tags.
691 692 #
692 693 # So, for this graph: a->b->c->d->e
693 694 # \ /
694 695 # aa -----/
695 696 # a has tag 2.6.12
696 697 # d has tag 2.6.13
697 698 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
698 699 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
699 700 # from the list.
700 701 #
701 702 # It is possible that more than one head will have the same branch tag.
702 703 # callers need to check the result for multiple heads under the same
703 704 # branch tag if that is a problem for them (ie checkout of a specific
704 705 # branch).
705 706 #
706 707 # passing in a specific branch will limit the depth of the search
707 708 # through the parents. It won't limit the branches returned in the
708 709 # result though.
709 710 def branchlookup(self, heads=None, branch=None):
710 711 if not heads:
711 712 heads = self.heads()
712 713 headt = [ h for h in heads ]
713 714 chlog = self.changelog
714 715 branches = {}
715 716 merges = []
716 717 seenmerge = {}
717 718
718 719 # traverse the tree once for each head, recording in the branches
719 720 # dict which tags are visible from this head. The branches
720 721 # dict also records which tags are visible from each tag
721 722 # while we traverse.
722 723 while headt or merges:
723 724 if merges:
724 725 n, found = merges.pop()
725 726 visit = [n]
726 727 else:
727 728 h = headt.pop()
728 729 visit = [h]
729 730 found = [h]
730 731 seen = {}
731 732 while visit:
732 733 n = visit.pop()
733 734 if n in seen:
734 735 continue
735 736 pp = chlog.parents(n)
736 737 tags = self.nodetags(n)
737 738 if tags:
738 739 for x in tags:
739 740 if x == 'tip':
740 741 continue
741 742 for f in found:
742 743 branches.setdefault(f, {})[n] = 1
743 744 branches.setdefault(n, {})[n] = 1
744 745 break
745 746 if n not in found:
746 747 found.append(n)
747 748 if branch in tags:
748 749 continue
749 750 seen[n] = 1
750 751 if pp[1] != nullid and n not in seenmerge:
751 752 merges.append((pp[1], [x for x in found]))
752 753 seenmerge[n] = 1
753 754 if pp[0] != nullid:
754 755 visit.append(pp[0])
755 756 # traverse the branches dict, eliminating branch tags from each
756 757 # head that are visible from another branch tag for that head.
757 758 out = {}
758 759 viscache = {}
759 760 for h in heads:
760 761 def visible(node):
761 762 if node in viscache:
762 763 return viscache[node]
763 764 ret = {}
764 765 visit = [node]
765 766 while visit:
766 767 x = visit.pop()
767 768 if x in viscache:
768 769 ret.update(viscache[x])
769 770 elif x not in ret:
770 771 ret[x] = 1
771 772 if x in branches:
772 773 visit[len(visit):] = branches[x].keys()
773 774 viscache[node] = ret
774 775 return ret
775 776 if h not in branches:
776 777 continue
777 778 # O(n^2), but somewhat limited. This only searches the
778 779 # tags visible from a specific head, not all the tags in the
779 780 # whole repo.
780 781 for b in branches[h]:
781 782 vis = False
782 783 for bb in branches[h].keys():
783 784 if b != bb:
784 785 if b in visible(bb):
785 786 vis = True
786 787 break
787 788 if not vis:
788 789 l = out.setdefault(h, [])
789 790 l[len(l):] = self.nodetags(b)
790 791 return out
791 792
792 793 def branches(self, nodes):
793 794 if not nodes:
794 795 nodes = [self.changelog.tip()]
795 796 b = []
796 797 for n in nodes:
797 798 t = n
798 799 while n:
799 800 p = self.changelog.parents(n)
800 801 if p[1] != nullid or p[0] == nullid:
801 802 b.append((t, n, p[0], p[1]))
802 803 break
803 804 n = p[0]
804 805 return b
805 806
806 807 def between(self, pairs):
807 808 r = []
808 809
809 810 for top, bottom in pairs:
810 811 n, l, i = top, [], 0
811 812 f = 1
812 813
813 814 while n != bottom:
814 815 p = self.changelog.parents(n)[0]
815 816 if i == f:
816 817 l.append(n)
817 818 f = f * 2
818 819 n = p
819 820 i += 1
820 821
821 822 r.append(l)
822 823
823 824 return r
824 825
825 826 def findincoming(self, remote, base=None, heads=None, force=False):
826 827 m = self.changelog.nodemap
827 828 search = []
828 829 fetch = {}
829 830 seen = {}
830 831 seenbranch = {}
831 832 if base == None:
832 833 base = {}
833 834
834 835 if not heads:
835 836 heads = remote.heads()
836 837
837 838 if self.changelog.tip() == nullid:
838 839 if heads != [nullid]:
839 840 return [nullid]
840 841 return []
841 842
842 843 # assume we're closer to the tip than the root
843 844 # and start by examining the heads
844 845 self.ui.status(_("searching for changes\n"))
845 846
846 847 unknown = []
847 848 for h in heads:
848 849 if h not in m:
849 850 unknown.append(h)
850 851 else:
851 852 base[h] = 1
852 853
853 854 if not unknown:
854 855 return []
855 856
856 857 rep = {}
857 858 reqcnt = 0
858 859
859 860 # search through remote branches
860 861 # a 'branch' here is a linear segment of history, with four parts:
861 862 # head, root, first parent, second parent
862 863 # (a branch always has two parents (or none) by definition)
863 864 unknown = remote.branches(unknown)
864 865 while unknown:
865 866 r = []
866 867 while unknown:
867 868 n = unknown.pop(0)
868 869 if n[0] in seen:
869 870 continue
870 871
871 872 self.ui.debug(_("examining %s:%s\n")
872 873 % (short(n[0]), short(n[1])))
873 874 if n[0] == nullid:
874 875 break
875 876 if n in seenbranch:
876 877 self.ui.debug(_("branch already found\n"))
877 878 continue
878 879 if n[1] and n[1] in m: # do we know the base?
879 880 self.ui.debug(_("found incomplete branch %s:%s\n")
880 881 % (short(n[0]), short(n[1])))
881 882 search.append(n) # schedule branch range for scanning
882 883 seenbranch[n] = 1
883 884 else:
884 885 if n[1] not in seen and n[1] not in fetch:
885 886 if n[2] in m and n[3] in m:
886 887 self.ui.debug(_("found new changeset %s\n") %
887 888 short(n[1]))
888 889 fetch[n[1]] = 1 # earliest unknown
889 890 base[n[2]] = 1 # latest known
890 891 continue
891 892
892 893 for a in n[2:4]:
893 894 if a not in rep:
894 895 r.append(a)
895 896 rep[a] = 1
896 897
897 898 seen[n[0]] = 1
898 899
899 900 if r:
900 901 reqcnt += 1
901 902 self.ui.debug(_("request %d: %s\n") %
902 903 (reqcnt, " ".join(map(short, r))))
903 904 for p in range(0, len(r), 10):
904 905 for b in remote.branches(r[p:p+10]):
905 906 self.ui.debug(_("received %s:%s\n") %
906 907 (short(b[0]), short(b[1])))
907 908 if b[0] in m:
908 909 self.ui.debug(_("found base node %s\n")
909 910 % short(b[0]))
910 911 base[b[0]] = 1
911 912 elif b[0] not in seen:
912 913 unknown.append(b)
913 914
914 915 # do binary search on the branches we found
915 916 while search:
916 917 n = search.pop(0)
917 918 reqcnt += 1
918 919 l = remote.between([(n[0], n[1])])[0]
919 920 l.append(n[1])
920 921 p = n[0]
921 922 f = 1
922 923 for i in l:
923 924 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
924 925 if i in m:
925 926 if f <= 2:
926 927 self.ui.debug(_("found new branch changeset %s\n") %
927 928 short(p))
928 929 fetch[p] = 1
929 930 base[i] = 1
930 931 else:
931 932 self.ui.debug(_("narrowed branch search to %s:%s\n")
932 933 % (short(p), short(i)))
933 934 search.append((p, i))
934 935 break
935 936 p, f = i, f * 2
936 937
937 938 # sanity check our fetch list
938 939 for f in fetch.keys():
939 940 if f in m:
940 941 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
941 942
942 943 if base.keys() == [nullid]:
943 944 if force:
944 945 self.ui.warn(_("warning: repository is unrelated\n"))
945 946 else:
946 947 raise util.Abort(_("repository is unrelated"))
947 948
948 949 self.ui.note(_("found new changesets starting at ") +
949 950 " ".join([short(f) for f in fetch]) + "\n")
950 951
951 952 self.ui.debug(_("%d total queries\n") % reqcnt)
952 953
953 954 return fetch.keys()
954 955
955 956 def findoutgoing(self, remote, base=None, heads=None, force=False):
956 957 """Return list of nodes that are roots of subsets not in remote
957 958
958 959 If base dict is specified, assume that these nodes and their parents
959 960 exist on the remote side.
960 961 If a list of heads is specified, return only nodes which are heads
961 962 or ancestors of these heads, and return a second element which
962 963 contains all remote heads which get new children.
963 964 """
964 965 if base == None:
965 966 base = {}
966 967 self.findincoming(remote, base, heads, force=force)
967 968
968 969 self.ui.debug(_("common changesets up to ")
969 970 + " ".join(map(short, base.keys())) + "\n")
970 971
971 972 remain = dict.fromkeys(self.changelog.nodemap)
972 973
973 974 # prune everything remote has from the tree
974 975 del remain[nullid]
975 976 remove = base.keys()
976 977 while remove:
977 978 n = remove.pop(0)
978 979 if n in remain:
979 980 del remain[n]
980 981 for p in self.changelog.parents(n):
981 982 remove.append(p)
982 983
983 984 # find every node whose parents have been pruned
984 985 subset = []
985 986 # find every remote head that will get new children
986 987 updated_heads = {}
987 988 for n in remain:
988 989 p1, p2 = self.changelog.parents(n)
989 990 if p1 not in remain and p2 not in remain:
990 991 subset.append(n)
991 992 if heads:
992 993 if p1 in heads:
993 994 updated_heads[p1] = True
994 995 if p2 in heads:
995 996 updated_heads[p2] = True
996 997
997 998 # this is the set of all roots we have to push
998 999 if heads:
999 1000 return subset, updated_heads.keys()
1000 1001 else:
1001 1002 return subset
1002 1003
1003 1004 def pull(self, remote, heads=None, force=False):
1004 1005 l = self.lock()
1005 1006
1006 1007 fetch = self.findincoming(remote, force=force)
1007 1008 if fetch == [nullid]:
1008 1009 self.ui.status(_("requesting all changes\n"))
1009 1010
1010 1011 if not fetch:
1011 1012 self.ui.status(_("no changes found\n"))
1012 1013 return 0
1013 1014
1014 1015 if heads is None:
1015 1016 cg = remote.changegroup(fetch, 'pull')
1016 1017 else:
1017 1018 cg = remote.changegroupsubset(fetch, heads, 'pull')
1018 1019 return self.addchangegroup(cg)
1019 1020
1020 1021 def push(self, remote, force=False, revs=None):
1021 1022 lock = remote.lock()
1022 1023
1023 1024 base = {}
1024 1025 remote_heads = remote.heads()
1025 1026 inc = self.findincoming(remote, base, remote_heads, force=force)
1026 1027 if not force and inc:
1027 1028 self.ui.warn(_("abort: unsynced remote changes!\n"))
1028 1029 self.ui.status(_("(did you forget to sync?"
1029 1030 " use push -f to force)\n"))
1030 1031 return 1
1031 1032
1032 1033 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1033 1034 if revs is not None:
1034 1035 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1035 1036 else:
1036 1037 bases, heads = update, self.changelog.heads()
1037 1038
1038 1039 if not bases:
1039 1040 self.ui.status(_("no changes found\n"))
1040 1041 return 1
1041 1042 elif not force:
1042 1043 # FIXME we don't properly detect creation of new heads
1043 1044 # in the push -r case, assume the user knows what he's doing
1044 1045 if not revs and len(remote_heads) < len(heads) \
1045 1046 and remote_heads != [nullid]:
1046 1047 self.ui.warn(_("abort: push creates new remote branches!\n"))
1047 1048 self.ui.status(_("(did you forget to merge?"
1048 1049 " use push -f to force)\n"))
1049 1050 return 1
1050 1051
1051 1052 if revs is None:
1052 1053 cg = self.changegroup(update, 'push')
1053 1054 else:
1054 1055 cg = self.changegroupsubset(update, revs, 'push')
1055 1056 return remote.addchangegroup(cg)
1056 1057
1057 1058 def changegroupsubset(self, bases, heads, source):
1058 1059 """This function generates a changegroup consisting of all the nodes
1059 1060 that are descendents of any of the bases, and ancestors of any of
1060 1061 the heads.
1061 1062
1062 1063 It is fairly complex as determining which filenodes and which
1063 1064 manifest nodes need to be included for the changeset to be complete
1064 1065 is non-trivial.
1065 1066
1066 1067 Another wrinkle is doing the reverse, figuring out which changeset in
1067 1068 the changegroup a particular filenode or manifestnode belongs to."""
1068 1069
1069 1070 self.hook('preoutgoing', throw=True, source=source)
1070 1071
1071 1072 # Set up some initial variables
1072 1073 # Make it easy to refer to self.changelog
1073 1074 cl = self.changelog
1074 1075 # msng is short for missing - compute the list of changesets in this
1075 1076 # changegroup.
1076 1077 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1077 1078 # Some bases may turn out to be superfluous, and some heads may be
1078 1079 # too. nodesbetween will return the minimal set of bases and heads
1079 1080 # necessary to re-create the changegroup.
1080 1081
1081 1082 # Known heads are the list of heads that it is assumed the recipient
1082 1083 # of this changegroup will know about.
1083 1084 knownheads = {}
1084 1085 # We assume that all parents of bases are known heads.
1085 1086 for n in bases:
1086 1087 for p in cl.parents(n):
1087 1088 if p != nullid:
1088 1089 knownheads[p] = 1
1089 1090 knownheads = knownheads.keys()
1090 1091 if knownheads:
1091 1092 # Now that we know what heads are known, we can compute which
1092 1093 # changesets are known. The recipient must know about all
1093 1094 # changesets required to reach the known heads from the null
1094 1095 # changeset.
1095 1096 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1096 1097 junk = None
1097 1098 # Transform the list into an ersatz set.
1098 1099 has_cl_set = dict.fromkeys(has_cl_set)
1099 1100 else:
1100 1101 # If there were no known heads, the recipient cannot be assumed to
1101 1102 # know about any changesets.
1102 1103 has_cl_set = {}
1103 1104
1104 1105 # Make it easy to refer to self.manifest
1105 1106 mnfst = self.manifest
1106 1107 # We don't know which manifests are missing yet
1107 1108 msng_mnfst_set = {}
1108 1109 # Nor do we know which filenodes are missing.
1109 1110 msng_filenode_set = {}
1110 1111
1111 1112 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1112 1113 junk = None
1113 1114
1114 1115 # A changeset always belongs to itself, so the changenode lookup
1115 1116 # function for a changenode is identity.
1116 1117 def identity(x):
1117 1118 return x
1118 1119
1119 1120 # A function generating function. Sets up an environment for the
1120 1121 # inner function.
1121 1122 def cmp_by_rev_func(revlog):
1122 1123 # Compare two nodes by their revision number in the environment's
1123 1124 # revision history. Since the revision number both represents the
1124 1125 # most efficient order to read the nodes in, and represents a
1125 1126 # topological sorting of the nodes, this function is often useful.
1126 1127 def cmp_by_rev(a, b):
1127 1128 return cmp(revlog.rev(a), revlog.rev(b))
1128 1129 return cmp_by_rev
1129 1130
1130 1131 # If we determine that a particular file or manifest node must be a
1131 1132 # node that the recipient of the changegroup will already have, we can
1132 1133 # also assume the recipient will have all the parents. This function
1133 1134 # prunes them from the set of missing nodes.
1134 1135 def prune_parents(revlog, hasset, msngset):
1135 1136 haslst = hasset.keys()
1136 1137 haslst.sort(cmp_by_rev_func(revlog))
1137 1138 for node in haslst:
1138 1139 parentlst = [p for p in revlog.parents(node) if p != nullid]
1139 1140 while parentlst:
1140 1141 n = parentlst.pop()
1141 1142 if n not in hasset:
1142 1143 hasset[n] = 1
1143 1144 p = [p for p in revlog.parents(n) if p != nullid]
1144 1145 parentlst.extend(p)
1145 1146 for n in hasset:
1146 1147 msngset.pop(n, None)
1147 1148
1148 1149 # This is a function generating function used to set up an environment
1149 1150 # for the inner function to execute in.
1150 1151 def manifest_and_file_collector(changedfileset):
1151 1152 # This is an information gathering function that gathers
1152 1153 # information from each changeset node that goes out as part of
1153 1154 # the changegroup. The information gathered is a list of which
1154 1155 # manifest nodes are potentially required (the recipient may
1155 1156 # already have them) and total list of all files which were
1156 1157 # changed in any changeset in the changegroup.
1157 1158 #
1158 1159 # We also remember the first changenode we saw any manifest
1159 1160 # referenced by so we can later determine which changenode 'owns'
1160 1161 # the manifest.
1161 1162 def collect_manifests_and_files(clnode):
1162 1163 c = cl.read(clnode)
1163 1164 for f in c[3]:
1164 1165 # This is to make sure we only have one instance of each
1165 1166 # filename string for each filename.
1166 1167 changedfileset.setdefault(f, f)
1167 1168 msng_mnfst_set.setdefault(c[0], clnode)
1168 1169 return collect_manifests_and_files
1169 1170
1170 1171 # Figure out which manifest nodes (of the ones we think might be part
1171 1172 # of the changegroup) the recipient must know about and remove them
1172 1173 # from the changegroup.
1173 1174 def prune_manifests():
1174 1175 has_mnfst_set = {}
1175 1176 for n in msng_mnfst_set:
1176 1177 # If a 'missing' manifest thinks it belongs to a changenode
1177 1178 # the recipient is assumed to have, obviously the recipient
1178 1179 # must have that manifest.
1179 1180 linknode = cl.node(mnfst.linkrev(n))
1180 1181 if linknode in has_cl_set:
1181 1182 has_mnfst_set[n] = 1
1182 1183 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1183 1184
1184 1185 # Use the information collected in collect_manifests_and_files to say
1185 1186 # which changenode any manifestnode belongs to.
1186 1187 def lookup_manifest_link(mnfstnode):
1187 1188 return msng_mnfst_set[mnfstnode]
1188 1189
1189 1190 # A function generating function that sets up the initial environment
1190 1191 # the inner function.
1191 1192 def filenode_collector(changedfiles):
1192 1193 next_rev = [0]
1193 1194 # This gathers information from each manifestnode included in the
1194 1195 # changegroup about which filenodes the manifest node references
1195 1196 # so we can include those in the changegroup too.
1196 1197 #
1197 1198 # It also remembers which changenode each filenode belongs to. It
1198 1199 # does this by assuming the a filenode belongs to the changenode
1199 1200 # the first manifest that references it belongs to.
1200 1201 def collect_msng_filenodes(mnfstnode):
1201 1202 r = mnfst.rev(mnfstnode)
1202 1203 if r == next_rev[0]:
1203 1204 # If the last rev we looked at was the one just previous,
1204 1205 # we only need to see a diff.
1205 1206 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1206 1207 # For each line in the delta
1207 1208 for dline in delta.splitlines():
1208 1209 # get the filename and filenode for that line
1209 1210 f, fnode = dline.split('\0')
1210 1211 fnode = bin(fnode[:40])
1211 1212 f = changedfiles.get(f, None)
1212 1213 # And if the file is in the list of files we care
1213 1214 # about.
1214 1215 if f is not None:
1215 1216 # Get the changenode this manifest belongs to
1216 1217 clnode = msng_mnfst_set[mnfstnode]
1217 1218 # Create the set of filenodes for the file if
1218 1219 # there isn't one already.
1219 1220 ndset = msng_filenode_set.setdefault(f, {})
1220 1221 # And set the filenode's changelog node to the
1221 1222 # manifest's if it hasn't been set already.
1222 1223 ndset.setdefault(fnode, clnode)
1223 1224 else:
1224 1225 # Otherwise we need a full manifest.
1225 1226 m = mnfst.read(mnfstnode)
1226 1227 # For every file in we care about.
1227 1228 for f in changedfiles:
1228 1229 fnode = m.get(f, None)
1229 1230 # If it's in the manifest
1230 1231 if fnode is not None:
1231 1232 # See comments above.
1232 1233 clnode = msng_mnfst_set[mnfstnode]
1233 1234 ndset = msng_filenode_set.setdefault(f, {})
1234 1235 ndset.setdefault(fnode, clnode)
1235 1236 # Remember the revision we hope to see next.
1236 1237 next_rev[0] = r + 1
1237 1238 return collect_msng_filenodes
1238 1239
1239 1240 # We have a list of filenodes we think we need for a file, lets remove
1240 1241 # all those we now the recipient must have.
1241 1242 def prune_filenodes(f, filerevlog):
1242 1243 msngset = msng_filenode_set[f]
1243 1244 hasset = {}
1244 1245 # If a 'missing' filenode thinks it belongs to a changenode we
1245 1246 # assume the recipient must have, then the recipient must have
1246 1247 # that filenode.
1247 1248 for n in msngset:
1248 1249 clnode = cl.node(filerevlog.linkrev(n))
1249 1250 if clnode in has_cl_set:
1250 1251 hasset[n] = 1
1251 1252 prune_parents(filerevlog, hasset, msngset)
1252 1253
1253 1254 # A function generator function that sets up the a context for the
1254 1255 # inner function.
1255 1256 def lookup_filenode_link_func(fname):
1256 1257 msngset = msng_filenode_set[fname]
1257 1258 # Lookup the changenode the filenode belongs to.
1258 1259 def lookup_filenode_link(fnode):
1259 1260 return msngset[fnode]
1260 1261 return lookup_filenode_link
1261 1262
1262 1263 # Now that we have all theses utility functions to help out and
1263 1264 # logically divide up the task, generate the group.
1264 1265 def gengroup():
1265 1266 # The set of changed files starts empty.
1266 1267 changedfiles = {}
1267 1268 # Create a changenode group generator that will call our functions
1268 1269 # back to lookup the owning changenode and collect information.
1269 1270 group = cl.group(msng_cl_lst, identity,
1270 1271 manifest_and_file_collector(changedfiles))
1271 1272 for chnk in group:
1272 1273 yield chnk
1273 1274
1274 1275 # The list of manifests has been collected by the generator
1275 1276 # calling our functions back.
1276 1277 prune_manifests()
1277 1278 msng_mnfst_lst = msng_mnfst_set.keys()
1278 1279 # Sort the manifestnodes by revision number.
1279 1280 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1280 1281 # Create a generator for the manifestnodes that calls our lookup
1281 1282 # and data collection functions back.
1282 1283 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1283 1284 filenode_collector(changedfiles))
1284 1285 for chnk in group:
1285 1286 yield chnk
1286 1287
1287 1288 # These are no longer needed, dereference and toss the memory for
1288 1289 # them.
1289 1290 msng_mnfst_lst = None
1290 1291 msng_mnfst_set.clear()
1291 1292
1292 1293 changedfiles = changedfiles.keys()
1293 1294 changedfiles.sort()
1294 1295 # Go through all our files in order sorted by name.
1295 1296 for fname in changedfiles:
1296 1297 filerevlog = self.file(fname)
1297 1298 # Toss out the filenodes that the recipient isn't really
1298 1299 # missing.
1299 1300 if msng_filenode_set.has_key(fname):
1300 1301 prune_filenodes(fname, filerevlog)
1301 1302 msng_filenode_lst = msng_filenode_set[fname].keys()
1302 1303 else:
1303 1304 msng_filenode_lst = []
1304 1305 # If any filenodes are left, generate the group for them,
1305 1306 # otherwise don't bother.
1306 1307 if len(msng_filenode_lst) > 0:
1307 1308 yield changegroup.genchunk(fname)
1308 1309 # Sort the filenodes by their revision #
1309 1310 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1310 1311 # Create a group generator and only pass in a changenode
1311 1312 # lookup function as we need to collect no information
1312 1313 # from filenodes.
1313 1314 group = filerevlog.group(msng_filenode_lst,
1314 1315 lookup_filenode_link_func(fname))
1315 1316 for chnk in group:
1316 1317 yield chnk
1317 1318 if msng_filenode_set.has_key(fname):
1318 1319 # Don't need this anymore, toss it to free memory.
1319 1320 del msng_filenode_set[fname]
1320 1321 # Signal that no more groups are left.
1321 1322 yield changegroup.closechunk()
1322 1323
1323 1324 if msng_cl_lst:
1324 1325 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1325 1326
1326 1327 return util.chunkbuffer(gengroup())
1327 1328
1328 1329 def changegroup(self, basenodes, source):
1329 1330 """Generate a changegroup of all nodes that we have that a recipient
1330 1331 doesn't.
1331 1332
1332 1333 This is much easier than the previous function as we can assume that
1333 1334 the recipient has any changenode we aren't sending them."""
1334 1335
1335 1336 self.hook('preoutgoing', throw=True, source=source)
1336 1337
1337 1338 cl = self.changelog
1338 1339 nodes = cl.nodesbetween(basenodes, None)[0]
1339 1340 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1340 1341
1341 1342 def identity(x):
1342 1343 return x
1343 1344
1344 1345 def gennodelst(revlog):
1345 1346 for r in xrange(0, revlog.count()):
1346 1347 n = revlog.node(r)
1347 1348 if revlog.linkrev(n) in revset:
1348 1349 yield n
1349 1350
1350 1351 def changed_file_collector(changedfileset):
1351 1352 def collect_changed_files(clnode):
1352 1353 c = cl.read(clnode)
1353 1354 for fname in c[3]:
1354 1355 changedfileset[fname] = 1
1355 1356 return collect_changed_files
1356 1357
1357 1358 def lookuprevlink_func(revlog):
1358 1359 def lookuprevlink(n):
1359 1360 return cl.node(revlog.linkrev(n))
1360 1361 return lookuprevlink
1361 1362
1362 1363 def gengroup():
1363 1364 # construct a list of all changed files
1364 1365 changedfiles = {}
1365 1366
1366 1367 for chnk in cl.group(nodes, identity,
1367 1368 changed_file_collector(changedfiles)):
1368 1369 yield chnk
1369 1370 changedfiles = changedfiles.keys()
1370 1371 changedfiles.sort()
1371 1372
1372 1373 mnfst = self.manifest
1373 1374 nodeiter = gennodelst(mnfst)
1374 1375 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1375 1376 yield chnk
1376 1377
1377 1378 for fname in changedfiles:
1378 1379 filerevlog = self.file(fname)
1379 1380 nodeiter = gennodelst(filerevlog)
1380 1381 nodeiter = list(nodeiter)
1381 1382 if nodeiter:
1382 1383 yield changegroup.genchunk(fname)
1383 1384 lookup = lookuprevlink_func(filerevlog)
1384 1385 for chnk in filerevlog.group(nodeiter, lookup):
1385 1386 yield chnk
1386 1387
1387 1388 yield changegroup.closechunk()
1388 1389
1389 1390 if nodes:
1390 1391 self.hook('outgoing', node=hex(nodes[0]), source=source)
1391 1392
1392 1393 return util.chunkbuffer(gengroup())
1393 1394
1394 1395 def addchangegroup(self, source):
1395 1396 """add changegroup to repo.
1396 1397 returns number of heads modified or added + 1."""
1397 1398
1398 1399 def csmap(x):
1399 1400 self.ui.debug(_("add changeset %s\n") % short(x))
1400 1401 return cl.count()
1401 1402
1402 1403 def revmap(x):
1403 1404 return cl.rev(x)
1404 1405
1405 1406 if not source:
1406 1407 return 0
1407 1408
1408 1409 self.hook('prechangegroup', throw=True)
1409 1410
1410 1411 changesets = files = revisions = 0
1411 1412
1412 1413 tr = self.transaction()
1413 1414
1414 1415 # write changelog and manifest data to temp files so
1415 1416 # concurrent readers will not see inconsistent view
1416 1417 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1417 1418
1418 1419 oldheads = len(cl.heads())
1419 1420
1420 1421 # pull off the changeset group
1421 1422 self.ui.status(_("adding changesets\n"))
1422 1423 co = cl.tip()
1423 1424 chunkiter = changegroup.chunkiter(source)
1424 1425 cn = cl.addgroup(chunkiter, csmap, tr, 1) # unique
1425 1426 cnr, cor = map(cl.rev, (cn, co))
1426 1427 if cn == nullid:
1427 1428 cnr = cor
1428 1429 changesets = cnr - cor
1429 1430
1430 1431 mf = appendfile.appendmanifest(self.opener, self.manifest.version)
1431 1432
1432 1433 # pull off the manifest group
1433 1434 self.ui.status(_("adding manifests\n"))
1434 1435 mm = mf.tip()
1435 1436 chunkiter = changegroup.chunkiter(source)
1436 1437 mo = mf.addgroup(chunkiter, revmap, tr)
1437 1438
1438 1439 # process the files
1439 1440 self.ui.status(_("adding file changes\n"))
1440 1441 while 1:
1441 1442 f = changegroup.getchunk(source)
1442 1443 if not f:
1443 1444 break
1444 1445 self.ui.debug(_("adding %s revisions\n") % f)
1445 1446 fl = self.file(f)
1446 1447 o = fl.count()
1447 1448 chunkiter = changegroup.chunkiter(source)
1448 1449 n = fl.addgroup(chunkiter, revmap, tr)
1449 1450 revisions += fl.count() - o
1450 1451 files += 1
1451 1452
1452 1453 # write order here is important so concurrent readers will see
1453 1454 # consistent view of repo
1454 1455 mf.writedata()
1455 1456 cl.writedata()
1456 1457
1457 1458 # make changelog and manifest see real files again
1458 1459 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1459 1460 self.manifest = manifest.manifest(self.opener, self.manifest.version)
1460 1461 self.changelog.checkinlinesize(tr)
1461 1462 self.manifest.checkinlinesize(tr)
1462 1463
1463 1464 newheads = len(self.changelog.heads())
1464 1465 heads = ""
1465 1466 if oldheads and newheads > oldheads:
1466 1467 heads = _(" (+%d heads)") % (newheads - oldheads)
1467 1468
1468 1469 self.ui.status(_("added %d changesets"
1469 1470 " with %d changes to %d files%s\n")
1470 1471 % (changesets, revisions, files, heads))
1471 1472
1472 1473 self.hook('pretxnchangegroup', throw=True,
1473 1474 node=hex(self.changelog.node(cor+1)))
1474 1475
1475 1476 tr.close()
1476 1477
1477 1478 if changesets > 0:
1478 1479 self.hook("changegroup", node=hex(self.changelog.node(cor+1)))
1479 1480
1480 1481 for i in range(cor + 1, cnr + 1):
1481 1482 self.hook("incoming", node=hex(self.changelog.node(i)))
1482 1483
1483 1484 return newheads - oldheads + 1
1484 1485
1485 1486 def update(self, node, allow=False, force=False, choose=None,
1486 1487 moddirstate=True, forcemerge=False, wlock=None):
1487 1488 pl = self.dirstate.parents()
1488 1489 if not force and pl[1] != nullid:
1489 1490 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1490 1491 return 1
1491 1492
1492 1493 err = False
1493 1494
1494 1495 p1, p2 = pl[0], node
1495 1496 pa = self.changelog.ancestor(p1, p2)
1496 1497 m1n = self.changelog.read(p1)[0]
1497 1498 m2n = self.changelog.read(p2)[0]
1498 1499 man = self.manifest.ancestor(m1n, m2n)
1499 1500 m1 = self.manifest.read(m1n)
1500 1501 mf1 = self.manifest.readflags(m1n)
1501 1502 m2 = self.manifest.read(m2n).copy()
1502 1503 mf2 = self.manifest.readflags(m2n)
1503 1504 ma = self.manifest.read(man)
1504 1505 mfa = self.manifest.readflags(man)
1505 1506
1506 1507 modified, added, removed, deleted, unknown = self.changes()
1507 1508
1508 1509 # is this a jump, or a merge? i.e. is there a linear path
1509 1510 # from p1 to p2?
1510 1511 linear_path = (pa == p1 or pa == p2)
1511 1512
1512 1513 if allow and linear_path:
1513 1514 raise util.Abort(_("there is nothing to merge, "
1514 1515 "just use 'hg update'"))
1515 1516 if allow and not forcemerge:
1516 1517 if modified or added or removed:
1517 1518 raise util.Abort(_("outstanding uncommitted changes"))
1518 1519 if not forcemerge and not force:
1519 1520 for f in unknown:
1520 1521 if f in m2:
1521 1522 t1 = self.wread(f)
1522 1523 t2 = self.file(f).read(m2[f])
1523 1524 if cmp(t1, t2) != 0:
1524 1525 raise util.Abort(_("'%s' already exists in the working"
1525 1526 " dir and differs from remote") % f)
1526 1527
1527 1528 # resolve the manifest to determine which files
1528 1529 # we care about merging
1529 1530 self.ui.note(_("resolving manifests\n"))
1530 1531 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1531 1532 (force, allow, moddirstate, linear_path))
1532 1533 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1533 1534 (short(man), short(m1n), short(m2n)))
1534 1535
1535 1536 merge = {}
1536 1537 get = {}
1537 1538 remove = []
1538 1539
1539 1540 # construct a working dir manifest
1540 1541 mw = m1.copy()
1541 1542 mfw = mf1.copy()
1542 1543 umap = dict.fromkeys(unknown)
1543 1544
1544 1545 for f in added + modified + unknown:
1545 1546 mw[f] = ""
1546 1547 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1547 1548
1548 1549 if moddirstate and not wlock:
1549 1550 wlock = self.wlock()
1550 1551
1551 1552 for f in deleted + removed:
1552 1553 if f in mw:
1553 1554 del mw[f]
1554 1555
1555 1556 # If we're jumping between revisions (as opposed to merging),
1556 1557 # and if neither the working directory nor the target rev has
1557 1558 # the file, then we need to remove it from the dirstate, to
1558 1559 # prevent the dirstate from listing the file when it is no
1559 1560 # longer in the manifest.
1560 1561 if moddirstate and linear_path and f not in m2:
1561 1562 self.dirstate.forget((f,))
1562 1563
1563 1564 # Compare manifests
1564 1565 for f, n in mw.iteritems():
1565 1566 if choose and not choose(f):
1566 1567 continue
1567 1568 if f in m2:
1568 1569 s = 0
1569 1570
1570 1571 # is the wfile new since m1, and match m2?
1571 1572 if f not in m1:
1572 1573 t1 = self.wread(f)
1573 1574 t2 = self.file(f).read(m2[f])
1574 1575 if cmp(t1, t2) == 0:
1575 1576 n = m2[f]
1576 1577 del t1, t2
1577 1578
1578 1579 # are files different?
1579 1580 if n != m2[f]:
1580 1581 a = ma.get(f, nullid)
1581 1582 # are both different from the ancestor?
1582 1583 if n != a and m2[f] != a:
1583 1584 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1584 1585 # merge executable bits
1585 1586 # "if we changed or they changed, change in merge"
1586 1587 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1587 1588 mode = ((a^b) | (a^c)) ^ a
1588 1589 merge[f] = (m1.get(f, nullid), m2[f], mode)
1589 1590 s = 1
1590 1591 # are we clobbering?
1591 1592 # is remote's version newer?
1592 1593 # or are we going back in time?
1593 1594 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1594 1595 self.ui.debug(_(" remote %s is newer, get\n") % f)
1595 1596 get[f] = m2[f]
1596 1597 s = 1
1597 1598 elif f in umap or f in added:
1598 1599 # this unknown file is the same as the checkout
1599 1600 # we need to reset the dirstate if the file was added
1600 1601 get[f] = m2[f]
1601 1602
1602 1603 if not s and mfw[f] != mf2[f]:
1603 1604 if force:
1604 1605 self.ui.debug(_(" updating permissions for %s\n") % f)
1605 1606 util.set_exec(self.wjoin(f), mf2[f])
1606 1607 else:
1607 1608 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1608 1609 mode = ((a^b) | (a^c)) ^ a
1609 1610 if mode != b:
1610 1611 self.ui.debug(_(" updating permissions for %s\n")
1611 1612 % f)
1612 1613 util.set_exec(self.wjoin(f), mode)
1613 1614 del m2[f]
1614 1615 elif f in ma:
1615 1616 if n != ma[f]:
1616 1617 r = _("d")
1617 1618 if not force and (linear_path or allow):
1618 1619 r = self.ui.prompt(
1619 1620 (_(" local changed %s which remote deleted\n") % f) +
1620 1621 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1621 1622 if r == _("d"):
1622 1623 remove.append(f)
1623 1624 else:
1624 1625 self.ui.debug(_("other deleted %s\n") % f)
1625 1626 remove.append(f) # other deleted it
1626 1627 else:
1627 1628 # file is created on branch or in working directory
1628 1629 if force and f not in umap:
1629 1630 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1630 1631 remove.append(f)
1631 1632 elif n == m1.get(f, nullid): # same as parent
1632 1633 if p2 == pa: # going backwards?
1633 1634 self.ui.debug(_("remote deleted %s\n") % f)
1634 1635 remove.append(f)
1635 1636 else:
1636 1637 self.ui.debug(_("local modified %s, keeping\n") % f)
1637 1638 else:
1638 1639 self.ui.debug(_("working dir created %s, keeping\n") % f)
1639 1640
1640 1641 for f, n in m2.iteritems():
1641 1642 if choose and not choose(f):
1642 1643 continue
1643 1644 if f[0] == "/":
1644 1645 continue
1645 1646 if f in ma and n != ma[f]:
1646 1647 r = _("k")
1647 1648 if not force and (linear_path or allow):
1648 1649 r = self.ui.prompt(
1649 1650 (_("remote changed %s which local deleted\n") % f) +
1650 1651 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1651 1652 if r == _("k"):
1652 1653 get[f] = n
1653 1654 elif f not in ma:
1654 1655 self.ui.debug(_("remote created %s\n") % f)
1655 1656 get[f] = n
1656 1657 else:
1657 1658 if force or p2 == pa: # going backwards?
1658 1659 self.ui.debug(_("local deleted %s, recreating\n") % f)
1659 1660 get[f] = n
1660 1661 else:
1661 1662 self.ui.debug(_("local deleted %s\n") % f)
1662 1663
1663 1664 del mw, m1, m2, ma
1664 1665
1665 1666 if force:
1666 1667 for f in merge:
1667 1668 get[f] = merge[f][1]
1668 1669 merge = {}
1669 1670
1670 1671 if linear_path or force:
1671 1672 # we don't need to do any magic, just jump to the new rev
1672 1673 branch_merge = False
1673 1674 p1, p2 = p2, nullid
1674 1675 else:
1675 1676 if not allow:
1676 1677 self.ui.status(_("this update spans a branch"
1677 1678 " affecting the following files:\n"))
1678 1679 fl = merge.keys() + get.keys()
1679 1680 fl.sort()
1680 1681 for f in fl:
1681 1682 cf = ""
1682 1683 if f in merge:
1683 1684 cf = _(" (resolve)")
1684 1685 self.ui.status(" %s%s\n" % (f, cf))
1685 1686 self.ui.warn(_("aborting update spanning branches!\n"))
1686 1687 self.ui.status(_("(use 'hg merge' to merge across branches"
1687 1688 " or 'hg update -C' to lose changes)\n"))
1688 1689 return 1
1689 1690 branch_merge = True
1690 1691
1691 1692 # get the files we don't need to change
1692 1693 files = get.keys()
1693 1694 files.sort()
1694 1695 for f in files:
1695 1696 if f[0] == "/":
1696 1697 continue
1697 1698 self.ui.note(_("getting %s\n") % f)
1698 1699 t = self.file(f).read(get[f])
1699 1700 self.wwrite(f, t)
1700 1701 util.set_exec(self.wjoin(f), mf2[f])
1701 1702 if moddirstate:
1702 1703 if branch_merge:
1703 1704 self.dirstate.update([f], 'n', st_mtime=-1)
1704 1705 else:
1705 1706 self.dirstate.update([f], 'n')
1706 1707
1707 1708 # merge the tricky bits
1708 1709 failedmerge = []
1709 1710 files = merge.keys()
1710 1711 files.sort()
1711 1712 xp1 = hex(p1)
1712 1713 xp2 = hex(p2)
1713 1714 for f in files:
1714 1715 self.ui.status(_("merging %s\n") % f)
1715 1716 my, other, flag = merge[f]
1716 1717 ret = self.merge3(f, my, other, xp1, xp2)
1717 1718 if ret:
1718 1719 err = True
1719 1720 failedmerge.append(f)
1720 1721 util.set_exec(self.wjoin(f), flag)
1721 1722 if moddirstate:
1722 1723 if branch_merge:
1723 1724 # We've done a branch merge, mark this file as merged
1724 1725 # so that we properly record the merger later
1725 1726 self.dirstate.update([f], 'm')
1726 1727 else:
1727 1728 # We've update-merged a locally modified file, so
1728 1729 # we set the dirstate to emulate a normal checkout
1729 1730 # of that file some time in the past. Thus our
1730 1731 # merge will appear as a normal local file
1731 1732 # modification.
1732 1733 f_len = len(self.file(f).read(other))
1733 1734 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1734 1735
1735 1736 remove.sort()
1736 1737 for f in remove:
1737 1738 self.ui.note(_("removing %s\n") % f)
1738 1739 util.audit_path(f)
1739 1740 try:
1740 1741 util.unlink(self.wjoin(f))
1741 1742 except OSError, inst:
1742 1743 if inst.errno != errno.ENOENT:
1743 1744 self.ui.warn(_("update failed to remove %s: %s!\n") %
1744 1745 (f, inst.strerror))
1745 1746 if moddirstate:
1746 1747 if branch_merge:
1747 1748 self.dirstate.update(remove, 'r')
1748 1749 else:
1749 1750 self.dirstate.forget(remove)
1750 1751
1751 1752 if moddirstate:
1752 1753 self.dirstate.setparents(p1, p2)
1753 1754
1754 1755 stat = ((len(get), _("updated")),
1755 1756 (len(merge) - len(failedmerge), _("merged")),
1756 1757 (len(remove), _("removed")),
1757 1758 (len(failedmerge), _("unresolved")))
1758 1759 note = ", ".join([_("%d files %s") % s for s in stat])
1759 1760 self.ui.note("%s\n" % note)
1760 1761 if moddirstate and branch_merge:
1761 1762 self.ui.note(_("(branch merge, don't forget to commit)\n"))
1762 1763
1763 1764 return err
1764 1765
1765 1766 def merge3(self, fn, my, other, p1, p2):
1766 1767 """perform a 3-way merge in the working directory"""
1767 1768
1768 1769 def temp(prefix, node):
1769 1770 pre = "%s~%s." % (os.path.basename(fn), prefix)
1770 1771 (fd, name) = tempfile.mkstemp("", pre)
1771 1772 f = os.fdopen(fd, "wb")
1772 1773 self.wwrite(fn, fl.read(node), f)
1773 1774 f.close()
1774 1775 return name
1775 1776
1776 1777 fl = self.file(fn)
1777 1778 base = fl.ancestor(my, other)
1778 1779 a = self.wjoin(fn)
1779 1780 b = temp("base", base)
1780 1781 c = temp("other", other)
1781 1782
1782 1783 self.ui.note(_("resolving %s\n") % fn)
1783 1784 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1784 1785 (fn, short(my), short(other), short(base)))
1785 1786
1786 1787 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1787 1788 or "hgmerge")
1788 1789 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
1789 1790 environ={'HG_FILE': fn,
1790 1791 'HG_MY_NODE': p1,
1791 1792 'HG_OTHER_NODE': p2,
1792 1793 'HG_FILE_MY_NODE': hex(my),
1793 1794 'HG_FILE_OTHER_NODE': hex(other),
1794 1795 'HG_FILE_BASE_NODE': hex(base)})
1795 1796 if r:
1796 1797 self.ui.warn(_("merging %s failed!\n") % fn)
1797 1798
1798 1799 os.unlink(b)
1799 1800 os.unlink(c)
1800 1801 return r
1801 1802
1802 1803 def verify(self):
1803 1804 filelinkrevs = {}
1804 1805 filenodes = {}
1805 1806 changesets = revisions = files = 0
1806 1807 errors = [0]
1807 1808 warnings = [0]
1808 1809 neededmanifests = {}
1809 1810
1810 1811 def err(msg):
1811 1812 self.ui.warn(msg + "\n")
1812 1813 errors[0] += 1
1813 1814
1814 1815 def warn(msg):
1815 1816 self.ui.warn(msg + "\n")
1816 1817 warnings[0] += 1
1817 1818
1818 1819 def checksize(obj, name):
1819 1820 d = obj.checksize()
1820 1821 if d[0]:
1821 1822 err(_("%s data length off by %d bytes") % (name, d[0]))
1822 1823 if d[1]:
1823 1824 err(_("%s index contains %d extra bytes") % (name, d[1]))
1824 1825
1825 1826 def checkversion(obj, name):
1826 1827 if obj.version != revlog.REVLOGV0:
1827 1828 if not revlogv1:
1828 1829 warn(_("warning: `%s' uses revlog format 1") % name)
1829 1830 elif revlogv1:
1830 1831 warn(_("warning: `%s' uses revlog format 0") % name)
1831 1832
1832 1833 revlogv1 = self.revlogversion != revlog.REVLOGV0
1834 if self.ui.verbose or revlogv1 != self.revlogv1:
1833 1835 self.ui.status(_("repository uses revlog format %d\n") %
1834 1836 (revlogv1 and 1 or 0))
1835 1837
1836 1838 seen = {}
1837 1839 self.ui.status(_("checking changesets\n"))
1838 1840 checksize(self.changelog, "changelog")
1839 1841
1840 1842 for i in range(self.changelog.count()):
1841 1843 changesets += 1
1842 1844 n = self.changelog.node(i)
1843 1845 l = self.changelog.linkrev(n)
1844 1846 if l != i:
1845 1847 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1846 1848 if n in seen:
1847 1849 err(_("duplicate changeset at revision %d") % i)
1848 1850 seen[n] = 1
1849 1851
1850 1852 for p in self.changelog.parents(n):
1851 1853 if p not in self.changelog.nodemap:
1852 1854 err(_("changeset %s has unknown parent %s") %
1853 1855 (short(n), short(p)))
1854 1856 try:
1855 1857 changes = self.changelog.read(n)
1856 1858 except KeyboardInterrupt:
1857 1859 self.ui.warn(_("interrupted"))
1858 1860 raise
1859 1861 except Exception, inst:
1860 1862 err(_("unpacking changeset %s: %s") % (short(n), inst))
1861 1863 continue
1862 1864
1863 1865 neededmanifests[changes[0]] = n
1864 1866
1865 1867 for f in changes[3]:
1866 1868 filelinkrevs.setdefault(f, []).append(i)
1867 1869
1868 1870 seen = {}
1869 1871 self.ui.status(_("checking manifests\n"))
1870 1872 checkversion(self.manifest, "manifest")
1871 1873 checksize(self.manifest, "manifest")
1872 1874
1873 1875 for i in range(self.manifest.count()):
1874 1876 n = self.manifest.node(i)
1875 1877 l = self.manifest.linkrev(n)
1876 1878
1877 1879 if l < 0 or l >= self.changelog.count():
1878 1880 err(_("bad manifest link (%d) at revision %d") % (l, i))
1879 1881
1880 1882 if n in neededmanifests:
1881 1883 del neededmanifests[n]
1882 1884
1883 1885 if n in seen:
1884 1886 err(_("duplicate manifest at revision %d") % i)
1885 1887
1886 1888 seen[n] = 1
1887 1889
1888 1890 for p in self.manifest.parents(n):
1889 1891 if p not in self.manifest.nodemap:
1890 1892 err(_("manifest %s has unknown parent %s") %
1891 1893 (short(n), short(p)))
1892 1894
1893 1895 try:
1894 1896 delta = mdiff.patchtext(self.manifest.delta(n))
1895 1897 except KeyboardInterrupt:
1896 1898 self.ui.warn(_("interrupted"))
1897 1899 raise
1898 1900 except Exception, inst:
1899 1901 err(_("unpacking manifest %s: %s") % (short(n), inst))
1900 1902 continue
1901 1903
1902 1904 try:
1903 1905 ff = [ l.split('\0') for l in delta.splitlines() ]
1904 1906 for f, fn in ff:
1905 1907 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1906 1908 except (ValueError, TypeError), inst:
1907 1909 err(_("broken delta in manifest %s: %s") % (short(n), inst))
1908 1910
1909 1911 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1910 1912
1911 1913 for m, c in neededmanifests.items():
1912 1914 err(_("Changeset %s refers to unknown manifest %s") %
1913 1915 (short(m), short(c)))
1914 1916 del neededmanifests
1915 1917
1916 1918 for f in filenodes:
1917 1919 if f not in filelinkrevs:
1918 1920 err(_("file %s in manifest but not in changesets") % f)
1919 1921
1920 1922 for f in filelinkrevs:
1921 1923 if f not in filenodes:
1922 1924 err(_("file %s in changeset but not in manifest") % f)
1923 1925
1924 1926 self.ui.status(_("checking files\n"))
1925 1927 ff = filenodes.keys()
1926 1928 ff.sort()
1927 1929 for f in ff:
1928 1930 if f == "/dev/null":
1929 1931 continue
1930 1932 files += 1
1931 1933 if not f:
1932 1934 err(_("file without name in manifest %s") % short(n))
1933 1935 continue
1934 1936 fl = self.file(f)
1935 1937 checkversion(fl, f)
1936 1938 checksize(fl, f)
1937 1939
1938 1940 nodes = {nullid: 1}
1939 1941 seen = {}
1940 1942 for i in range(fl.count()):
1941 1943 revisions += 1
1942 1944 n = fl.node(i)
1943 1945
1944 1946 if n in seen:
1945 1947 err(_("%s: duplicate revision %d") % (f, i))
1946 1948 if n not in filenodes[f]:
1947 1949 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1948 1950 else:
1949 1951 del filenodes[f][n]
1950 1952
1951 1953 flr = fl.linkrev(n)
1952 1954 if flr not in filelinkrevs.get(f, []):
1953 1955 err(_("%s:%s points to unexpected changeset %d")
1954 1956 % (f, short(n), flr))
1955 1957 else:
1956 1958 filelinkrevs[f].remove(flr)
1957 1959
1958 1960 # verify contents
1959 1961 try:
1960 1962 t = fl.read(n)
1961 1963 except KeyboardInterrupt:
1962 1964 self.ui.warn(_("interrupted"))
1963 1965 raise
1964 1966 except Exception, inst:
1965 1967 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1966 1968
1967 1969 # verify parents
1968 1970 (p1, p2) = fl.parents(n)
1969 1971 if p1 not in nodes:
1970 1972 err(_("file %s:%s unknown parent 1 %s") %
1971 1973 (f, short(n), short(p1)))
1972 1974 if p2 not in nodes:
1973 1975 err(_("file %s:%s unknown parent 2 %s") %
1974 1976 (f, short(n), short(p1)))
1975 1977 nodes[n] = 1
1976 1978
1977 1979 # cross-check
1978 1980 for node in filenodes[f]:
1979 1981 err(_("node %s in manifests not in %s") % (hex(node), f))
1980 1982
1981 1983 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1982 1984 (files, changesets, revisions))
1983 1985
1984 1986 if warnings[0]:
1985 1987 self.ui.warn(_("%d warnings encountered!\n") % warnings[0])
1986 1988 if errors[0]:
1987 1989 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1988 1990 return 1
1989 1991
1990 1992 # used to avoid circular references so destructors work
1991 1993 def aftertrans(base):
1992 1994 p = base
1993 1995 def a():
1994 1996 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1995 1997 util.rename(os.path.join(p, "journal.dirstate"),
1996 1998 os.path.join(p, "undo.dirstate"))
1997 1999 return a
1998 2000
@@ -1,14 +1,13 b''
1 1 changeset: 0:0acdaf898367
2 2 tag: tip
3 3 user: test
4 4 date: Mon Jan 12 13:46:40 1970 +0000
5 5 summary: test
6 6
7 7 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 644 a
8 8 a
9 repository uses revlog format 0
10 9 checking changesets
11 10 checking manifests
12 11 crosschecking files in changesets and manifests
13 12 checking files
14 13 1 files, 1 changesets, 1 total revisions
@@ -1,16 +1,15 b''
1 1 pulling from ../source
2 2 abort: pretxncommit hook exited with status 1
3 3 transaction abort!
4 4 rollback completed
5 5 searching for changes
6 6 adding changesets
7 7 adding manifests
8 8 adding file changes
9 9 added 1 changesets with 1 changes to 1 files
10 10 (run 'hg update' to get a working copy)
11 repository uses revlog format 0
12 11 checking changesets
13 12 checking manifests
14 13 crosschecking files in changesets and manifests
15 14 checking files
16 15 1 files, 2 changesets, 2 total revisions
@@ -1,137 +1,126 b''
1 1 rev offset length base linkrev nodeid p1 p2
2 2 0 0 3 0 0 362fef284ce2 000000000000 000000000000
3 3 1 3 5 1 1 125144f7e028 362fef284ce2 000000000000
4 4 2 8 7 2 2 4c982badb186 125144f7e028 000000000000
5 5 3 15 9 3 3 19b1fc555737 4c982badb186 000000000000
6 6 rev offset length base linkrev nodeid p1 p2
7 7 0 0 75 0 7 905359268f77 000000000000 000000000000
8 8 rev offset length base linkrev nodeid p1 p2
9 9 0 0 75 0 8 905359268f77 000000000000 000000000000
10 10 rev offset length base linkrev nodeid p1 p2
11 11 0 0 8 0 6 12ab3bcc5ea4 000000000000 000000000000
12 12 rev offset length base linkrev nodeid p1 p2
13 13 0 0 48 0 0 43eadb1d2d06 000000000000 000000000000
14 14 1 48 48 1 1 8b89697eba2c 43eadb1d2d06 000000000000
15 15 2 96 48 2 2 626a32663c2f 8b89697eba2c 000000000000
16 16 3 144 48 3 3 f54c32f13478 626a32663c2f 000000000000
17 17 4 192 58 3 6 de68e904d169 626a32663c2f 000000000000
18 18 5 250 68 3 7 3b45cc2ab868 de68e904d169 000000000000
19 19 6 318 54 6 8 24d86153a002 f54c32f13478 000000000000
20 repository uses revlog format 0
21 20 checking changesets
22 21 checking manifests
23 22 crosschecking files in changesets and manifests
24 23 checking files
25 24 4 files, 9 changesets, 7 total revisions
26 25 requesting all changes
27 26 adding changesets
28 27 adding manifests
29 28 adding file changes
30 29 added 1 changesets with 1 changes to 1 files
31 repository uses revlog format 0
32 30 checking changesets
33 31 checking manifests
34 32 crosschecking files in changesets and manifests
35 33 checking files
36 34 1 files, 1 changesets, 1 total revisions
37 35 requesting all changes
38 36 adding changesets
39 37 adding manifests
40 38 adding file changes
41 39 added 2 changesets with 2 changes to 1 files
42 repository uses revlog format 0
43 40 checking changesets
44 41 checking manifests
45 42 crosschecking files in changesets and manifests
46 43 checking files
47 44 1 files, 2 changesets, 2 total revisions
48 45 requesting all changes
49 46 adding changesets
50 47 adding manifests
51 48 adding file changes
52 49 added 3 changesets with 3 changes to 1 files
53 repository uses revlog format 0
54 50 checking changesets
55 51 checking manifests
56 52 crosschecking files in changesets and manifests
57 53 checking files
58 54 1 files, 3 changesets, 3 total revisions
59 55 requesting all changes
60 56 adding changesets
61 57 adding manifests
62 58 adding file changes
63 59 added 4 changesets with 4 changes to 1 files
64 repository uses revlog format 0
65 60 checking changesets
66 61 checking manifests
67 62 crosschecking files in changesets and manifests
68 63 checking files
69 64 1 files, 4 changesets, 4 total revisions
70 65 requesting all changes
71 66 adding changesets
72 67 adding manifests
73 68 adding file changes
74 69 added 2 changesets with 2 changes to 1 files
75 repository uses revlog format 0
76 70 checking changesets
77 71 checking manifests
78 72 crosschecking files in changesets and manifests
79 73 checking files
80 74 1 files, 2 changesets, 2 total revisions
81 75 requesting all changes
82 76 adding changesets
83 77 adding manifests
84 78 adding file changes
85 79 added 3 changesets with 3 changes to 1 files
86 repository uses revlog format 0
87 80 checking changesets
88 81 checking manifests
89 82 crosschecking files in changesets and manifests
90 83 checking files
91 84 1 files, 3 changesets, 3 total revisions
92 85 requesting all changes
93 86 adding changesets
94 87 adding manifests
95 88 adding file changes
96 89 added 4 changesets with 5 changes to 2 files
97 repository uses revlog format 0
98 90 checking changesets
99 91 checking manifests
100 92 crosschecking files in changesets and manifests
101 93 checking files
102 94 2 files, 4 changesets, 5 total revisions
103 95 requesting all changes
104 96 adding changesets
105 97 adding manifests
106 98 adding file changes
107 99 added 5 changesets with 6 changes to 3 files
108 repository uses revlog format 0
109 100 checking changesets
110 101 checking manifests
111 102 crosschecking files in changesets and manifests
112 103 checking files
113 104 3 files, 5 changesets, 6 total revisions
114 105 requesting all changes
115 106 adding changesets
116 107 adding manifests
117 108 adding file changes
118 109 added 5 changesets with 5 changes to 2 files
119 repository uses revlog format 0
120 110 checking changesets
121 111 checking manifests
122 112 crosschecking files in changesets and manifests
123 113 checking files
124 114 2 files, 5 changesets, 5 total revisions
125 115 pulling from ../test-7
126 116 searching for changes
127 117 adding changesets
128 118 adding manifests
129 119 adding file changes
130 120 added 4 changesets with 2 changes to 3 files (+1 heads)
131 121 (run 'hg heads' to see heads, 'hg merge' to merge)
132 repository uses revlog format 0
133 122 checking changesets
134 123 checking manifests
135 124 crosschecking files in changesets and manifests
136 125 checking files
137 126 4 files, 9 changesets, 7 total revisions
@@ -1,15 +1,13 b''
1 1 a
2 repository uses revlog format 0
3 2 checking changesets
4 3 checking manifests
5 4 crosschecking files in changesets and manifests
6 5 checking files
7 6 1 files, 1 changesets, 1 total revisions
8 7 a not present
9 repository uses revlog format 0
10 8 checking changesets
11 9 checking manifests
12 10 crosschecking files in changesets and manifests
13 11 checking files
14 12 1 files, 1 changesets, 1 total revisions
15 13 a
@@ -1,52 +1,51 b''
1 1 A b
2 2 b
3 3 b: copy a:b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3
4 4 we should see two history entries
5 5 changeset: 1:386a3cc01532710ca78aed9a54fa2f459c04f29c
6 6 tag: tip
7 7 user: test
8 8 date: Mon Jan 12 13:46:40 1970 +0000
9 9 files: b
10 10 description:
11 11 2
12 12
13 13
14 14 changeset: 0:33aaa84a386bd609094aeb21a97c09436c482ef1
15 15 user: test
16 16 date: Mon Jan 12 13:46:40 1970 +0000
17 17 files: a
18 18 description:
19 19 1
20 20
21 21
22 22 we should see one log entry for a
23 23 changeset: 0:33aaa84a386b
24 24 user: test
25 25 date: Mon Jan 12 13:46:40 1970 +0000
26 26 summary: 1
27 27
28 28 this should show a revision linked to changeset 0
29 29 rev offset length base linkrev nodeid p1 p2
30 30 0 0 3 0 0 b789fdd96dc2 000000000000 000000000000
31 31 we should see one log entry for b
32 32 changeset: 1:386a3cc01532
33 33 tag: tip
34 34 user: test
35 35 date: Mon Jan 12 13:46:40 1970 +0000
36 36 summary: 2
37 37
38 38 this should show a revision linked to changeset 1
39 39 rev offset length base linkrev nodeid p1 p2
40 40 0 0 65 0 1 9a263dd772e0 000000000000 000000000000
41 41 this should show the rename information in the metadata
42 42 copyrev: b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3
43 43 copy: a
44 44 566e338d09a089ba737c21e0d3759980 .hg/data/b.d
45 45 60b725f10c9c85c70d97880dfe8191b3 bsum
46 46 60b725f10c9c85c70d97880dfe8191b3 asum
47 repository uses revlog format 0
48 47 checking changesets
49 48 checking manifests
50 49 crosschecking files in changesets and manifests
51 50 checking files
52 51 2 files, 2 changesets, 2 total revisions
@@ -1,6 +1,5 b''
1 repository uses revlog format 0
2 1 checking changesets
3 2 checking manifests
4 3 crosschecking files in changesets and manifests
5 4 checking files
6 5 0 files, 0 changesets, 0 total revisions
@@ -1,60 +1,59 b''
1 1 changeset: 4:f6c172c6198c
2 2 tag: tip
3 3 parent: 1:448a8c5e42f1
4 4 parent: 2:7c5dc2e857f2
5 5 user: test
6 6 date: Mon Jan 12 13:46:40 1970 +0000
7 7 summary: merge a/b -> blah
8 8
9 9 changeset: 3:13d875a22764
10 10 parent: 2:7c5dc2e857f2
11 11 parent: 1:448a8c5e42f1
12 12 user: test
13 13 date: Mon Jan 12 13:46:40 1970 +0000
14 14 summary: merge b/a -> blah
15 15
16 16 changeset: 2:7c5dc2e857f2
17 17 parent: 0:dc1751ec2e9d
18 18 user: test
19 19 date: Mon Jan 12 13:46:40 1970 +0000
20 20 summary: branch b
21 21
22 22 changeset: 1:448a8c5e42f1
23 23 user: test
24 24 date: Mon Jan 12 13:46:40 1970 +0000
25 25 summary: branch a
26 26
27 27 changeset: 0:dc1751ec2e9d
28 28 user: test
29 29 date: Mon Jan 12 13:46:40 1970 +0000
30 30 summary: test
31 31
32 32 rev offset length base linkrev nodeid p1 p2
33 33 0 0 64 0 0 dc1751ec2e9d 000000000000 000000000000
34 34 1 64 68 1 1 448a8c5e42f1 dc1751ec2e9d 000000000000
35 35 2 132 68 2 2 7c5dc2e857f2 dc1751ec2e9d 000000000000
36 36 3 200 75 3 3 13d875a22764 7c5dc2e857f2 448a8c5e42f1
37 37 4 275 29 3 4 f6c172c6198c 448a8c5e42f1 7c5dc2e857f2
38 38
39 39 1
40 40 79d7492df40aa0fa093ec4209be78043c181f094 644 a
41 41 2ed2a3912a0b24502043eae84ee4b279c18b90dd 644 b
42 42 2
43 43 2ed2a3912a0b24502043eae84ee4b279c18b90dd 644 a
44 44 79d7492df40aa0fa093ec4209be78043c181f094 644 b
45 45 3
46 46 79d7492df40aa0fa093ec4209be78043c181f094 644 a
47 47 79d7492df40aa0fa093ec4209be78043c181f094 644 b
48 48 4
49 49 79d7492df40aa0fa093ec4209be78043c181f094 644 a
50 50 79d7492df40aa0fa093ec4209be78043c181f094 644 b
51 51
52 52 rev offset length base linkrev nodeid p1 p2
53 53 0 0 5 0 0 2ed2a3912a0b 000000000000 000000000000
54 54 1 5 6 1 1 79d7492df40a 2ed2a3912a0b 000000000000
55 repository uses revlog format 0
56 55 checking changesets
57 56 checking manifests
58 57 crosschecking files in changesets and manifests
59 58 checking files
60 59 2 files, 5 changesets, 4 total revisions
@@ -1,73 +1,72 b''
1 1 creating base
2 2 creating branch a
3 3 creating branch b
4 4 we shouldn't have anything but n state here
5 5 n 644 2 bar
6 6 n 644 3 baz
7 7 n 644 3 foo
8 8 n 644 2 quux
9 9 merging
10 10 pulling from ../a
11 11 searching for changes
12 12 adding changesets
13 13 adding manifests
14 14 adding file changes
15 15 added 1 changesets with 2 changes to 2 files (+1 heads)
16 16 (run 'hg heads' to see heads, 'hg merge' to merge)
17 17 merging for foo
18 18 resolving manifests
19 19 getting bar
20 20 merging foo
21 21 resolving foo
22 22 1 files updated, 1 files merged, 0 files removed, 0 files unresolved
23 23 (branch merge, don't forget to commit)
24 24 we shouldn't have anything but foo in merge state here
25 25 m 644 3 foo
26 26 main: we should have a merge here
27 27 rev offset length base linkrev nodeid p1 p2
28 28 0 0 77 0 0 c36078bec30d 000000000000 000000000000
29 29 1 77 73 1 1 182b283965f1 c36078bec30d 000000000000
30 30 2 150 71 2 2 a6aef98656b7 c36078bec30d 000000000000
31 31 3 221 72 3 3 0c2cc6fc80e2 182b283965f1 a6aef98656b7
32 32 log should show foo and quux changed
33 33 changeset: 3:0c2cc6fc80e2d4ee289bb658dbbe9ad932380fe9
34 34 tag: tip
35 35 parent: 1:182b283965f1069c0112784e30e7755ad1c0dd52
36 36 parent: 2:a6aef98656b71154cae9d87408abe6d0218c8045
37 37 user: test
38 38 date: Mon Jan 12 13:46:40 1970 +0000
39 39 files: foo quux
40 40 description:
41 41 merge
42 42
43 43
44 44 foo: we should have a merge here
45 45 rev offset length base linkrev nodeid p1 p2
46 46 0 0 3 0 0 b8e02f643373 000000000000 000000000000
47 47 1 3 4 1 1 2ffeddde1b65 b8e02f643373 000000000000
48 48 2 7 4 2 2 33d1fb69067a b8e02f643373 000000000000
49 49 3 11 4 3 3 aa27919ee430 2ffeddde1b65 33d1fb69067a
50 50 bar: we shouldn't have a merge here
51 51 rev offset length base linkrev nodeid p1 p2
52 52 0 0 3 0 0 b8e02f643373 000000000000 000000000000
53 53 1 3 4 1 2 33d1fb69067a b8e02f643373 000000000000
54 54 baz: we shouldn't have a merge here
55 55 rev offset length base linkrev nodeid p1 p2
56 56 0 0 3 0 0 b8e02f643373 000000000000 000000000000
57 57 1 3 4 1 1 2ffeddde1b65 b8e02f643373 000000000000
58 58 quux: we shouldn't have a merge here
59 59 rev offset length base linkrev nodeid p1 p2
60 60 0 0 3 0 0 b8e02f643373 000000000000 000000000000
61 61 1 3 5 1 3 6128c0f33108 b8e02f643373 000000000000
62 62 manifest entries should match tips of all files
63 63 33d1fb69067a0139622a3fa3b7ba1cdb1367972e 644 bar
64 64 2ffeddde1b65b4827f6746174a145474129fa2ce 644 baz
65 65 aa27919ee4303cfd575e1fb932dd64d75aa08be4 644 foo
66 66 6128c0f33108e8cfbb4e0824d13ae48b466d7280 644 quux
67 67 everything should be clean now
68 repository uses revlog format 0
69 68 checking changesets
70 69 checking manifests
71 70 crosschecking files in changesets and manifests
72 71 checking files
73 72 4 files, 4 changesets, 10 total revisions
@@ -1,19 +1,16 b''
1 repository uses revlog format 0
2 1 checking changesets
3 2 checking manifests
4 3 crosschecking files in changesets and manifests
5 4 checking files
6 5 1 files, 1 changesets, 1 total revisions
7 repository uses revlog format 0
8 6 checking changesets
9 7 checking manifests
10 8 crosschecking files in changesets and manifests
11 9 checking files
12 10 verify failed
13 repository uses revlog format 0
14 11 checking changesets
15 12 checking manifests
16 13 crosschecking files in changesets and manifests
17 14 checking files
18 15 1 files, 1 changesets, 1 total revisions
19 16 commit failed
@@ -1,11 +1,10 b''
1 1 requesting all changes
2 2 adding changesets
3 3 adding manifests
4 4 adding file changes
5 5 added 1 changesets with 1 changes to 1 files
6 repository uses revlog format 0
7 6 checking changesets
8 7 checking manifests
9 8 crosschecking files in changesets and manifests
10 9 checking files
11 10 1 files, 1 changesets, 1 total revisions
@@ -1,25 +1,24 b''
1 1 requesting all changes
2 2 adding changesets
3 3 adding manifests
4 4 adding file changes
5 5 added 1 changesets with 1 changes to 1 files
6 6 pulling from ../source2
7 7 pulling from ../source1
8 8 requesting all changes
9 9 adding changesets
10 10 adding manifests
11 11 adding file changes
12 12 added 10 changesets with 10 changes to 1 files
13 13 (run 'hg update' to get a working copy)
14 14 searching for changes
15 15 adding changesets
16 16 adding manifests
17 17 adding file changes
18 18 added 1 changesets with 1 changes to 1 files (+1 heads)
19 19 (run 'hg heads' to see heads, 'hg merge' to merge)
20 repository uses revlog format 0
21 20 checking changesets
22 21 checking manifests
23 22 crosschecking files in changesets and manifests
24 23 checking files
25 24 1 files, 11 changesets, 11 total revisions
@@ -1,24 +1,22 b''
1 1 pulling from source1
2 2 requesting all changes
3 3 adding changesets
4 4 adding manifests
5 5 adding file changes
6 6 added 10 changesets with 10 changes to 1 files
7 7 (run 'hg update' to get a working copy)
8 8 requesting all changes
9 9 adding changesets
10 10 adding manifests
11 11 adding file changes
12 12 added 10 changesets with 10 changes to 1 files
13 repository uses revlog format 0
14 13 checking changesets
15 14 checking manifests
16 15 crosschecking files in changesets and manifests
17 16 checking files
18 17 1 files, 10 changesets, 10 total revisions
19 repository uses revlog format 0
20 18 checking changesets
21 19 checking manifests
22 20 crosschecking files in changesets and manifests
23 21 checking files
24 22 1 files, 10 changesets, 10 total revisions
@@ -1,23 +1,21 b''
1 1 adding foo
2 repository uses revlog format 0
3 2 checking changesets
4 3 checking manifests
5 4 crosschecking files in changesets and manifests
6 5 checking files
7 6 1 files, 1 changesets, 1 total revisions
8 7 requesting all changes
9 8 adding changesets
10 9 adding manifests
11 10 adding file changes
12 11 added 1 changesets with 1 changes to 1 files
13 repository uses revlog format 0
14 12 checking changesets
15 13 checking manifests
16 14 crosschecking files in changesets and manifests
17 15 checking files
18 16 1 files, 1 changesets, 1 total revisions
19 17 foo
20 18 2ed2a3912a0b24502043eae84ee4b279c18b90dd 644 foo
21 19 pulling from http://localhost:20059/
22 20 searching for changes
23 21 no changes found
@@ -1,146 +1,135 b''
1 1 rev offset length base linkrev nodeid p1 p2
2 2 0 0 3 0 0 362fef284ce2 000000000000 000000000000
3 3 1 3 5 1 1 125144f7e028 362fef284ce2 000000000000
4 4 2 8 7 2 2 4c982badb186 125144f7e028 000000000000
5 5 3 15 9 3 3 19b1fc555737 4c982badb186 000000000000
6 6 rev offset length base linkrev nodeid p1 p2
7 7 0 0 75 0 7 905359268f77 000000000000 000000000000
8 8 rev offset length base linkrev nodeid p1 p2
9 9 0 0 75 0 8 905359268f77 000000000000 000000000000
10 10 rev offset length base linkrev nodeid p1 p2
11 11 0 0 8 0 6 12ab3bcc5ea4 000000000000 000000000000
12 12 rev offset length base linkrev nodeid p1 p2
13 13 0 0 48 0 0 43eadb1d2d06 000000000000 000000000000
14 14 1 48 48 1 1 8b89697eba2c 43eadb1d2d06 000000000000
15 15 2 96 48 2 2 626a32663c2f 8b89697eba2c 000000000000
16 16 3 144 48 3 3 f54c32f13478 626a32663c2f 000000000000
17 17 4 192 58 3 6 de68e904d169 626a32663c2f 000000000000
18 18 5 250 68 3 7 3b45cc2ab868 de68e904d169 000000000000
19 19 6 318 54 6 8 24d86153a002 f54c32f13478 000000000000
20 repository uses revlog format 0
21 20 checking changesets
22 21 checking manifests
23 22 crosschecking files in changesets and manifests
24 23 checking files
25 24 4 files, 9 changesets, 7 total revisions
26 25 pushing to test-0
27 26 searching for changes
28 27 adding changesets
29 28 adding manifests
30 29 adding file changes
31 30 added 1 changesets with 1 changes to 1 files
32 repository uses revlog format 0
33 31 checking changesets
34 32 checking manifests
35 33 crosschecking files in changesets and manifests
36 34 checking files
37 35 1 files, 1 changesets, 1 total revisions
38 36 pushing to test-1
39 37 searching for changes
40 38 adding changesets
41 39 adding manifests
42 40 adding file changes
43 41 added 2 changesets with 2 changes to 1 files
44 repository uses revlog format 0
45 42 checking changesets
46 43 checking manifests
47 44 crosschecking files in changesets and manifests
48 45 checking files
49 46 1 files, 2 changesets, 2 total revisions
50 47 pushing to test-2
51 48 searching for changes
52 49 adding changesets
53 50 adding manifests
54 51 adding file changes
55 52 added 3 changesets with 3 changes to 1 files
56 repository uses revlog format 0
57 53 checking changesets
58 54 checking manifests
59 55 crosschecking files in changesets and manifests
60 56 checking files
61 57 1 files, 3 changesets, 3 total revisions
62 58 pushing to test-3
63 59 searching for changes
64 60 adding changesets
65 61 adding manifests
66 62 adding file changes
67 63 added 4 changesets with 4 changes to 1 files
68 repository uses revlog format 0
69 64 checking changesets
70 65 checking manifests
71 66 crosschecking files in changesets and manifests
72 67 checking files
73 68 1 files, 4 changesets, 4 total revisions
74 69 pushing to test-4
75 70 searching for changes
76 71 adding changesets
77 72 adding manifests
78 73 adding file changes
79 74 added 2 changesets with 2 changes to 1 files
80 repository uses revlog format 0
81 75 checking changesets
82 76 checking manifests
83 77 crosschecking files in changesets and manifests
84 78 checking files
85 79 1 files, 2 changesets, 2 total revisions
86 80 pushing to test-5
87 81 searching for changes
88 82 adding changesets
89 83 adding manifests
90 84 adding file changes
91 85 added 3 changesets with 3 changes to 1 files
92 repository uses revlog format 0
93 86 checking changesets
94 87 checking manifests
95 88 crosschecking files in changesets and manifests
96 89 checking files
97 90 1 files, 3 changesets, 3 total revisions
98 91 pushing to test-6
99 92 searching for changes
100 93 adding changesets
101 94 adding manifests
102 95 adding file changes
103 96 added 4 changesets with 5 changes to 2 files
104 repository uses revlog format 0
105 97 checking changesets
106 98 checking manifests
107 99 crosschecking files in changesets and manifests
108 100 checking files
109 101 2 files, 4 changesets, 5 total revisions
110 102 pushing to test-7
111 103 searching for changes
112 104 adding changesets
113 105 adding manifests
114 106 adding file changes
115 107 added 5 changesets with 6 changes to 3 files
116 repository uses revlog format 0
117 108 checking changesets
118 109 checking manifests
119 110 crosschecking files in changesets and manifests
120 111 checking files
121 112 3 files, 5 changesets, 6 total revisions
122 113 pushing to test-8
123 114 searching for changes
124 115 adding changesets
125 116 adding manifests
126 117 adding file changes
127 118 added 5 changesets with 5 changes to 2 files
128 repository uses revlog format 0
129 119 checking changesets
130 120 checking manifests
131 121 crosschecking files in changesets and manifests
132 122 checking files
133 123 2 files, 5 changesets, 5 total revisions
134 124 pulling from ../test-7
135 125 searching for changes
136 126 adding changesets
137 127 adding manifests
138 128 adding file changes
139 129 added 4 changesets with 2 changes to 3 files (+1 heads)
140 130 (run 'hg heads' to see heads, 'hg merge' to merge)
141 repository uses revlog format 0
142 131 checking changesets
143 132 checking manifests
144 133 crosschecking files in changesets and manifests
145 134 checking files
146 135 4 files, 9 changesets, 7 total revisions
@@ -1,23 +1,21 b''
1 1 adding foo
2 repository uses revlog format 0
3 2 checking changesets
4 3 checking manifests
5 4 crosschecking files in changesets and manifests
6 5 checking files
7 6 1 files, 1 changesets, 1 total revisions
8 7 pulling from ../branch
9 8 searching for changes
10 9 adding changesets
11 10 adding manifests
12 11 adding file changes
13 12 added 1 changesets with 1 changes to 1 files
14 13 (run 'hg update' to get a working copy)
15 repository uses revlog format 0
16 14 checking changesets
17 15 checking manifests
18 16 crosschecking files in changesets and manifests
19 17 checking files
20 18 1 files, 2 changesets, 2 total revisions
21 19 foo
22 20 bar
23 21 6f4310b00b9a147241b071a60c28a650827fb03d 644 foo
@@ -1,63 +1,61 b''
1 1 # creating 'remote'
2 2 # clone remote
3 3 requesting all changes
4 4 adding changesets
5 5 adding manifests
6 6 adding file changes
7 7 added 1 changesets with 1 changes to 1 files
8 8 # verify
9 repository uses revlog format 0
10 9 checking changesets
11 10 checking manifests
12 11 crosschecking files in changesets and manifests
13 12 checking files
14 13 1 files, 1 changesets, 1 total revisions
15 14 # empty default pull
16 15 default = ssh://user@dummy/remote
17 16 pulling from ssh://user@dummy/remote
18 17 searching for changes
19 18 no changes found
20 19 # local change
21 20 # updating rc
22 21 # find outgoing
23 22 searching for changes
24 23 changeset: 1:c54836a570be
25 24 tag: tip
26 25 user: test
27 26 date: Mon Jan 12 13:46:40 1970 +0000
28 27 summary: add
29 28
30 29 # find incoming on the remote side
31 30 searching for changes
32 31 changeset: 1:c54836a570be
33 32 tag: tip
34 33 user: test
35 34 date: Mon Jan 12 13:46:40 1970 +0000
36 35 summary: add
37 36
38 37 # push
39 38 pushing to ssh://user@dummy/remote
40 39 searching for changes
41 40 remote: adding changesets
42 41 remote: adding manifests
43 42 remote: adding file changes
44 43 remote: added 1 changesets with 1 changes to 1 files
45 44 # check remote tip
46 45 changeset: 1:c54836a570be
47 46 tag: tip
48 47 user: test
49 48 date: Mon Jan 12 13:46:40 1970 +0000
50 49 summary: add
51 50
52 repository uses revlog format 0
53 51 checking changesets
54 52 checking manifests
55 53 crosschecking files in changesets and manifests
56 54 checking files
57 55 1 files, 2 changesets, 2 total revisions
58 56 bleah
59 57 Got arguments 1:user@dummy 2:hg -R remote serve --stdio 3: 4: 5:
60 58 Got arguments 1:user@dummy 2:hg -R remote serve --stdio 3: 4: 5:
61 59 Got arguments 1:user@dummy 2:hg -R remote serve --stdio 3: 4: 5:
62 60 Got arguments 1:user@dummy 2:hg -R local serve --stdio 3: 4: 5:
63 61 Got arguments 1:user@dummy 2:hg -R remote serve --stdio 3: 4: 5:
@@ -1,24 +1,23 b''
1 1 abort: Connection refused
2 2 255
3 3 copy: No such file or directory
4 4 changeset: 0:53e17d176ae6
5 5 tag: tip
6 6 user: test
7 7 date: Mon Jan 12 13:46:40 1970 +0000
8 8 summary: test
9 9
10 10 requesting all changes
11 11 adding changesets
12 12 adding manifests
13 13 adding file changes
14 14 added 1 changesets with 1 changes to 1 files
15 repository uses revlog format 0
16 15 checking changesets
17 16 checking manifests
18 17 crosschecking files in changesets and manifests
19 18 checking files
20 19 1 files, 1 changesets, 1 total revisions
21 20 foo
22 21 pulling from old-http://localhost:20059/remote
23 22 searching for changes
24 23 no changes found
@@ -1,20 +1,18 b''
1 repository uses revlog format 0
2 1 checking changesets
3 2 checking manifests
4 3 crosschecking files in changesets and manifests
5 4 checking files
6 5 1 files, 1 changesets, 1 total revisions
7 6 changeset: 0:0acdaf898367
8 7 tag: tip
9 8 user: test
10 9 date: Mon Jan 12 13:46:40 1970 +0000
11 10 summary: test
12 11
13 12 rolling back last transaction
14 repository uses revlog format 0
15 13 checking changesets
16 14 checking manifests
17 15 crosschecking files in changesets and manifests
18 16 checking files
19 17 0 files, 0 changesets, 0 total revisions
20 18 A a
General Comments 0
You need to be logged in to leave comments. Login now