##// END OF EJS Templates
Allow reverting a deleted file with two parents...
Matt Mackall -
r1448:182879d7 default
parent child Browse files
Show More
@@ -1,1476 +1,1472 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import struct, os, util
9 9 import filelog, manifest, changelog, dirstate, repo
10 10 from node import *
11 11 from i18n import gettext as _
12 12 from demandload import *
13 13 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
14 14
15 15 class localrepository:
16 16 def __init__(self, ui, path=None, create=0):
17 17 if not path:
18 18 p = os.getcwd()
19 19 while not os.path.isdir(os.path.join(p, ".hg")):
20 20 oldp = p
21 21 p = os.path.dirname(p)
22 22 if p == oldp: raise repo.RepoError(_("no repo found"))
23 23 path = p
24 24 self.path = os.path.join(path, ".hg")
25 25
26 26 if not create and not os.path.isdir(self.path):
27 27 raise repo.RepoError(_("repository %s not found") % self.path)
28 28
29 29 self.root = os.path.abspath(path)
30 30 self.ui = ui
31 31 self.opener = util.opener(self.path)
32 32 self.wopener = util.opener(self.root)
33 33 self.manifest = manifest.manifest(self.opener)
34 34 self.changelog = changelog.changelog(self.opener)
35 35 self.tagscache = None
36 36 self.nodetagscache = None
37 37 self.encodepats = None
38 38 self.decodepats = None
39 39
40 40 if create:
41 41 os.mkdir(self.path)
42 42 os.mkdir(self.join("data"))
43 43
44 44 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
45 45 try:
46 46 self.ui.readconfig(self.opener("hgrc"))
47 47 except IOError: pass
48 48
49 49 def hook(self, name, **args):
50 50 s = self.ui.config("hooks", name)
51 51 if s:
52 52 self.ui.note(_("running hook %s: %s\n") % (name, s))
53 53 old = {}
54 54 for k, v in args.items():
55 55 k = k.upper()
56 56 old[k] = os.environ.get(k, None)
57 57 os.environ[k] = v
58 58
59 59 # Hooks run in the repository root
60 60 olddir = os.getcwd()
61 61 os.chdir(self.root)
62 62 r = os.system(s)
63 63 os.chdir(olddir)
64 64
65 65 for k, v in old.items():
66 66 if v != None:
67 67 os.environ[k] = v
68 68 else:
69 69 del os.environ[k]
70 70
71 71 if r:
72 72 self.ui.warn(_("abort: %s hook failed with status %d!\n") %
73 73 (name, r))
74 74 return False
75 75 return True
76 76
77 77 def tags(self):
78 78 '''return a mapping of tag to node'''
79 79 if not self.tagscache:
80 80 self.tagscache = {}
81 81 def addtag(self, k, n):
82 82 try:
83 83 bin_n = bin(n)
84 84 except TypeError:
85 85 bin_n = ''
86 86 self.tagscache[k.strip()] = bin_n
87 87
88 88 try:
89 89 # read each head of the tags file, ending with the tip
90 90 # and add each tag found to the map, with "newer" ones
91 91 # taking precedence
92 92 fl = self.file(".hgtags")
93 93 h = fl.heads()
94 94 h.reverse()
95 95 for r in h:
96 96 for l in fl.read(r).splitlines():
97 97 if l:
98 98 n, k = l.split(" ", 1)
99 99 addtag(self, k, n)
100 100 except KeyError:
101 101 pass
102 102
103 103 try:
104 104 f = self.opener("localtags")
105 105 for l in f:
106 106 n, k = l.split(" ", 1)
107 107 addtag(self, k, n)
108 108 except IOError:
109 109 pass
110 110
111 111 self.tagscache['tip'] = self.changelog.tip()
112 112
113 113 return self.tagscache
114 114
115 115 def tagslist(self):
116 116 '''return a list of tags ordered by revision'''
117 117 l = []
118 118 for t, n in self.tags().items():
119 119 try:
120 120 r = self.changelog.rev(n)
121 121 except:
122 122 r = -2 # sort to the beginning of the list if unknown
123 123 l.append((r,t,n))
124 124 l.sort()
125 125 return [(t,n) for r,t,n in l]
126 126
127 127 def nodetags(self, node):
128 128 '''return the tags associated with a node'''
129 129 if not self.nodetagscache:
130 130 self.nodetagscache = {}
131 131 for t,n in self.tags().items():
132 132 self.nodetagscache.setdefault(n,[]).append(t)
133 133 return self.nodetagscache.get(node, [])
134 134
135 135 def lookup(self, key):
136 136 try:
137 137 return self.tags()[key]
138 138 except KeyError:
139 139 try:
140 140 return self.changelog.lookup(key)
141 141 except:
142 142 raise repo.RepoError(_("unknown revision '%s'") % key)
143 143
144 144 def dev(self):
145 145 return os.stat(self.path).st_dev
146 146
147 147 def local(self):
148 148 return True
149 149
150 150 def join(self, f):
151 151 return os.path.join(self.path, f)
152 152
153 153 def wjoin(self, f):
154 154 return os.path.join(self.root, f)
155 155
156 156 def file(self, f):
157 157 if f[0] == '/': f = f[1:]
158 158 return filelog.filelog(self.opener, f)
159 159
160 160 def getcwd(self):
161 161 return self.dirstate.getcwd()
162 162
163 163 def wfile(self, f, mode='r'):
164 164 return self.wopener(f, mode)
165 165
166 166 def wread(self, filename):
167 167 if self.encodepats == None:
168 168 l = []
169 169 for pat, cmd in self.ui.configitems("encode"):
170 170 mf = util.matcher("", "/", [pat], [], [])[1]
171 171 l.append((mf, cmd))
172 172 self.encodepats = l
173 173
174 174 data = self.wopener(filename, 'r').read()
175 175
176 176 for mf, cmd in self.encodepats:
177 177 if mf(filename):
178 178 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
179 179 data = util.filter(data, cmd)
180 180 break
181 181
182 182 return data
183 183
184 184 def wwrite(self, filename, data, fd=None):
185 185 if self.decodepats == None:
186 186 l = []
187 187 for pat, cmd in self.ui.configitems("decode"):
188 188 mf = util.matcher("", "/", [pat], [], [])[1]
189 189 l.append((mf, cmd))
190 190 self.decodepats = l
191 191
192 192 for mf, cmd in self.decodepats:
193 193 if mf(filename):
194 194 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
195 195 data = util.filter(data, cmd)
196 196 break
197 197
198 198 if fd:
199 199 return fd.write(data)
200 200 return self.wopener(filename, 'w').write(data)
201 201
202 202 def transaction(self):
203 203 # save dirstate for undo
204 204 try:
205 205 ds = self.opener("dirstate").read()
206 206 except IOError:
207 207 ds = ""
208 208 self.opener("journal.dirstate", "w").write(ds)
209 209
210 210 def after():
211 211 util.rename(self.join("journal"), self.join("undo"))
212 212 util.rename(self.join("journal.dirstate"),
213 213 self.join("undo.dirstate"))
214 214
215 215 return transaction.transaction(self.ui.warn, self.opener,
216 216 self.join("journal"), after)
217 217
218 218 def recover(self):
219 219 lock = self.lock()
220 220 if os.path.exists(self.join("journal")):
221 221 self.ui.status(_("rolling back interrupted transaction\n"))
222 222 return transaction.rollback(self.opener, self.join("journal"))
223 223 else:
224 224 self.ui.warn(_("no interrupted transaction available\n"))
225 225
226 226 def undo(self):
227 227 lock = self.lock()
228 228 if os.path.exists(self.join("undo")):
229 229 self.ui.status(_("rolling back last transaction\n"))
230 230 transaction.rollback(self.opener, self.join("undo"))
231 231 self.dirstate = None
232 232 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
233 233 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
234 234 else:
235 235 self.ui.warn(_("no undo information available\n"))
236 236
237 237 def lock(self, wait=1):
238 238 try:
239 239 return lock.lock(self.join("lock"), 0)
240 240 except lock.LockHeld, inst:
241 241 if wait:
242 242 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
243 243 return lock.lock(self.join("lock"), wait)
244 244 raise inst
245 245
246 246 def rawcommit(self, files, text, user, date, p1=None, p2=None):
247 247 orig_parent = self.dirstate.parents()[0] or nullid
248 248 p1 = p1 or self.dirstate.parents()[0] or nullid
249 249 p2 = p2 or self.dirstate.parents()[1] or nullid
250 250 c1 = self.changelog.read(p1)
251 251 c2 = self.changelog.read(p2)
252 252 m1 = self.manifest.read(c1[0])
253 253 mf1 = self.manifest.readflags(c1[0])
254 254 m2 = self.manifest.read(c2[0])
255 255 changed = []
256 256
257 257 if orig_parent == p1:
258 258 update_dirstate = 1
259 259 else:
260 260 update_dirstate = 0
261 261
262 262 tr = self.transaction()
263 263 mm = m1.copy()
264 264 mfm = mf1.copy()
265 265 linkrev = self.changelog.count()
266 266 for f in files:
267 267 try:
268 268 t = self.wread(f)
269 269 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
270 270 r = self.file(f)
271 271 mfm[f] = tm
272 272
273 273 fp1 = m1.get(f, nullid)
274 274 fp2 = m2.get(f, nullid)
275 275
276 276 # is the same revision on two branches of a merge?
277 277 if fp2 == fp1:
278 278 fp2 = nullid
279 279
280 280 if fp2 != nullid:
281 281 # is one parent an ancestor of the other?
282 282 fpa = r.ancestor(fp1, fp2)
283 283 if fpa == fp1:
284 284 fp1, fp2 = fp2, nullid
285 285 elif fpa == fp2:
286 286 fp2 = nullid
287 287
288 288 # is the file unmodified from the parent?
289 289 if t == r.read(fp1):
290 290 # record the proper existing parent in manifest
291 291 # no need to add a revision
292 292 mm[f] = fp1
293 293 continue
294 294
295 295 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
296 296 changed.append(f)
297 297 if update_dirstate:
298 298 self.dirstate.update([f], "n")
299 299 except IOError:
300 300 try:
301 301 del mm[f]
302 302 del mfm[f]
303 303 if update_dirstate:
304 304 self.dirstate.forget([f])
305 305 except:
306 306 # deleted from p2?
307 307 pass
308 308
309 309 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
310 310 user = user or self.ui.username()
311 311 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
312 312 tr.close()
313 313 if update_dirstate:
314 314 self.dirstate.setparents(n, nullid)
315 315
316 316 def commit(self, files = None, text = "", user = None, date = None,
317 317 match = util.always, force=False):
318 318 commit = []
319 319 remove = []
320 320 changed = []
321 321
322 322 if files:
323 323 for f in files:
324 324 s = self.dirstate.state(f)
325 325 if s in 'nmai':
326 326 commit.append(f)
327 327 elif s == 'r':
328 328 remove.append(f)
329 329 else:
330 330 self.ui.warn(_("%s not tracked!\n") % f)
331 331 else:
332 332 (c, a, d, u) = self.changes(match=match)
333 333 commit = c + a
334 334 remove = d
335 335
336 336 p1, p2 = self.dirstate.parents()
337 337 c1 = self.changelog.read(p1)
338 338 c2 = self.changelog.read(p2)
339 339 m1 = self.manifest.read(c1[0])
340 340 mf1 = self.manifest.readflags(c1[0])
341 341 m2 = self.manifest.read(c2[0])
342 342
343 343 if not commit and not remove and not force and p2 == nullid:
344 344 self.ui.status(_("nothing changed\n"))
345 345 return None
346 346
347 347 if not self.hook("precommit"):
348 348 return None
349 349
350 350 lock = self.lock()
351 351 tr = self.transaction()
352 352
353 353 # check in files
354 354 new = {}
355 355 linkrev = self.changelog.count()
356 356 commit.sort()
357 357 for f in commit:
358 358 self.ui.note(f + "\n")
359 359 try:
360 360 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
361 361 t = self.wread(f)
362 362 except IOError:
363 363 self.ui.warn(_("trouble committing %s!\n") % f)
364 364 raise
365 365
366 366 r = self.file(f)
367 367
368 368 meta = {}
369 369 cp = self.dirstate.copied(f)
370 370 if cp:
371 371 meta["copy"] = cp
372 372 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
373 373 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
374 374 fp1, fp2 = nullid, nullid
375 375 else:
376 376 fp1 = m1.get(f, nullid)
377 377 fp2 = m2.get(f, nullid)
378 378
379 379 # is the same revision on two branches of a merge?
380 380 if fp2 == fp1:
381 381 fp2 = nullid
382 382
383 383 if fp2 != nullid:
384 384 # is one parent an ancestor of the other?
385 385 fpa = r.ancestor(fp1, fp2)
386 386 if fpa == fp1:
387 387 fp1, fp2 = fp2, nullid
388 388 elif fpa == fp2:
389 389 fp2 = nullid
390 390
391 391 # is the file unmodified from the parent?
392 392 if not meta and t == r.read(fp1):
393 393 # record the proper existing parent in manifest
394 394 # no need to add a revision
395 395 new[f] = fp1
396 396 continue
397 397
398 398 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
399 399 # remember what we've added so that we can later calculate
400 400 # the files to pull from a set of changesets
401 401 changed.append(f)
402 402
403 403 # update manifest
404 404 m1.update(new)
405 405 for f in remove:
406 406 if f in m1:
407 407 del m1[f]
408 408 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
409 409 (new, remove))
410 410
411 411 # add changeset
412 412 new = new.keys()
413 413 new.sort()
414 414
415 415 if not text:
416 416 edittext = ""
417 417 if p2 != nullid:
418 418 edittext += "HG: branch merge\n"
419 419 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
420 420 edittext += "".join(["HG: changed %s\n" % f for f in changed])
421 421 edittext += "".join(["HG: removed %s\n" % f for f in remove])
422 422 if not changed and not remove:
423 423 edittext += "HG: no files changed\n"
424 424 edittext = self.ui.edit(edittext)
425 425 if not edittext.rstrip():
426 426 return None
427 427 text = edittext
428 428
429 429 user = user or self.ui.username()
430 430 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
431 431 tr.close()
432 432
433 433 self.dirstate.setparents(n)
434 434 self.dirstate.update(new, "n")
435 435 self.dirstate.forget(remove)
436 436
437 437 if not self.hook("commit", node=hex(n)):
438 438 return None
439 439 return n
440 440
441 441 def walk(self, node=None, files=[], match=util.always):
442 442 if node:
443 443 for fn in self.manifest.read(self.changelog.read(node)[0]):
444 444 if match(fn): yield 'm', fn
445 445 else:
446 446 for src, fn in self.dirstate.walk(files, match):
447 447 yield src, fn
448 448
449 449 def changes(self, node1 = None, node2 = None, files = [],
450 450 match = util.always):
451 451 mf2, u = None, []
452 452
453 453 def fcmp(fn, mf):
454 454 t1 = self.wread(fn)
455 455 t2 = self.file(fn).read(mf.get(fn, nullid))
456 456 return cmp(t1, t2)
457 457
458 458 def mfmatches(node):
459 459 mf = dict(self.manifest.read(node))
460 460 for fn in mf.keys():
461 461 if not match(fn):
462 462 del mf[fn]
463 463 return mf
464 464
465 465 # are we comparing the working directory?
466 466 if not node2:
467 467 l, c, a, d, u = self.dirstate.changes(files, match)
468 468
469 469 # are we comparing working dir against its parent?
470 470 if not node1:
471 471 if l:
472 472 # do a full compare of any files that might have changed
473 473 change = self.changelog.read(self.dirstate.parents()[0])
474 474 mf2 = mfmatches(change[0])
475 475 for f in l:
476 476 if fcmp(f, mf2):
477 477 c.append(f)
478 478
479 479 for l in c, a, d, u:
480 480 l.sort()
481 481
482 482 return (c, a, d, u)
483 483
484 484 # are we comparing working dir against non-tip?
485 485 # generate a pseudo-manifest for the working dir
486 486 if not node2:
487 487 if not mf2:
488 488 change = self.changelog.read(self.dirstate.parents()[0])
489 489 mf2 = mfmatches(change[0])
490 490 for f in a + c + l:
491 491 mf2[f] = ""
492 492 for f in d:
493 493 if f in mf2: del mf2[f]
494 494 else:
495 495 change = self.changelog.read(node2)
496 496 mf2 = mfmatches(change[0])
497 497
498 498 # flush lists from dirstate before comparing manifests
499 499 c, a = [], []
500 500
501 501 change = self.changelog.read(node1)
502 502 mf1 = mfmatches(change[0])
503 503
504 504 for fn in mf2:
505 505 if mf1.has_key(fn):
506 506 if mf1[fn] != mf2[fn]:
507 507 if mf2[fn] != "" or fcmp(fn, mf1):
508 508 c.append(fn)
509 509 del mf1[fn]
510 510 else:
511 511 a.append(fn)
512 512
513 513 d = mf1.keys()
514 514
515 515 for l in c, a, d, u:
516 516 l.sort()
517 517
518 518 return (c, a, d, u)
519 519
520 520 def add(self, list):
521 521 for f in list:
522 522 p = self.wjoin(f)
523 523 if not os.path.exists(p):
524 524 self.ui.warn(_("%s does not exist!\n") % f)
525 525 elif not os.path.isfile(p):
526 526 self.ui.warn(_("%s not added: only files supported currently\n") % f)
527 527 elif self.dirstate.state(f) in 'an':
528 528 self.ui.warn(_("%s already tracked!\n") % f)
529 529 else:
530 530 self.dirstate.update([f], "a")
531 531
532 532 def forget(self, list):
533 533 for f in list:
534 534 if self.dirstate.state(f) not in 'ai':
535 535 self.ui.warn(_("%s not added!\n") % f)
536 536 else:
537 537 self.dirstate.forget([f])
538 538
539 539 def remove(self, list, unlink=False):
540 540 if unlink:
541 541 for f in list:
542 542 try:
543 543 util.unlink(self.wjoin(f))
544 544 except OSError, inst:
545 545 if inst.errno != errno.ENOENT: raise
546 546 for f in list:
547 547 p = self.wjoin(f)
548 548 if os.path.exists(p):
549 549 self.ui.warn(_("%s still exists!\n") % f)
550 550 elif self.dirstate.state(f) == 'a':
551 551 self.ui.warn(_("%s never committed!\n") % f)
552 552 self.dirstate.forget([f])
553 553 elif f not in self.dirstate:
554 554 self.ui.warn(_("%s not tracked!\n") % f)
555 555 else:
556 556 self.dirstate.update([f], "r")
557 557
558 558 def undelete(self, list):
559 pl = self.dirstate.parents()
560 if pl[1] != nullid:
561 self.ui.warn("aborting: outstanding uncommitted merges\n")
562 return 1
563 p = pl[0]
559 p = self.dirstate.parents()[0]
564 560 mn = self.changelog.read(p)[0]
565 561 mf = self.manifest.readflags(mn)
566 562 m = self.manifest.read(mn)
567 563 for f in list:
568 564 if self.dirstate.state(f) not in "r":
569 565 self.ui.warn("%s not removed!\n" % f)
570 566 else:
571 567 t = self.file(f).read(m[f])
572 568 try:
573 569 self.wwrite(f, t)
574 570 except IOError, e:
575 571 if e.errno != errno.ENOENT:
576 572 raise
577 573 os.makedirs(os.path.dirname(self.wjoin(f)))
578 574 self.wwrite(f, t)
579 575 util.set_exec(self.wjoin(f), mf[f])
580 576 self.dirstate.update([f], "n")
581 577
582 578 def copy(self, source, dest):
583 579 p = self.wjoin(dest)
584 580 if not os.path.exists(p):
585 581 self.ui.warn(_("%s does not exist!\n") % dest)
586 582 elif not os.path.isfile(p):
587 583 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
588 584 else:
589 585 if self.dirstate.state(dest) == '?':
590 586 self.dirstate.update([dest], "a")
591 587 self.dirstate.copy(source, dest)
592 588
593 589 def heads(self):
594 590 return self.changelog.heads()
595 591
596 592 # branchlookup returns a dict giving a list of branches for
597 593 # each head. A branch is defined as the tag of a node or
598 594 # the branch of the node's parents. If a node has multiple
599 595 # branch tags, tags are eliminated if they are visible from other
600 596 # branch tags.
601 597 #
602 598 # So, for this graph: a->b->c->d->e
603 599 # \ /
604 600 # aa -----/
605 601 # a has tag 2.6.12
606 602 # d has tag 2.6.13
607 603 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
608 604 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
609 605 # from the list.
610 606 #
611 607 # It is possible that more than one head will have the same branch tag.
612 608 # callers need to check the result for multiple heads under the same
613 609 # branch tag if that is a problem for them (ie checkout of a specific
614 610 # branch).
615 611 #
616 612 # passing in a specific branch will limit the depth of the search
617 613 # through the parents. It won't limit the branches returned in the
618 614 # result though.
619 615 def branchlookup(self, heads=None, branch=None):
620 616 if not heads:
621 617 heads = self.heads()
622 618 headt = [ h for h in heads ]
623 619 chlog = self.changelog
624 620 branches = {}
625 621 merges = []
626 622 seenmerge = {}
627 623
628 624 # traverse the tree once for each head, recording in the branches
629 625 # dict which tags are visible from this head. The branches
630 626 # dict also records which tags are visible from each tag
631 627 # while we traverse.
632 628 while headt or merges:
633 629 if merges:
634 630 n, found = merges.pop()
635 631 visit = [n]
636 632 else:
637 633 h = headt.pop()
638 634 visit = [h]
639 635 found = [h]
640 636 seen = {}
641 637 while visit:
642 638 n = visit.pop()
643 639 if n in seen:
644 640 continue
645 641 pp = chlog.parents(n)
646 642 tags = self.nodetags(n)
647 643 if tags:
648 644 for x in tags:
649 645 if x == 'tip':
650 646 continue
651 647 for f in found:
652 648 branches.setdefault(f, {})[n] = 1
653 649 branches.setdefault(n, {})[n] = 1
654 650 break
655 651 if n not in found:
656 652 found.append(n)
657 653 if branch in tags:
658 654 continue
659 655 seen[n] = 1
660 656 if pp[1] != nullid and n not in seenmerge:
661 657 merges.append((pp[1], [x for x in found]))
662 658 seenmerge[n] = 1
663 659 if pp[0] != nullid:
664 660 visit.append(pp[0])
665 661 # traverse the branches dict, eliminating branch tags from each
666 662 # head that are visible from another branch tag for that head.
667 663 out = {}
668 664 viscache = {}
669 665 for h in heads:
670 666 def visible(node):
671 667 if node in viscache:
672 668 return viscache[node]
673 669 ret = {}
674 670 visit = [node]
675 671 while visit:
676 672 x = visit.pop()
677 673 if x in viscache:
678 674 ret.update(viscache[x])
679 675 elif x not in ret:
680 676 ret[x] = 1
681 677 if x in branches:
682 678 visit[len(visit):] = branches[x].keys()
683 679 viscache[node] = ret
684 680 return ret
685 681 if h not in branches:
686 682 continue
687 683 # O(n^2), but somewhat limited. This only searches the
688 684 # tags visible from a specific head, not all the tags in the
689 685 # whole repo.
690 686 for b in branches[h]:
691 687 vis = False
692 688 for bb in branches[h].keys():
693 689 if b != bb:
694 690 if b in visible(bb):
695 691 vis = True
696 692 break
697 693 if not vis:
698 694 l = out.setdefault(h, [])
699 695 l[len(l):] = self.nodetags(b)
700 696 return out
701 697
702 698 def branches(self, nodes):
703 699 if not nodes: nodes = [self.changelog.tip()]
704 700 b = []
705 701 for n in nodes:
706 702 t = n
707 703 while n:
708 704 p = self.changelog.parents(n)
709 705 if p[1] != nullid or p[0] == nullid:
710 706 b.append((t, n, p[0], p[1]))
711 707 break
712 708 n = p[0]
713 709 return b
714 710
715 711 def between(self, pairs):
716 712 r = []
717 713
718 714 for top, bottom in pairs:
719 715 n, l, i = top, [], 0
720 716 f = 1
721 717
722 718 while n != bottom:
723 719 p = self.changelog.parents(n)[0]
724 720 if i == f:
725 721 l.append(n)
726 722 f = f * 2
727 723 n = p
728 724 i += 1
729 725
730 726 r.append(l)
731 727
732 728 return r
733 729
734 730 def newer(self, nodes):
735 731 m = {}
736 732 nl = []
737 733 pm = {}
738 734 cl = self.changelog
739 735 t = l = cl.count()
740 736
741 737 # find the lowest numbered node
742 738 for n in nodes:
743 739 l = min(l, cl.rev(n))
744 740 m[n] = 1
745 741
746 742 for i in xrange(l, t):
747 743 n = cl.node(i)
748 744 if n in m: # explicitly listed
749 745 pm[n] = 1
750 746 nl.append(n)
751 747 continue
752 748 for p in cl.parents(n):
753 749 if p in pm: # parent listed
754 750 pm[n] = 1
755 751 nl.append(n)
756 752 break
757 753
758 754 return nl
759 755
760 756 def findincoming(self, remote, base=None, heads=None):
761 757 m = self.changelog.nodemap
762 758 search = []
763 759 fetch = {}
764 760 seen = {}
765 761 seenbranch = {}
766 762 if base == None:
767 763 base = {}
768 764
769 765 # assume we're closer to the tip than the root
770 766 # and start by examining the heads
771 767 self.ui.status(_("searching for changes\n"))
772 768
773 769 if not heads:
774 770 heads = remote.heads()
775 771
776 772 unknown = []
777 773 for h in heads:
778 774 if h not in m:
779 775 unknown.append(h)
780 776 else:
781 777 base[h] = 1
782 778
783 779 if not unknown:
784 780 return None
785 781
786 782 rep = {}
787 783 reqcnt = 0
788 784
789 785 # search through remote branches
790 786 # a 'branch' here is a linear segment of history, with four parts:
791 787 # head, root, first parent, second parent
792 788 # (a branch always has two parents (or none) by definition)
793 789 unknown = remote.branches(unknown)
794 790 while unknown:
795 791 r = []
796 792 while unknown:
797 793 n = unknown.pop(0)
798 794 if n[0] in seen:
799 795 continue
800 796
801 797 self.ui.debug(_("examining %s:%s\n") % (short(n[0]), short(n[1])))
802 798 if n[0] == nullid:
803 799 break
804 800 if n in seenbranch:
805 801 self.ui.debug(_("branch already found\n"))
806 802 continue
807 803 if n[1] and n[1] in m: # do we know the base?
808 804 self.ui.debug(_("found incomplete branch %s:%s\n")
809 805 % (short(n[0]), short(n[1])))
810 806 search.append(n) # schedule branch range for scanning
811 807 seenbranch[n] = 1
812 808 else:
813 809 if n[1] not in seen and n[1] not in fetch:
814 810 if n[2] in m and n[3] in m:
815 811 self.ui.debug(_("found new changeset %s\n") %
816 812 short(n[1]))
817 813 fetch[n[1]] = 1 # earliest unknown
818 814 base[n[2]] = 1 # latest known
819 815 continue
820 816
821 817 for a in n[2:4]:
822 818 if a not in rep:
823 819 r.append(a)
824 820 rep[a] = 1
825 821
826 822 seen[n[0]] = 1
827 823
828 824 if r:
829 825 reqcnt += 1
830 826 self.ui.debug(_("request %d: %s\n") %
831 827 (reqcnt, " ".join(map(short, r))))
832 828 for p in range(0, len(r), 10):
833 829 for b in remote.branches(r[p:p+10]):
834 830 self.ui.debug(_("received %s:%s\n") %
835 831 (short(b[0]), short(b[1])))
836 832 if b[0] in m:
837 833 self.ui.debug(_("found base node %s\n") % short(b[0]))
838 834 base[b[0]] = 1
839 835 elif b[0] not in seen:
840 836 unknown.append(b)
841 837
842 838 # do binary search on the branches we found
843 839 while search:
844 840 n = search.pop(0)
845 841 reqcnt += 1
846 842 l = remote.between([(n[0], n[1])])[0]
847 843 l.append(n[1])
848 844 p = n[0]
849 845 f = 1
850 846 for i in l:
851 847 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
852 848 if i in m:
853 849 if f <= 2:
854 850 self.ui.debug(_("found new branch changeset %s\n") %
855 851 short(p))
856 852 fetch[p] = 1
857 853 base[i] = 1
858 854 else:
859 855 self.ui.debug(_("narrowed branch search to %s:%s\n")
860 856 % (short(p), short(i)))
861 857 search.append((p, i))
862 858 break
863 859 p, f = i, f * 2
864 860
865 861 # sanity check our fetch list
866 862 for f in fetch.keys():
867 863 if f in m:
868 864 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
869 865
870 866 if base.keys() == [nullid]:
871 867 self.ui.warn(_("warning: pulling from an unrelated repository!\n"))
872 868
873 869 self.ui.note(_("found new changesets starting at ") +
874 870 " ".join([short(f) for f in fetch]) + "\n")
875 871
876 872 self.ui.debug(_("%d total queries\n") % reqcnt)
877 873
878 874 return fetch.keys()
879 875
880 876 def findoutgoing(self, remote, base=None, heads=None):
881 877 if base == None:
882 878 base = {}
883 879 self.findincoming(remote, base, heads)
884 880
885 881 self.ui.debug(_("common changesets up to ")
886 882 + " ".join(map(short, base.keys())) + "\n")
887 883
888 884 remain = dict.fromkeys(self.changelog.nodemap)
889 885
890 886 # prune everything remote has from the tree
891 887 del remain[nullid]
892 888 remove = base.keys()
893 889 while remove:
894 890 n = remove.pop(0)
895 891 if n in remain:
896 892 del remain[n]
897 893 for p in self.changelog.parents(n):
898 894 remove.append(p)
899 895
900 896 # find every node whose parents have been pruned
901 897 subset = []
902 898 for n in remain:
903 899 p1, p2 = self.changelog.parents(n)
904 900 if p1 not in remain and p2 not in remain:
905 901 subset.append(n)
906 902
907 903 # this is the set of all roots we have to push
908 904 return subset
909 905
910 906 def pull(self, remote):
911 907 lock = self.lock()
912 908
913 909 # if we have an empty repo, fetch everything
914 910 if self.changelog.tip() == nullid:
915 911 self.ui.status(_("requesting all changes\n"))
916 912 fetch = [nullid]
917 913 else:
918 914 fetch = self.findincoming(remote)
919 915
920 916 if not fetch:
921 917 self.ui.status(_("no changes found\n"))
922 918 return 1
923 919
924 920 cg = remote.changegroup(fetch)
925 921 return self.addchangegroup(cg)
926 922
927 923 def push(self, remote, force=False):
928 924 lock = remote.lock()
929 925
930 926 base = {}
931 927 heads = remote.heads()
932 928 inc = self.findincoming(remote, base, heads)
933 929 if not force and inc:
934 930 self.ui.warn(_("abort: unsynced remote changes!\n"))
935 931 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
936 932 return 1
937 933
938 934 update = self.findoutgoing(remote, base)
939 935 if not update:
940 936 self.ui.status(_("no changes found\n"))
941 937 return 1
942 938 elif not force:
943 939 if len(heads) < len(self.changelog.heads()):
944 940 self.ui.warn(_("abort: push creates new remote branches!\n"))
945 941 self.ui.status(_("(did you forget to merge?"
946 942 " use push -f to force)\n"))
947 943 return 1
948 944
949 945 cg = self.changegroup(update)
950 946 return remote.addchangegroup(cg)
951 947
952 948 def changegroup(self, basenodes):
953 949 genread = util.chunkbuffer
954 950
955 951 def gengroup():
956 952 nodes = self.newer(basenodes)
957 953
958 954 # construct the link map
959 955 linkmap = {}
960 956 for n in nodes:
961 957 linkmap[self.changelog.rev(n)] = n
962 958
963 959 # construct a list of all changed files
964 960 changed = {}
965 961 for n in nodes:
966 962 c = self.changelog.read(n)
967 963 for f in c[3]:
968 964 changed[f] = 1
969 965 changed = changed.keys()
970 966 changed.sort()
971 967
972 968 # the changegroup is changesets + manifests + all file revs
973 969 revs = [ self.changelog.rev(n) for n in nodes ]
974 970
975 971 for y in self.changelog.group(linkmap): yield y
976 972 for y in self.manifest.group(linkmap): yield y
977 973 for f in changed:
978 974 yield struct.pack(">l", len(f) + 4) + f
979 975 g = self.file(f).group(linkmap)
980 976 for y in g:
981 977 yield y
982 978
983 979 yield struct.pack(">l", 0)
984 980
985 981 return genread(gengroup())
986 982
987 983 def addchangegroup(self, source):
988 984
989 985 def getchunk():
990 986 d = source.read(4)
991 987 if not d: return ""
992 988 l = struct.unpack(">l", d)[0]
993 989 if l <= 4: return ""
994 990 d = source.read(l - 4)
995 991 if len(d) < l - 4:
996 992 raise repo.RepoError(_("premature EOF reading chunk"
997 993 " (got %d bytes, expected %d)")
998 994 % (len(d), l - 4))
999 995 return d
1000 996
1001 997 def getgroup():
1002 998 while 1:
1003 999 c = getchunk()
1004 1000 if not c: break
1005 1001 yield c
1006 1002
1007 1003 def csmap(x):
1008 1004 self.ui.debug(_("add changeset %s\n") % short(x))
1009 1005 return self.changelog.count()
1010 1006
1011 1007 def revmap(x):
1012 1008 return self.changelog.rev(x)
1013 1009
1014 1010 if not source: return
1015 1011 changesets = files = revisions = 0
1016 1012
1017 1013 tr = self.transaction()
1018 1014
1019 1015 oldheads = len(self.changelog.heads())
1020 1016
1021 1017 # pull off the changeset group
1022 1018 self.ui.status(_("adding changesets\n"))
1023 1019 co = self.changelog.tip()
1024 1020 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1025 1021 cnr, cor = map(self.changelog.rev, (cn, co))
1026 1022 if cn == nullid:
1027 1023 cnr = cor
1028 1024 changesets = cnr - cor
1029 1025
1030 1026 # pull off the manifest group
1031 1027 self.ui.status(_("adding manifests\n"))
1032 1028 mm = self.manifest.tip()
1033 1029 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1034 1030
1035 1031 # process the files
1036 1032 self.ui.status(_("adding file changes\n"))
1037 1033 while 1:
1038 1034 f = getchunk()
1039 1035 if not f: break
1040 1036 self.ui.debug(_("adding %s revisions\n") % f)
1041 1037 fl = self.file(f)
1042 1038 o = fl.count()
1043 1039 n = fl.addgroup(getgroup(), revmap, tr)
1044 1040 revisions += fl.count() - o
1045 1041 files += 1
1046 1042
1047 1043 newheads = len(self.changelog.heads())
1048 1044 heads = ""
1049 1045 if oldheads and newheads > oldheads:
1050 1046 heads = _(" (+%d heads)") % (newheads - oldheads)
1051 1047
1052 1048 self.ui.status(_("added %d changesets"
1053 1049 " with %d changes to %d files%s\n")
1054 1050 % (changesets, revisions, files, heads))
1055 1051
1056 1052 tr.close()
1057 1053
1058 1054 if changesets > 0:
1059 1055 if not self.hook("changegroup",
1060 1056 node=hex(self.changelog.node(cor+1))):
1061 1057 self.ui.warn(_("abort: changegroup hook returned failure!\n"))
1062 1058 return 1
1063 1059
1064 1060 for i in range(cor + 1, cnr + 1):
1065 1061 self.hook("commit", node=hex(self.changelog.node(i)))
1066 1062
1067 1063 return
1068 1064
1069 1065 def update(self, node, allow=False, force=False, choose=None,
1070 1066 moddirstate=True):
1071 1067 pl = self.dirstate.parents()
1072 1068 if not force and pl[1] != nullid:
1073 1069 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1074 1070 return 1
1075 1071
1076 1072 p1, p2 = pl[0], node
1077 1073 pa = self.changelog.ancestor(p1, p2)
1078 1074 m1n = self.changelog.read(p1)[0]
1079 1075 m2n = self.changelog.read(p2)[0]
1080 1076 man = self.manifest.ancestor(m1n, m2n)
1081 1077 m1 = self.manifest.read(m1n)
1082 1078 mf1 = self.manifest.readflags(m1n)
1083 1079 m2 = self.manifest.read(m2n)
1084 1080 mf2 = self.manifest.readflags(m2n)
1085 1081 ma = self.manifest.read(man)
1086 1082 mfa = self.manifest.readflags(man)
1087 1083
1088 1084 (c, a, d, u) = self.changes()
1089 1085
1090 1086 # is this a jump, or a merge? i.e. is there a linear path
1091 1087 # from p1 to p2?
1092 1088 linear_path = (pa == p1 or pa == p2)
1093 1089
1094 1090 # resolve the manifest to determine which files
1095 1091 # we care about merging
1096 1092 self.ui.note(_("resolving manifests\n"))
1097 1093 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1098 1094 (force, allow, moddirstate, linear_path))
1099 1095 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1100 1096 (short(man), short(m1n), short(m2n)))
1101 1097
1102 1098 merge = {}
1103 1099 get = {}
1104 1100 remove = []
1105 1101
1106 1102 # construct a working dir manifest
1107 1103 mw = m1.copy()
1108 1104 mfw = mf1.copy()
1109 1105 umap = dict.fromkeys(u)
1110 1106
1111 1107 for f in a + c + u:
1112 1108 mw[f] = ""
1113 1109 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1114 1110
1115 1111 for f in d:
1116 1112 if f in mw: del mw[f]
1117 1113
1118 1114 # If we're jumping between revisions (as opposed to merging),
1119 1115 # and if neither the working directory nor the target rev has
1120 1116 # the file, then we need to remove it from the dirstate, to
1121 1117 # prevent the dirstate from listing the file when it is no
1122 1118 # longer in the manifest.
1123 1119 if moddirstate and linear_path and f not in m2:
1124 1120 self.dirstate.forget((f,))
1125 1121
1126 1122 # Compare manifests
1127 1123 for f, n in mw.iteritems():
1128 1124 if choose and not choose(f): continue
1129 1125 if f in m2:
1130 1126 s = 0
1131 1127
1132 1128 # is the wfile new since m1, and match m2?
1133 1129 if f not in m1:
1134 1130 t1 = self.wread(f)
1135 1131 t2 = self.file(f).read(m2[f])
1136 1132 if cmp(t1, t2) == 0:
1137 1133 n = m2[f]
1138 1134 del t1, t2
1139 1135
1140 1136 # are files different?
1141 1137 if n != m2[f]:
1142 1138 a = ma.get(f, nullid)
1143 1139 # are both different from the ancestor?
1144 1140 if n != a and m2[f] != a:
1145 1141 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1146 1142 # merge executable bits
1147 1143 # "if we changed or they changed, change in merge"
1148 1144 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1149 1145 mode = ((a^b) | (a^c)) ^ a
1150 1146 merge[f] = (m1.get(f, nullid), m2[f], mode)
1151 1147 s = 1
1152 1148 # are we clobbering?
1153 1149 # is remote's version newer?
1154 1150 # or are we going back in time?
1155 1151 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1156 1152 self.ui.debug(_(" remote %s is newer, get\n") % f)
1157 1153 get[f] = m2[f]
1158 1154 s = 1
1159 1155 elif f in umap:
1160 1156 # this unknown file is the same as the checkout
1161 1157 get[f] = m2[f]
1162 1158
1163 1159 if not s and mfw[f] != mf2[f]:
1164 1160 if force:
1165 1161 self.ui.debug(_(" updating permissions for %s\n") % f)
1166 1162 util.set_exec(self.wjoin(f), mf2[f])
1167 1163 else:
1168 1164 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1169 1165 mode = ((a^b) | (a^c)) ^ a
1170 1166 if mode != b:
1171 1167 self.ui.debug(_(" updating permissions for %s\n") % f)
1172 1168 util.set_exec(self.wjoin(f), mode)
1173 1169 del m2[f]
1174 1170 elif f in ma:
1175 1171 if n != ma[f]:
1176 1172 r = _("d")
1177 1173 if not force and (linear_path or allow):
1178 1174 r = self.ui.prompt(
1179 1175 (_(" local changed %s which remote deleted\n") % f) +
1180 1176 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1181 1177 if r == _("d"):
1182 1178 remove.append(f)
1183 1179 else:
1184 1180 self.ui.debug(_("other deleted %s\n") % f)
1185 1181 remove.append(f) # other deleted it
1186 1182 else:
1187 1183 # file is created on branch or in working directory
1188 1184 if force and f not in umap:
1189 1185 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1190 1186 remove.append(f)
1191 1187 elif n == m1.get(f, nullid): # same as parent
1192 1188 if p2 == pa: # going backwards?
1193 1189 self.ui.debug(_("remote deleted %s\n") % f)
1194 1190 remove.append(f)
1195 1191 else:
1196 1192 self.ui.debug(_("local modified %s, keeping\n") % f)
1197 1193 else:
1198 1194 self.ui.debug(_("working dir created %s, keeping\n") % f)
1199 1195
1200 1196 for f, n in m2.iteritems():
1201 1197 if choose and not choose(f): continue
1202 1198 if f[0] == "/": continue
1203 1199 if f in ma and n != ma[f]:
1204 1200 r = _("k")
1205 1201 if not force and (linear_path or allow):
1206 1202 r = self.ui.prompt(
1207 1203 (_("remote changed %s which local deleted\n") % f) +
1208 1204 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1209 1205 if r == _("k"): get[f] = n
1210 1206 elif f not in ma:
1211 1207 self.ui.debug(_("remote created %s\n") % f)
1212 1208 get[f] = n
1213 1209 else:
1214 1210 if force or p2 == pa: # going backwards?
1215 1211 self.ui.debug(_("local deleted %s, recreating\n") % f)
1216 1212 get[f] = n
1217 1213 else:
1218 1214 self.ui.debug(_("local deleted %s\n") % f)
1219 1215
1220 1216 del mw, m1, m2, ma
1221 1217
1222 1218 if force:
1223 1219 for f in merge:
1224 1220 get[f] = merge[f][1]
1225 1221 merge = {}
1226 1222
1227 1223 if linear_path or force:
1228 1224 # we don't need to do any magic, just jump to the new rev
1229 1225 branch_merge = False
1230 1226 p1, p2 = p2, nullid
1231 1227 else:
1232 1228 if not allow:
1233 1229 self.ui.status(_("this update spans a branch"
1234 1230 " affecting the following files:\n"))
1235 1231 fl = merge.keys() + get.keys()
1236 1232 fl.sort()
1237 1233 for f in fl:
1238 1234 cf = ""
1239 1235 if f in merge: cf = _(" (resolve)")
1240 1236 self.ui.status(" %s%s\n" % (f, cf))
1241 1237 self.ui.warn(_("aborting update spanning branches!\n"))
1242 1238 self.ui.status(_("(use update -m to merge across branches"
1243 1239 " or -C to lose changes)\n"))
1244 1240 return 1
1245 1241 branch_merge = True
1246 1242
1247 1243 if moddirstate:
1248 1244 self.dirstate.setparents(p1, p2)
1249 1245
1250 1246 # get the files we don't need to change
1251 1247 files = get.keys()
1252 1248 files.sort()
1253 1249 for f in files:
1254 1250 if f[0] == "/": continue
1255 1251 self.ui.note(_("getting %s\n") % f)
1256 1252 t = self.file(f).read(get[f])
1257 1253 try:
1258 1254 self.wwrite(f, t)
1259 1255 except IOError, e:
1260 1256 if e.errno != errno.ENOENT:
1261 1257 raise
1262 1258 os.makedirs(os.path.dirname(self.wjoin(f)))
1263 1259 self.wwrite(f, t)
1264 1260 util.set_exec(self.wjoin(f), mf2[f])
1265 1261 if moddirstate:
1266 1262 if branch_merge:
1267 1263 self.dirstate.update([f], 'n', st_mtime=-1)
1268 1264 else:
1269 1265 self.dirstate.update([f], 'n')
1270 1266
1271 1267 # merge the tricky bits
1272 1268 files = merge.keys()
1273 1269 files.sort()
1274 1270 for f in files:
1275 1271 self.ui.status(_("merging %s\n") % f)
1276 1272 my, other, flag = merge[f]
1277 1273 self.merge3(f, my, other)
1278 1274 util.set_exec(self.wjoin(f), flag)
1279 1275 if moddirstate:
1280 1276 if branch_merge:
1281 1277 # We've done a branch merge, mark this file as merged
1282 1278 # so that we properly record the merger later
1283 1279 self.dirstate.update([f], 'm')
1284 1280 else:
1285 1281 # We've update-merged a locally modified file, so
1286 1282 # we set the dirstate to emulate a normal checkout
1287 1283 # of that file some time in the past. Thus our
1288 1284 # merge will appear as a normal local file
1289 1285 # modification.
1290 1286 f_len = len(self.file(f).read(other))
1291 1287 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1292 1288
1293 1289 remove.sort()
1294 1290 for f in remove:
1295 1291 self.ui.note(_("removing %s\n") % f)
1296 1292 try:
1297 1293 util.unlink(self.wjoin(f))
1298 1294 except OSError, inst:
1299 1295 if inst.errno != errno.ENOENT:
1300 1296 self.ui.warn(_("update failed to remove %s: %s!\n") %
1301 1297 (f, inst.strerror))
1302 1298 if moddirstate:
1303 1299 if branch_merge:
1304 1300 self.dirstate.update(remove, 'r')
1305 1301 else:
1306 1302 self.dirstate.forget(remove)
1307 1303
1308 1304 def merge3(self, fn, my, other):
1309 1305 """perform a 3-way merge in the working directory"""
1310 1306
1311 1307 def temp(prefix, node):
1312 1308 pre = "%s~%s." % (os.path.basename(fn), prefix)
1313 1309 (fd, name) = tempfile.mkstemp("", pre)
1314 1310 f = os.fdopen(fd, "wb")
1315 1311 self.wwrite(fn, fl.read(node), f)
1316 1312 f.close()
1317 1313 return name
1318 1314
1319 1315 fl = self.file(fn)
1320 1316 base = fl.ancestor(my, other)
1321 1317 a = self.wjoin(fn)
1322 1318 b = temp("base", base)
1323 1319 c = temp("other", other)
1324 1320
1325 1321 self.ui.note(_("resolving %s\n") % fn)
1326 1322 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1327 1323 (fn, short(my), short(other), short(base)))
1328 1324
1329 1325 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1330 1326 or "hgmerge")
1331 1327 r = os.system('%s "%s" "%s" "%s"' % (cmd, a, b, c))
1332 1328 if r:
1333 1329 self.ui.warn(_("merging %s failed!\n") % fn)
1334 1330
1335 1331 os.unlink(b)
1336 1332 os.unlink(c)
1337 1333
1338 1334 def verify(self):
1339 1335 filelinkrevs = {}
1340 1336 filenodes = {}
1341 1337 changesets = revisions = files = 0
1342 1338 errors = [0]
1343 1339 neededmanifests = {}
1344 1340
1345 1341 def err(msg):
1346 1342 self.ui.warn(msg + "\n")
1347 1343 errors[0] += 1
1348 1344
1349 1345 seen = {}
1350 1346 self.ui.status(_("checking changesets\n"))
1351 1347 for i in range(self.changelog.count()):
1352 1348 changesets += 1
1353 1349 n = self.changelog.node(i)
1354 1350 l = self.changelog.linkrev(n)
1355 1351 if l != i:
1356 1352 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1357 1353 if n in seen:
1358 1354 err(_("duplicate changeset at revision %d") % i)
1359 1355 seen[n] = 1
1360 1356
1361 1357 for p in self.changelog.parents(n):
1362 1358 if p not in self.changelog.nodemap:
1363 1359 err(_("changeset %s has unknown parent %s") %
1364 1360 (short(n), short(p)))
1365 1361 try:
1366 1362 changes = self.changelog.read(n)
1367 1363 except Exception, inst:
1368 1364 err(_("unpacking changeset %s: %s") % (short(n), inst))
1369 1365
1370 1366 neededmanifests[changes[0]] = n
1371 1367
1372 1368 for f in changes[3]:
1373 1369 filelinkrevs.setdefault(f, []).append(i)
1374 1370
1375 1371 seen = {}
1376 1372 self.ui.status(_("checking manifests\n"))
1377 1373 for i in range(self.manifest.count()):
1378 1374 n = self.manifest.node(i)
1379 1375 l = self.manifest.linkrev(n)
1380 1376
1381 1377 if l < 0 or l >= self.changelog.count():
1382 1378 err(_("bad manifest link (%d) at revision %d") % (l, i))
1383 1379
1384 1380 if n in neededmanifests:
1385 1381 del neededmanifests[n]
1386 1382
1387 1383 if n in seen:
1388 1384 err(_("duplicate manifest at revision %d") % i)
1389 1385
1390 1386 seen[n] = 1
1391 1387
1392 1388 for p in self.manifest.parents(n):
1393 1389 if p not in self.manifest.nodemap:
1394 1390 err(_("manifest %s has unknown parent %s") %
1395 1391 (short(n), short(p)))
1396 1392
1397 1393 try:
1398 1394 delta = mdiff.patchtext(self.manifest.delta(n))
1399 1395 except KeyboardInterrupt:
1400 1396 self.ui.warn(_("interrupted"))
1401 1397 raise
1402 1398 except Exception, inst:
1403 1399 err(_("unpacking manifest %s: %s") % (short(n), inst))
1404 1400
1405 1401 ff = [ l.split('\0') for l in delta.splitlines() ]
1406 1402 for f, fn in ff:
1407 1403 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1408 1404
1409 1405 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1410 1406
1411 1407 for m,c in neededmanifests.items():
1412 1408 err(_("Changeset %s refers to unknown manifest %s") %
1413 1409 (short(m), short(c)))
1414 1410 del neededmanifests
1415 1411
1416 1412 for f in filenodes:
1417 1413 if f not in filelinkrevs:
1418 1414 err(_("file %s in manifest but not in changesets") % f)
1419 1415
1420 1416 for f in filelinkrevs:
1421 1417 if f not in filenodes:
1422 1418 err(_("file %s in changeset but not in manifest") % f)
1423 1419
1424 1420 self.ui.status(_("checking files\n"))
1425 1421 ff = filenodes.keys()
1426 1422 ff.sort()
1427 1423 for f in ff:
1428 1424 if f == "/dev/null": continue
1429 1425 files += 1
1430 1426 fl = self.file(f)
1431 1427 nodes = { nullid: 1 }
1432 1428 seen = {}
1433 1429 for i in range(fl.count()):
1434 1430 revisions += 1
1435 1431 n = fl.node(i)
1436 1432
1437 1433 if n in seen:
1438 1434 err(_("%s: duplicate revision %d") % (f, i))
1439 1435 if n not in filenodes[f]:
1440 1436 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1441 1437 else:
1442 1438 del filenodes[f][n]
1443 1439
1444 1440 flr = fl.linkrev(n)
1445 1441 if flr not in filelinkrevs[f]:
1446 1442 err(_("%s:%s points to unexpected changeset %d")
1447 1443 % (f, short(n), flr))
1448 1444 else:
1449 1445 filelinkrevs[f].remove(flr)
1450 1446
1451 1447 # verify contents
1452 1448 try:
1453 1449 t = fl.read(n)
1454 1450 except Exception, inst:
1455 1451 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1456 1452
1457 1453 # verify parents
1458 1454 (p1, p2) = fl.parents(n)
1459 1455 if p1 not in nodes:
1460 1456 err(_("file %s:%s unknown parent 1 %s") %
1461 1457 (f, short(n), short(p1)))
1462 1458 if p2 not in nodes:
1463 1459 err(_("file %s:%s unknown parent 2 %s") %
1464 1460 (f, short(n), short(p1)))
1465 1461 nodes[n] = 1
1466 1462
1467 1463 # cross-check
1468 1464 for node in filenodes[f]:
1469 1465 err(_("node %s in manifests not in %s") % (hex(node), f))
1470 1466
1471 1467 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1472 1468 (files, changesets, revisions))
1473 1469
1474 1470 if errors[0]:
1475 1471 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1476 1472 return 1
General Comments 0
You need to be logged in to leave comments. Login now