##// END OF EJS Templates
fix bad assumption about uniqueness of file versions...
mpm@selenic.com -
r224:ccbcc4d7 default
parent child Browse files
Show More
@@ -1,542 +1,538 b''
1 1 #!/usr/bin/env python
2 2 #
3 3 # mercurial - a minimal scalable distributed SCM
4 4 # v0.5b "katje"
5 5 #
6 6 # Copyright 2005 Matt Mackall <mpm@selenic.com>
7 7 #
8 8 # This software may be used and distributed according to the terms
9 9 # of the GNU General Public License, incorporated herein by reference.
10 10
11 11 # the psyco compiler makes commits a bit faster
12 12 # and makes changegroup merge about 20 times slower!
13 13 # try:
14 14 # import psyco
15 15 # psyco.full()
16 16 # except:
17 17 # pass
18 18
19 19 import sys, os, time
20 20 from mercurial import hg, mdiff, fancyopts, ui, commands
21 21
22 22 def help():
23 23 ui.status("""\
24 24 commands:
25 25
26 26 add [files...] add the given files in the next commit
27 27 addremove add all new files, delete all missing files
28 28 annotate [files...] show changeset number per file line
29 29 branch <path> create a branch of <path> in this directory
30 30 checkout [changeset] checkout the latest or given changeset
31 31 commit commit all changes to the repository
32 32 diff [files...] diff working directory (or selected files)
33 33 dump <file> [rev] dump the latest or given revision of a file
34 34 dumpmanifest [rev] dump the latest or given revision of the manifest
35 35 export <rev> dump the changeset header and diffs for a revision
36 36 history show changeset history
37 37 init create a new repository in this directory
38 38 log <file> show revision history of a single file
39 39 merge <path> merge changes from <path> into local repository
40 40 recover rollback an interrupted transaction
41 41 remove [files...] remove the given files in the next commit
42 42 serve export the repository via HTTP
43 43 status show new, missing, and changed files in working dir
44 44 tags show current changeset tags
45 45 undo undo the last transaction
46 46 """)
47 47
48 48 def filterfiles(list, files):
49 49 l = [ x for x in list if x in files ]
50 50
51 51 for f in files:
52 52 if f[-1] != os.sep: f += os.sep
53 53 l += [ x for x in list if x.startswith(f) ]
54 54 return l
55 55
56 56 def diff(files = None, node1 = None, node2 = None):
57 57 def date(c):
58 58 return time.asctime(time.gmtime(float(c[2].split(' ')[0])))
59 59
60 60 if node2:
61 61 change = repo.changelog.read(node2)
62 62 mmap2 = repo.manifest.read(change[0])
63 63 (c, a, d) = repo.diffrevs(node1, node2)
64 64 def read(f): return repo.file(f).read(mmap2[f])
65 65 date2 = date(change)
66 66 else:
67 67 date2 = time.asctime()
68 68 if not node1:
69 69 node1 = repo.current
70 70 (c, a, d, u) = repo.diffdir(repo.root, node1)
71 71 a = [] # ignore unknown files in repo, by popular request
72 72 def read(f): return file(os.path.join(repo.root, f)).read()
73 73
74 74 change = repo.changelog.read(node1)
75 75 mmap = repo.manifest.read(change[0])
76 76 date1 = date(change)
77 77
78 78 if files:
79 79 c, a, d = map(lambda x: filterfiles(x, files), (c, a, d))
80 80
81 81 for f in c:
82 82 to = repo.file(f).read(mmap[f])
83 83 tn = read(f)
84 84 sys.stdout.write(mdiff.unidiff(to, date1, tn, date2, f))
85 85 for f in a:
86 86 to = ""
87 87 tn = read(f)
88 88 sys.stdout.write(mdiff.unidiff(to, date1, tn, date2, f))
89 89 for f in d:
90 90 to = repo.file(f).read(mmap[f])
91 91 tn = ""
92 92 sys.stdout.write(mdiff.unidiff(to, date1, tn, date2, f))
93 93
94 94
95 95 try:
96 96 sys.exit(commands.dispatch(sys.argv[1:]))
97 97 except commands.UnknownCommand:
98 98 # fall through
99 99 pass
100 100
101 101 options = {}
102 102 opts = [('v', 'verbose', None, 'verbose'),
103 103 ('d', 'debug', None, 'debug'),
104 104 ('q', 'quiet', None, 'quiet'),
105 105 ('y', 'noninteractive', None, 'run non-interactively'),
106 106 ]
107 107
108 108 args = fancyopts.fancyopts(sys.argv[1:], opts, options,
109 109 'hg [options] <command> [command options] [files]')
110 110
111 111 try:
112 112 cmd = args[0]
113 113 args = args[1:]
114 114 except:
115 115 cmd = "help"
116 116
117 117 ui = ui.ui(options["verbose"], options["debug"], options["quiet"],
118 118 not options["noninteractive"])
119 119
120 120 try:
121 121 repo = hg.repository(ui=ui)
122 122 except IOError:
123 123 ui.warn("Unable to open repository\n")
124 124 sys.exit(0)
125 125
126 126 relpath = None
127 127 if os.getcwd() != repo.root:
128 128 relpath = os.getcwd()[len(repo.root) + 1: ]
129 129
130 130 elif cmd == "add":
131 131 repo.add(args)
132 132
133 133 elif cmd == "forget":
134 134 repo.forget(args)
135 135
136 136 elif cmd == "remove" or cmd == "rm" or cmd == "del" or cmd == "delete":
137 137 repo.remove(args)
138 138
139 139 elif cmd == "commit" or cmd == "checkin" or cmd == "ci":
140 140 if 1:
141 141 if len(args) > 0:
142 142 repo.commit(repo.current, args)
143 143 else:
144 144 repo.commit(repo.current)
145 145 elif cmd == "rawcommit":
146 146 "raw commit interface"
147 147 rc = {}
148 148 opts = [('p', 'parent', [], 'parent'),
149 149 ('d', 'date', "", 'data'),
150 150 ('u', 'user', "", 'user'),
151 151 ('F', 'files', "", 'file list'),
152 152 ('t', 'text', "", 'commit text'),
153 153 ('l', 'logfile', "", 'commit text file')
154 154 ]
155 155 args = fancyopts.fancyopts(args, opts, rc,
156 156 "hg rawcommit [options] files")
157 157 text = rc['text']
158 158 if not text and rc['logfile']:
159 159 try: text = open(rc['logfile']).read()
160 160 except IOError: pass
161 161 if not text and not rc['logfile']:
162 162 print "missing commit text"
163 163 sys.exit(0)
164 164 if rc['files']:
165 165 files = open(rc['files']).read().splitlines()
166 166 else:
167 167 files = args
168 168
169 169 repo.rawcommit(files, text, rc['user'], rc['date'], *rc['parent'])
170 170
171 171
172 172 elif cmd == "import" or cmd == "patch":
173 173 try:
174 174 import psyco
175 175 psyco.full()
176 176 except:
177 177 pass
178 178
179 179 ioptions = {}
180 180 opts = [('p', 'strip', 1, 'path strip'),
181 181 ('b', 'base', "", 'base path'),
182 182 ('q', 'quiet', "", 'silence diff')
183 183 ]
184 184
185 185 args = fancyopts.fancyopts(args, opts, ioptions,
186 186 'hg import [options] <patch names>')
187 187 d = ioptions["base"]
188 188 strip = ioptions["strip"]
189 189 quiet = ioptions["quiet"] and "> /dev/null" or ""
190 190
191 191 for patch in args:
192 192 ui.status("applying %s\n" % patch)
193 193 pf = os.path.join(d, patch)
194 194
195 195 text = ""
196 196 for l in file(pf):
197 197 if l[:4] == "--- ": break
198 198 text += l
199 199
200 200 f = os.popen("lsdiff --strip %d %s" % (strip, pf))
201 201 files = filter(None, map(lambda x: x.rstrip(), f.read().splitlines()))
202 202 f.close()
203 203
204 204 if files:
205 205 if os.system("patch -p%d < %s %s" % (strip, pf, quiet)):
206 206 raise "patch failed!"
207 207 repo.commit(repo.current, files, text)
208 208
209 209 elif cmd == "diff":
210 210 revs = []
211 211
212 212 if args:
213 213 doptions = {}
214 214 opts = [('r', 'revision', [], 'revision')]
215 215 args = fancyopts.fancyopts(args, opts, doptions,
216 216 'hg diff [options] [files]')
217 217 revs = map(lambda x: repo.lookup(x), doptions['revision'])
218 218
219 219 if len(revs) > 2:
220 220 self.ui.warn("too many revisions to diff\n")
221 221 sys.exit(1)
222 222
223 223 if relpath:
224 224 if not args: args = [ relpath ]
225 225 else: args = [ os.path.join(relpath, x) for x in args ]
226 226
227 227 diff(args, *revs)
228 228
229 229 elif cmd == "export":
230 230 node = repo.lookup(args[0])
231 231 prev, other = repo.changelog.parents(node)
232 232 change = repo.changelog.read(node)
233 233 print "# HG changeset patch"
234 234 print "# User %s" % change[1]
235 235 print "# Node ID %s" % hg.hex(node)
236 236 print "# Parent %s" % hg.hex(prev)
237 237 print
238 238 if other != hg.nullid:
239 239 print "# Parent %s" % hg.hex(other)
240 240 print change[4]
241 241
242 242 diff(None, prev, node)
243 243
244 244 elif cmd == "debugchangegroup":
245 245 newer = repo.newer(map(repo.lookup, args))
246 246 for chunk in repo.changegroup(newer):
247 247 sys.stdout.write(chunk)
248 248
249 249 elif cmd == "debugaddchangegroup":
250 250 data = sys.stdin.read()
251 251 repo.addchangegroup(data)
252 252
253 253 elif cmd == "addremove":
254 254 (c, a, d, u) = repo.diffdir(repo.root, repo.current)
255 255 repo.add(a)
256 256 repo.remove(d)
257 257
258 258 elif cmd == "history":
259 259 for i in range(repo.changelog.count()):
260 260 n = repo.changelog.node(i)
261 261 changes = repo.changelog.read(n)
262 262 (p1, p2) = repo.changelog.parents(n)
263 263 (h, h1, h2) = map(hg.hex, (n, p1, p2))
264 264 (i1, i2) = map(repo.changelog.rev, (p1, p2))
265 265 print "rev: %4d:%s" % (i, h)
266 266 print "parents: %4d:%s" % (i1, h1)
267 267 if i2: print " %4d:%s" % (i2, h2)
268 268 print "manifest: %4d:%s" % (repo.manifest.rev(changes[0]),
269 269 hg.hex(changes[0]))
270 270 print "user:", changes[1]
271 271 print "date:", time.asctime(
272 272 time.localtime(float(changes[2].split(' ')[0])))
273 273 if ui.verbose: print "files:", " ".join(changes[3])
274 274 print "description:"
275 275 print changes[4]
276 276
277 277 elif cmd == "tip":
278 278 n = repo.changelog.tip()
279 279 t = repo.changelog.rev(n)
280 280 ui.status("%d:%s\n" % (t, hg.hex(n)))
281 281
282 282 elif cmd == "log":
283 283
284 284 if len(args) == 1:
285 285 if relpath:
286 286 args[0] = os.path.join(relpath, args[0])
287 287
288 288 r = repo.file(args[0])
289 289 for i in range(r.count()):
290 290 n = r.node(i)
291 291 (p1, p2) = r.parents(n)
292 292 (h, h1, h2) = map(hg.hex, (n, p1, p2))
293 293 (i1, i2) = map(r.rev, (p1, p2))
294 294 cr = r.linkrev(n)
295 295 cn = hg.hex(repo.changelog.node(cr))
296 296 print "rev: %4d:%s" % (i, h)
297 297 print "changeset: %4d:%s" % (cr, cn)
298 298 print "parents: %4d:%s" % (i1, h1)
299 299 if i2: print " %4d:%s" % (i2, h2)
300 300 changes = repo.changelog.read(repo.changelog.node(cr))
301 301 print "user: %s" % changes[1]
302 302 print "date: %s" % time.asctime(
303 303 time.localtime(float(changes[2].split(' ')[0])))
304 304 print "description:"
305 305 print changes[4]
306 306 print
307 307 elif len(args) > 1:
308 308 print "too many args"
309 309 else:
310 310 print "missing filename"
311 311
312 312 elif cmd == "dump":
313 313 if args:
314 314 r = repo.file(args[0])
315 315 n = r.tip()
316 316 if len(args) > 1: n = r.lookup(args[1])
317 317 sys.stdout.write(r.read(n))
318 318 else:
319 319 print "missing filename"
320 320
321 321 elif cmd == "dumpmanifest":
322 322 n = repo.manifest.tip()
323 323 if len(args) > 0:
324 324 n = repo.manifest.lookup(args[0])
325 325 m = repo.manifest.read(n)
326 326 files = m.keys()
327 327 files.sort()
328 328
329 329 for f in files:
330 330 print hg.hex(m[f]), f
331 331
332 332 elif cmd == "debugindex":
333 333 if ".hg" not in args[0]:
334 334 args[0] = ".hg/data/" + repo.file(args[0]).encodepath(args[0]) + "i"
335 335
336 336 r = hg.revlog(open, args[0], "")
337 337 print " rev offset length base linkrev"+\
338 338 " p1 p2 nodeid"
339 339 for i in range(r.count()):
340 340 e = r.index[i]
341 341 print "% 6d % 9d % 7d % 6d % 7d %s.. %s.. %s.." % (
342 342 i, e[0], e[1], e[2], e[3],
343 343 hg.hex(e[4][:5]), hg.hex(e[5][:5]), hg.hex(e[6][:5]))
344 344
345 345 elif cmd == "debugindexdot":
346 346 if ".hg" not in args[0]:
347 347 args[0] = ".hg/data/" + repo.file(args[0]).encodepath(args[0]) + "i"
348 348
349 349 r = hg.revlog(open, args[0], "")
350 350 print "digraph G {"
351 351 for i in range(r.count()):
352 352 e = r.index[i]
353 353 print "\t%d -> %d" % (r.rev(e[4]), i)
354 354 if e[5] != hg.nullid:
355 355 print "\t%d -> %d" % (r.rev(e[5]), i)
356 356 print "}"
357 357
358 358 elif cmd == "merge":
359 359 (c, a, d, u) = repo.diffdir(repo.root, repo.current)
360 360 if c or a or d:
361 361 ui.warn("aborting (outstanding changes in working directory)\n")
362 362 sys.exit(1)
363 363
364 364 if args:
365 365 paths = {}
366 366 try:
367 367 pf = os.path.join(os.environ["HOME"], ".hgpaths")
368 368 for l in file(pf):
369 369 name, path = l.split()
370 370 paths[name] = path
371 371 except:
372 372 pass
373 373
374 374 if args[0] in paths: args[0] = paths[args[0]]
375 375
376 376 other = hg.repository(ui, args[0])
377 377 cg = repo.getchangegroup(other)
378 378 repo.addchangegroup(cg)
379 379 else:
380 380 print "missing source repository"
381 381
382 382 elif cmd == "tags":
383 383 repo.lookup(0) # prime the cache
384 384 i = repo.tags.items()
385 385 i.sort()
386 386 for k, n in i:
387 387 try:
388 388 r = repo.changelog.rev(n)
389 389 except KeyError:
390 390 r = "?"
391 391 print "%-30s %5d:%s" % (k, repo.changelog.rev(n), hg.hex(n))
392 392
393 393 elif cmd == "recover":
394 394 repo.recover()
395 395
396 396 elif cmd == "verify":
397 397 filelinkrevs = {}
398 398 filenodes = {}
399 399 manifestchangeset = {}
400 400 changesets = revisions = files = 0
401 401 errors = 0
402 402
403 403 ui.status("checking changesets\n")
404 404 for i in range(repo.changelog.count()):
405 405 changesets += 1
406 406 n = repo.changelog.node(i)
407 407 for p in repo.changelog.parents(n):
408 408 if p not in repo.changelog.nodemap:
409 409 ui.warn("changeset %s has unknown parent %s\n" %
410 410 (hg.short(n), hg.short(p)))
411 411 errors += 1
412 412 try:
413 413 changes = repo.changelog.read(n)
414 414 except Exception, inst:
415 415 ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
416 416 errors += 1
417 417
418 418 manifestchangeset[changes[0]] = n
419 419 for f in changes[3]:
420 revisions += 1
421 420 filelinkrevs.setdefault(f, []).append(i)
422 421
423 422 ui.status("checking manifests\n")
424 423 for i in range(repo.manifest.count()):
425 424 n = repo.manifest.node(i)
426 425 for p in repo.manifest.parents(n):
427 426 if p not in repo.manifest.nodemap:
428 427 ui.warn("manifest %s has unknown parent %s\n" %
429 428 (hg.short(n), hg.short(p)))
430 429 errors += 1
431 430 ca = repo.changelog.node(repo.manifest.linkrev(n))
432 431 cc = manifestchangeset[n]
433 432 if ca != cc:
434 433 ui.warn("manifest %s points to %s, not %s\n" %
435 434 (hg.hex(n), hg.hex(ca), hg.hex(cc)))
436 435 errors += 1
437 436
438 437 try:
439 438 delta = mdiff.patchtext(repo.manifest.delta(n))
440 439 except KeyboardInterrupt:
441 440 print "aborted"
442 441 sys.exit(0)
443 442 except Exception, inst:
444 443 ui.warn("unpacking manifest %s: %s\n" % (hg.short(n), inst))
445 444 errors += 1
446 445
447 446 ff = [ l.split('\0') for l in delta.splitlines() ]
448 447 for f, fn in ff:
449 448 filenodes.setdefault(f, {})[hg.bin(fn)] = 1
450 449
451 450 ui.status("crosschecking files in changesets and manifests\n")
452 451 for f in filenodes:
453 452 if f not in filelinkrevs:
454 453 ui.warn("file %s in manifest but not in changesets\n" % f)
455 454 errors += 1
456 455
457 456 for f in filelinkrevs:
458 457 if f not in filenodes:
459 458 ui.warn("file %s in changeset but not in manifest\n" % f)
460 459 errors += 1
461 460
462 461 ui.status("checking files\n")
463 462 ff = filenodes.keys()
464 463 ff.sort()
465 464 for f in ff:
466 465 if f == "/dev/null": continue
467 466 files += 1
468 467 fl = repo.file(f)
469 468 nodes = { hg.nullid: 1 }
470 469 for i in range(fl.count()):
470 revisions += 1
471 471 n = fl.node(i)
472 472
473 473 if n not in filenodes[f]:
474 474 ui.warn("%s: %d:%s not in manifests\n" % (f, i, hg.short(n)))
475 475 print len(filenodes[f].keys()), fl.count(), f
476 476 errors += 1
477 477 else:
478 478 del filenodes[f][n]
479 479
480 480 flr = fl.linkrev(n)
481 481 if flr not in filelinkrevs[f]:
482 482 ui.warn("%s:%s points to unexpected changeset rev %d\n"
483 483 % (f, hg.short(n), fl.linkrev(n)))
484 484 errors += 1
485 485 else:
486 486 filelinkrevs[f].remove(flr)
487 487
488 488 # verify contents
489 489 try:
490 490 t = fl.read(n)
491 491 except Exception, inst:
492 492 ui.warn("unpacking file %s %s: %s\n" % (f, hg.short(n), inst))
493 493 errors += 1
494 494
495 495 # verify parents
496 496 (p1, p2) = fl.parents(n)
497 497 if p1 not in nodes:
498 498 ui.warn("file %s:%s unknown parent 1 %s" %
499 499 (f, hg.short(n), hg.short(p1)))
500 500 errors += 1
501 501 if p2 not in nodes:
502 502 ui.warn("file %s:%s unknown parent 2 %s" %
503 503 (f, hg.short(n), hg.short(p1)))
504 504 errors += 1
505 505 nodes[n] = 1
506 506
507 507 # cross-check
508 for flr in filelinkrevs[f]:
509 ui.warn("changeset rev %d not in %s\n" % (flr, f))
510 errors += 1
511
512 508 for node in filenodes[f]:
513 509 ui.warn("node %s in manifests not in %s\n" % (hg.hex(n), f))
514 510 errors += 1
515 511
516 512 ui.status("%d files, %d changesets, %d total revisions\n" %
517 513 (files, changesets, revisions))
518 514
519 515 if errors:
520 516 ui.warn("%d integrity errors encountered!\n" % errors)
521 517 sys.exit(1)
522 518
523 519 elif cmd == "serve":
524 520 from mercurial import hgweb
525 521
526 522 soptions = {}
527 523 opts = [('p', 'port', 8000, 'listen port'),
528 524 ('a', 'address', '', 'interface address'),
529 525 ('n', 'name', os.getcwd(), 'repository name'),
530 526 ('t', 'templates', "", 'template map')
531 527 ]
532 528
533 529 args = fancyopts.fancyopts(args, opts, soptions,
534 530 'hg serve [options]')
535 531
536 532 hgweb.server(repo.root, soptions["name"], soptions["templates"],
537 533 soptions["address"], soptions["port"])
538 534
539 535 else:
540 536 if cmd: ui.warn("unknown command\n\n")
541 537 help()
542 538 sys.exit(1)
@@ -1,1125 +1,1125 b''
1 1 # hg.py - repository classes for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import sys, struct, sha, socket, os, time, re, urllib2, tempfile
9 9 import urllib
10 10 from mercurial import byterange, lock
11 11 from mercurial.transaction import *
12 12 from mercurial.revlog import *
13 13 from difflib import SequenceMatcher
14 14
15 15 class filelog(revlog):
16 16 def __init__(self, opener, path):
17 17 revlog.__init__(self, opener,
18 18 os.path.join("data", path + ".i"),
19 19 os.path.join("data", path + ".d"))
20 20
21 21 def read(self, node):
22 22 return self.revision(node)
23 23 def add(self, text, transaction, link, p1=None, p2=None):
24 24 return self.addrevision(text, transaction, link, p1, p2)
25 25
26 26 def annotate(self, node):
27 27
28 28 def decorate(text, rev):
29 29 return [(rev, l) for l in text.splitlines(1)]
30 30
31 31 def strip(annotation):
32 32 return [e[1] for e in annotation]
33 33
34 34 def pair(parent, child):
35 35 new = []
36 36 sm = SequenceMatcher(None, strip(parent), strip(child))
37 37 for o, m, n, s, t in sm.get_opcodes():
38 38 if o == 'equal':
39 39 new += parent[m:n]
40 40 else:
41 41 new += child[s:t]
42 42 return new
43 43
44 44 # find all ancestors
45 45 needed = {node:1}
46 46 visit = [node]
47 47 while visit:
48 48 n = visit.pop(0)
49 49 for p in self.parents(n):
50 50 if p not in needed:
51 51 needed[p] = 1
52 52 visit.append(p)
53 53 else:
54 54 # count how many times we'll use this
55 55 needed[p] += 1
56 56
57 57 # sort by revision which is a topological order
58 58 visit = needed.keys()
59 59 visit = [ (self.rev(n), n) for n in visit ]
60 60 visit.sort()
61 61 visit = [ p[1] for p in visit ]
62 62 hist = {}
63 63
64 64 for n in visit:
65 65 curr = decorate(self.read(n), self.linkrev(n))
66 66 for p in self.parents(n):
67 67 if p != nullid:
68 68 curr = pair(hist[p], curr)
69 69 # trim the history of unneeded revs
70 70 needed[p] -= 1
71 71 if not needed[p]:
72 72 del hist[p]
73 73 hist[n] = curr
74 74
75 75 return hist[n]
76 76
77 77 class manifest(revlog):
78 78 def __init__(self, opener):
79 79 self.mapcache = None
80 80 self.listcache = None
81 81 self.addlist = None
82 82 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
83 83
84 84 def read(self, node):
85 85 if self.mapcache and self.mapcache[0] == node:
86 86 return self.mapcache[1].copy()
87 87 text = self.revision(node)
88 88 map = {}
89 89 self.listcache = (text, text.splitlines(1))
90 90 for l in self.listcache[1]:
91 91 (f, n) = l.split('\0')
92 92 map[f] = bin(n[:40])
93 93 self.mapcache = (node, map)
94 94 return map
95 95
96 96 def diff(self, a, b):
97 97 # this is sneaky, as we're not actually using a and b
98 98 if self.listcache and self.addlist and self.listcache[0] == a:
99 99 d = mdiff.diff(self.listcache[1], self.addlist, 1)
100 100 if mdiff.patch(a, d) != b:
101 101 sys.stderr.write("*** sortdiff failed, falling back ***\n")
102 102 return mdiff.textdiff(a, b)
103 103 return d
104 104 else:
105 105 return mdiff.textdiff(a, b)
106 106
107 107 def add(self, map, transaction, link, p1=None, p2=None):
108 108 files = map.keys()
109 109 files.sort()
110 110
111 111 self.addlist = ["%s\000%s\n" % (f, hex(map[f])) for f in files]
112 112 text = "".join(self.addlist)
113 113
114 114 n = self.addrevision(text, transaction, link, p1, p2)
115 115 self.mapcache = (n, map)
116 116 self.listcache = (text, self.addlist)
117 117 self.addlist = None
118 118
119 119 return n
120 120
121 121 class changelog(revlog):
122 122 def __init__(self, opener):
123 123 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
124 124
125 125 def extract(self, text):
126 126 if not text:
127 127 return (nullid, "", "0", [], "")
128 128 last = text.index("\n\n")
129 129 desc = text[last + 2:]
130 130 l = text[:last].splitlines()
131 131 manifest = bin(l[0])
132 132 user = l[1]
133 133 date = l[2]
134 134 files = l[3:]
135 135 return (manifest, user, date, files, desc)
136 136
137 137 def read(self, node):
138 138 return self.extract(self.revision(node))
139 139
140 140 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
141 141 user=None, date=None):
142 142 user = (user or
143 143 os.environ.get("HGUSER") or
144 144 os.environ.get("EMAIL") or
145 145 os.environ.get("LOGNAME", "unknown") + '@' + socket.getfqdn())
146 146 date = date or "%d %d" % (time.time(), time.timezone)
147 147 list.sort()
148 148 l = [hex(manifest), user, date] + list + ["", desc]
149 149 text = "\n".join(l)
150 150 return self.addrevision(text, transaction, self.count(), p1, p2)
151 151
152 152 class dirstate:
153 153 def __init__(self, opener, ui):
154 154 self.opener = opener
155 155 self.dirty = 0
156 156 self.ui = ui
157 157 self.map = None
158 158
159 159 def __del__(self):
160 160 if self.dirty:
161 161 self.write()
162 162
163 163 def __getitem__(self, key):
164 164 try:
165 165 return self.map[key]
166 166 except TypeError:
167 167 self.read()
168 168 return self[key]
169 169
170 170 def __contains__(self, key):
171 171 if not self.map: self.read()
172 172 return key in self.map
173 173
174 174 def state(self, key):
175 175 try:
176 176 return self[key][0]
177 177 except KeyError:
178 178 return "?"
179 179
180 180 def read(self):
181 181 if self.map is not None: return self.map
182 182
183 183 self.map = {}
184 184 try:
185 185 st = self.opener("dirstate").read()
186 186 except: return
187 187
188 188 pos = 0
189 189 while pos < len(st):
190 190 e = struct.unpack(">cllll", st[pos:pos+17])
191 191 l = e[4]
192 192 pos += 17
193 193 f = st[pos:pos + l]
194 194 self.map[f] = e[:4]
195 195 pos += l
196 196
197 197 def update(self, files, state):
198 198 ''' current states:
199 199 n normal
200 200 i invalid
201 201 r marked for removal
202 202 a marked for addition'''
203 203
204 204 if not files: return
205 205 self.read()
206 206 self.dirty = 1
207 207 for f in files:
208 208 if state == "r":
209 209 self.map[f] = ('r', 0, 0, 0)
210 210 else:
211 211 try:
212 212 s = os.stat(f)
213 213 self.map[f] = (state, s.st_mode, s.st_size, s.st_mtime)
214 214 except OSError:
215 215 if state != "i": raise
216 216 self.map[f] = ('r', 0, 0, 0)
217 217
218 218 def forget(self, files):
219 219 if not files: return
220 220 self.read()
221 221 self.dirty = 1
222 222 for f in files:
223 223 try:
224 224 del self.map[f]
225 225 except KeyError:
226 226 self.ui.warn("not in dirstate: %s!\n" % f)
227 227 pass
228 228
229 229 def clear(self):
230 230 self.map = {}
231 231 self.dirty = 1
232 232
233 233 def write(self):
234 234 st = self.opener("dirstate", "w")
235 235 for f, e in self.map.items():
236 236 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
237 237 st.write(e + f)
238 238 self.dirty = 0
239 239
240 240 def copy(self):
241 241 self.read()
242 242 return self.map.copy()
243 243
244 244 # used to avoid circular references so destructors work
245 245 def opener(base):
246 246 p = base
247 247 def o(path, mode="r"):
248 248 if p[:7] == "http://":
249 249 f = os.path.join(p, urllib.quote(path))
250 250 return httprangereader(f)
251 251
252 252 f = os.path.join(p, path)
253 253
254 254 if mode != "r":
255 255 try:
256 256 s = os.stat(f)
257 257 except OSError:
258 258 d = os.path.dirname(f)
259 259 if not os.path.isdir(d):
260 260 os.makedirs(d)
261 261 else:
262 262 if s.st_nlink > 1:
263 263 file(f + ".tmp", "w").write(file(f).read())
264 264 os.rename(f+".tmp", f)
265 265
266 266 return file(f, mode)
267 267
268 268 return o
269 269
270 270 class localrepository:
271 271 def __init__(self, ui, path=None, create=0):
272 272 self.remote = 0
273 273 if path and path[:7] == "http://":
274 274 self.remote = 1
275 275 self.path = path
276 276 else:
277 277 if not path:
278 278 p = os.getcwd()
279 279 while not os.path.isdir(os.path.join(p, ".hg")):
280 280 p = os.path.dirname(p)
281 281 if p == "/": raise "No repo found"
282 282 path = p
283 283 self.path = os.path.join(path, ".hg")
284 284
285 285 self.root = path
286 286 self.ui = ui
287 287
288 288 if create:
289 289 os.mkdir(self.path)
290 290 os.mkdir(self.join("data"))
291 291
292 292 self.opener = opener(self.path)
293 293 self.manifest = manifest(self.opener)
294 294 self.changelog = changelog(self.opener)
295 295 self.ignorelist = None
296 296 self.tags = None
297 297
298 298 if not self.remote:
299 299 self.dirstate = dirstate(self.opener, ui)
300 300 try:
301 301 self.current = bin(self.opener("current").read())
302 302 except IOError:
303 303 self.current = None
304 304
305 305 def setcurrent(self, node):
306 306 self.current = node
307 307 self.opener("current", "w").write(hex(node))
308 308
309 309 def ignore(self, f):
310 310 if self.ignorelist is None:
311 311 self.ignorelist = []
312 312 try:
313 313 l = open(os.path.join(self.root, ".hgignore"))
314 314 for pat in l:
315 315 if pat != "\n":
316 316 self.ignorelist.append(re.compile(pat[:-1]))
317 317 except IOError: pass
318 318 for pat in self.ignorelist:
319 319 if pat.search(f): return True
320 320 return False
321 321
322 322 def lookup(self, key):
323 323 if self.tags is None:
324 324 self.tags = {}
325 325 try:
326 326 fl = self.file(".hgtags")
327 327 for l in fl.revision(fl.tip()).splitlines():
328 328 if l:
329 329 n, k = l.split(" ")
330 330 self.tags[k] = bin(n)
331 331 except KeyError: pass
332 332 try:
333 333 return self.tags[key]
334 334 except KeyError:
335 335 return self.changelog.lookup(key)
336 336
337 337 def join(self, f):
338 338 return os.path.join(self.path, f)
339 339
340 340 def file(self, f):
341 341 if f[0] == '/': f = f[1:]
342 342 return filelog(self.opener, f)
343 343
344 344 def transaction(self):
345 345 return transaction(self.opener, self.join("journal"),
346 346 self.join("undo"))
347 347
348 348 def recover(self):
349 349 self.lock()
350 350 if os.path.exists(self.join("recover")):
351 351 self.ui.status("attempting to rollback interrupted transaction\n")
352 352 return rollback(self.opener, self.join("recover"))
353 353 else:
354 354 self.ui.warn("no interrupted transaction available\n")
355 355
356 356 def undo(self):
357 357 self.lock()
358 358 if os.path.exists(self.join("undo")):
359 359 f = self.changelog.read(self.changelog.tip())[3]
360 360 self.ui.status("attempting to rollback last transaction\n")
361 361 rollback(self.opener, self.join("undo"))
362 362 self.manifest = manifest(self.opener)
363 363 self.changelog = changelog(self.opener)
364 364
365 365 self.ui.status("discarding dirstate\n")
366 366 node = self.changelog.tip()
367 367 f.sort()
368 368
369 369 self.setcurrent(node)
370 370 self.dirstate.update(f, 'i')
371 371
372 372 else:
373 373 self.ui.warn("no undo information available\n")
374 374
375 375 def lock(self, wait = 1):
376 376 try:
377 377 return lock.lock(self.join("lock"), 0)
378 378 except lock.LockHeld, inst:
379 379 if wait:
380 380 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
381 381 return lock.lock(self.join("lock"), wait)
382 382 raise inst
383 383
384 384 def rawcommit(self, files, text, user, date, p1=None, p2=None):
385 385 p1 = p1 or self.current or nullid
386 386 pchange = self.changelog.read(p1)
387 387 pmmap = self.manifest.read(pchange[0])
388 388 tr = self.transaction()
389 389 mmap = {}
390 390 linkrev = self.changelog.count()
391 391 for f in files:
392 392 try:
393 393 t = file(f).read()
394 394 except IOError:
395 395 self.ui.warn("Read file %s error, skipped\n" % f)
396 396 continue
397 397 r = self.file(f)
398 398 prev = pmmap.get(f, nullid)
399 399 mmap[f] = r.add(t, tr, linkrev, prev)
400 400
401 401 mnode = self.manifest.add(mmap, tr, linkrev, pchange[0])
402 402 n = self.changelog.add(mnode, files, text, tr, p1, p2, user ,date, )
403 403 tr.close()
404 404 self.setcurrent(n)
405 405 self.dirstate.clear()
406 406 self.dirstate.update(mmap.keys(), "n")
407 407
408 408 def commit(self, parent, files = None, text = ""):
409 409 self.lock()
410 410
411 411 commit = []
412 412 remove = []
413 413 if files:
414 414 for f in files:
415 415 s = self.dirstate.state(f)
416 416 if s in 'cai':
417 417 commit.append(f)
418 418 elif s == 'r':
419 419 remove.append(f)
420 420 else:
421 421 self.warn("%s not tracked!\n")
422 422 else:
423 423 (c, a, d, u) = self.diffdir(self.root, parent)
424 424 commit = c + a
425 425 remove = d
426 426
427 427 if not commit and not remove:
428 428 self.ui.status("nothing changed\n")
429 429 return
430 430
431 431 tr = self.transaction()
432 432
433 433 # check in files
434 434 new = {}
435 435 linkrev = self.changelog.count()
436 436 commit.sort()
437 437 for f in commit:
438 438 self.ui.note(f + "\n")
439 439 try:
440 440 t = file(f).read()
441 441 except IOError:
442 442 self.warn("trouble committing %s!\n" % f)
443 443 raise
444 444
445 445 r = self.file(f)
446 446 new[f] = r.add(t, tr, linkrev)
447 447
448 448 # update manifest
449 449 mmap = self.manifest.read(self.manifest.tip())
450 450 mmap.update(new)
451 451 for f in remove:
452 452 del mmap[f]
453 453 mnode = self.manifest.add(mmap, tr, linkrev)
454 454
455 455 # add changeset
456 456 new = new.keys()
457 457 new.sort()
458 458
459 459 edittext = text + "\n" + "HG: manifest hash %s\n" % hex(mnode)
460 460 edittext += "".join(["HG: changed %s\n" % f for f in new])
461 461 edittext += "".join(["HG: removed %s\n" % f for f in remove])
462 462 edittext = self.ui.edit(edittext)
463 463
464 464 n = self.changelog.add(mnode, new, edittext, tr)
465 465 tr.close()
466 466
467 467 self.setcurrent(n)
468 468 self.dirstate.update(new, "n")
469 469 self.dirstate.forget(remove)
470 470
471 471 def checkout(self, node):
472 472 # checkout is really dumb at the moment
473 473 # it ought to basically merge
474 474 change = self.changelog.read(node)
475 475 l = self.manifest.read(change[0]).items()
476 476 l.sort()
477 477
478 478 for f,n in l:
479 479 if f[0] == "/": continue
480 480 self.ui.note(f, "\n")
481 481 t = self.file(f).revision(n)
482 482 try:
483 483 file(f, "w").write(t)
484 484 except IOError:
485 485 os.makedirs(os.path.dirname(f))
486 486 file(f, "w").write(t)
487 487
488 488 self.setcurrent(node)
489 489 self.dirstate.clear()
490 490 self.dirstate.update([f for f,n in l], "n")
491 491
492 492 def diffdir(self, path, changeset):
493 493 changed = []
494 494 added = []
495 495 unknown = []
496 496 mf = {}
497 497
498 498 if changeset:
499 499 change = self.changelog.read(changeset)
500 500 mf = self.manifest.read(change[0])
501 501
502 502 if changeset == self.current:
503 503 dc = self.dirstate.copy()
504 504 else:
505 505 dc = dict.fromkeys(mf)
506 506
507 507 def fcmp(fn):
508 508 t1 = file(os.path.join(self.root, fn)).read()
509 509 t2 = self.file(fn).revision(mf[fn])
510 510 return cmp(t1, t2)
511 511
512 512 for dir, subdirs, files in os.walk(self.root):
513 513 d = dir[len(self.root)+1:]
514 514 if ".hg" in subdirs: subdirs.remove(".hg")
515 515
516 516 for f in files:
517 517 fn = os.path.join(d, f)
518 518 try: s = os.stat(os.path.join(self.root, fn))
519 519 except: continue
520 520 if fn in dc:
521 521 c = dc[fn]
522 522 del dc[fn]
523 523 if not c:
524 524 if fcmp(fn):
525 525 changed.append(fn)
526 526 if c[0] == 'i':
527 527 if fn not in mf:
528 528 added.append(fn)
529 529 elif fcmp(fn):
530 530 changed.append(fn)
531 531 elif c[0] == 'a':
532 532 added.append(fn)
533 533 elif c[0] == 'r':
534 534 unknown.append(fn)
535 535 elif c[2] != s.st_size:
536 536 changed.append(fn)
537 537 elif c[1] != s.st_mode or c[3] != s.st_mtime:
538 538 if fcmp(fn):
539 539 changed.append(fn)
540 540 else:
541 541 if self.ignore(fn): continue
542 542 unknown.append(fn)
543 543
544 544 deleted = dc.keys()
545 545 deleted.sort()
546 546
547 547 return (changed, added, deleted, unknown)
548 548
549 549 def diffrevs(self, node1, node2):
550 550 changed, added = [], []
551 551
552 552 change = self.changelog.read(node1)
553 553 mf1 = self.manifest.read(change[0])
554 554 change = self.changelog.read(node2)
555 555 mf2 = self.manifest.read(change[0])
556 556
557 557 for fn in mf2:
558 558 if mf1.has_key(fn):
559 559 if mf1[fn] != mf2[fn]:
560 560 changed.append(fn)
561 561 del mf1[fn]
562 562 else:
563 563 added.append(fn)
564 564
565 565 deleted = mf1.keys()
566 566 deleted.sort()
567 567
568 568 return (changed, added, deleted)
569 569
570 570 def add(self, list):
571 571 for f in list:
572 572 p = os.path.join(self.root, f)
573 573 if not os.path.isfile(p):
574 574 self.ui.warn("%s does not exist!\n" % f)
575 575 elif self.dirstate.state(f) == 'n':
576 576 self.ui.warn("%s already tracked!\n" % f)
577 577 else:
578 578 self.dirstate.update([f], "a")
579 579
580 580 def forget(self, list):
581 581 for f in list:
582 582 if self.dirstate.state(f) not in 'ai':
583 583 self.ui.warn("%s not added!\n" % f)
584 584 else:
585 585 self.dirstate.forget([f])
586 586
587 587 def remove(self, list):
588 588 for f in list:
589 589 p = os.path.join(self.root, f)
590 590 if os.path.isfile(p):
591 591 self.ui.warn("%s still exists!\n" % f)
592 592 elif f not in self.dirstate:
593 593 self.ui.warn("%s not tracked!\n" % f)
594 594 else:
595 595 self.dirstate.update([f], "r")
596 596
597 597 def heads(self):
598 598 return self.changelog.heads()
599 599
600 600 def branches(self, nodes):
601 601 if not nodes: nodes = [self.changelog.tip()]
602 602 b = []
603 603 for n in nodes:
604 604 t = n
605 605 while n:
606 606 p = self.changelog.parents(n)
607 607 if p[1] != nullid or p[0] == nullid:
608 608 b.append((t, n, p[0], p[1]))
609 609 break
610 610 n = p[0]
611 611 return b
612 612
613 613 def between(self, pairs):
614 614 r = []
615 615
616 616 for top, bottom in pairs:
617 617 n, l, i = top, [], 0
618 618 f = 1
619 619
620 620 while n != bottom:
621 621 p = self.changelog.parents(n)[0]
622 622 if i == f:
623 623 l.append(n)
624 624 f = f * 2
625 625 n = p
626 626 i += 1
627 627
628 628 r.append(l)
629 629
630 630 return r
631 631
632 632 def newer(self, nodes):
633 633 m = {}
634 634 nl = []
635 635 pm = {}
636 636 cl = self.changelog
637 637 t = l = cl.count()
638 638
639 639 # find the lowest numbered node
640 640 for n in nodes:
641 641 l = min(l, cl.rev(n))
642 642 m[n] = 1
643 643
644 644 for i in xrange(l, t):
645 645 n = cl.node(i)
646 646 if n in m: # explicitly listed
647 647 pm[n] = 1
648 648 nl.append(n)
649 649 continue
650 650 for p in cl.parents(n):
651 651 if p in pm: # parent listed
652 652 pm[n] = 1
653 653 nl.append(n)
654 654 break
655 655
656 656 return nl
657 657
658 658 def getchangegroup(self, remote):
659 659 m = self.changelog.nodemap
660 660 search = []
661 661 fetch = []
662 662 seen = {}
663 663 seenbranch = {}
664 664
665 665 # if we have an empty repo, fetch everything
666 666 if self.changelog.tip() == nullid:
667 667 self.ui.status("requesting all changes\n")
668 668 return remote.changegroup([nullid])
669 669
670 670 # otherwise, assume we're closer to the tip than the root
671 671 self.ui.status("searching for changes\n")
672 672 heads = remote.heads()
673 673 unknown = []
674 674 for h in heads:
675 675 if h not in m:
676 676 unknown.append(h)
677 677
678 678 if not unknown:
679 679 self.ui.status("nothing to do!\n")
680 680 return None
681 681
682 682 unknown = remote.branches(unknown)
683 683 while unknown:
684 684 n = unknown.pop(0)
685 685 seen[n[0]] = 1
686 686
687 687 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
688 688 if n == nullid: break
689 689 if n in seenbranch:
690 690 self.ui.debug("branch already found\n")
691 691 continue
692 692 if n[1] and n[1] in m: # do we know the base?
693 693 self.ui.debug("found incomplete branch %s:%s\n"
694 694 % (short(n[0]), short(n[1])))
695 695 search.append(n) # schedule branch range for scanning
696 696 seenbranch[n] = 1
697 697 else:
698 698 if n[2] in m and n[3] in m:
699 699 if n[1] not in fetch:
700 700 self.ui.debug("found new changeset %s\n" %
701 701 short(n[1]))
702 702 fetch.append(n[1]) # earliest unknown
703 703 continue
704 704
705 705 r = []
706 706 for a in n[2:4]:
707 707 if a not in seen: r.append(a)
708 708
709 709 if r:
710 710 self.ui.debug("requesting %s\n" %
711 711 " ".join(map(short, r)))
712 712 for b in remote.branches(r):
713 713 self.ui.debug("received %s:%s\n" %
714 714 (short(b[0]), short(b[1])))
715 715 if b[0] not in m and b[0] not in seen:
716 716 unknown.append(b)
717 717
718 718 while search:
719 719 n = search.pop(0)
720 720 l = remote.between([(n[0], n[1])])[0]
721 721 p = n[0]
722 722 f = 1
723 723 for i in l + [n[1]]:
724 724 if i in m:
725 725 if f <= 2:
726 726 self.ui.debug("found new branch changeset %s\n" %
727 727 short(p))
728 728 fetch.append(p)
729 729 else:
730 730 self.ui.debug("narrowed branch search to %s:%s\n"
731 731 % (short(p), short(i)))
732 732 search.append((p, i))
733 733 break
734 734 p, f = i, f * 2
735 735
736 736 for f in fetch:
737 737 if f in m:
738 738 raise "already have", short(f[:4])
739 739
740 740 self.ui.note("adding new changesets starting at " +
741 741 " ".join([short(f) for f in fetch]) + "\n")
742 742
743 743 return remote.changegroup(fetch)
744 744
745 745 def changegroup(self, basenodes):
746 746 nodes = self.newer(basenodes)
747 747
748 748 # construct the link map
749 749 linkmap = {}
750 750 for n in nodes:
751 751 linkmap[self.changelog.rev(n)] = n
752 752
753 753 # construct a list of all changed files
754 754 changed = {}
755 755 for n in nodes:
756 756 c = self.changelog.read(n)
757 757 for f in c[3]:
758 758 changed[f] = 1
759 759 changed = changed.keys()
760 760 changed.sort()
761 761
762 762 # the changegroup is changesets + manifests + all file revs
763 763 revs = [ self.changelog.rev(n) for n in nodes ]
764 764
765 765 for y in self.changelog.group(linkmap): yield y
766 766 for y in self.manifest.group(linkmap): yield y
767 767 for f in changed:
768 768 yield struct.pack(">l", len(f) + 4) + f
769 769 g = self.file(f).group(linkmap)
770 770 for y in g:
771 771 yield y
772 772
773 773 def addchangegroup(self, generator):
774 774
775 775 class genread:
776 776 def __init__(self, generator):
777 777 self.g = generator
778 778 self.buf = ""
779 779 def read(self, l):
780 780 while l > len(self.buf):
781 781 try:
782 782 self.buf += self.g.next()
783 783 except StopIteration:
784 784 break
785 785 d, self.buf = self.buf[:l], self.buf[l:]
786 786 return d
787 787
788 788 def getchunk():
789 789 d = source.read(4)
790 790 if not d: return ""
791 791 l = struct.unpack(">l", d)[0]
792 792 if l <= 4: return ""
793 793 return source.read(l - 4)
794 794
795 795 def getgroup():
796 796 while 1:
797 797 c = getchunk()
798 798 if not c: break
799 799 yield c
800 800
801 801 def csmap(x):
802 802 self.ui.debug("add changeset %s\n" % short(x))
803 803 return self.changelog.count()
804 804
805 805 def revmap(x):
806 806 return self.changelog.rev(x)
807 807
808 808 if not generator: return
809 809 changesets = files = revisions = 0
810 810 self.lock()
811 811 source = genread(generator)
812 812 tr = self.transaction()
813 813
814 814 # pull off the changeset group
815 815 self.ui.status("adding changesets\n")
816 816 co = self.changelog.tip()
817 cn = self.changelog.addgroup(getgroup(), csmap, tr)
817 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
818 818 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
819 819
820 820 # pull off the manifest group
821 821 self.ui.status("adding manifests\n")
822 822 mm = self.manifest.tip()
823 823 mo = self.manifest.addgroup(getgroup(), revmap, tr)
824 824
825 825 # process the files
826 826 self.ui.status("adding file revisions\n")
827 827 while 1:
828 828 f = getchunk()
829 829 if not f: break
830 830 self.ui.debug("adding %s revisions\n" % f)
831 831 fl = self.file(f)
832 832 o = fl.tip()
833 833 n = fl.addgroup(getgroup(), revmap, tr)
834 834 revisions += fl.rev(n) - fl.rev(o)
835 835 files += 1
836 836
837 837 self.ui.status(("modified %d files, added %d changesets" +
838 838 " and %d new revisions\n")
839 839 % (files, changesets, revisions))
840 840
841 841 tr.close()
842 842 return
843 843
844 844 def merge(self, generator):
845 845 changesets = files = revisions = 0
846 846
847 847 self.lock()
848 848 class genread:
849 849 def __init__(self, generator):
850 850 self.g = generator
851 851 self.buf = ""
852 852 def read(self, l):
853 853 while l > len(self.buf):
854 854 try:
855 855 self.buf += self.g.next()
856 856 except StopIteration:
857 857 break
858 858 d, self.buf = self.buf[:l], self.buf[l:]
859 859 return d
860 860
861 861 if not generator: return
862 862 source = genread(generator)
863 863
864 864 def getchunk():
865 865 d = source.read(4)
866 866 if not d: return ""
867 867 l = struct.unpack(">l", d)[0]
868 868 if l <= 4: return ""
869 869 return source.read(l - 4)
870 870
871 871 def getgroup():
872 872 while 1:
873 873 c = getchunk()
874 874 if not c: break
875 875 yield c
876 876
877 877 tr = self.transaction()
878 878 simple = True
879 879 need = {}
880 880
881 881 self.ui.status("adding changesets\n")
882 882 # pull off the changeset group
883 883 def report(x):
884 884 self.ui.debug("add changeset %s\n" % short(x))
885 885 return self.changelog.count()
886 886
887 887 co = self.changelog.tip()
888 888 cn = self.changelog.addgroup(getgroup(), report, tr)
889 889
890 890 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
891 891
892 892 self.ui.status("adding manifests\n")
893 893 # pull off the manifest group
894 894 mm = self.manifest.tip()
895 895 mo = self.manifest.addgroup(getgroup(),
896 896 lambda x: self.changelog.rev(x), tr)
897 897
898 898 # do we need a resolve?
899 899 if self.changelog.ancestor(co, cn) != co:
900 900 simple = False
901 901 resolverev = self.changelog.count()
902 902
903 903 # resolve the manifest to determine which files
904 904 # we care about merging
905 905 self.ui.status("resolving manifests\n")
906 906 ma = self.manifest.ancestor(mm, mo)
907 907 omap = self.manifest.read(mo) # other
908 908 amap = self.manifest.read(ma) # ancestor
909 909 mmap = self.manifest.read(mm) # mine
910 910 nmap = {}
911 911
912 912 self.ui.debug(" ancestor %s local %s remote %s\n" %
913 913 (short(ma), short(mm), short(mo)))
914 914
915 915 for f, mid in mmap.iteritems():
916 916 if f in omap:
917 917 if mid != omap[f]:
918 918 self.ui.debug(" %s versions differ, do resolve\n" % f)
919 919 need[f] = mid # use merged version or local version
920 920 else:
921 921 nmap[f] = mid # keep ours
922 922 del omap[f]
923 923 elif f in amap:
924 924 if mid != amap[f]:
925 925 r = self.ui.prompt(
926 926 (" local changed %s which remote deleted\n" % f) +
927 927 "(k)eep or (d)elete?", "[kd]", "k")
928 928 if r == "k": nmap[f] = mid
929 929 else:
930 930 self.ui.debug("other deleted %s\n" % f)
931 931 pass # other deleted it
932 932 else:
933 933 self.ui.debug("local created %s\n" %f)
934 934 nmap[f] = mid # we created it
935 935
936 936 del mmap
937 937
938 938 for f, oid in omap.iteritems():
939 939 if f in amap:
940 940 if oid != amap[f]:
941 941 r = self.ui.prompt(
942 942 ("remote changed %s which local deleted\n" % f) +
943 943 "(k)eep or (d)elete?", "[kd]", "k")
944 944 if r == "k": nmap[f] = oid
945 945 else:
946 946 pass # probably safe
947 947 else:
948 948 self.ui.debug("remote created %s, do resolve\n" % f)
949 949 need[f] = oid
950 950
951 951 del omap
952 952 del amap
953 953
954 954 new = need.keys()
955 955 new.sort()
956 956
957 957 # process the files
958 958 self.ui.status("adding files\n")
959 959 while 1:
960 960 f = getchunk()
961 961 if not f: break
962 962 self.ui.debug("adding %s revisions\n" % f)
963 963 fl = self.file(f)
964 964 o = fl.tip()
965 965 n = fl.addgroup(getgroup(), lambda x: self.changelog.rev(x), tr)
966 966 revisions += fl.rev(n) - fl.rev(o)
967 967 files += 1
968 968 if f in need:
969 969 del need[f]
970 970 # manifest resolve determined we need to merge the tips
971 971 nmap[f] = self.merge3(fl, f, o, n, tr, resolverev)
972 972
973 973 if need:
974 974 # we need to do trivial merges on local files
975 975 for f in new:
976 976 if f not in need: continue
977 977 fl = self.file(f)
978 978 nmap[f] = self.merge3(fl, f, need[f], fl.tip(), tr, resolverev)
979 979 revisions += 1
980 980
981 981 # For simple merges, we don't need to resolve manifests or changesets
982 982 if simple:
983 983 self.ui.debug("simple merge, skipping resolve\n")
984 984 self.ui.status(("modified %d files, added %d changesets" +
985 985 " and %d new revisions\n")
986 986 % (files, changesets, revisions))
987 987 tr.close()
988 988 return
989 989
990 990 node = self.manifest.add(nmap, tr, resolverev, mm, mo)
991 991 revisions += 1
992 992
993 993 # Now all files and manifests are merged, we add the changed files
994 994 # and manifest id to the changelog
995 995 self.ui.status("committing merge changeset\n")
996 996 if co == cn: cn = -1
997 997
998 998 edittext = "\nHG: merge resolve\n" + \
999 999 "HG: manifest hash %s\n" % hex(node) + \
1000 1000 "".join(["HG: changed %s\n" % f for f in new])
1001 1001 edittext = self.ui.edit(edittext)
1002 1002 n = self.changelog.add(node, new, edittext, tr, co, cn)
1003 1003 revisions += 1
1004 1004
1005 1005 self.ui.status("added %d changesets, %d files, and %d new revisions\n"
1006 1006 % (changesets, files, revisions))
1007 1007
1008 1008 tr.close()
1009 1009
1010 1010 def merge3(self, fl, fn, my, other, transaction, link):
1011 1011 """perform a 3-way merge and append the result"""
1012 1012
1013 1013 def temp(prefix, node):
1014 1014 pre = "%s~%s." % (os.path.basename(fn), prefix)
1015 1015 (fd, name) = tempfile.mkstemp("", pre)
1016 1016 f = os.fdopen(fd, "w")
1017 1017 f.write(fl.revision(node))
1018 1018 f.close()
1019 1019 return name
1020 1020
1021 1021 base = fl.ancestor(my, other)
1022 1022 self.ui.note("resolving %s\n" % fn)
1023 1023 self.ui.debug("local %s remote %s ancestor %s\n" %
1024 1024 (short(my), short(other), short(base)))
1025 1025
1026 1026 if my == base:
1027 1027 text = fl.revision(other)
1028 1028 else:
1029 1029 a = temp("local", my)
1030 1030 b = temp("remote", other)
1031 1031 c = temp("parent", base)
1032 1032
1033 1033 cmd = os.environ["HGMERGE"]
1034 1034 self.ui.debug("invoking merge with %s\n" % cmd)
1035 1035 r = os.system("%s %s %s %s %s" % (cmd, a, b, c, fn))
1036 1036 if r:
1037 1037 raise "Merge failed!"
1038 1038
1039 1039 text = open(a).read()
1040 1040 os.unlink(a)
1041 1041 os.unlink(b)
1042 1042 os.unlink(c)
1043 1043
1044 1044 return fl.add(text, transaction, link, my, other)
1045 1045
1046 1046 class remoterepository:
1047 1047 def __init__(self, ui, path):
1048 1048 self.url = path
1049 1049 self.ui = ui
1050 1050
1051 1051 def do_cmd(self, cmd, **args):
1052 1052 self.ui.debug("sending %s command\n" % cmd)
1053 1053 q = {"cmd": cmd}
1054 1054 q.update(args)
1055 1055 qs = urllib.urlencode(q)
1056 1056 cu = "%s?%s" % (self.url, qs)
1057 1057 return urllib.urlopen(cu)
1058 1058
1059 1059 def heads(self):
1060 1060 d = self.do_cmd("heads").read()
1061 1061 try:
1062 1062 return map(bin, d[:-1].split(" "))
1063 1063 except:
1064 1064 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1065 1065 raise
1066 1066
1067 1067 def branches(self, nodes):
1068 1068 n = " ".join(map(hex, nodes))
1069 1069 d = self.do_cmd("branches", nodes=n).read()
1070 1070 try:
1071 1071 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1072 1072 return br
1073 1073 except:
1074 1074 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1075 1075 raise
1076 1076
1077 1077 def between(self, pairs):
1078 1078 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1079 1079 d = self.do_cmd("between", pairs=n).read()
1080 1080 try:
1081 1081 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1082 1082 return p
1083 1083 except:
1084 1084 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1085 1085 raise
1086 1086
1087 1087 def changegroup(self, nodes):
1088 1088 n = " ".join(map(hex, nodes))
1089 1089 zd = zlib.decompressobj()
1090 1090 f = self.do_cmd("changegroup", roots=n)
1091 1091 bytes = 0
1092 1092 while 1:
1093 1093 d = f.read(4096)
1094 1094 bytes += len(d)
1095 1095 if not d:
1096 1096 yield zd.flush()
1097 1097 break
1098 1098 yield zd.decompress(d)
1099 1099 self.ui.note("%d bytes of data transfered\n" % bytes)
1100 1100
1101 1101 def repository(ui, path=None, create=0):
1102 1102 if path and path[:7] == "http://":
1103 1103 return remoterepository(ui, path)
1104 1104 if path and path[:5] == "hg://":
1105 1105 return remoterepository(ui, path.replace("hg://", "http://"))
1106 1106 if path and path[:11] == "old-http://":
1107 1107 return localrepository(ui, path.replace("old-http://", "http://"))
1108 1108 else:
1109 1109 return localrepository(ui, path, create)
1110 1110
1111 1111 class httprangereader:
1112 1112 def __init__(self, url):
1113 1113 self.url = url
1114 1114 self.pos = 0
1115 1115 def seek(self, pos):
1116 1116 self.pos = pos
1117 1117 def read(self, bytes=None):
1118 1118 opener = urllib2.build_opener(byterange.HTTPRangeHandler())
1119 1119 urllib2.install_opener(opener)
1120 1120 req = urllib2.Request(self.url)
1121 1121 end = ''
1122 1122 if bytes: end = self.pos + bytes
1123 1123 req.add_header('Range', 'bytes=%d-%s' % (self.pos, end))
1124 1124 f = urllib2.urlopen(req)
1125 1125 return f.read()
@@ -1,507 +1,510 b''
1 1 # revlog.py - storage back-end for mercurial
2 2 #
3 3 # This provides efficient delta storage with O(1) retrieve and append
4 4 # and O(changes) merge between branches
5 5 #
6 6 # Copyright 2005 Matt Mackall <mpm@selenic.com>
7 7 #
8 8 # This software may be used and distributed according to the terms
9 9 # of the GNU General Public License, incorporated herein by reference.
10 10
11 11 import zlib, struct, sha, binascii, heapq
12 12 from mercurial import mdiff
13 13
14 14 def hex(node): return binascii.hexlify(node)
15 15 def bin(node): return binascii.unhexlify(node)
16 16 def short(node): return hex(node[:4])
17 17
18 18 def compress(text):
19 19 if not text: return text
20 20 if len(text) < 44:
21 21 if text[0] == '\0': return text
22 22 return 'u' + text
23 23 bin = zlib.compress(text)
24 24 if len(bin) > len(text):
25 25 if text[0] == '\0': return text
26 26 return 'u' + text
27 27 return bin
28 28
29 29 def decompress(bin):
30 30 if not bin: return bin
31 31 t = bin[0]
32 32 if t == '\0': return bin
33 33 if t == 'x': return zlib.decompress(bin)
34 34 if t == 'u': return bin[1:]
35 35 raise "unknown compression type %s" % t
36 36
37 37 def hash(text, p1, p2):
38 38 l = [p1, p2]
39 39 l.sort()
40 40 return sha.sha(l[0] + l[1] + text).digest()
41 41
42 42 nullid = "\0" * 20
43 43 indexformat = ">4l20s20s20s"
44 44
45 45 class lazyparser:
46 46 def __init__(self, data):
47 47 self.data = data
48 48 self.s = struct.calcsize(indexformat)
49 49 self.l = len(data)/self.s
50 50 self.index = [None] * self.l
51 51 self.map = {nullid: -1}
52 52
53 53 def load(self, pos):
54 54 block = pos / 1000
55 55 i = block * 1000
56 56 end = min(self.l, i + 1000)
57 57 while i < end:
58 58 d = self.data[i * self.s: (i + 1) * self.s]
59 59 e = struct.unpack(indexformat, d)
60 60 self.index[i] = e
61 61 self.map[e[6]] = i
62 62 i += 1
63 63
64 64 class lazyindex:
65 65 def __init__(self, parser):
66 66 self.p = parser
67 67 def __len__(self):
68 68 return len(self.p.index)
69 69 def load(self, pos):
70 70 self.p.load(pos)
71 71 return self.p.index[pos]
72 72 def __getitem__(self, pos):
73 73 return self.p.index[pos] or self.load(pos)
74 74 def append(self, e):
75 75 self.p.index.append(e)
76 76
77 77 class lazymap:
78 78 def __init__(self, parser):
79 79 self.p = parser
80 80 def load(self, key):
81 81 n = self.p.data.find(key)
82 82 if n < 0: raise KeyError("node " + hex(key))
83 83 pos = n / self.p.s
84 84 self.p.load(pos)
85 85 def __contains__(self, key):
86 86 try:
87 87 self[key]
88 88 return True
89 89 except KeyError:
90 90 return False
91 91 def __iter__(self):
92 92 for i in xrange(self.p.l):
93 93 try:
94 94 yield self.p.index[i][6]
95 95 except:
96 96 self.p.load(i)
97 97 yield self.p.index[i][6]
98 98 def __getitem__(self, key):
99 99 try:
100 100 return self.p.map[key]
101 101 except KeyError:
102 102 try:
103 103 self.load(key)
104 104 return self.p.map[key]
105 105 except KeyError:
106 106 raise KeyError("node " + hex(key))
107 107 def __setitem__(self, key, val):
108 108 self.p.map[key] = val
109 109
110 110 class revlog:
111 111 def __init__(self, opener, indexfile, datafile):
112 112 self.indexfile = indexfile
113 113 self.datafile = datafile
114 114 self.opener = opener
115 115 self.cache = None
116 116
117 117 try:
118 118 i = self.opener(self.indexfile).read()
119 119 except IOError:
120 120 i = ""
121 121
122 122 if len(i) > 10000:
123 123 # big index, let's parse it on demand
124 124 parser = lazyparser(i)
125 125 self.index = lazyindex(parser)
126 126 self.nodemap = lazymap(parser)
127 127 else:
128 128 s = struct.calcsize(indexformat)
129 129 l = len(i) / s
130 130 self.index = [None] * l
131 131 m = [None] * l
132 132
133 133 n = 0
134 134 for f in xrange(0, len(i), s):
135 135 # offset, size, base, linkrev, p1, p2, nodeid
136 136 e = struct.unpack(indexformat, i[f:f + s])
137 137 m[n] = (e[6], n)
138 138 self.index[n] = e
139 139 n += 1
140 140
141 141 self.nodemap = dict(m)
142 142 self.nodemap[nullid] = -1
143 143
144 144
145 145 def tip(self): return self.node(len(self.index) - 1)
146 146 def count(self): return len(self.index)
147 147 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
148 148 def rev(self, node): return self.nodemap[node]
149 149 def linkrev(self, node): return self.index[self.nodemap[node]][3]
150 150 def parents(self, node):
151 151 if node == nullid: return (nullid, nullid)
152 152 return self.index[self.nodemap[node]][4:6]
153 153
154 154 def start(self, rev): return self.index[rev][0]
155 155 def length(self, rev): return self.index[rev][1]
156 156 def end(self, rev): return self.start(rev) + self.length(rev)
157 157 def base(self, rev): return self.index[rev][2]
158 158
159 159 def heads(self):
160 160 p = {}
161 161 h = []
162 162 for r in range(self.count() - 1, 0, -1):
163 163 n = self.node(r)
164 164 if n not in p:
165 165 h.append(n)
166 166 for pn in self.parents(n):
167 167 p[pn] = 1
168 168 return h
169 169
170 170 def lookup(self, id):
171 171 try:
172 172 rev = int(id)
173 173 return self.node(rev)
174 174 except ValueError:
175 175 c = []
176 176 for n in self.nodemap:
177 177 if id in hex(n):
178 178 c.append(n)
179 179 if len(c) > 1: raise KeyError("Ambiguous identifier")
180 180 if len(c) < 1: raise KeyError("No match found")
181 181 return c[0]
182 182
183 183 return None
184 184
185 185 def diff(self, a, b):
186 186 return mdiff.textdiff(a, b)
187 187
188 188 def patches(self, t, pl):
189 189 return mdiff.patches(t, pl)
190 190
191 191 def delta(self, node):
192 192 r = self.rev(node)
193 193 b = self.base(r)
194 194 if r == b:
195 195 return self.diff(self.revision(self.node(r - 1)),
196 196 self.revision(node))
197 197 else:
198 198 f = self.opener(self.datafile)
199 199 f.seek(self.start(r))
200 200 data = f.read(self.length(r))
201 201 return decompress(data)
202 202
203 203 def revision(self, node):
204 204 if node == nullid: return ""
205 205 if self.cache and self.cache[0] == node: return self.cache[2]
206 206
207 207 text = None
208 208 rev = self.rev(node)
209 209 start, length, base, link, p1, p2, node = self.index[rev]
210 210 end = start + length
211 211 if base != rev: start = self.start(base)
212 212
213 213 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
214 214 base = self.cache[1]
215 215 start = self.start(base + 1)
216 216 text = self.cache[2]
217 217 last = 0
218 218
219 219 f = self.opener(self.datafile)
220 220 f.seek(start)
221 221 data = f.read(end - start)
222 222
223 223 if not text:
224 224 last = self.length(base)
225 225 text = decompress(data[:last])
226 226
227 227 bins = []
228 228 for r in xrange(base + 1, rev + 1):
229 229 s = self.length(r)
230 230 bins.append(decompress(data[last:last + s]))
231 231 last = last + s
232 232
233 233 text = mdiff.patches(text, bins)
234 234
235 235 if node != hash(text, p1, p2):
236 236 raise IOError("integrity check failed on %s:%d"
237 237 % (self.datafile, rev))
238 238
239 239 self.cache = (node, rev, text)
240 240 return text
241 241
242 242 def addrevision(self, text, transaction, link, p1=None, p2=None):
243 243 if text is None: text = ""
244 244 if p1 is None: p1 = self.tip()
245 245 if p2 is None: p2 = nullid
246 246
247 247 node = hash(text, p1, p2)
248 248
249 249 n = self.count()
250 250 t = n - 1
251 251
252 252 if n:
253 253 base = self.base(t)
254 254 start = self.start(base)
255 255 end = self.end(t)
256 256 prev = self.revision(self.tip())
257 257 d = self.diff(prev, text)
258 258 data = compress(d)
259 259 dist = end - start + len(data)
260 260
261 261 # full versions are inserted when the needed deltas
262 262 # become comparable to the uncompressed text
263 263 if not n or dist > len(text) * 2:
264 264 data = compress(text)
265 265 base = n
266 266 else:
267 267 base = self.base(t)
268 268
269 269 offset = 0
270 270 if t >= 0:
271 271 offset = self.end(t)
272 272
273 273 e = (offset, len(data), base, link, p1, p2, node)
274 274
275 275 self.index.append(e)
276 276 self.nodemap[node] = n
277 277 entry = struct.pack(indexformat, *e)
278 278
279 279 transaction.add(self.datafile, e[0])
280 280 self.opener(self.datafile, "a").write(data)
281 281 transaction.add(self.indexfile, n * len(entry))
282 282 self.opener(self.indexfile, "a").write(entry)
283 283
284 284 self.cache = (node, n, text)
285 285 return node
286 286
287 287 def ancestor(self, a, b):
288 288 # calculate the distance of every node from root
289 289 dist = {nullid: 0}
290 290 for i in xrange(self.count()):
291 291 n = self.node(i)
292 292 p1, p2 = self.parents(n)
293 293 dist[n] = max(dist[p1], dist[p2]) + 1
294 294
295 295 # traverse ancestors in order of decreasing distance from root
296 296 def ancestors(node):
297 297 # we store negative distances because heap returns smallest member
298 298 h = [(-dist[node], node)]
299 299 seen = {}
300 300 earliest = self.count()
301 301 while h:
302 302 d, n = heapq.heappop(h)
303 303 r = self.rev(n)
304 304 if n not in seen:
305 305 seen[n] = 1
306 306 yield (-d, n)
307 307 for p in self.parents(n):
308 308 heapq.heappush(h, (-dist[p], p))
309 309
310 310 x = ancestors(a)
311 311 y = ancestors(b)
312 312 lx = x.next()
313 313 ly = y.next()
314 314
315 315 # increment each ancestor list until it is closer to root than
316 316 # the other, or they match
317 317 while 1:
318 318 if lx == ly:
319 319 return lx[1]
320 320 elif lx < ly:
321 321 ly = y.next()
322 322 elif lx > ly:
323 323 lx = x.next()
324 324
325 325 def group(self, linkmap):
326 326 # given a list of changeset revs, return a set of deltas and
327 327 # metadata corresponding to nodes. the first delta is
328 328 # parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
329 329 # have this parent as it has all history before these
330 330 # changesets. parent is parent[0]
331 331
332 332 revs = []
333 333 needed = {}
334 334
335 335 # find file nodes/revs that match changeset revs
336 336 for i in xrange(0, self.count()):
337 337 if self.index[i][3] in linkmap:
338 338 revs.append(i)
339 339 needed[i] = 1
340 340
341 341 # if we don't have any revisions touched by these changesets, bail
342 342 if not revs:
343 343 yield struct.pack(">l", 0)
344 344 return
345 345
346 346 # add the parent of the first rev
347 347 p = self.parents(self.node(revs[0]))[0]
348 348 revs.insert(0, self.rev(p))
349 349
350 350 # for each delta that isn't contiguous in the log, we need to
351 351 # reconstruct the base, reconstruct the result, and then
352 352 # calculate the delta. We also need to do this where we've
353 353 # stored a full version and not a delta
354 354 for i in xrange(0, len(revs) - 1):
355 355 a, b = revs[i], revs[i + 1]
356 356 if a + 1 != b or self.base(b) == b:
357 357 for j in xrange(self.base(a), a + 1):
358 358 needed[j] = 1
359 359 for j in xrange(self.base(b), b + 1):
360 360 needed[j] = 1
361 361
362 362 # calculate spans to retrieve from datafile
363 363 needed = needed.keys()
364 364 needed.sort()
365 365 spans = []
366 366 oo = -1
367 367 ol = 0
368 368 for n in needed:
369 369 if n < 0: continue
370 370 o = self.start(n)
371 371 l = self.length(n)
372 372 if oo + ol == o: # can we merge with the previous?
373 373 nl = spans[-1][2]
374 374 nl.append((n, l))
375 375 ol += l
376 376 spans[-1] = (oo, ol, nl)
377 377 else:
378 378 oo = o
379 379 ol = l
380 380 spans.append((oo, ol, [(n, l)]))
381 381
382 382 # read spans in, divide up chunks
383 383 chunks = {}
384 384 for span in spans:
385 385 # we reopen the file for each span to make http happy for now
386 386 f = self.opener(self.datafile)
387 387 f.seek(span[0])
388 388 data = f.read(span[1])
389 389
390 390 # divide up the span
391 391 pos = 0
392 392 for r, l in span[2]:
393 393 chunks[r] = decompress(data[pos: pos + l])
394 394 pos += l
395 395
396 396 # helper to reconstruct intermediate versions
397 397 def construct(text, base, rev):
398 398 bins = [chunks[r] for r in xrange(base + 1, rev + 1)]
399 399 return mdiff.patches(text, bins)
400 400
401 401 # build deltas
402 402 deltas = []
403 403 for d in xrange(0, len(revs) - 1):
404 404 a, b = revs[d], revs[d + 1]
405 405 n = self.node(b)
406 406
407 407 # do we need to construct a new delta?
408 408 if a + 1 != b or self.base(b) == b:
409 409 if a >= 0:
410 410 base = self.base(a)
411 411 ta = chunks[self.base(a)]
412 412 ta = construct(ta, base, a)
413 413 else:
414 414 ta = ""
415 415
416 416 base = self.base(b)
417 417 if a > base:
418 418 base = a
419 419 tb = ta
420 420 else:
421 421 tb = chunks[self.base(b)]
422 422 tb = construct(tb, base, b)
423 423 d = self.diff(ta, tb)
424 424 else:
425 425 d = chunks[b]
426 426
427 427 p = self.parents(n)
428 428 meta = n + p[0] + p[1] + linkmap[self.linkrev(n)]
429 429 l = struct.pack(">l", len(meta) + len(d) + 4)
430 430 yield l
431 431 yield meta
432 432 yield d
433 433
434 434 yield struct.pack(">l", 0)
435 435
436 def addgroup(self, revs, linkmapper, transaction):
436 def addgroup(self, revs, linkmapper, transaction, unique = 0):
437 437 # given a set of deltas, add them to the revision log. the
438 438 # first delta is against its parent, which should be in our
439 439 # log, the rest are against the previous delta.
440 440
441 441 # track the base of the current delta log
442 442 r = self.count()
443 443 t = r - 1
444 444 node = nullid
445 445
446 446 base = prev = -1
447 447 start = end = 0
448 448 if r:
449 449 start = self.start(self.base(t))
450 450 end = self.end(t)
451 451 measure = self.length(self.base(t))
452 452 base = self.base(t)
453 453 prev = self.tip()
454 454
455 455 transaction.add(self.datafile, end)
456 456 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
457 457 dfh = self.opener(self.datafile, "a")
458 458 ifh = self.opener(self.indexfile, "a")
459 459
460 460 # loop through our set of deltas
461 461 chain = None
462 462 for chunk in revs:
463 463 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
464 464 link = linkmapper(cs)
465 465 if node in self.nodemap:
466 raise "already have %s" % hex(node[:4])
466 # this can happen if two branches make the same change
467 if unique:
468 raise "already have %s" % hex(node[:4])
469 continue
467 470 delta = chunk[80:]
468 471
469 472 if not chain:
470 473 # retrieve the parent revision of the delta chain
471 474 chain = p1
472 475 if not chain in self.nodemap:
473 476 raise "unknown base %s" % short(chain[:4])
474 477
475 478 # full versions are inserted when the needed deltas become
476 479 # comparable to the uncompressed text or when the previous
477 480 # version is not the one we have a delta against. We use
478 481 # the size of the previous full rev as a proxy for the
479 482 # current size.
480 483
481 484 if chain == prev:
482 485 cdelta = compress(delta)
483 486
484 487 if chain != prev or (end - start + len(cdelta)) > measure * 2:
485 488 # flush our writes here so we can read it in revision
486 489 dfh.flush()
487 490 ifh.flush()
488 491 text = self.revision(chain)
489 492 text = self.patches(text, [delta])
490 493 chk = self.addrevision(text, transaction, link, p1, p2)
491 494 if chk != node:
492 495 raise "consistency error adding group"
493 496 measure = len(text)
494 497 else:
495 498 e = (end, len(cdelta), self.base(t), link, p1, p2, node)
496 499 self.index.append(e)
497 500 self.nodemap[node] = r
498 501 dfh.write(cdelta)
499 502 ifh.write(struct.pack(indexformat, *e))
500 503
501 504 t, r, chain, prev = r, r + 1, node, node
502 505 start = self.start(self.base(t))
503 506 end = self.end(t)
504 507
505 508 dfh.close()
506 509 ifh.close()
507 510 return node
General Comments 0
You need to be logged in to leave comments. Login now