##// END OF EJS Templates
Refactor merge code...
mpm@selenic.com -
r94:7daef883 default
parent child Browse files
Show More
@@ -1,967 +1,825 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import sys, struct, sha, socket, os, time, base64, re, urllib2
8 import sys, struct, sha, socket, os, time, base64, re, urllib2
9 import urllib
9 import urllib
10 from mercurial import byterange
10 from mercurial import byterange
11 from mercurial.transaction import *
11 from mercurial.transaction import *
12 from mercurial.revlog import *
12 from mercurial.revlog import *
13 from difflib import SequenceMatcher
13 from difflib import SequenceMatcher
14
14
15 class filelog(revlog):
15 class filelog(revlog):
16 def __init__(self, opener, path):
16 def __init__(self, opener, path):
17 s = self.encodepath(path)
17 s = self.encodepath(path)
18 revlog.__init__(self, opener, os.path.join("data", s + "i"),
18 revlog.__init__(self, opener, os.path.join("data", s + "i"),
19 os.path.join("data", s))
19 os.path.join("data", s))
20
20
21 def encodepath(self, path):
21 def encodepath(self, path):
22 s = sha.sha(path).digest()
22 s = sha.sha(path).digest()
23 s = base64.encodestring(s)[:-3]
23 s = base64.encodestring(s)[:-3]
24 s = re.sub("\+", "%", s)
24 s = re.sub("\+", "%", s)
25 s = re.sub("/", "_", s)
25 s = re.sub("/", "_", s)
26 return s
26 return s
27
27
28 def read(self, node):
28 def read(self, node):
29 return self.revision(node)
29 return self.revision(node)
30 def add(self, text, transaction, link, p1=None, p2=None):
30 def add(self, text, transaction, link, p1=None, p2=None):
31 return self.addrevision(text, transaction, link, p1, p2)
31 return self.addrevision(text, transaction, link, p1, p2)
32
32
33 def resolvedag(self, old, new, transaction, link):
34 """resolve unmerged heads in our DAG"""
35 if old == new: return None
36 a = self.ancestor(old, new)
37 if old == a: return None
38 return self.merge3(old, new, a, transaction, link)
39
40 def merge3(self, my, other, base, transaction, link):
41 """perform a 3-way merge and append the result"""
42 def temp(prefix, node):
43 (fd, name) = tempfile.mkstemp(prefix)
44 f = os.fdopen(fd, "w")
45 f.write(self.revision(node))
46 f.close()
47 return name
48
49 a = temp("local", my)
50 b = temp("remote", other)
51 c = temp("parent", base)
52
53 cmd = os.environ["HGMERGE"]
54 r = os.system("%s %s %s %s" % (cmd, a, b, c))
55 if r:
56 raise "Merge failed, implement rollback!"
57
58 t = open(a).read()
59 os.unlink(a)
60 os.unlink(b)
61 os.unlink(c)
62 return self.addrevision(t, transaction, link, my, other)
63
64 def merge(self, other, transaction, linkseq, link):
65 """perform a merge and resolve resulting heads"""
66 (o, n) = self.mergedag(other, transaction, linkseq)
67 return self.resolvedag(o, n, transaction, link)
68
69 def annotate(self, node):
33 def annotate(self, node):
70 revs = []
34 revs = []
71 while node != nullid:
35 while node != nullid:
72 revs.append(node)
36 revs.append(node)
73 node = self.parents(node)[0]
37 node = self.parents(node)[0]
74 revs.reverse()
38 revs.reverse()
75 prev = []
39 prev = []
76 annotate = []
40 annotate = []
77 for node in revs:
41 for node in revs:
78 curr = self.read(node).splitlines(1)
42 curr = self.read(node).splitlines(1)
79 linkrev = self.linkrev(node)
43 linkrev = self.linkrev(node)
80 sm = SequenceMatcher(None, prev, curr)
44 sm = SequenceMatcher(None, prev, curr)
81 offset = 0
45 offset = 0
82 for o, m, n, s, t in sm.get_opcodes():
46 for o, m, n, s, t in sm.get_opcodes():
83 if o in ('insert','replace'):
47 if o in ('insert','replace'):
84 annotate[m+offset:n+offset] = \
48 annotate[m+offset:n+offset] = \
85 [ (linkrev, l) for l in curr[s:t]]
49 [ (linkrev, l) for l in curr[s:t]]
86 if o == 'insert':
50 if o == 'insert':
87 offset += m-n
51 offset += m-n
88 elif o == 'delete':
52 elif o == 'delete':
89 del annotate[m+offset:n+offset]
53 del annotate[m+offset:n+offset]
90 offset -= m-n
54 offset -= m-n
91 assert len(annotate) == len(curr)
55 assert len(annotate) == len(curr)
92 prev = curr
56 prev = curr
93 return annotate
57 return annotate
94
58
95 class manifest(revlog):
59 class manifest(revlog):
96 def __init__(self, opener):
60 def __init__(self, opener):
97 self.mapcache = None
61 self.mapcache = None
98 self.listcache = None
62 self.listcache = None
99 self.addlist = None
63 self.addlist = None
100 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
64 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
101
65
102 def read(self, node):
66 def read(self, node):
103 if self.mapcache and self.mapcache[0] == node:
67 if self.mapcache and self.mapcache[0] == node:
104 return self.mapcache[1].copy()
68 return self.mapcache[1].copy()
105 text = self.revision(node)
69 text = self.revision(node)
106 map = {}
70 map = {}
107 self.listcache = (text, text.splitlines(1))
71 self.listcache = (text, text.splitlines(1))
108 for l in self.listcache[1]:
72 for l in self.listcache[1]:
109 (f, n) = l.split('\0')
73 (f, n) = l.split('\0')
110 map[f] = bin(n[:40])
74 map[f] = bin(n[:40])
111 self.mapcache = (node, map)
75 self.mapcache = (node, map)
112 return map
76 return map
113
77
114 def diff(self, a, b):
78 def diff(self, a, b):
115 # this is sneaky, as we're not actually using a and b
79 # this is sneaky, as we're not actually using a and b
116 if self.listcache and len(self.listcache[0]) == len(a):
80 if self.listcache and len(self.listcache[0]) == len(a):
117 return mdiff.diff(self.listcache[1], self.addlist, 1)
81 return mdiff.diff(self.listcache[1], self.addlist, 1)
118 else:
82 else:
119 return mdiff.textdiff(a, b)
83 return mdiff.textdiff(a, b)
120
84
121 def add(self, map, transaction, link, p1=None, p2=None):
85 def add(self, map, transaction, link, p1=None, p2=None):
122 files = map.keys()
86 files = map.keys()
123 files.sort()
87 files.sort()
124
88
125 self.addlist = ["%s\000%s\n" % (f, hex(map[f])) for f in files]
89 self.addlist = ["%s\000%s\n" % (f, hex(map[f])) for f in files]
126 text = "".join(self.addlist)
90 text = "".join(self.addlist)
127
91
128 n = self.addrevision(text, transaction, link, p1, p2)
92 n = self.addrevision(text, transaction, link, p1, p2)
129 self.mapcache = (n, map)
93 self.mapcache = (n, map)
130 self.listcache = (text, self.addlist)
94 self.listcache = (text, self.addlist)
131
95
132 return n
96 return n
133
97
134 class changelog(revlog):
98 class changelog(revlog):
135 def __init__(self, opener):
99 def __init__(self, opener):
136 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
100 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
137
101
138 def extract(self, text):
102 def extract(self, text):
139 if not text:
103 if not text:
140 return (nullid, "", "0", [], "")
104 return (nullid, "", "0", [], "")
141 last = text.index("\n\n")
105 last = text.index("\n\n")
142 desc = text[last + 2:]
106 desc = text[last + 2:]
143 l = text[:last].splitlines()
107 l = text[:last].splitlines()
144 manifest = bin(l[0])
108 manifest = bin(l[0])
145 user = l[1]
109 user = l[1]
146 date = l[2]
110 date = l[2]
147 files = l[3:]
111 files = l[3:]
148 return (manifest, user, date, files, desc)
112 return (manifest, user, date, files, desc)
149
113
150 def read(self, node):
114 def read(self, node):
151 return self.extract(self.revision(node))
115 return self.extract(self.revision(node))
152
116
153 def add(self, manifest, list, desc, transaction, p1=None, p2=None):
117 def add(self, manifest, list, desc, transaction, p1=None, p2=None):
154 user = (os.environ.get("HGUSER") or
118 user = (os.environ.get("HGUSER") or
155 os.environ.get("EMAIL") or
119 os.environ.get("EMAIL") or
156 os.environ.get("LOGNAME", "unknown") + '@' + socket.getfqdn())
120 os.environ.get("LOGNAME", "unknown") + '@' + socket.getfqdn())
157 date = "%d %d" % (time.time(), time.timezone)
121 date = "%d %d" % (time.time(), time.timezone)
158 list.sort()
122 list.sort()
159 l = [hex(manifest), user, date] + list + ["", desc]
123 l = [hex(manifest), user, date] + list + ["", desc]
160 text = "\n".join(l)
124 text = "\n".join(l)
161 return self.addrevision(text, transaction, self.count(), p1, p2)
125 return self.addrevision(text, transaction, self.count(), p1, p2)
162
126
163 def merge3(self, my, other, base):
164 pass
165
166 class dircache:
127 class dircache:
167 def __init__(self, opener, ui):
128 def __init__(self, opener, ui):
168 self.opener = opener
129 self.opener = opener
169 self.dirty = 0
130 self.dirty = 0
170 self.ui = ui
131 self.ui = ui
171 self.map = None
132 self.map = None
172 def __del__(self):
133 def __del__(self):
173 if self.dirty: self.write()
134 if self.dirty: self.write()
174 def __getitem__(self, key):
135 def __getitem__(self, key):
175 try:
136 try:
176 return self.map[key]
137 return self.map[key]
177 except TypeError:
138 except TypeError:
178 self.read()
139 self.read()
179 return self[key]
140 return self[key]
180
141
181 def read(self):
142 def read(self):
182 if self.map is not None: return self.map
143 if self.map is not None: return self.map
183
144
184 self.map = {}
145 self.map = {}
185 try:
146 try:
186 st = self.opener("dircache").read()
147 st = self.opener("dircache").read()
187 except: return
148 except: return
188
149
189 pos = 0
150 pos = 0
190 while pos < len(st):
151 while pos < len(st):
191 e = struct.unpack(">llll", st[pos:pos+16])
152 e = struct.unpack(">llll", st[pos:pos+16])
192 l = e[3]
153 l = e[3]
193 pos += 16
154 pos += 16
194 f = st[pos:pos + l]
155 f = st[pos:pos + l]
195 self.map[f] = e[:3]
156 self.map[f] = e[:3]
196 pos += l
157 pos += l
197
158
198 def update(self, files):
159 def update(self, files):
199 if not files: return
160 if not files: return
200 self.read()
161 self.read()
201 self.dirty = 1
162 self.dirty = 1
202 for f in files:
163 for f in files:
203 try:
164 try:
204 s = os.stat(f)
165 s = os.stat(f)
205 self.map[f] = (s.st_mode, s.st_size, s.st_mtime)
166 self.map[f] = (s.st_mode, s.st_size, s.st_mtime)
206 except IOError:
167 except IOError:
207 self.remove(f)
168 self.remove(f)
208
169
209 def taint(self, files):
170 def taint(self, files):
210 if not files: return
171 if not files: return
211 self.read()
172 self.read()
212 self.dirty = 1
173 self.dirty = 1
213 for f in files:
174 for f in files:
214 self.map[f] = (0, -1, 0)
175 self.map[f] = (0, -1, 0)
215
176
216 def remove(self, files):
177 def remove(self, files):
217 if not files: return
178 if not files: return
218 self.read()
179 self.read()
219 self.dirty = 1
180 self.dirty = 1
220 for f in files:
181 for f in files:
221 try:
182 try:
222 del self.map[f]
183 del self.map[f]
223 except KeyError:
184 except KeyError:
224 self.ui.warn("Not in dircache: %s\n" % f)
185 self.ui.warn("Not in dircache: %s\n" % f)
225 pass
186 pass
226
187
227 def clear(self):
188 def clear(self):
228 self.map = {}
189 self.map = {}
229 self.dirty = 1
190 self.dirty = 1
230
191
231 def write(self):
192 def write(self):
232 st = self.opener("dircache", "w")
193 st = self.opener("dircache", "w")
233 for f, e in self.map.items():
194 for f, e in self.map.items():
234 e = struct.pack(">llll", e[0], e[1], e[2], len(f))
195 e = struct.pack(">llll", e[0], e[1], e[2], len(f))
235 st.write(e + f)
196 st.write(e + f)
236 self.dirty = 0
197 self.dirty = 0
237
198
238 def copy(self):
199 def copy(self):
239 self.read()
200 self.read()
240 return self.map.copy()
201 return self.map.copy()
241
202
242 # used to avoid circular references so destructors work
203 # used to avoid circular references so destructors work
243 def opener(base):
204 def opener(base):
244 p = base
205 p = base
245 def o(path, mode="r"):
206 def o(path, mode="r"):
246 if p[:7] == "http://":
207 if p[:7] == "http://":
247 f = os.path.join(p, urllib.quote(path))
208 f = os.path.join(p, urllib.quote(path))
248 return httprangereader(f)
209 return httprangereader(f)
249
210
250 f = os.path.join(p, path)
211 f = os.path.join(p, path)
251
212
252 if mode != "r" and os.path.isfile(f):
213 if mode != "r" and os.path.isfile(f):
253 s = os.stat(f)
214 s = os.stat(f)
254 if s.st_nlink > 1:
215 if s.st_nlink > 1:
255 file(f + ".tmp", "w").write(file(f).read())
216 file(f + ".tmp", "w").write(file(f).read())
256 os.rename(f+".tmp", f)
217 os.rename(f+".tmp", f)
257
218
258 return file(f, mode)
219 return file(f, mode)
259
220
260 return o
221 return o
261
222
262 class localrepository:
223 class localrepository:
263 def __init__(self, ui, path=None, create=0):
224 def __init__(self, ui, path=None, create=0):
264 self.remote = 0
225 self.remote = 0
265 if path and path[:7] == "http://":
226 if path and path[:7] == "http://":
266 self.remote = 1
227 self.remote = 1
267 self.path = path
228 self.path = path
268 else:
229 else:
269 if not path:
230 if not path:
270 p = os.getcwd()
231 p = os.getcwd()
271 while not os.path.isdir(os.path.join(p, ".hg")):
232 while not os.path.isdir(os.path.join(p, ".hg")):
272 p = os.path.dirname(p)
233 p = os.path.dirname(p)
273 if p == "/": raise "No repo found"
234 if p == "/": raise "No repo found"
274 path = p
235 path = p
275 self.path = os.path.join(path, ".hg")
236 self.path = os.path.join(path, ".hg")
276
237
277 self.root = path
238 self.root = path
278 self.ui = ui
239 self.ui = ui
279
240
280 if create:
241 if create:
281 os.mkdir(self.path)
242 os.mkdir(self.path)
282 os.mkdir(self.join("data"))
243 os.mkdir(self.join("data"))
283
244
284 self.opener = opener(self.path)
245 self.opener = opener(self.path)
285 self.manifest = manifest(self.opener)
246 self.manifest = manifest(self.opener)
286 self.changelog = changelog(self.opener)
247 self.changelog = changelog(self.opener)
287 self.ignorelist = None
248 self.ignorelist = None
288 self.tags = None
249 self.tags = None
289
250
290 if not self.remote:
251 if not self.remote:
291 self.dircache = dircache(self.opener, ui)
252 self.dircache = dircache(self.opener, ui)
292 try:
253 try:
293 self.current = bin(self.opener("current").read())
254 self.current = bin(self.opener("current").read())
294 except IOError:
255 except IOError:
295 self.current = None
256 self.current = None
296
257
297 def setcurrent(self, node):
258 def setcurrent(self, node):
298 self.current = node
259 self.current = node
299 self.opener("current", "w").write(hex(node))
260 self.opener("current", "w").write(hex(node))
300
261
301 def ignore(self, f):
262 def ignore(self, f):
302 if self.ignorelist is None:
263 if self.ignorelist is None:
303 self.ignorelist = []
264 self.ignorelist = []
304 try:
265 try:
305 l = open(os.path.join(self.root, ".hgignore"))
266 l = open(os.path.join(self.root, ".hgignore"))
306 for pat in l:
267 for pat in l:
307 if pat != "\n":
268 if pat != "\n":
308 self.ignorelist.append(re.compile(pat[:-1]))
269 self.ignorelist.append(re.compile(pat[:-1]))
309 except IOError: pass
270 except IOError: pass
310 for pat in self.ignorelist:
271 for pat in self.ignorelist:
311 if pat.search(f): return True
272 if pat.search(f): return True
312 return False
273 return False
313
274
314 def lookup(self, key):
275 def lookup(self, key):
315 if self.tags is None:
276 if self.tags is None:
316 self.tags = {}
277 self.tags = {}
317 try:
278 try:
318 fl = self.file(".hgtags")
279 fl = self.file(".hgtags")
319 for l in fl.revision(fl.tip()).splitlines():
280 for l in fl.revision(fl.tip()).splitlines():
320 if l:
281 if l:
321 n, k = l.split(" ")
282 n, k = l.split(" ")
322 self.tags[k] = bin(n)
283 self.tags[k] = bin(n)
323 except KeyError: pass
284 except KeyError: pass
324 try:
285 try:
325 return self.tags[key]
286 return self.tags[key]
326 except KeyError:
287 except KeyError:
327 return self.changelog.lookup(key)
288 return self.changelog.lookup(key)
328
289
329 def join(self, f):
290 def join(self, f):
330 return os.path.join(self.path, f)
291 return os.path.join(self.path, f)
331
292
332 def file(self, f):
293 def file(self, f):
333 return filelog(self.opener, f)
294 return filelog(self.opener, f)
334
295
335 def transaction(self):
296 def transaction(self):
336 return transaction(self.opener, self.join("journal"))
297 return transaction(self.opener, self.join("journal"))
337
298
338 def merge(self, other):
339 tr = self.transaction()
340 changed = {}
341 new = {}
342 seqrev = self.changelog.count()
343 # some magic to allow fiddling in nested scope
344 nextrev = [seqrev]
345
346 # helpers for back-linking file revisions to local changeset
347 # revisions so we can immediately get to changeset from annotate
348 def accumulate(text):
349 # track which files are added in which changeset and the
350 # corresponding _local_ changeset revision
351 files = self.changelog.extract(text)[3]
352 for f in files:
353 changed.setdefault(f, []).append(nextrev[0])
354 nextrev[0] += 1
355
356 def seq(start):
357 while 1:
358 yield start
359 start += 1
360
361 def lseq(l):
362 for r in l:
363 yield r
364
365 # begin the import/merge of changesets
366 self.ui.status("merging new changesets\n")
367 (co, cn) = self.changelog.mergedag(other.changelog, tr,
368 seq(seqrev), accumulate)
369 resolverev = self.changelog.count()
370
371 # is there anything to do?
372 if co == cn:
373 tr.close()
374 return
375
376 # do we need to resolve?
377 simple = (co == self.changelog.ancestor(co, cn))
378
379 # merge all files changed by the changesets,
380 # keeping track of the new tips
381 changelist = changed.keys()
382 changelist.sort()
383 for f in changelist:
384 sys.stdout.write(".")
385 sys.stdout.flush()
386 r = self.file(f)
387 node = r.merge(other.file(f), tr, lseq(changed[f]), resolverev)
388 if node:
389 new[f] = node
390 sys.stdout.write("\n")
391
392 # begin the merge of the manifest
393 self.ui.status("merging manifests\n")
394 (mm, mo) = self.manifest.mergedag(other.manifest, tr, seq(seqrev))
395
396 # For simple merges, we don't need to resolve manifests or changesets
397 if simple:
398 tr.close()
399 return
400
401 ma = self.manifest.ancestor(mm, mo)
402
403 # resolve the manifest to point to all the merged files
404 self.ui.status("resolving manifests\n")
405 omap = self.manifest.read(mo) # other
406 amap = self.manifest.read(ma) # ancestor
407 mmap = self.manifest.read(mm) # mine
408 nmap = {}
409
410 for f, mid in mmap.iteritems():
411 if f in omap:
412 if mid != omap[f]:
413 nmap[f] = new.get(f, mid) # use merged version
414 else:
415 nmap[f] = new.get(f, mid) # they're the same
416 del omap[f]
417 elif f in amap:
418 if mid != amap[f]:
419 pass # we should prompt here
420 else:
421 pass # other deleted it
422 else:
423 nmap[f] = new.get(f, mid) # we created it
424
425 del mmap
426
427 for f, oid in omap.iteritems():
428 if f in amap:
429 if oid != amap[f]:
430 pass # this is the nasty case, we should prompt
431 else:
432 pass # probably safe
433 else:
434 nmap[f] = new.get(f, oid) # remote created it
435
436 del omap
437 del amap
438
439 node = self.manifest.add(nmap, tr, resolverev, mm, mo)
440
441 # Now all files and manifests are merged, we add the changed files
442 # and manifest id to the changelog
443 self.ui.status("committing merge changeset\n")
444 new = new.keys()
445 new.sort()
446 if co == cn: cn = -1
447
448 edittext = "\n"+"".join(["HG: changed %s\n" % f for f in new])
449 edittext = self.ui.edit(edittext)
450 n = self.changelog.add(node, new, edittext, tr, co, cn)
451
452 tr.close()
453
454 def commit(self, parent, update = None, text = ""):
299 def commit(self, parent, update = None, text = ""):
455 tr = self.transaction()
300 tr = self.transaction()
456
301
457 try:
302 try:
458 remove = [ l[:-1] for l in self.opener("to-remove") ]
303 remove = [ l[:-1] for l in self.opener("to-remove") ]
459 os.unlink(self.join("to-remove"))
304 os.unlink(self.join("to-remove"))
460
305
461 except IOError:
306 except IOError:
462 remove = []
307 remove = []
463
308
464 if update == None:
309 if update == None:
465 update = self.diffdir(self.root, parent)[0]
310 update = self.diffdir(self.root, parent)[0]
466
311
467 # check in files
312 # check in files
468 new = {}
313 new = {}
469 linkrev = self.changelog.count()
314 linkrev = self.changelog.count()
470 for f in update:
315 for f in update:
471 self.ui.note(f + "\n")
316 self.ui.note(f + "\n")
472 try:
317 try:
473 t = file(f).read()
318 t = file(f).read()
474 except IOError:
319 except IOError:
475 remove.append(f)
320 remove.append(f)
476 continue
321 continue
477 r = self.file(f)
322 r = self.file(f)
478 new[f] = r.add(t, tr, linkrev)
323 new[f] = r.add(t, tr, linkrev)
479
324
480 # update manifest
325 # update manifest
481 mmap = self.manifest.read(self.manifest.tip())
326 mmap = self.manifest.read(self.manifest.tip())
482 mmap.update(new)
327 mmap.update(new)
483 for f in remove:
328 for f in remove:
484 del mmap[f]
329 del mmap[f]
485 mnode = self.manifest.add(mmap, tr, linkrev)
330 mnode = self.manifest.add(mmap, tr, linkrev)
486
331
487 # add changeset
332 # add changeset
488 new = new.keys()
333 new = new.keys()
489 new.sort()
334 new.sort()
490
335
491 edittext = text + "\n"+"".join(["HG: changed %s\n" % f for f in new])
336 edittext = text + "\n"+"".join(["HG: changed %s\n" % f for f in new])
492 edittext += "".join(["HG: removed %s\n" % f for f in remove])
337 edittext += "".join(["HG: removed %s\n" % f for f in remove])
493 edittext = self.ui.edit(edittext)
338 edittext = self.ui.edit(edittext)
494
339
495 n = self.changelog.add(mnode, new, edittext, tr)
340 n = self.changelog.add(mnode, new, edittext, tr)
496 tr.close()
341 tr.close()
497
342
498 self.setcurrent(n)
343 self.setcurrent(n)
499 self.dircache.update(new)
344 self.dircache.update(new)
500 self.dircache.remove(remove)
345 self.dircache.remove(remove)
501
346
502 def checkdir(self, path):
347 def checkdir(self, path):
503 d = os.path.dirname(path)
348 d = os.path.dirname(path)
504 if not d: return
349 if not d: return
505 if not os.path.isdir(d):
350 if not os.path.isdir(d):
506 self.checkdir(d)
351 self.checkdir(d)
507 os.mkdir(d)
352 os.mkdir(d)
508
353
509 def checkout(self, node):
354 def checkout(self, node):
510 # checkout is really dumb at the moment
355 # checkout is really dumb at the moment
511 # it ought to basically merge
356 # it ought to basically merge
512 change = self.changelog.read(node)
357 change = self.changelog.read(node)
513 mmap = self.manifest.read(change[0])
358 mmap = self.manifest.read(change[0])
514
359
515 l = mmap.keys()
360 l = mmap.keys()
516 l.sort()
361 l.sort()
517 stats = []
362 stats = []
518 for f in l:
363 for f in l:
519 self.ui.note(f + "\n")
364 self.ui.note(f + "\n")
520 r = self.file(f)
365 r = self.file(f)
521 t = r.revision(mmap[f])
366 t = r.revision(mmap[f])
522 try:
367 try:
523 file(f, "w").write(t)
368 file(f, "w").write(t)
524 except:
369 except:
525 self.checkdir(f)
370 self.checkdir(f)
526 file(f, "w").write(t)
371 file(f, "w").write(t)
527
372
528 self.setcurrent(node)
373 self.setcurrent(node)
529 self.dircache.clear()
374 self.dircache.clear()
530 self.dircache.update(l)
375 self.dircache.update(l)
531
376
532 def diffdir(self, path, changeset):
377 def diffdir(self, path, changeset):
533 changed = []
378 changed = []
534 mf = {}
379 mf = {}
535 added = []
380 added = []
536
381
537 if changeset:
382 if changeset:
538 change = self.changelog.read(changeset)
383 change = self.changelog.read(changeset)
539 mf = self.manifest.read(change[0])
384 mf = self.manifest.read(change[0])
540
385
541 if changeset == self.current:
386 if changeset == self.current:
542 dc = self.dircache.copy()
387 dc = self.dircache.copy()
543 else:
388 else:
544 dc = dict.fromkeys(mf)
389 dc = dict.fromkeys(mf)
545
390
546 def fcmp(fn):
391 def fcmp(fn):
547 t1 = file(os.path.join(self.root, fn)).read()
392 t1 = file(os.path.join(self.root, fn)).read()
548 t2 = self.file(fn).revision(mf[fn])
393 t2 = self.file(fn).revision(mf[fn])
549 return cmp(t1, t2)
394 return cmp(t1, t2)
550
395
551 for dir, subdirs, files in os.walk(self.root):
396 for dir, subdirs, files in os.walk(self.root):
552 d = dir[len(self.root)+1:]
397 d = dir[len(self.root)+1:]
553 if ".hg" in subdirs: subdirs.remove(".hg")
398 if ".hg" in subdirs: subdirs.remove(".hg")
554
399
555 for f in files:
400 for f in files:
556 fn = os.path.join(d, f)
401 fn = os.path.join(d, f)
557 try: s = os.stat(os.path.join(self.root, fn))
402 try: s = os.stat(os.path.join(self.root, fn))
558 except: continue
403 except: continue
559 if fn in dc:
404 if fn in dc:
560 c = dc[fn]
405 c = dc[fn]
561 del dc[fn]
406 del dc[fn]
562 if not c:
407 if not c:
563 if fcmp(fn):
408 if fcmp(fn):
564 changed.append(fn)
409 changed.append(fn)
565 elif c[1] != s.st_size:
410 elif c[1] != s.st_size:
566 changed.append(fn)
411 changed.append(fn)
567 elif c[0] != s.st_mode or c[2] != s.st_mtime:
412 elif c[0] != s.st_mode or c[2] != s.st_mtime:
568 if fcmp(fn):
413 if fcmp(fn):
569 changed.append(fn)
414 changed.append(fn)
570 else:
415 else:
571 if self.ignore(fn): continue
416 if self.ignore(fn): continue
572 added.append(fn)
417 added.append(fn)
573
418
574 deleted = dc.keys()
419 deleted = dc.keys()
575 deleted.sort()
420 deleted.sort()
576
421
577 return (changed, added, deleted)
422 return (changed, added, deleted)
578
423
579 def diffrevs(self, node1, node2):
424 def diffrevs(self, node1, node2):
580 changed, added = [], []
425 changed, added = [], []
581
426
582 change = self.changelog.read(node1)
427 change = self.changelog.read(node1)
583 mf1 = self.manifest.read(change[0])
428 mf1 = self.manifest.read(change[0])
584 change = self.changelog.read(node2)
429 change = self.changelog.read(node2)
585 mf2 = self.manifest.read(change[0])
430 mf2 = self.manifest.read(change[0])
586
431
587 for fn in mf2:
432 for fn in mf2:
588 if mf1.has_key(fn):
433 if mf1.has_key(fn):
589 if mf1[fn] != mf2[fn]:
434 if mf1[fn] != mf2[fn]:
590 changed.append(fn)
435 changed.append(fn)
591 del mf1[fn]
436 del mf1[fn]
592 else:
437 else:
593 added.append(fn)
438 added.append(fn)
594
439
595 deleted = mf1.keys()
440 deleted = mf1.keys()
596 deleted.sort()
441 deleted.sort()
597
442
598 return (changed, added, deleted)
443 return (changed, added, deleted)
599
444
600 def add(self, list):
445 def add(self, list):
601 self.dircache.taint(list)
446 self.dircache.taint(list)
602
447
603 def remove(self, list):
448 def remove(self, list):
604 dl = self.opener("to-remove", "a")
449 dl = self.opener("to-remove", "a")
605 for f in list:
450 for f in list:
606 dl.write(f + "\n")
451 dl.write(f + "\n")
607
452
608 def branches(self, nodes):
453 def branches(self, nodes):
609 if not nodes: nodes = [self.changelog.tip()]
454 if not nodes: nodes = [self.changelog.tip()]
610 b = []
455 b = []
611 for n in nodes:
456 for n in nodes:
612 t = n
457 t = n
613 while n:
458 while n:
614 p = self.changelog.parents(n)
459 p = self.changelog.parents(n)
615 if p[1] != nullid or p[0] == nullid:
460 if p[1] != nullid or p[0] == nullid:
616 b.append((t, n, p[0], p[1]))
461 b.append((t, n, p[0], p[1]))
617 break
462 break
618 n = p[0]
463 n = p[0]
619 return b
464 return b
620
465
621 def between(self, pairs):
466 def between(self, pairs):
622 r = []
467 r = []
623
468
624 for top, bottom in pairs:
469 for top, bottom in pairs:
625 n, l, i = top, [], 0
470 n, l, i = top, [], 0
626 f = 1
471 f = 1
627
472
628 while n != bottom:
473 while n != bottom:
629 p = self.changelog.parents(n)[0]
474 p = self.changelog.parents(n)[0]
630 if i == f:
475 if i == f:
631 l.append(n)
476 l.append(n)
632 f = f * 2
477 f = f * 2
633 n = p
478 n = p
634 i += 1
479 i += 1
635
480
636 r.append(l)
481 r.append(l)
637
482
638 return r
483 return r
639
484
640 def newer(self, nodes):
485 def newer(self, nodes):
641 m = {}
486 m = {}
642 nl = []
487 nl = []
488 pm = {}
643 cl = self.changelog
489 cl = self.changelog
644 t = l = cl.count()
490 t = l = cl.count()
491
492 # find the lowest numbered node
645 for n in nodes:
493 for n in nodes:
646 l = min(l, cl.rev(n))
494 l = min(l, cl.rev(n))
647 for p in cl.parents(n):
495 m[n] = 1
648 m[p] = 1
649
496
650 for i in xrange(l, t):
497 for i in xrange(l, t):
651 n = cl.node(i)
498 n = cl.node(i)
499 if n in m: # explicitly listed
500 pm[n] = 1
501 nl.append(n)
502 continue
652 for p in cl.parents(n):
503 for p in cl.parents(n):
653 if p in m and n not in m:
504 if p in pm: # parent listed
654 m[n] = 1
505 pm[n] = 1
655 nl.append(n)
506 nl.append(n)
507 break
656
508
657 return nl
509 return nl
658
510
659 def getchangegroup(self, remote):
511 def getchangegroup(self, remote):
660 tip = remote.branches([])[0]
512 tip = remote.branches([])[0]
661 self.ui.debug("remote tip branch is %s:%s\n" %
513 self.ui.debug("remote tip branch is %s:%s\n" %
662 (short(tip[0]), short(tip[1])))
514 (short(tip[0]), short(tip[1])))
663 m = self.changelog.nodemap
515 m = self.changelog.nodemap
664 unknown = [tip]
516 unknown = [tip]
665 search = []
517 search = []
666 fetch = []
518 fetch = []
667
519
668 if tip[0] in m:
520 if tip[0] in m:
669 self.ui.note("nothing to do!\n")
521 self.ui.note("nothing to do!\n")
670 return None
522 return None
671
523
672 while unknown:
524 while unknown:
673 n = unknown.pop(0)
525 n = unknown.pop(0)
674 if n == nullid: break
526 if n == nullid: break
675 if n[1] and n[1] in m: # do we know the base?
527 if n[1] and n[1] in m: # do we know the base?
676 self.ui.debug("found incomplete branch %s\n" % short(n[1]))
528 self.ui.debug("found incomplete branch %s\n" % short(n[1]))
677 search.append(n) # schedule branch range for scanning
529 search.append(n) # schedule branch range for scanning
678 else:
530 else:
531 if n[2] in m and n[3] in m:
532 if n[1] not in fetch:
533 self.ui.debug("found new changeset %s\n" %
534 short(n[1]))
535 fetch.append(n[1]) # earliest unknown
536 continue
679 for b in remote.branches([n[2], n[3]]):
537 for b in remote.branches([n[2], n[3]]):
680 if b[0] in m:
538 if b[0] not in m:
681 if n[1] not in fetch:
682 self.ui.debug("found new changeset %s\n" %
683 short(n[1]))
684 fetch.append(n[1]) # earliest unknown
685 else:
686 unknown.append(b)
539 unknown.append(b)
687
540
688 while search:
541 while search:
689 n = search.pop(0)
542 n = search.pop(0)
690 l = remote.between([(n[0], n[1])])[0]
543 l = remote.between([(n[0], n[1])])[0]
691 p = n[0]
544 p = n[0]
692 f = 1
545 f = 1
693 for i in l + [n[1]]:
546 for i in l + [n[1]]:
694 if i in m:
547 if i in m:
695 if f <= 2:
548 if f <= 2:
696 self.ui.debug("found new branch changeset %s\n" %
549 self.ui.debug("found new branch changeset %s\n" %
697 short(p))
550 short(p))
698 fetch.append(p)
551 fetch.append(p)
699 else:
552 else:
700 self.ui.debug("narrowed branch search to %s:%s\n"
553 self.ui.debug("narrowed branch search to %s:%s\n"
701 % (short(p), short(i)))
554 % (short(p), short(i)))
702 search.append((p, i))
555 search.append((p, i))
703 break
556 break
704 p, f = i, f * 2
557 p, f = i, f * 2
705
558
706 for f in fetch:
559 for f in fetch:
707 if f in m:
560 if f in m:
708 raise "already have", short(f[:4])
561 raise "already have", short(f[:4])
709
562
710 self.ui.note("merging new changesets starting at " +
563 self.ui.note("adding new changesets starting at " +
711 " ".join([short(f) for f in fetch]) + "\n")
564 " ".join([short(f) for f in fetch]) + "\n")
712
565
713 return remote.changegroup(fetch)
566 return remote.changegroup(fetch)
714
567
715 def changegroup(self, basenodes):
568 def changegroup(self, basenodes):
716 nodes = self.newer(basenodes)
569 nodes = self.newer(basenodes)
717
570
718 # construct the link map
571 # construct the link map
719 linkmap = {}
572 linkmap = {}
720 for n in nodes:
573 for n in nodes:
721 linkmap[self.changelog.rev(n)] = n
574 linkmap[self.changelog.rev(n)] = n
722
575
723 # construct a list of all changed files
576 # construct a list of all changed files
724 changed = {}
577 changed = {}
725 for n in nodes:
578 for n in nodes:
726 c = self.changelog.read(n)
579 c = self.changelog.read(n)
727 for f in c[3]:
580 for f in c[3]:
728 changed[f] = 1
581 changed[f] = 1
729 changed = changed.keys()
582 changed = changed.keys()
730 changed.sort()
583 changed.sort()
731
584
732 # the changegroup is changesets + manifests + all file revs
585 # the changegroup is changesets + manifests + all file revs
733 revs = [ self.changelog.rev(n) for n in nodes ]
586 revs = [ self.changelog.rev(n) for n in nodes ]
734
587
735 yield self.changelog.group(linkmap)
588 yield self.changelog.group(linkmap)
736 yield self.manifest.group(linkmap)
589 yield self.manifest.group(linkmap)
737
590
738 for f in changed:
591 for f in changed:
739 g = self.file(f).group(linkmap)
592 g = self.file(f).group(linkmap)
740 if not g: raise "couldn't find change to %s" % f
593 if not g: raise "couldn't find change to %s" % f
741 l = struct.pack(">l", len(f))
594 l = struct.pack(">l", len(f))
742 yield "".join([l, f, g])
595 yield "".join([l, f, g])
743
596
744 def addchangegroup(self, generator):
597 def addchangegroup(self, generator):
745 class genread:
598 class genread:
746 def __init__(self, generator):
599 def __init__(self, generator):
747 self.g = generator
600 self.g = generator
748 self.buf = ""
601 self.buf = ""
749 def read(self, l):
602 def read(self, l):
750 while l > len(self.buf):
603 while l > len(self.buf):
751 try:
604 try:
752 self.buf += self.g.next()
605 self.buf += self.g.next()
753 except StopIteration:
606 except StopIteration:
754 break
607 break
755 d, self.buf = self.buf[:l], self.buf[l:]
608 d, self.buf = self.buf[:l], self.buf[l:]
756 return d
609 return d
757
610
758 if not generator: return
611 if not generator: return
759 source = genread(generator)
612 source = genread(generator)
760
613
761 def getchunk(add = 0):
614 def getchunk(add = 0):
762 d = source.read(4)
615 d = source.read(4)
763 if not d: return ""
616 if not d: return ""
764 l = struct.unpack(">l", d)[0]
617 l = struct.unpack(">l", d)[0]
765 return source.read(l - 4 + add)
618 return source.read(l - 4 + add)
766
619
767 tr = self.transaction()
620 tr = self.transaction()
768 simple = True
621 simple = True
769
622
770 self.ui.status("merging changesets\n")
623 self.ui.status("adding changesets\n")
771 # pull off the changeset group
624 # pull off the changeset group
625 def report(x):
626 self.ui.debug("add changeset %s\n" % short(x))
627 return self.changelog.count()
628
772 csg = getchunk()
629 csg = getchunk()
773 co = self.changelog.tip()
630 co = self.changelog.tip()
774 cn = self.changelog.addgroup(csg, lambda x: self.changelog.count(), tr)
631 cn = self.changelog.addgroup(csg, report, tr)
775
632
776 self.ui.status("merging manifests\n")
633 self.ui.status("adding manifests\n")
777 # pull off the manifest group
634 # pull off the manifest group
778 mfg = getchunk()
635 mfg = getchunk()
779 mm = self.manifest.tip()
636 mm = self.manifest.tip()
780 mo = self.manifest.addgroup(mfg, lambda x: self.changelog.rev(x), tr)
637 mo = self.manifest.addgroup(mfg, lambda x: self.changelog.rev(x), tr)
781
638
782 # do we need a resolve?
639 # do we need a resolve?
783 if self.changelog.ancestor(co, cn) != co:
640 if self.changelog.ancestor(co, cn) != co:
784 simple = False
641 simple = False
785 resolverev = self.changelog.count()
642 resolverev = self.changelog.count()
786
643
787 # process the files
644 # process the files
788 self.ui.status("merging files\n")
645 self.ui.status("adding files\n")
789 new = {}
646 new = {}
790 while 1:
647 while 1:
791 f = getchunk(4)
648 f = getchunk(4)
792 if not f: break
649 if not f: break
793 fg = getchunk()
650 fg = getchunk()
794
651 self.ui.debug("adding %s revisions\n" % f)
795 fl = self.file(f)
652 fl = self.file(f)
796 o = fl.tip()
653 o = fl.tip()
797 n = fl.addgroup(fg, lambda x: self.changelog.rev(x), tr)
654 n = fl.addgroup(fg, lambda x: self.changelog.rev(x), tr)
798 if not simple:
655 if not simple:
799 nn = fl.resolvedag(o, n, tr, resolverev)
656 if o == n: continue
800 if nn:
657 # this file has changed between branches, so it must be
801 self.ui.note("merged %s\n", f)
658 # represented in the merge changeset
802 new[f] = nn
659 new[f] = self.merge3(fl, f, o, n, tr, resolverev)
803
660
804 # For simple merges, we don't need to resolve manifests or changesets
661 # For simple merges, we don't need to resolve manifests or changesets
805 if simple:
662 if simple:
806 self.ui.debug("simple merge, skipping resolve\n")
663 self.ui.debug("simple merge, skipping resolve\n")
807 tr.close()
664 tr.close()
808 return
665 return
809
666
810 # resolve the manifest to point to all the merged files
667 # resolve the manifest to point to all the merged files
811 self.ui.status("resolving manifests\n")
668 self.ui.status("resolving manifests\n")
812 ma = self.manifest.ancestor(mm, mo)
669 ma = self.manifest.ancestor(mm, mo)
813 omap = self.manifest.read(mo) # other
670 omap = self.manifest.read(mo) # other
814 amap = self.manifest.read(ma) # ancestor
671 amap = self.manifest.read(ma) # ancestor
815 mmap = self.manifest.read(mm) # mine
672 mmap = self.manifest.read(mm) # mine
816 self.ui.debug("ancestor %s local %s other %s\n" %
673 self.ui.debug("ancestor %s local %s other %s\n" %
817 (short(ma), short(mm), short(mo)))
674 (short(ma), short(mm), short(mo)))
818 nmap = {}
675 nmap = {}
819
676
820 for f, mid in mmap.iteritems():
677 for f, mid in mmap.iteritems():
821 if f in omap:
678 if f in omap:
822 if mid != omap[f]:
679 if mid != omap[f]:
823 self.ui.debug("%s versions differ\n" % f)
680 self.ui.debug("%s versions differ\n" % f)
824 if f in new: self.ui.note("%s updated in resolve\n" % f)
681 if f in new: self.ui.debug("%s updated in resolve\n" % f)
825 nmap[f] = new.get(f, mid) # use merged version
682 # use merged version or local version
683 nmap[f] = new.get(f, mid)
826 else:
684 else:
827 nmap[f] = mid # keep ours
685 nmap[f] = mid # keep ours
828 del omap[f]
686 del omap[f]
829 elif f in amap:
687 elif f in amap:
830 if mid != amap[f]:
688 if mid != amap[f]:
831 self.ui.debug("local changed %s which other deleted\n" % f)
689 self.ui.debug("local changed %s which other deleted\n" % f)
832 pass # we should prompt here
690 pass # we should prompt here
833 else:
691 else:
834 self.ui.debug("other deleted %s\n" % f)
692 self.ui.debug("other deleted %s\n" % f)
835 pass # other deleted it
693 pass # other deleted it
836 else:
694 else:
837 self.ui.debug("local created %s\n" %f)
695 self.ui.debug("local created %s\n" %f)
838 nmap[f] = mid # we created it
696 nmap[f] = mid # we created it
839
697
840 del mmap
698 del mmap
841
699
842 for f, oid in omap.iteritems():
700 for f, oid in omap.iteritems():
843 if f in amap:
701 if f in amap:
844 if oid != amap[f]:
702 if oid != amap[f]:
845 self.ui.debug("other changed %s which we deleted\n" % f)
703 self.ui.debug("other changed %s which we deleted\n" % f)
846 pass # this is the nasty case, we should prompt
704 pass # this is the nasty case, we should prompt
847 else:
705 else:
848 pass # probably safe
706 pass # probably safe
849 else:
707 else:
850 self.ui.debug("remote created %s\n" % f)
708 self.ui.debug("remote created %s\n" % f)
851 nmap[f] = new.get(f, oid) # remote created it
709 nmap[f] = new.get(f, oid) # remote created it
852
710
853 del omap
711 del omap
854 del amap
712 del amap
855
713
856 node = self.manifest.add(nmap, tr, resolverev, mm, mo)
714 node = self.manifest.add(nmap, tr, resolverev, mm, mo)
857
715
858 # Now all files and manifests are merged, we add the changed files
716 # Now all files and manifests are merged, we add the changed files
859 # and manifest id to the changelog
717 # and manifest id to the changelog
860 self.ui.status("committing merge changeset\n")
718 self.ui.status("committing merge changeset\n")
861 new = new.keys()
719 new = new.keys()
862 new.sort()
720 new.sort()
863 if co == cn: cn = -1
721 if co == cn: cn = -1
864
722
865 edittext = "\nHG: merge resolve\n" + \
723 edittext = "\nHG: merge resolve\n" + \
866 "".join(["HG: changed %s\n" % f for f in new])
724 "".join(["HG: changed %s\n" % f for f in new])
867 edittext = self.ui.edit(edittext)
725 edittext = self.ui.edit(edittext)
868 n = self.changelog.add(node, new, edittext, tr, co, cn)
726 n = self.changelog.add(node, new, edittext, tr, co, cn)
869
727
870 tr.close()
728 tr.close()
871
729
872 class remoterepository:
730 class remoterepository:
873 def __init__(self, ui, path):
731 def __init__(self, ui, path):
874 self.url = path.replace("hg://", "http://", 1)
732 self.url = path.replace("hg://", "http://", 1)
875 self.ui = ui
733 self.ui = ui
876
734
877 def do_cmd(self, cmd, **args):
735 def do_cmd(self, cmd, **args):
878 self.ui.debug("sending %s command\n" % cmd)
736 self.ui.debug("sending %s command\n" % cmd)
879 q = {"cmd": cmd}
737 q = {"cmd": cmd}
880 q.update(args)
738 q.update(args)
881 qs = urllib.urlencode(q)
739 qs = urllib.urlencode(q)
882 cu = "%s?%s" % (self.url, qs)
740 cu = "%s?%s" % (self.url, qs)
883 return urllib.urlopen(cu)
741 return urllib.urlopen(cu)
884
742
885 def branches(self, nodes):
743 def branches(self, nodes):
886 n = " ".join(map(hex, nodes))
744 n = " ".join(map(hex, nodes))
887 d = self.do_cmd("branches", nodes=n).read()
745 d = self.do_cmd("branches", nodes=n).read()
888 br = [ map(bin, b.split(" ")) for b in d.splitlines() ]
746 br = [ map(bin, b.split(" ")) for b in d.splitlines() ]
889 return br
747 return br
890
748
891 def between(self, pairs):
749 def between(self, pairs):
892 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
750 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
893 d = self.do_cmd("between", pairs=n).read()
751 d = self.do_cmd("between", pairs=n).read()
894 p = [ map(bin, l.split(" ")) for l in d.splitlines() ]
752 p = [ map(bin, l.split(" ")) for l in d.splitlines() ]
895 return p
753 return p
896
754
897 def changegroup(self, nodes):
755 def changegroup(self, nodes):
898 n = " ".join(map(hex, nodes))
756 n = " ".join(map(hex, nodes))
899 zd = zlib.decompressobj()
757 zd = zlib.decompressobj()
900 f = self.do_cmd("changegroup", roots=n)
758 f = self.do_cmd("changegroup", roots=n)
901 while 1:
759 while 1:
902 d = f.read(4096)
760 d = f.read(4096)
903 if not d:
761 if not d:
904 yield zd.flush()
762 yield zd.flush()
905 break
763 break
906 yield zd.decompress(d)
764 yield zd.decompress(d)
907
765
908 def repository(ui, path=None, create=0):
766 def repository(ui, path=None, create=0):
909 if path and path[:5] == "hg://":
767 if path and path[:5] == "hg://":
910 return remoterepository(ui, path)
768 return remoterepository(ui, path)
911 else:
769 else:
912 return localrepository(ui, path, create)
770 return localrepository(ui, path, create)
913
771
914 class ui:
772 class ui:
915 def __init__(self, verbose=False, debug=False, quiet=False):
773 def __init__(self, verbose=False, debug=False, quiet=False):
916 self.quiet = quiet and not verbose and not debug
774 self.quiet = quiet and not verbose and not debug
917 self.verbose = verbose or debug
775 self.verbose = verbose or debug
918 self.debugflag = debug
776 self.debugflag = debug
919 def write(self, *args):
777 def write(self, *args):
920 for a in args:
778 for a in args:
921 sys.stdout.write(str(a))
779 sys.stdout.write(str(a))
922 def prompt(self, msg, pat):
780 def prompt(self, msg, pat):
923 while 1:
781 while 1:
924 sys.stdout.write(msg)
782 sys.stdout.write(msg)
925 r = sys.stdin.readline()[:-1]
783 r = sys.stdin.readline()[:-1]
926 if re.match(pat, r):
784 if re.match(pat, r):
927 return r
785 return r
928 def status(self, *msg):
786 def status(self, *msg):
929 if not self.quiet: self.write(*msg)
787 if not self.quiet: self.write(*msg)
930 def warn(self, msg):
788 def warn(self, msg):
931 self.write(*msg)
789 self.write(*msg)
932 def note(self, msg):
790 def note(self, msg):
933 if self.verbose: self.write(*msg)
791 if self.verbose: self.write(*msg)
934 def debug(self, msg):
792 def debug(self, msg):
935 if self.debugflag: self.write(*msg)
793 if self.debugflag: self.write(*msg)
936 def edit(self, text):
794 def edit(self, text):
937 (fd, name) = tempfile.mkstemp("hg")
795 (fd, name) = tempfile.mkstemp("hg")
938 f = os.fdopen(fd, "w")
796 f = os.fdopen(fd, "w")
939 f.write(text)
797 f.write(text)
940 f.close()
798 f.close()
941
799
942 editor = os.environ.get("EDITOR", "vi")
800 editor = os.environ.get("EDITOR", "vi")
943 r = os.system("%s %s" % (editor, name))
801 r = os.system("%s %s" % (editor, name))
944 if r:
802 if r:
945 raise "Edit failed!"
803 raise "Edit failed!"
946
804
947 t = open(name).read()
805 t = open(name).read()
948 t = re.sub("(?m)^HG:.*\n", "", t)
806 t = re.sub("(?m)^HG:.*\n", "", t)
949
807
950 return t
808 return t
951
809
952
810
953 class httprangereader:
811 class httprangereader:
954 def __init__(self, url):
812 def __init__(self, url):
955 self.url = url
813 self.url = url
956 self.pos = 0
814 self.pos = 0
957 def seek(self, pos):
815 def seek(self, pos):
958 self.pos = pos
816 self.pos = pos
959 def read(self, bytes=None):
817 def read(self, bytes=None):
960 opener = urllib2.build_opener(byterange.HTTPRangeHandler())
818 opener = urllib2.build_opener(byterange.HTTPRangeHandler())
961 urllib2.install_opener(opener)
819 urllib2.install_opener(opener)
962 req = urllib2.Request(self.url)
820 req = urllib2.Request(self.url)
963 end = ''
821 end = ''
964 if bytes: end = self.pos + bytes
822 if bytes: end = self.pos + bytes
965 req.add_header('Range', 'bytes=%d-%s' % (self.pos, end))
823 req.add_header('Range', 'bytes=%d-%s' % (self.pos, end))
966 f = urllib2.urlopen(req)
824 f = urllib2.urlopen(req)
967 return f.read()
825 return f.read()
@@ -1,481 +1,450 b''
1 # revlog.py - storage back-end for mercurial
1 # revlog.py - storage back-end for mercurial
2 #
2 #
3 # This provides efficient delta storage with O(1) retrieve and append
3 # This provides efficient delta storage with O(1) retrieve and append
4 # and O(changes) merge between branches
4 # and O(changes) merge between branches
5 #
5 #
6 # Copyright 2005 Matt Mackall <mpm@selenic.com>
6 # Copyright 2005 Matt Mackall <mpm@selenic.com>
7 #
7 #
8 # This software may be used and distributed according to the terms
8 # This software may be used and distributed according to the terms
9 # of the GNU General Public License, incorporated herein by reference.
9 # of the GNU General Public License, incorporated herein by reference.
10
10
11 import zlib, struct, sha, os, tempfile, binascii
11 import zlib, struct, sha, os, tempfile, binascii
12 from mercurial import mdiff
12 from mercurial import mdiff
13
13
14 def hex(node): return binascii.hexlify(node)
14 def hex(node): return binascii.hexlify(node)
15 def bin(node): return binascii.unhexlify(node)
15 def bin(node): return binascii.unhexlify(node)
16 def short(node): return hex(node[:4])
16 def short(node): return hex(node[:4])
17
17
18 def compress(text):
18 def compress(text):
19 return zlib.compress(text)
19 return zlib.compress(text)
20
20
21 def decompress(bin):
21 def decompress(bin):
22 return zlib.decompress(bin)
22 return zlib.decompress(bin)
23
23
24 def hash(text, p1, p2):
24 def hash(text, p1, p2):
25 l = [p1, p2]
25 l = [p1, p2]
26 l.sort()
26 l.sort()
27 return sha.sha(l[0] + l[1] + text).digest()
27 return sha.sha(l[0] + l[1] + text).digest()
28
28
29 nullid = "\0" * 20
29 nullid = "\0" * 20
30 indexformat = ">4l20s20s20s"
30 indexformat = ">4l20s20s20s"
31
31
32 class lazyparser:
32 class lazyparser:
33 def __init__(self, data):
33 def __init__(self, data):
34 self.data = data
34 self.data = data
35 self.s = struct.calcsize(indexformat)
35 self.s = struct.calcsize(indexformat)
36 self.l = len(data)/self.s
36 self.l = len(data)/self.s
37 self.index = [None] * self.l
37 self.index = [None] * self.l
38 self.map = {nullid: -1}
38 self.map = {nullid: -1}
39
39
40 if 0:
40 if 0:
41 n = 0
41 n = 0
42 i = self.data
42 i = self.data
43 s = struct.calcsize(indexformat)
43 s = struct.calcsize(indexformat)
44 for f in xrange(0, len(i), s):
44 for f in xrange(0, len(i), s):
45 # offset, size, base, linkrev, p1, p2, nodeid
45 # offset, size, base, linkrev, p1, p2, nodeid
46 e = struct.unpack(indexformat, i[f:f + s])
46 e = struct.unpack(indexformat, i[f:f + s])
47 self.map[e[6]] = n
47 self.map[e[6]] = n
48 self.index.append(e)
48 self.index.append(e)
49 n += 1
49 n += 1
50
50
51 def load(self, pos):
51 def load(self, pos):
52 block = pos / 1000
52 block = pos / 1000
53 i = block * 1000
53 i = block * 1000
54 end = min(self.l, i + 1000)
54 end = min(self.l, i + 1000)
55 while i < end:
55 while i < end:
56 d = self.data[i * self.s: (i + 1) * self.s]
56 d = self.data[i * self.s: (i + 1) * self.s]
57 e = struct.unpack(indexformat, d)
57 e = struct.unpack(indexformat, d)
58 self.index[i] = e
58 self.index[i] = e
59 self.map[e[6]] = i
59 self.map[e[6]] = i
60 i += 1
60 i += 1
61
61
62 class lazyindex:
62 class lazyindex:
63 def __init__(self, parser):
63 def __init__(self, parser):
64 self.p = parser
64 self.p = parser
65 def __len__(self):
65 def __len__(self):
66 return len(self.p.index)
66 return len(self.p.index)
67 def __getitem__(self, pos):
67 def __getitem__(self, pos):
68 i = self.p.index[pos]
68 i = self.p.index[pos]
69 if not i:
69 if not i:
70 self.p.load(pos)
70 self.p.load(pos)
71 return self.p.index[pos]
71 return self.p.index[pos]
72 return i
72 return i
73 def append(self, e):
73 def append(self, e):
74 self.p.index.append(e)
74 self.p.index.append(e)
75
75
76 class lazymap:
76 class lazymap:
77 def __init__(self, parser):
77 def __init__(self, parser):
78 self.p = parser
78 self.p = parser
79 def load(self, key):
79 def load(self, key):
80 n = self.p.data.find(key)
80 n = self.p.data.find(key)
81 if n < 0: raise KeyError("node " + hex(key))
81 if n < 0: raise KeyError("node " + hex(key))
82 pos = n / self.p.s
82 pos = n / self.p.s
83 self.p.load(pos)
83 self.p.load(pos)
84 def __contains__(self, key):
84 def __contains__(self, key):
85 try:
85 try:
86 self[key]
86 self[key]
87 return True
87 return True
88 except KeyError:
88 except KeyError:
89 return False
89 return False
90 def __getitem__(self, key):
90 def __getitem__(self, key):
91 try:
91 try:
92 return self.p.map[key]
92 return self.p.map[key]
93 except KeyError:
93 except KeyError:
94 try:
94 try:
95 self.load(key)
95 self.load(key)
96 return self.p.map[key]
96 return self.p.map[key]
97 except KeyError:
97 except KeyError:
98 raise KeyError("node " + hex(key))
98 raise KeyError("node " + hex(key))
99 def __setitem__(self, key, val):
99 def __setitem__(self, key, val):
100 self.p.map[key] = val
100 self.p.map[key] = val
101
101
102 class revlog:
102 class revlog:
103 def __init__(self, opener, indexfile, datafile):
103 def __init__(self, opener, indexfile, datafile):
104 self.indexfile = indexfile
104 self.indexfile = indexfile
105 self.datafile = datafile
105 self.datafile = datafile
106 self.opener = opener
106 self.opener = opener
107 self.cache = None
107 self.cache = None
108 # read the whole index for now, handle on-demand later
108 # read the whole index for now, handle on-demand later
109 try:
109 try:
110 i = self.opener(self.indexfile).read()
110 i = self.opener(self.indexfile).read()
111 except IOError:
111 except IOError:
112 i = ""
112 i = ""
113 parser = lazyparser(i)
113 parser = lazyparser(i)
114 self.index = lazyindex(parser)
114 self.index = lazyindex(parser)
115 self.nodemap = lazymap(parser)
115 self.nodemap = lazymap(parser)
116
116
117 def tip(self): return self.node(len(self.index) - 1)
117 def tip(self): return self.node(len(self.index) - 1)
118 def count(self): return len(self.index)
118 def count(self): return len(self.index)
119 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
119 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
120 def rev(self, node): return self.nodemap[node]
120 def rev(self, node): return self.nodemap[node]
121 def linkrev(self, node): return self.index[self.nodemap[node]][3]
121 def linkrev(self, node): return self.index[self.nodemap[node]][3]
122 def parents(self, node):
122 def parents(self, node):
123 if node == nullid: return (nullid, nullid)
123 if node == nullid: return (nullid, nullid)
124 return self.index[self.nodemap[node]][4:6]
124 return self.index[self.nodemap[node]][4:6]
125
125
126 def start(self, rev): return self.index[rev][0]
126 def start(self, rev): return self.index[rev][0]
127 def length(self, rev): return self.index[rev][1]
127 def length(self, rev): return self.index[rev][1]
128 def end(self, rev): return self.start(rev) + self.length(rev)
128 def end(self, rev): return self.start(rev) + self.length(rev)
129 def base(self, rev): return self.index[rev][2]
129 def base(self, rev): return self.index[rev][2]
130
130
131 def lookup(self, id):
131 def lookup(self, id):
132 try:
132 try:
133 rev = int(id)
133 rev = int(id)
134 return self.node(rev)
134 return self.node(rev)
135 except ValueError:
135 except ValueError:
136 c = []
136 c = []
137 for n in self.nodemap:
137 for n in self.nodemap:
138 if id in hex(n):
138 if id in hex(n):
139 c.append(n)
139 c.append(n)
140 if len(c) > 1: raise KeyError("Ambiguous identifier")
140 if len(c) > 1: raise KeyError("Ambiguous identifier")
141 if len(c) < 1: raise KeyError("No match found")
141 if len(c) < 1: raise KeyError("No match found")
142 return c[0]
142 return c[0]
143
143
144 return None
144 return None
145
145
146 def revisions(self, list):
147 # this can be optimized to do spans, etc
148 # be stupid for now
149 for node in list:
150 yield self.revision(node)
151
152 def diff(self, a, b):
146 def diff(self, a, b):
153 return mdiff.textdiff(a, b)
147 return mdiff.textdiff(a, b)
154
148
155 def patches(self, t, pl):
149 def patches(self, t, pl):
156 return mdiff.patches(t, pl)
150 return mdiff.patches(t, pl)
157
151
158 def revision(self, node):
152 def revision(self, node):
159 if node == nullid: return ""
153 if node == nullid: return ""
160 if self.cache and self.cache[0] == node: return self.cache[2]
154 if self.cache and self.cache[0] == node: return self.cache[2]
161
155
162 text = None
156 text = None
163 rev = self.rev(node)
157 rev = self.rev(node)
164 base = self.base(rev)
158 base = self.base(rev)
165 start = self.start(base)
159 start = self.start(base)
166 end = self.end(rev)
160 end = self.end(rev)
167
161
168 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
162 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
169 base = self.cache[1]
163 base = self.cache[1]
170 start = self.start(base + 1)
164 start = self.start(base + 1)
171 text = self.cache[2]
165 text = self.cache[2]
172 last = 0
166 last = 0
173
167
174 f = self.opener(self.datafile)
168 f = self.opener(self.datafile)
175 f.seek(start)
169 f.seek(start)
176 data = f.read(end - start)
170 data = f.read(end - start)
177
171
178 if not text:
172 if not text:
179 last = self.length(base)
173 last = self.length(base)
180 text = decompress(data[:last])
174 text = decompress(data[:last])
181
175
182 bins = []
176 bins = []
183 for r in xrange(base + 1, rev + 1):
177 for r in xrange(base + 1, rev + 1):
184 s = self.length(r)
178 s = self.length(r)
185 bins.append(decompress(data[last:last + s]))
179 bins.append(decompress(data[last:last + s]))
186 last = last + s
180 last = last + s
187
181
188 text = mdiff.patches(text, bins)
182 text = mdiff.patches(text, bins)
189
183
190 (p1, p2) = self.parents(node)
184 (p1, p2) = self.parents(node)
191 if node != hash(text, p1, p2):
185 if node != hash(text, p1, p2):
192 raise "integrity check failed on %s:%d" % (self.datafile, rev)
186 raise "integrity check failed on %s:%d" % (self.datafile, rev)
193
187
194 self.cache = (node, rev, text)
188 self.cache = (node, rev, text)
195 return text
189 return text
196
190
197 def addrevision(self, text, transaction, link, p1=None, p2=None):
191 def addrevision(self, text, transaction, link, p1=None, p2=None):
198 if text is None: text = ""
192 if text is None: text = ""
199 if p1 is None: p1 = self.tip()
193 if p1 is None: p1 = self.tip()
200 if p2 is None: p2 = nullid
194 if p2 is None: p2 = nullid
201
195
202 node = hash(text, p1, p2)
196 node = hash(text, p1, p2)
203
197
204 n = self.count()
198 n = self.count()
205 t = n - 1
199 t = n - 1
206
200
207 if n:
201 if n:
208 base = self.base(t)
202 base = self.base(t)
209 start = self.start(base)
203 start = self.start(base)
210 end = self.end(t)
204 end = self.end(t)
211 prev = self.revision(self.tip())
205 prev = self.revision(self.tip())
212 data = compress(self.diff(prev, text))
206 data = compress(self.diff(prev, text))
213 dist = end - start + len(data)
207 dist = end - start + len(data)
214
208
215 # full versions are inserted when the needed deltas
209 # full versions are inserted when the needed deltas
216 # become comparable to the uncompressed text
210 # become comparable to the uncompressed text
217 if not n or dist > len(text) * 2:
211 if not n or dist > len(text) * 2:
218 data = compress(text)
212 data = compress(text)
219 base = n
213 base = n
220 else:
214 else:
221 base = self.base(t)
215 base = self.base(t)
222
216
223 offset = 0
217 offset = 0
224 if t >= 0:
218 if t >= 0:
225 offset = self.end(t)
219 offset = self.end(t)
226
220
227 e = (offset, len(data), base, link, p1, p2, node)
221 e = (offset, len(data), base, link, p1, p2, node)
228
222
229 self.index.append(e)
223 self.index.append(e)
230 self.nodemap[node] = n
224 self.nodemap[node] = n
231 entry = struct.pack(indexformat, *e)
225 entry = struct.pack(indexformat, *e)
232
226
233 transaction.add(self.datafile, e[0])
227 transaction.add(self.datafile, e[0])
234 self.opener(self.datafile, "a").write(data)
228 self.opener(self.datafile, "a").write(data)
235 transaction.add(self.indexfile, n * len(entry))
229 transaction.add(self.indexfile, n * len(entry))
236 self.opener(self.indexfile, "a").write(entry)
230 self.opener(self.indexfile, "a").write(entry)
237
231
238 self.cache = (node, n, text)
232 self.cache = (node, n, text)
239 return node
233 return node
240
234
241 def ancestor(self, a, b):
235 def ancestor(self, a, b):
242 def expand(list, map):
236 def expand(list, map):
243 a = []
237 a = []
244 while list:
238 while list:
245 n = list.pop(0)
239 n = list.pop(0)
246 map[n] = 1
240 map[n] = 1
247 yield n
241 yield n
248 for p in self.parents(n):
242 for p in self.parents(n):
249 if p != nullid and p not in map:
243 if p != nullid and p not in map:
250 list.append(p)
244 list.append(p)
251 yield nullid
245 yield nullid
252
246
253 amap = {}
247 amap = {}
254 bmap = {}
248 bmap = {}
255 ag = expand([a], amap)
249 ag = expand([a], amap)
256 bg = expand([b], bmap)
250 bg = expand([b], bmap)
257 adone = bdone = 0
251 adone = bdone = 0
258
252
259 while not adone or not bdone:
253 while not adone or not bdone:
260 if not adone:
254 if not adone:
261 an = ag.next()
255 an = ag.next()
262 if an == nullid:
256 if an == nullid:
263 adone = 1
257 adone = 1
264 elif an in bmap:
258 elif an in bmap:
265 return an
259 return an
266 if not bdone:
260 if not bdone:
267 bn = bg.next()
261 bn = bg.next()
268 if bn == nullid:
262 if bn == nullid:
269 bdone = 1
263 bdone = 1
270 elif bn in amap:
264 elif bn in amap:
271 return bn
265 return bn
272
266
273 return nullid
267 return nullid
274
268
275 def mergedag(self, other, transaction, linkseq, accumulate = None):
276 """combine the nodes from other's DAG into ours"""
277 old = self.tip()
278 i = self.count()
279 l = []
280
281 # merge the other revision log into our DAG
282 for r in range(other.count()):
283 id = other.node(r)
284 if id not in self.nodemap:
285 (xn, yn) = other.parents(id)
286 l.append((id, xn, yn))
287 self.nodemap[id] = i
288 i += 1
289
290 # merge node date for new nodes
291 r = other.revisions([e[0] for e in l])
292 for e in l:
293 t = r.next()
294 if accumulate: accumulate(t)
295 self.addrevision(t, transaction, linkseq.next(), e[1], e[2])
296
297 # return the unmerged heads for later resolving
298 return (old, self.tip())
299
300 def group(self, linkmap):
269 def group(self, linkmap):
301 # given a list of changeset revs, return a set of deltas and
270 # given a list of changeset revs, return a set of deltas and
302 # metadata corresponding to nodes the first delta is
271 # metadata corresponding to nodes. the first delta is
303 # parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
272 # parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
304 # have this parent as it has all history before these
273 # have this parent as it has all history before these
305 # changesets. parent is parent[0]
274 # changesets. parent is parent[0]
306
275
307 revs = []
276 revs = []
308 needed = {}
277 needed = {}
309
278
310 # find file nodes/revs that match changeset revs
279 # find file nodes/revs that match changeset revs
311 for i in xrange(0, self.count()):
280 for i in xrange(0, self.count()):
312 if self.index[i][3] in linkmap:
281 if self.index[i][3] in linkmap:
313 revs.append(i)
282 revs.append(i)
314 needed[i] = 1
283 needed[i] = 1
315
284
316 # if we don't have any revisions touched by these changesets, bail
285 # if we don't have any revisions touched by these changesets, bail
317 if not revs: return struct.pack(">l", 0)
286 if not revs: return struct.pack(">l", 0)
318
287
319 # add the parent of the first rev
288 # add the parent of the first rev
320 p = self.parents(self.node(revs[0]))[0]
289 p = self.parents(self.node(revs[0]))[0]
321 revs.insert(0, self.rev(p))
290 revs.insert(0, self.rev(p))
322
291
323 # for each delta that isn't contiguous in the log, we need to
292 # for each delta that isn't contiguous in the log, we need to
324 # reconstruct the base, reconstruct the result, and then
293 # reconstruct the base, reconstruct the result, and then
325 # calculate the delta. We also need to do this where we've
294 # calculate the delta. We also need to do this where we've
326 # stored a full version and not a delta
295 # stored a full version and not a delta
327 for i in xrange(0, len(revs) - 1):
296 for i in xrange(0, len(revs) - 1):
328 a, b = revs[i], revs[i + 1]
297 a, b = revs[i], revs[i + 1]
329 if a + 1 != b or self.base(b) == b:
298 if a + 1 != b or self.base(b) == b:
330 for j in xrange(self.base(a), a + 1):
299 for j in xrange(self.base(a), a + 1):
331 needed[j] = 1
300 needed[j] = 1
332 for j in xrange(self.base(b), b + 1):
301 for j in xrange(self.base(b), b + 1):
333 needed[j] = 1
302 needed[j] = 1
334
303
335 # calculate spans to retrieve from datafile
304 # calculate spans to retrieve from datafile
336 needed = needed.keys()
305 needed = needed.keys()
337 needed.sort()
306 needed.sort()
338 spans = []
307 spans = []
339 for n in needed:
308 for n in needed:
340 if n < 0: continue
309 if n < 0: continue
341 o = self.start(n)
310 o = self.start(n)
342 l = self.length(n)
311 l = self.length(n)
343 spans.append((o, l, [(n, l)]))
312 spans.append((o, l, [(n, l)]))
344
313
345 # merge spans
314 # merge spans
346 merge = [spans.pop(0)]
315 merge = [spans.pop(0)]
347 while spans:
316 while spans:
348 e = spans.pop(0)
317 e = spans.pop(0)
349 f = merge[-1]
318 f = merge[-1]
350 if e[0] == f[0] + f[1]:
319 if e[0] == f[0] + f[1]:
351 merge[-1] = (f[0], f[1] + e[1], f[2] + e[2])
320 merge[-1] = (f[0], f[1] + e[1], f[2] + e[2])
352 else:
321 else:
353 merge.append(e)
322 merge.append(e)
354
323
355 # read spans in, divide up chunks
324 # read spans in, divide up chunks
356 chunks = {}
325 chunks = {}
357 for span in merge:
326 for span in merge:
358 # we reopen the file for each span to make http happy for now
327 # we reopen the file for each span to make http happy for now
359 f = self.opener(self.datafile)
328 f = self.opener(self.datafile)
360 f.seek(span[0])
329 f.seek(span[0])
361 data = f.read(span[1])
330 data = f.read(span[1])
362
331
363 # divide up the span
332 # divide up the span
364 pos = 0
333 pos = 0
365 for r, l in span[2]:
334 for r, l in span[2]:
366 chunks[r] = data[pos: pos + l]
335 chunks[r] = data[pos: pos + l]
367 pos += l
336 pos += l
368
337
369 # helper to reconstruct intermediate versions
338 # helper to reconstruct intermediate versions
370 def construct(text, base, rev):
339 def construct(text, base, rev):
371 bins = [decompress(chunks[r]) for r in xrange(base + 1, rev + 1)]
340 bins = [decompress(chunks[r]) for r in xrange(base + 1, rev + 1)]
372 return mdiff.patches(text, bins)
341 return mdiff.patches(text, bins)
373
342
374 # build deltas
343 # build deltas
375 deltas = []
344 deltas = []
376 for d in xrange(0, len(revs) - 1):
345 for d in xrange(0, len(revs) - 1):
377 a, b = revs[d], revs[d + 1]
346 a, b = revs[d], revs[d + 1]
378 n = self.node(b)
347 n = self.node(b)
379
348
380 if a + 1 != b or self.base(b) == b:
349 if a + 1 != b or self.base(b) == b:
381 if a >= 0:
350 if a >= 0:
382 base = self.base(a)
351 base = self.base(a)
383 ta = decompress(chunks[self.base(a)])
352 ta = decompress(chunks[self.base(a)])
384 ta = construct(ta, base, a)
353 ta = construct(ta, base, a)
385 else:
354 else:
386 ta = ""
355 ta = ""
387
356
388 base = self.base(b)
357 base = self.base(b)
389 if a > base:
358 if a > base:
390 base = a
359 base = a
391 tb = ta
360 tb = ta
392 else:
361 else:
393 tb = decompress(chunks[self.base(b)])
362 tb = decompress(chunks[self.base(b)])
394 tb = construct(tb, base, b)
363 tb = construct(tb, base, b)
395 d = self.diff(ta, tb)
364 d = self.diff(ta, tb)
396 else:
365 else:
397 d = decompress(chunks[b])
366 d = decompress(chunks[b])
398
367
399 p = self.parents(n)
368 p = self.parents(n)
400 meta = n + p[0] + p[1] + linkmap[self.linkrev(n)]
369 meta = n + p[0] + p[1] + linkmap[self.linkrev(n)]
401 l = struct.pack(">l", len(meta) + len(d) + 4)
370 l = struct.pack(">l", len(meta) + len(d) + 4)
402 deltas.append(l + meta + d)
371 deltas.append(l + meta + d)
403
372
404 l = struct.pack(">l", sum(map(len, deltas)) + 4)
373 l = struct.pack(">l", sum(map(len, deltas)) + 4)
405 deltas.insert(0, l)
374 deltas.insert(0, l)
406 return "".join(deltas)
375 return "".join(deltas)
407
376
408 def addgroup(self, data, linkmapper, transaction):
377 def addgroup(self, data, linkmapper, transaction):
409 # given a set of deltas, add them to the revision log. the
378 # given a set of deltas, add them to the revision log. the
410 # first delta is against its parent, which should be in our
379 # first delta is against its parent, which should be in our
411 # log, the rest are against the previous delta.
380 # log, the rest are against the previous delta.
412
381
413 if not data: return self.tip()
382 if not data: return self.tip()
414
383
415 # retrieve the parent revision of the delta chain
384 # retrieve the parent revision of the delta chain
416 chain = data[24:44]
385 chain = data[24:44]
417 if not chain in self.nodemap:
386 if not chain in self.nodemap:
418 raise "unknown base %s" % short(chain[:4])
387 raise "unknown base %s" % short(chain[:4])
419
388
420 # track the base of the current delta log
389 # track the base of the current delta log
421 r = self.count()
390 r = self.count()
422 t = r - 1
391 t = r - 1
423
392
424 base = prev = -1
393 base = prev = -1
425 start = end = 0
394 start = end = 0
426 if r:
395 if r:
427 start = self.start(self.base(t))
396 start = self.start(self.base(t))
428 end = self.end(t)
397 end = self.end(t)
429 measure = self.length(self.base(t))
398 measure = self.length(self.base(t))
430 base = self.base(t)
399 base = self.base(t)
431 prev = self.tip()
400 prev = self.tip()
432
401
433 transaction.add(self.datafile, end)
402 transaction.add(self.datafile, end)
434 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
403 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
435 dfh = self.opener(self.datafile, "a")
404 dfh = self.opener(self.datafile, "a")
436 ifh = self.opener(self.indexfile, "a")
405 ifh = self.opener(self.indexfile, "a")
437
406
438 # loop through our set of deltas
407 # loop through our set of deltas
439 pos = 0
408 pos = 0
440 while pos < len(data):
409 while pos < len(data):
441 l, node, p1, p2, cs = struct.unpack(">l20s20s20s20s",
410 l, node, p1, p2, cs = struct.unpack(">l20s20s20s20s",
442 data[pos:pos+84])
411 data[pos:pos+84])
412 link = linkmapper(cs)
443 if node in self.nodemap:
413 if node in self.nodemap:
444 raise "already have %s" % hex(node[:4])
414 raise "already have %s" % hex(node[:4])
445 link = linkmapper(cs)
446 delta = data[pos + 84:pos + l]
415 delta = data[pos + 84:pos + l]
447 pos += l
416 pos += l
448
417
449 # full versions are inserted when the needed deltas become
418 # full versions are inserted when the needed deltas become
450 # comparable to the uncompressed text or when the previous
419 # comparable to the uncompressed text or when the previous
451 # version is not the one we have a delta against. We use
420 # version is not the one we have a delta against. We use
452 # the size of the previous full rev as a proxy for the
421 # the size of the previous full rev as a proxy for the
453 # current size.
422 # current size.
454
423
455 if chain == prev:
424 if chain == prev:
456 cdelta = compress(delta)
425 cdelta = compress(delta)
457
426
458 if chain != prev or (end - start + len(cdelta)) > measure * 2:
427 if chain != prev or (end - start + len(cdelta)) > measure * 2:
459 # flush our writes here so we can read it in revision
428 # flush our writes here so we can read it in revision
460 dfh.flush()
429 dfh.flush()
461 ifh.flush()
430 ifh.flush()
462 text = self.revision(chain)
431 text = self.revision(chain)
463 text = self.patches(text, [delta])
432 text = self.patches(text, [delta])
464 chk = self.addrevision(text, transaction, link, p1, p2)
433 chk = self.addrevision(text, transaction, link, p1, p2)
465 if chk != node:
434 if chk != node:
466 raise "consistency error adding group"
435 raise "consistency error adding group"
467 measure = len(text)
436 measure = len(text)
468 else:
437 else:
469 e = (end, len(cdelta), self.base(t), link, p1, p2, node)
438 e = (end, len(cdelta), self.base(t), link, p1, p2, node)
470 self.index.append(e)
439 self.index.append(e)
471 self.nodemap[node] = r
440 self.nodemap[node] = r
472 dfh.write(cdelta)
441 dfh.write(cdelta)
473 ifh.write(struct.pack(indexformat, *e))
442 ifh.write(struct.pack(indexformat, *e))
474
443
475 t, r, chain, prev = r, r + 1, node, node
444 t, r, chain, prev = r, r + 1, node, node
476 start = self.start(self.base(t))
445 start = self.start(self.base(t))
477 end = self.end(t)
446 end = self.end(t)
478
447
479 dfh.close()
448 dfh.close()
480 ifh.close()
449 ifh.close()
481 return node
450 return node
General Comments 0
You need to be logged in to leave comments. Login now