##// END OF EJS Templates
Beginning of multi-head support...
mpm@selenic.com -
r221:2bfe525e default
parent child Browse files
Show More
@@ -1,219 +1,240 b''
1 import os, re, traceback, sys, signal
1 import os, re, traceback, sys, signal, time
2 2 from mercurial import fancyopts, ui, hg
3 3
4 4 class UnknownCommand(Exception): pass
5 5
6 6 def filterfiles(list, files):
7 7 l = [ x for x in list if x in files ]
8 8
9 9 for f in files:
10 10 if f[-1] != os.sep: f += os.sep
11 11 l += [ x for x in list if x.startswith(f) ]
12 12 return l
13 13
14 14 def relfilter(repo, args):
15 15 if os.getcwd() != repo.root:
16 16 p = os.getcwd()[len(repo.root) + 1: ]
17 17 return filterfiles(p, args)
18 18 return args
19 19
20 20 def relpath(repo, args):
21 21 if os.getcwd() != repo.root:
22 22 p = os.getcwd()[len(repo.root) + 1: ]
23 23 return [ os.path.join(p, x) for x in args ]
24 24 return args
25 25
26 26 def help(ui, cmd=None):
27 27 '''show help'''
28 28 if cmd:
29 29 try:
30 30 i = find(cmd)
31 31 ui.write("%s\n\n" % i[2])
32 32 ui.write(i[0].__doc__, "\n")
33 33 except UnknownCommand:
34 34 ui.warn("unknown command %s", cmd)
35 35 sys.exit(0)
36 36
37 37 ui.status("""\
38 38 hg commands:
39 39
40 40 add [files...] add the given files in the next commit
41 41 addremove add all new files, delete all missing files
42 42 annotate [files...] show changeset number per file line
43 43 branch <path> create a branch of <path> in this directory
44 44 checkout [changeset] checkout the latest or given changeset
45 45 commit commit all changes to the repository
46 46 diff [files...] diff working directory (or selected files)
47 47 dump <file> [rev] dump the latest or given revision of a file
48 48 dumpmanifest [rev] dump the latest or given revision of the manifest
49 49 export <rev> dump the changeset header and diffs for a revision
50 50 history show changeset history
51 51 init create a new repository in this directory
52 52 log <file> show revision history of a single file
53 53 merge <path> merge changes from <path> into local repository
54 54 recover rollback an interrupted transaction
55 55 remove [files...] remove the given files in the next commit
56 56 serve export the repository via HTTP
57 57 status show new, missing, and changed files in working dir
58 58 tags show current changeset tags
59 59 undo undo the last transaction
60 60 """)
61 61
62 62 def init(ui):
63 63 """create a repository"""
64 64 hg.repository(ui, ".", create=1)
65 65
66 66 def branch(ui, path):
67 67 '''branch from a local repository'''
68 68 # this should eventually support remote repos
69 69 os.system("cp -al %s/.hg .hg" % path)
70 70
71 71 def checkout(ui, repo, changeset=None):
72 72 '''checkout a given changeset or the current tip'''
73 73 (c, a, d, u) = repo.diffdir(repo.root, repo.current)
74 74 if c or a or d:
75 75 ui.warn("aborting (outstanding changes in working directory)\n")
76 76 sys.exit(1)
77 77
78 78 node = repo.changelog.tip()
79 79 if changeset:
80 80 node = repo.lookup(changeset)
81 81 repo.checkout(node)
82 82
83 83 def annotate(u, repo, *args, **ops):
84 84 def getnode(rev):
85 85 return hg.short(repo.changelog.node(rev))
86 86
87 87 def getname(rev):
88 88 try:
89 89 return bcache[rev]
90 90 except KeyError:
91 91 cl = repo.changelog.read(repo.changelog.node(rev))
92 92 name = cl[1]
93 93 f = name.find('@')
94 94 if f >= 0:
95 95 name = name[:f]
96 96 bcache[rev] = name
97 97 return name
98 98
99 99 bcache = {}
100 100 opmap = [['user', getname], ['number', str], ['changeset', getnode]]
101 101 if not ops['user'] and not ops['changeset']:
102 102 ops['number'] = 1
103 103
104 104 args = relpath(repo, args)
105 105 node = repo.current
106 106 if ops['revision']:
107 107 node = repo.changelog.lookup(ops['revision'])
108 108 change = repo.changelog.read(node)
109 109 mmap = repo.manifest.read(change[0])
110 110 maxuserlen = 0
111 111 maxchangelen = 0
112 112 for f in args:
113 113 lines = repo.file(f).annotate(mmap[f])
114 114 pieces = []
115 115
116 116 for o, f in opmap:
117 117 if ops[o]:
118 118 l = [ f(n) for n,t in lines ]
119 119 m = max(map(len, l))
120 120 pieces.append([ "%*s" % (m, x) for x in l])
121 121
122 122 for p,l in zip(zip(*pieces), lines):
123 123 u.write(" ".join(p) + ": " + l[1])
124 124
125 def heads(ui, repo):
126 '''show current repository heads'''
127 for n in repo.changelog.heads():
128 i = repo.changelog.rev(n)
129 changes = repo.changelog.read(n)
130 (p1, p2) = repo.changelog.parents(n)
131 (h, h1, h2) = map(hg.hex, (n, p1, p2))
132 (i1, i2) = map(repo.changelog.rev, (p1, p2))
133 print "rev: %4d:%s" % (i, h)
134 print "parents: %4d:%s" % (i1, h1)
135 if i2: print " %4d:%s" % (i2, h2)
136 print "manifest: %4d:%s" % (repo.manifest.rev(changes[0]),
137 hg.hex(changes[0]))
138 print "user:", changes[1]
139 print "date:", time.asctime(
140 time.localtime(float(changes[2].split(' ')[0])))
141 if ui.verbose: print "files:", " ".join(changes[3])
142 print "description:"
143 print changes[4]
144
125 145 def status(ui, repo):
126 146 '''show changed files in the working directory
127 147
128 148 C = changed
129 149 A = added
130 150 R = removed
131 151 ? = not tracked'''
132 152 (c, a, d, u) = repo.diffdir(repo.root, repo.current)
133 153 (c, a, d, u) = map(lambda x: relfilter(repo, x), (c, a, d, u))
134 154
135 155 for f in c: print "C", f
136 156 for f in a: print "A", f
137 157 for f in d: print "R", f
138 158 for f in u: print "?", f
139 159
140 160 def undo(ui, repo):
141 161 repo.undo()
142 162
143 163 table = {
144 164 "init": (init, [], 'hg init'),
145 165 "branch|clone": (branch, [], 'hg branch [path]'),
166 "heads": (heads, [], 'hg heads'),
146 167 "help": (help, [], 'hg help [command]'),
147 168 "checkout|co": (checkout, [], 'hg checkout [changeset]'),
148 169 "ann|annotate": (annotate,
149 170 [('r', 'revision', '', 'revision'),
150 171 ('u', 'user', None, 'show user'),
151 172 ('n', 'number', None, 'show revision number'),
152 173 ('c', 'changeset', None, 'show changeset')],
153 174 'hg annotate [-u] [-c] [-n] [-r id] [files]'),
154 175 "status": (status, [], 'hg status'),
155 176 "undo": (undo, [], 'hg undo'),
156 177 }
157 178
158 179 norepo = "init branch help"
159 180
160 181 def find(cmd):
161 182 i = None
162 183 for e in table.keys():
163 184 if re.match(e + "$", cmd):
164 185 return table[e]
165 186
166 187 raise UnknownCommand(cmd)
167 188
168 189 class SignalInterrupt(Exception): pass
169 190
170 191 def catchterm(*args):
171 192 raise SignalInterrupt
172 193
173 194 def dispatch(args):
174 195 options = {}
175 196 opts = [('v', 'verbose', None, 'verbose'),
176 197 ('d', 'debug', None, 'debug'),
177 198 ('q', 'quiet', None, 'quiet'),
178 199 ('y', 'noninteractive', None, 'run non-interactively'),
179 200 ]
180 201
181 202 args = fancyopts.fancyopts(args, opts, options,
182 203 'hg [options] <command> [options] [files]')
183 204
184 205 if not args:
185 206 cmd = "help"
186 207 else:
187 208 cmd, args = args[0], args[1:]
188 209
189 210 u = ui.ui(options["verbose"], options["debug"], options["quiet"],
190 211 not options["noninteractive"])
191 212
192 213 # deal with unfound commands later
193 214 i = find(cmd)
194 215
195 216 signal.signal(signal.SIGTERM, catchterm)
196 217
197 218 cmdoptions = {}
198 219 args = fancyopts.fancyopts(args, i[1], cmdoptions, i[2])
199 220
200 221 if cmd not in norepo.split():
201 222 repo = hg.repository(ui = u)
202 223 d = lambda: i[0](u, repo, *args, **cmdoptions)
203 224 else:
204 225 d = lambda: i[0](u, *args, **cmdoptions)
205 226
206 227 try:
207 228 d()
208 229 except SignalInterrupt:
209 230 u.warn("killed!\n")
210 231 except KeyboardInterrupt:
211 232 u.warn("interrupted!\n")
212 233 except TypeError, inst:
213 234 # was this an argument error?
214 235 tb = traceback.extract_tb(sys.exc_info()[2])
215 236 if len(tb) > 2: # no
216 237 raise
217 238 u.warn("%s: invalid arguments\n" % i[0].__name__)
218 239 u.warn("syntax: %s\n" % i[2])
219 240 sys.exit(-1)
@@ -1,496 +1,507 b''
1 1 # revlog.py - storage back-end for mercurial
2 2 #
3 3 # This provides efficient delta storage with O(1) retrieve and append
4 4 # and O(changes) merge between branches
5 5 #
6 6 # Copyright 2005 Matt Mackall <mpm@selenic.com>
7 7 #
8 8 # This software may be used and distributed according to the terms
9 9 # of the GNU General Public License, incorporated herein by reference.
10 10
11 11 import zlib, struct, sha, binascii, heapq
12 12 from mercurial import mdiff
13 13
14 14 def hex(node): return binascii.hexlify(node)
15 15 def bin(node): return binascii.unhexlify(node)
16 16 def short(node): return hex(node[:4])
17 17
18 18 def compress(text):
19 19 if not text: return text
20 20 if len(text) < 44:
21 21 if text[0] == '\0': return text
22 22 return 'u' + text
23 23 bin = zlib.compress(text)
24 24 if len(bin) > len(text):
25 25 if text[0] == '\0': return text
26 26 return 'u' + text
27 27 return bin
28 28
29 29 def decompress(bin):
30 30 if not bin: return bin
31 31 t = bin[0]
32 32 if t == '\0': return bin
33 33 if t == 'x': return zlib.decompress(bin)
34 34 if t == 'u': return bin[1:]
35 35 raise "unknown compression type %s" % t
36 36
37 37 def hash(text, p1, p2):
38 38 l = [p1, p2]
39 39 l.sort()
40 40 return sha.sha(l[0] + l[1] + text).digest()
41 41
42 42 nullid = "\0" * 20
43 43 indexformat = ">4l20s20s20s"
44 44
45 45 class lazyparser:
46 46 def __init__(self, data):
47 47 self.data = data
48 48 self.s = struct.calcsize(indexformat)
49 49 self.l = len(data)/self.s
50 50 self.index = [None] * self.l
51 51 self.map = {nullid: -1}
52 52
53 53 def load(self, pos):
54 54 block = pos / 1000
55 55 i = block * 1000
56 56 end = min(self.l, i + 1000)
57 57 while i < end:
58 58 d = self.data[i * self.s: (i + 1) * self.s]
59 59 e = struct.unpack(indexformat, d)
60 60 self.index[i] = e
61 61 self.map[e[6]] = i
62 62 i += 1
63 63
64 64 class lazyindex:
65 65 def __init__(self, parser):
66 66 self.p = parser
67 67 def __len__(self):
68 68 return len(self.p.index)
69 69 def load(self, pos):
70 70 self.p.load(pos)
71 71 return self.p.index[pos]
72 72 def __getitem__(self, pos):
73 73 return self.p.index[pos] or self.load(pos)
74 74 def append(self, e):
75 75 self.p.index.append(e)
76 76
77 77 class lazymap:
78 78 def __init__(self, parser):
79 79 self.p = parser
80 80 def load(self, key):
81 81 n = self.p.data.find(key)
82 82 if n < 0: raise KeyError("node " + hex(key))
83 83 pos = n / self.p.s
84 84 self.p.load(pos)
85 85 def __contains__(self, key):
86 86 try:
87 87 self[key]
88 88 return True
89 89 except KeyError:
90 90 return False
91 91 def __iter__(self):
92 92 for i in xrange(self.p.l):
93 93 try:
94 94 yield self.p.index[i][6]
95 95 except:
96 96 self.p.load(i)
97 97 yield self.p.index[i][6]
98 98 def __getitem__(self, key):
99 99 try:
100 100 return self.p.map[key]
101 101 except KeyError:
102 102 try:
103 103 self.load(key)
104 104 return self.p.map[key]
105 105 except KeyError:
106 106 raise KeyError("node " + hex(key))
107 107 def __setitem__(self, key, val):
108 108 self.p.map[key] = val
109 109
110 110 class revlog:
111 111 def __init__(self, opener, indexfile, datafile):
112 112 self.indexfile = indexfile
113 113 self.datafile = datafile
114 114 self.opener = opener
115 115 self.cache = None
116 116
117 117 try:
118 118 i = self.opener(self.indexfile).read()
119 119 except IOError:
120 120 i = ""
121 121
122 122 if len(i) > 10000:
123 123 # big index, let's parse it on demand
124 124 parser = lazyparser(i)
125 125 self.index = lazyindex(parser)
126 126 self.nodemap = lazymap(parser)
127 127 else:
128 128 s = struct.calcsize(indexformat)
129 129 l = len(i) / s
130 130 self.index = [None] * l
131 131 m = [None] * l
132 132
133 133 n = 0
134 134 for f in xrange(0, len(i), s):
135 135 # offset, size, base, linkrev, p1, p2, nodeid
136 136 e = struct.unpack(indexformat, i[f:f + s])
137 137 m[n] = (e[6], n)
138 138 self.index[n] = e
139 139 n += 1
140 140
141 141 self.nodemap = dict(m)
142 142 self.nodemap[nullid] = -1
143 143
144 144
145 145 def tip(self): return self.node(len(self.index) - 1)
146 146 def count(self): return len(self.index)
147 147 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
148 148 def rev(self, node): return self.nodemap[node]
149 149 def linkrev(self, node): return self.index[self.nodemap[node]][3]
150 150 def parents(self, node):
151 151 if node == nullid: return (nullid, nullid)
152 152 return self.index[self.nodemap[node]][4:6]
153 153
154 154 def start(self, rev): return self.index[rev][0]
155 155 def length(self, rev): return self.index[rev][1]
156 156 def end(self, rev): return self.start(rev) + self.length(rev)
157 157 def base(self, rev): return self.index[rev][2]
158 158
159 def heads(self):
160 p = {}
161 h = []
162 for r in range(self.count() - 1, 0, -1):
163 n = self.node(r)
164 if n not in p:
165 h.append(n)
166 for pn in self.parents(n):
167 p[pn] = 1
168 return h
169
159 170 def lookup(self, id):
160 171 try:
161 172 rev = int(id)
162 173 return self.node(rev)
163 174 except ValueError:
164 175 c = []
165 176 for n in self.nodemap:
166 177 if id in hex(n):
167 178 c.append(n)
168 179 if len(c) > 1: raise KeyError("Ambiguous identifier")
169 180 if len(c) < 1: raise KeyError("No match found")
170 181 return c[0]
171 182
172 183 return None
173 184
174 185 def diff(self, a, b):
175 186 return mdiff.textdiff(a, b)
176 187
177 188 def patches(self, t, pl):
178 189 return mdiff.patches(t, pl)
179 190
180 191 def delta(self, node):
181 192 r = self.rev(node)
182 193 b = self.base(r)
183 194 if r == b:
184 195 return self.diff(self.revision(self.node(r - 1)),
185 196 self.revision(node))
186 197 else:
187 198 f = self.opener(self.datafile)
188 199 f.seek(self.start(r))
189 200 data = f.read(self.length(r))
190 201 return decompress(data)
191 202
192 203 def revision(self, node):
193 204 if node == nullid: return ""
194 205 if self.cache and self.cache[0] == node: return self.cache[2]
195 206
196 207 text = None
197 208 rev = self.rev(node)
198 209 start, length, base, link, p1, p2, node = self.index[rev]
199 210 end = start + length
200 211 if base != rev: start = self.start(base)
201 212
202 213 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
203 214 base = self.cache[1]
204 215 start = self.start(base + 1)
205 216 text = self.cache[2]
206 217 last = 0
207 218
208 219 f = self.opener(self.datafile)
209 220 f.seek(start)
210 221 data = f.read(end - start)
211 222
212 223 if not text:
213 224 last = self.length(base)
214 225 text = decompress(data[:last])
215 226
216 227 bins = []
217 228 for r in xrange(base + 1, rev + 1):
218 229 s = self.length(r)
219 230 bins.append(decompress(data[last:last + s]))
220 231 last = last + s
221 232
222 233 text = mdiff.patches(text, bins)
223 234
224 235 if node != hash(text, p1, p2):
225 236 raise IOError("integrity check failed on %s:%d"
226 237 % (self.datafile, rev))
227 238
228 239 self.cache = (node, rev, text)
229 240 return text
230 241
231 242 def addrevision(self, text, transaction, link, p1=None, p2=None):
232 243 if text is None: text = ""
233 244 if p1 is None: p1 = self.tip()
234 245 if p2 is None: p2 = nullid
235 246
236 247 node = hash(text, p1, p2)
237 248
238 249 n = self.count()
239 250 t = n - 1
240 251
241 252 if n:
242 253 base = self.base(t)
243 254 start = self.start(base)
244 255 end = self.end(t)
245 256 prev = self.revision(self.tip())
246 257 d = self.diff(prev, text)
247 258 data = compress(d)
248 259 dist = end - start + len(data)
249 260
250 261 # full versions are inserted when the needed deltas
251 262 # become comparable to the uncompressed text
252 263 if not n or dist > len(text) * 2:
253 264 data = compress(text)
254 265 base = n
255 266 else:
256 267 base = self.base(t)
257 268
258 269 offset = 0
259 270 if t >= 0:
260 271 offset = self.end(t)
261 272
262 273 e = (offset, len(data), base, link, p1, p2, node)
263 274
264 275 self.index.append(e)
265 276 self.nodemap[node] = n
266 277 entry = struct.pack(indexformat, *e)
267 278
268 279 transaction.add(self.datafile, e[0])
269 280 self.opener(self.datafile, "a").write(data)
270 281 transaction.add(self.indexfile, n * len(entry))
271 282 self.opener(self.indexfile, "a").write(entry)
272 283
273 284 self.cache = (node, n, text)
274 285 return node
275 286
276 287 def ancestor(self, a, b):
277 288 # calculate the distance of every node from root
278 289 dist = {nullid: 0}
279 290 for i in xrange(self.count()):
280 291 n = self.node(i)
281 292 p1, p2 = self.parents(n)
282 293 dist[n] = max(dist[p1], dist[p2]) + 1
283 294
284 295 # traverse ancestors in order of decreasing distance from root
285 296 def ancestors(node):
286 297 # we store negative distances because heap returns smallest member
287 298 h = [(-dist[node], node)]
288 299 seen = {}
289 300 earliest = self.count()
290 301 while h:
291 302 d, n = heapq.heappop(h)
292 303 r = self.rev(n)
293 304 if n not in seen:
294 305 seen[n] = 1
295 306 yield (-d, n)
296 307 for p in self.parents(n):
297 308 heapq.heappush(h, (-dist[p], p))
298 309
299 310 x = ancestors(a)
300 311 y = ancestors(b)
301 312 lx = x.next()
302 313 ly = y.next()
303 314
304 315 # increment each ancestor list until it is closer to root than
305 316 # the other, or they match
306 317 while 1:
307 318 if lx == ly:
308 319 return lx[1]
309 320 elif lx < ly:
310 321 ly = y.next()
311 322 elif lx > ly:
312 323 lx = x.next()
313 324
314 325 def group(self, linkmap):
315 326 # given a list of changeset revs, return a set of deltas and
316 327 # metadata corresponding to nodes. the first delta is
317 328 # parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
318 329 # have this parent as it has all history before these
319 330 # changesets. parent is parent[0]
320 331
321 332 revs = []
322 333 needed = {}
323 334
324 335 # find file nodes/revs that match changeset revs
325 336 for i in xrange(0, self.count()):
326 337 if self.index[i][3] in linkmap:
327 338 revs.append(i)
328 339 needed[i] = 1
329 340
330 341 # if we don't have any revisions touched by these changesets, bail
331 342 if not revs:
332 343 yield struct.pack(">l", 0)
333 344 return
334 345
335 346 # add the parent of the first rev
336 347 p = self.parents(self.node(revs[0]))[0]
337 348 revs.insert(0, self.rev(p))
338 349
339 350 # for each delta that isn't contiguous in the log, we need to
340 351 # reconstruct the base, reconstruct the result, and then
341 352 # calculate the delta. We also need to do this where we've
342 353 # stored a full version and not a delta
343 354 for i in xrange(0, len(revs) - 1):
344 355 a, b = revs[i], revs[i + 1]
345 356 if a + 1 != b or self.base(b) == b:
346 357 for j in xrange(self.base(a), a + 1):
347 358 needed[j] = 1
348 359 for j in xrange(self.base(b), b + 1):
349 360 needed[j] = 1
350 361
351 362 # calculate spans to retrieve from datafile
352 363 needed = needed.keys()
353 364 needed.sort()
354 365 spans = []
355 366 oo = -1
356 367 ol = 0
357 368 for n in needed:
358 369 if n < 0: continue
359 370 o = self.start(n)
360 371 l = self.length(n)
361 372 if oo + ol == o: # can we merge with the previous?
362 373 nl = spans[-1][2]
363 374 nl.append((n, l))
364 375 ol += l
365 376 spans[-1] = (oo, ol, nl)
366 377 else:
367 378 oo = o
368 379 ol = l
369 380 spans.append((oo, ol, [(n, l)]))
370 381
371 382 # read spans in, divide up chunks
372 383 chunks = {}
373 384 for span in spans:
374 385 # we reopen the file for each span to make http happy for now
375 386 f = self.opener(self.datafile)
376 387 f.seek(span[0])
377 388 data = f.read(span[1])
378 389
379 390 # divide up the span
380 391 pos = 0
381 392 for r, l in span[2]:
382 393 chunks[r] = decompress(data[pos: pos + l])
383 394 pos += l
384 395
385 396 # helper to reconstruct intermediate versions
386 397 def construct(text, base, rev):
387 398 bins = [chunks[r] for r in xrange(base + 1, rev + 1)]
388 399 return mdiff.patches(text, bins)
389 400
390 401 # build deltas
391 402 deltas = []
392 403 for d in xrange(0, len(revs) - 1):
393 404 a, b = revs[d], revs[d + 1]
394 405 n = self.node(b)
395 406
396 407 # do we need to construct a new delta?
397 408 if a + 1 != b or self.base(b) == b:
398 409 if a >= 0:
399 410 base = self.base(a)
400 411 ta = chunks[self.base(a)]
401 412 ta = construct(ta, base, a)
402 413 else:
403 414 ta = ""
404 415
405 416 base = self.base(b)
406 417 if a > base:
407 418 base = a
408 419 tb = ta
409 420 else:
410 421 tb = chunks[self.base(b)]
411 422 tb = construct(tb, base, b)
412 423 d = self.diff(ta, tb)
413 424 else:
414 425 d = chunks[b]
415 426
416 427 p = self.parents(n)
417 428 meta = n + p[0] + p[1] + linkmap[self.linkrev(n)]
418 429 l = struct.pack(">l", len(meta) + len(d) + 4)
419 430 yield l
420 431 yield meta
421 432 yield d
422 433
423 434 yield struct.pack(">l", 0)
424 435
425 436 def addgroup(self, revs, linkmapper, transaction):
426 437 # given a set of deltas, add them to the revision log. the
427 438 # first delta is against its parent, which should be in our
428 439 # log, the rest are against the previous delta.
429 440
430 441 # track the base of the current delta log
431 442 r = self.count()
432 443 t = r - 1
433 444 node = nullid
434 445
435 446 base = prev = -1
436 447 start = end = 0
437 448 if r:
438 449 start = self.start(self.base(t))
439 450 end = self.end(t)
440 451 measure = self.length(self.base(t))
441 452 base = self.base(t)
442 453 prev = self.tip()
443 454
444 455 transaction.add(self.datafile, end)
445 456 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
446 457 dfh = self.opener(self.datafile, "a")
447 458 ifh = self.opener(self.indexfile, "a")
448 459
449 460 # loop through our set of deltas
450 461 chain = None
451 462 for chunk in revs:
452 463 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
453 464 link = linkmapper(cs)
454 465 if node in self.nodemap:
455 466 raise "already have %s" % hex(node[:4])
456 467 delta = chunk[80:]
457 468
458 469 if not chain:
459 470 # retrieve the parent revision of the delta chain
460 471 chain = p1
461 472 if not chain in self.nodemap:
462 473 raise "unknown base %s" % short(chain[:4])
463 474
464 475 # full versions are inserted when the needed deltas become
465 476 # comparable to the uncompressed text or when the previous
466 477 # version is not the one we have a delta against. We use
467 478 # the size of the previous full rev as a proxy for the
468 479 # current size.
469 480
470 481 if chain == prev:
471 482 cdelta = compress(delta)
472 483
473 484 if chain != prev or (end - start + len(cdelta)) > measure * 2:
474 485 # flush our writes here so we can read it in revision
475 486 dfh.flush()
476 487 ifh.flush()
477 488 text = self.revision(chain)
478 489 text = self.patches(text, [delta])
479 490 chk = self.addrevision(text, transaction, link, p1, p2)
480 491 if chk != node:
481 492 raise "consistency error adding group"
482 493 measure = len(text)
483 494 else:
484 495 e = (end, len(cdelta), self.base(t), link, p1, p2, node)
485 496 self.index.append(e)
486 497 self.nodemap[node] = r
487 498 dfh.write(cdelta)
488 499 ifh.write(struct.pack(indexformat, *e))
489 500
490 501 t, r, chain, prev = r, r + 1, node, node
491 502 start = self.start(self.base(t))
492 503 end = self.end(t)
493 504
494 505 dfh.close()
495 506 ifh.close()
496 507 return node
General Comments 0
You need to be logged in to leave comments. Login now