##// END OF EJS Templates
Fix up a bunch of bugs in the new merge code...
mpm@selenic.com -
r65:d40cc5aa 0.4f default
parent child Browse files
Show More
@@ -1,380 +1,380 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 #
2 #
3 # mercurial - a minimal scalable distributed SCM
3 # mercurial - a minimal scalable distributed SCM
4 # v0.4e "sabina"
4 # v0.4e "sabina"
5 #
5 #
6 # Copyright 2005 Matt Mackall <mpm@selenic.com>
6 # Copyright 2005 Matt Mackall <mpm@selenic.com>
7 #
7 #
8 # This software may be used and distributed according to the terms
8 # This software may be used and distributed according to the terms
9 # of the GNU General Public License, incorporated herein by reference.
9 # of the GNU General Public License, incorporated herein by reference.
10
10
11 # the psyco compiler makes commits a bit faster
11 # the psyco compiler makes commits a bit faster
12 # and makes changegroup merge about 20 times slower!
12 # and makes changegroup merge about 20 times slower!
13 # try:
13 # try:
14 # import psyco
14 # import psyco
15 # psyco.full()
15 # psyco.full()
16 # except:
16 # except:
17 # pass
17 # pass
18
18
19 import sys, os, time
19 import sys, os, time
20 from mercurial import hg, mdiff, fancyopts
20 from mercurial import hg, mdiff, fancyopts
21
21
22 def help():
22 def help():
23 print """\
23 print """\
24 commands:
24 commands:
25
25
26 init create a new repository in this directory
26 init create a new repository in this directory
27 branch <path> create a branch of <path> in this directory
27 branch <path> create a branch of <path> in this directory
28 merge <path> merge changes from <path> into local repository
28 merge <path> merge changes from <path> into local repository
29 checkout [changeset] checkout the latest or given changeset
29 checkout [changeset] checkout the latest or given changeset
30 status show new, missing, and changed files in working dir
30 status show new, missing, and changed files in working dir
31 add [files...] add the given files in the next commit
31 add [files...] add the given files in the next commit
32 remove [files...] remove the given files in the next commit
32 remove [files...] remove the given files in the next commit
33 addremove add all new files, delete all missing files
33 addremove add all new files, delete all missing files
34 commit commit all changes to the repository
34 commit commit all changes to the repository
35 history show changeset history
35 history show changeset history
36 log <file> show revision history of a single file
36 log <file> show revision history of a single file
37 dump <file> [rev] dump the latest or given revision of a file
37 dump <file> [rev] dump the latest or given revision of a file
38 dumpmanifest [rev] dump the latest or given revision of the manifest
38 dumpmanifest [rev] dump the latest or given revision of the manifest
39 diff [files...] diff working directory (or selected files)
39 diff [files...] diff working directory (or selected files)
40 """
40 """
41
41
42 def filterfiles(list, files):
42 def filterfiles(list, files):
43 l = [ x for x in list if x in files ]
43 l = [ x for x in list if x in files ]
44
44
45 for f in files:
45 for f in files:
46 if f[-1] != os.sep: f += os.sep
46 if f[-1] != os.sep: f += os.sep
47 l += [ x for x in list if x.startswith(f) ]
47 l += [ x for x in list if x.startswith(f) ]
48 return l
48 return l
49
49
50 def diff(files = None, node1 = None, node2 = None):
50 def diff(files = None, node1 = None, node2 = None):
51 def date(c):
51 def date(c):
52 return time.asctime(time.gmtime(float(c[2].split(' ')[0])))
52 return time.asctime(time.gmtime(float(c[2].split(' ')[0])))
53
53
54 if node2:
54 if node2:
55 change = repo.changelog.read(node2)
55 change = repo.changelog.read(node2)
56 mmap2 = repo.manifest.read(change[0])
56 mmap2 = repo.manifest.read(change[0])
57 (c, a, d) = repo.diffrevs(node1, node2)
57 (c, a, d) = repo.diffrevs(node1, node2)
58 def read(f): return repo.file(f).read(mmap2[f])
58 def read(f): return repo.file(f).read(mmap2[f])
59 date2 = date(change)
59 date2 = date(change)
60 else:
60 else:
61 date2 = time.asctime()
61 date2 = time.asctime()
62 if not node1:
62 if not node1:
63 node1 = repo.current
63 node1 = repo.current
64 (c, a, d) = repo.diffdir(repo.root, node1)
64 (c, a, d) = repo.diffdir(repo.root, node1)
65 def read(f): return file(os.path.join(repo.root, f)).read()
65 def read(f): return file(os.path.join(repo.root, f)).read()
66
66
67 change = repo.changelog.read(node1)
67 change = repo.changelog.read(node1)
68 mmap = repo.manifest.read(change[0])
68 mmap = repo.manifest.read(change[0])
69 date1 = date(change)
69 date1 = date(change)
70
70
71 if files:
71 if files:
72 (c, a, d) = map(lambda x: filterfiles(x, files), (c, a, d))
72 (c, a, d) = map(lambda x: filterfiles(x, files), (c, a, d))
73
73
74 for f in c:
74 for f in c:
75 to = repo.file(f).read(mmap[f])
75 to = repo.file(f).read(mmap[f])
76 tn = read(f)
76 tn = read(f)
77 sys.stdout.write(mdiff.unidiff(to, date1, tn, date2, f))
77 sys.stdout.write(mdiff.unidiff(to, date1, tn, date2, f))
78 for f in a:
78 for f in a:
79 to = ""
79 to = ""
80 tn = read(f)
80 tn = read(f)
81 sys.stdout.write(mdiff.unidiff(to, date1, tn, date2, f))
81 sys.stdout.write(mdiff.unidiff(to, date1, tn, date2, f))
82 for f in d:
82 for f in d:
83 to = repo.file(f).read(mmap[f])
83 to = repo.file(f).read(mmap[f])
84 tn = ""
84 tn = ""
85 sys.stdout.write(mdiff.unidiff(to, date1, tn, date2, f))
85 sys.stdout.write(mdiff.unidiff(to, date1, tn, date2, f))
86
86
87 options = {}
87 options = {}
88 opts = [('v', 'verbose', None, 'verbose'),
88 opts = [('v', 'verbose', None, 'verbose'),
89 ('d', 'debug', None, 'debug')]
89 ('d', 'debug', None, 'debug')]
90
90
91 args = fancyopts.fancyopts(sys.argv[1:], opts, options,
91 args = fancyopts.fancyopts(sys.argv[1:], opts, options,
92 'hg [options] <command> [command options] [files]')
92 'hg [options] <command> [command options] [files]')
93
93
94 try:
94 try:
95 cmd = args[0]
95 cmd = args[0]
96 args = args[1:]
96 args = args[1:]
97 except:
97 except:
98 cmd = ""
98 cmd = ""
99
99
100 ui = hg.ui(options["verbose"], options["debug"])
100 ui = hg.ui(options["verbose"], options["debug"])
101
101
102 if cmd == "init":
102 if cmd == "init":
103 repo = hg.repository(ui, ".", create=1)
103 repo = hg.repository(ui, ".", create=1)
104 sys.exit(0)
104 sys.exit(0)
105 elif cmd == "branch" or cmd == "clone":
105 elif cmd == "branch" or cmd == "clone":
106 os.system("cp -al %s/.hg .hg" % args[0])
106 os.system("cp -al %s/.hg .hg" % args[0])
107 sys.exit(0)
107 sys.exit(0)
108 elif cmd == "help":
108 elif cmd == "help":
109 help()
109 help()
110 sys.exit(0)
110 sys.exit(0)
111 else:
111 else:
112 try:
112 try:
113 repo = hg.repository(ui=ui)
113 repo = hg.repository(ui=ui)
114 except:
114 except:
115 print "Unable to open repository"
115 print "Unable to open repository"
116 sys.exit(0)
116 sys.exit(0)
117
117
118 if cmd == "checkout" or cmd == "co":
118 if cmd == "checkout" or cmd == "co":
119 node = repo.changelog.tip()
119 node = repo.changelog.tip()
120 if args:
120 if args:
121 node = repo.changelog.lookup(args[0])
121 node = repo.changelog.lookup(args[0])
122 repo.checkout(node)
122 repo.checkout(node)
123
123
124 elif cmd == "add":
124 elif cmd == "add":
125 repo.add(args)
125 repo.add(args)
126
126
127 elif cmd == "remove" or cmd == "rm" or cmd == "del" or cmd == "delete":
127 elif cmd == "remove" or cmd == "rm" or cmd == "del" or cmd == "delete":
128 repo.remove(args)
128 repo.remove(args)
129
129
130 elif cmd == "commit" or cmd == "checkin" or cmd == "ci":
130 elif cmd == "commit" or cmd == "checkin" or cmd == "ci":
131 if 1:
131 if 1:
132 if len(args) > 0:
132 if len(args) > 0:
133 repo.commit(repo.current, args)
133 repo.commit(repo.current, args)
134 else:
134 else:
135 repo.commit(repo.current)
135 repo.commit(repo.current)
136
136
137 elif cmd == "import" or cmd == "patch":
137 elif cmd == "import" or cmd == "patch":
138 ioptions = {}
138 ioptions = {}
139 opts = [('p', 'strip', 1, 'path strip'),
139 opts = [('p', 'strip', 1, 'path strip'),
140 ('b', 'base', "", 'base path'),
140 ('b', 'base', "", 'base path'),
141 ('q', 'quiet', "", 'silence diff')
141 ('q', 'quiet', "", 'silence diff')
142 ]
142 ]
143
143
144 args = fancyopts.fancyopts(args, opts, ioptions,
144 args = fancyopts.fancyopts(args, opts, ioptions,
145 'hg import [options] <patch names>')
145 'hg import [options] <patch names>')
146 d = ioptions["base"]
146 d = ioptions["base"]
147 strip = ioptions["strip"]
147 strip = ioptions["strip"]
148 quiet = ioptions["quiet"] and "> /dev/null" or ""
148 quiet = ioptions["quiet"] and "> /dev/null" or ""
149
149
150 for patch in args:
150 for patch in args:
151 ui.status("applying %s\n" % patch)
151 ui.status("applying %s\n" % patch)
152 pf = os.path.join(d, patch)
152 pf = os.path.join(d, patch)
153
153
154 text = ""
154 text = ""
155 for l in file(pf):
155 for l in file(pf):
156 if l[:3] == "---": break
156 if l[:3] == "---": break
157 text += l
157 text += l
158
158
159 if os.system("patch -p%d < %s %s" % (strip, pf, quiet)):
159 if os.system("patch -p%d < %s %s" % (strip, pf, quiet)):
160 raise "patch failed!"
160 raise "patch failed!"
161 f = os.popen("lsdiff --strip %d %s" % (strip, pf))
161 f = os.popen("lsdiff --strip %d %s" % (strip, pf))
162 files = filter(None, map(lambda x: x.rstrip(), f.read().splitlines()))
162 files = filter(None, map(lambda x: x.rstrip(), f.read().splitlines()))
163 f.close()
163 f.close()
164 repo.commit(repo.current, files, text)
164 repo.commit(repo.current, files, text)
165
165
166 elif cmd == "status":
166 elif cmd == "status":
167 (c, a, d) = repo.diffdir(repo.root, repo.current)
167 (c, a, d) = repo.diffdir(repo.root, repo.current)
168 for f in c: print "C", f
168 for f in c: print "C", f
169 for f in a: print "?", f
169 for f in a: print "?", f
170 for f in d: print "R", f
170 for f in d: print "R", f
171
171
172 elif cmd == "diff":
172 elif cmd == "diff":
173 revs = []
173 revs = []
174
174
175 if args:
175 if args:
176 doptions = {}
176 doptions = {}
177 opts = [('r', 'revision', [], 'revision')]
177 opts = [('r', 'revision', [], 'revision')]
178 args = fancyopts.fancyopts(args, opts, doptions,
178 args = fancyopts.fancyopts(args, opts, doptions,
179 'hg diff [options] [files]')
179 'hg diff [options] [files]')
180 revs = map(lambda x: repo.changelog.lookup(x), doptions['revision'])
180 revs = map(lambda x: repo.changelog.lookup(x), doptions['revision'])
181
181
182 if len(revs) > 2:
182 if len(revs) > 2:
183 print "too many revisions to diff"
183 print "too many revisions to diff"
184 sys.exit(1)
184 sys.exit(1)
185
185
186 if os.getcwd() != repo.root:
186 if os.getcwd() != repo.root:
187 relpath = os.getcwd()[len(repo.root) + 1: ]
187 relpath = os.getcwd()[len(repo.root) + 1: ]
188 if not args: args = [ relpath ]
188 if not args: args = [ relpath ]
189 else: args = [ os.path.join(relpath, x) for x in args ]
189 else: args = [ os.path.join(relpath, x) for x in args ]
190
190
191 diff(args, *revs)
191 diff(args, *revs)
192
192
193 elif cmd == "export":
193 elif cmd == "export":
194 node = repo.changelog.lookup(args[0])
194 node = repo.changelog.lookup(args[0])
195 prev = repo.changelog.parents(node)[0]
195 prev = repo.changelog.parents(node)[0]
196 diff(None, prev, node)
196 diff(None, prev, node)
197
197
198 elif cmd == "debugchangegroup":
198 elif cmd == "debugchangegroup":
199 newer = repo.newer(map(repo.changelog.lookup, args))
199 newer = repo.newer(map(repo.changelog.lookup, args))
200 for chunk in repo.changegroup(newer):
200 for chunk in repo.changegroup(newer):
201 sys.stdout.write(chunk)
201 sys.stdout.write(chunk)
202
202
203 elif cmd == "debugaddchangegroup":
203 elif cmd == "debugaddchangegroup":
204 data = sys.stdin.read()
204 data = sys.stdin.read()
205 repo.addchangegroup(data)
205 repo.addchangegroup(data)
206
206
207 elif cmd == "addremove":
207 elif cmd == "addremove":
208 (c, a, d) = repo.diffdir(repo.root, repo.current)
208 (c, a, d) = repo.diffdir(repo.root, repo.current)
209 repo.add(a)
209 repo.add(a)
210 repo.remove(d)
210 repo.remove(d)
211
211
212 elif cmd == "history":
212 elif cmd == "history":
213 for i in range(repo.changelog.count()):
213 for i in range(repo.changelog.count()):
214 n = repo.changelog.node(i)
214 n = repo.changelog.node(i)
215 changes = repo.changelog.read(n)
215 changes = repo.changelog.read(n)
216 (p1, p2) = repo.changelog.parents(n)
216 (p1, p2) = repo.changelog.parents(n)
217 (h, h1, h2) = map(hg.hex, (n, p1, p2))
217 (h, h1, h2) = map(hg.hex, (n, p1, p2))
218 (i1, i2) = map(repo.changelog.rev, (p1, p2))
218 (i1, i2) = map(repo.changelog.rev, (p1, p2))
219 print "rev: %4d:%s" % (i, h)
219 print "rev: %4d:%s" % (i, h)
220 print "parents: %4d:%s" % (i1, h1)
220 print "parents: %4d:%s" % (i1, h1)
221 if i2: print " %4d:%s" % (i2, h2)
221 if i2: print " %4d:%s" % (i2, h2)
222 print "manifest: %4d:%s" % (repo.manifest.rev(changes[0]),
222 print "manifest: %4d:%s" % (repo.manifest.rev(changes[0]),
223 hg.hex(changes[0]))
223 hg.hex(changes[0]))
224 print "user:", changes[1]
224 print "user:", changes[1]
225 print "date:", time.asctime(
225 print "date:", time.asctime(
226 time.localtime(float(changes[2].split(' ')[0])))
226 time.localtime(float(changes[2].split(' ')[0])))
227 print "files:", " ".join(changes[3])
227 print "files:", " ".join(changes[3])
228 print "description:"
228 print "description:"
229 print changes[4]
229 print changes[4]
230
230
231 elif cmd == "log":
231 elif cmd == "log":
232 if args:
232 if args:
233 r = repo.file(args[0])
233 r = repo.file(args[0])
234 for i in range(r.count()):
234 for i in range(r.count()):
235 n = r.node(i)
235 n = r.node(i)
236 (p1, p2) = r.parents(n)
236 (p1, p2) = r.parents(n)
237 (h, h1, h2) = map(hg.hex, (n, p1, p2))
237 (h, h1, h2) = map(hg.hex, (n, p1, p2))
238 (i1, i2) = map(r.rev, (p1, p2))
238 (i1, i2) = map(r.rev, (p1, p2))
239 cr = r.linkrev(n)
239 cr = r.linkrev(n)
240 cn = hg.hex(repo.changelog.node(cr))
240 cn = hg.hex(repo.changelog.node(cr))
241 print "rev: %4d:%s" % (i, h)
241 print "rev: %4d:%s" % (i, h)
242 print "changeset: %4d:%s" % (cr, cn)
242 print "changeset: %4d:%s" % (cr, cn)
243 print "parents: %4d:%s" % (i1, h1)
243 print "parents: %4d:%s" % (i1, h1)
244 if i2: print " %4d:%s" % (i2, h2)
244 if i2: print " %4d:%s" % (i2, h2)
245 else:
245 else:
246 print "missing filename"
246 print "missing filename"
247
247
248 elif cmd == "dump":
248 elif cmd == "dump":
249 if args:
249 if args:
250 r = repo.file(args[0])
250 r = repo.file(args[0])
251 n = r.tip()
251 n = r.tip()
252 if len(args) > 1: n = r.lookup(args[1])
252 if len(args) > 1: n = r.lookup(args[1])
253 sys.stdout.write(r.read(n))
253 sys.stdout.write(r.read(n))
254 else:
254 else:
255 print "missing filename"
255 print "missing filename"
256
256
257 elif cmd == "dumpmanifest":
257 elif cmd == "dumpmanifest":
258 n = repo.manifest.tip()
258 n = repo.manifest.tip()
259 if len(args) > 0:
259 if len(args) > 0:
260 n = repo.manifest.lookup(args[0])
260 n = repo.manifest.lookup(args[0])
261 m = repo.manifest.read(n)
261 m = repo.manifest.read(n)
262 files = m.keys()
262 files = m.keys()
263 files.sort()
263 files.sort()
264
264
265 for f in files:
265 for f in files:
266 print hg.hex(m[f]), f
266 print hg.hex(m[f]), f
267
267
268 elif cmd == "debughash":
268 elif cmd == "debughash":
269 f = repo.file(args[0])
269 f = repo.file(args[0])
270 print f.encodepath(args[0])
270 print f.encodepath(args[0])
271
271
272 elif cmd == "debugindex":
272 elif cmd == "debugindex":
273 r = hg.revlog(open, args[0], "")
273 r = hg.revlog(open, args[0], "")
274 print " rev offset length base linkrev"+\
274 print " rev offset length base linkrev"+\
275 " p1 p2 nodeid"
275 " p1 p2 nodeid"
276 for i in range(r.count()):
276 for i in range(r.count()):
277 e = r.index[i]
277 e = r.index[i]
278 print "% 6d % 9d % 7d % 5d % 7d %s.. %s.. %s.." % (
278 print "% 6d % 9d % 7d % 5d % 7d %s.. %s.. %s.." % (
279 i, e[0], e[1], e[2], e[3],
279 i, e[0], e[1], e[2], e[3],
280 hg.hex(e[4][:5]), hg.hex(e[5][:5]), hg.hex(e[6][:5]))
280 hg.hex(e[4][:5]), hg.hex(e[5][:5]), hg.hex(e[6][:5]))
281
281
282 elif cmd == "merge":
282 elif cmd == "merge":
283 if args:
283 if args:
284 other = hg.repository(ui, args[0])
284 other = hg.repository(ui, args[0])
285 print "retrieving changegroup"
285 print "requesting changegroup"
286 cg = repo.getchangegroup(other)
286 cg = repo.getchangegroup(other)
287 repo.addchangegroup(cg)
287 repo.addchangegroup(cg)
288 else:
288 else:
289 print "missing source repository"
289 print "missing source repository"
290
290
291 elif cmd == "debugoldmerge":
291 elif cmd == "debugoldmerge":
292 if args:
292 if args:
293 other = hg.repository(ui, args[0])
293 other = hg.repository(ui, args[0])
294 repo.merge(other)
294 repo.merge(other)
295 else:
295 else:
296 print "missing source repository"
296 print "missing source repository"
297
297
298 elif cmd == "verify":
298 elif cmd == "verify":
299 filelinkrevs = {}
299 filelinkrevs = {}
300 filenodes = {}
300 filenodes = {}
301 manifestchangeset = {}
301 manifestchangeset = {}
302 changesets = revisions = files = 0
302 changesets = revisions = files = 0
303
303
304 print "checking changesets"
304 print "checking changesets"
305 for i in range(repo.changelog.count()):
305 for i in range(repo.changelog.count()):
306 changesets += 1
306 changesets += 1
307 n = repo.changelog.node(i)
307 n = repo.changelog.node(i)
308 changes = repo.changelog.read(n)
308 changes = repo.changelog.read(n)
309 manifestchangeset[changes[0]] = n
309 manifestchangeset[changes[0]] = n
310 for f in changes[3]:
310 for f in changes[3]:
311 revisions += 1
311 revisions += 1
312 filelinkrevs.setdefault(f, []).append(i)
312 filelinkrevs.setdefault(f, []).append(i)
313
313
314 print "checking manifests"
314 print "checking manifests"
315 for i in range(repo.manifest.count()):
315 for i in range(repo.manifest.count()):
316 n = repo.manifest.node(i)
316 n = repo.manifest.node(i)
317 ca = repo.changelog.node(repo.manifest.linkrev(n))
317 ca = repo.changelog.node(repo.manifest.linkrev(n))
318 cc = manifestchangeset[n]
318 cc = manifestchangeset[n]
319 if ca != cc:
319 if ca != cc:
320 print "manifest %s points to %s, not %s" % \
320 print "manifest %s points to %s, not %s" % \
321 (hg.hex(n), hg.hex(ca), hg.hex(cc))
321 (hg.hex(n), hg.hex(ca), hg.hex(cc))
322 m = repo.manifest.read(n)
322 m = repo.manifest.read(n)
323 for f, fn in m.items():
323 for f, fn in m.items():
324 filenodes.setdefault(f, {})[fn] = 1
324 filenodes.setdefault(f, {})[fn] = 1
325
325
326 print "crosschecking files in changesets and manifests"
326 print "crosschecking files in changesets and manifests"
327 for f in filenodes:
327 for f in filenodes:
328 if f not in filelinkrevs:
328 if f not in filelinkrevs:
329 print "file %s in manifest but not in changesets"
329 print "file %s in manifest but not in changesets"
330
330
331 for f in filelinkrevs:
331 for f in filelinkrevs:
332 if f not in filenodes:
332 if f not in filenodes:
333 print "file %s in changeset but not in manifest"
333 print "file %s in changeset but not in manifest"
334
334
335 print "checking files"
335 print "checking files"
336 for f in filenodes:
336 for f in filenodes:
337 files += 1
337 files += 1
338 fl = repo.file(f)
338 fl = repo.file(f)
339 nodes = {"\0"*20: 1}
339 nodes = {"\0"*20: 1}
340 for i in range(fl.count()):
340 for i in range(fl.count()):
341 n = fl.node(i)
341 n = fl.node(i)
342
342
343 if n not in filenodes[f]:
343 if n not in filenodes[f]:
344 print "%s:%s not in manifests" % (f, hg.hex(n))
344 print "%s:%s not in manifests" % (f, hg.hex(n))
345 else:
345 else:
346 del filenodes[f][n]
346 del filenodes[f][n]
347
347
348 flr = fl.linkrev(n)
348 flr = fl.linkrev(n)
349 if flr not in filelinkrevs[f]:
349 if flr not in filelinkrevs[f]:
350 print "%s:%s points to unexpected changeset rev %d" \
350 print "%s:%s points to unexpected changeset rev %d" \
351 % (f, hg.hex(n), fl.linkrev(n))
351 % (f, hg.hex(n), fl.linkrev(n))
352 else:
352 else:
353 filelinkrevs[f].remove(flr)
353 filelinkrevs[f].remove(flr)
354
354
355 # verify contents
355 # verify contents
356 t = fl.read(n)
356 t = fl.read(n)
357
357
358 # verify parents
358 # verify parents
359 (p1, p2) = fl.parents(n)
359 (p1, p2) = fl.parents(n)
360 if p1 not in nodes:
360 if p1 not in nodes:
361 print "%s:%s unknown parent 1 %s" % (f, hg.hex(n), hg.hex(p1))
361 print "%s:%s unknown parent 1 %s" % (f, hg.hex(n), hg.hex(p1))
362 if p2 not in nodes:
362 if p2 not in nodes:
363 print "file %s:%s unknown parent %s" % (f, hg.hex(n), hg.hex(p1))
363 print "file %s:%s unknown parent %s" % (f, hg.hex(n), hg.hex(p1))
364 nodes[n] = 1
364 nodes[n] = 1
365
365
366 # cross-check
366 # cross-check
367 for flr in filelinkrevs[f]:
367 for flr in filelinkrevs[f]:
368 print "changeset rev %d not in %s" % (flr, f)
368 print "changeset rev %d not in %s" % (flr, f)
369
369
370 for node in filenodes[f]:
370 for node in filenodes[f]:
371 print "node %s in manifests not in %s" % (hg.hex(n), f)
371 print "node %s in manifests not in %s" % (hg.hex(n), f)
372
372
373
373
374 print "%d files, %d changesets, %d total revisions" % (files, changesets,
374 print "%d files, %d changesets, %d total revisions" % (files, changesets,
375 revisions)
375 revisions)
376
376
377 else:
377 else:
378 print "unknown command\n"
378 print "unknown command\n"
379 help()
379 help()
380 sys.exit(1)
380 sys.exit(1)
@@ -1,873 +1,893 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import sys, struct, sha, socket, os, time, base64, re, urllib2
8 import sys, struct, sha, socket, os, time, base64, re, urllib2
9 import urllib
9 import urllib
10 from mercurial import byterange
10 from mercurial import byterange
11 from mercurial.transaction import *
11 from mercurial.transaction import *
12 from mercurial.revlog import *
12 from mercurial.revlog import *
13
13
14 class filelog(revlog):
14 class filelog(revlog):
15 def __init__(self, opener, path):
15 def __init__(self, opener, path):
16 s = self.encodepath(path)
16 s = self.encodepath(path)
17 revlog.__init__(self, opener, os.path.join("data", s + "i"),
17 revlog.__init__(self, opener, os.path.join("data", s + "i"),
18 os.path.join("data", s))
18 os.path.join("data", s))
19
19
20 def encodepath(self, path):
20 def encodepath(self, path):
21 s = sha.sha(path).digest()
21 s = sha.sha(path).digest()
22 s = base64.encodestring(s)[:-3]
22 s = base64.encodestring(s)[:-3]
23 s = re.sub("\+", "%", s)
23 s = re.sub("\+", "%", s)
24 s = re.sub("/", "_", s)
24 s = re.sub("/", "_", s)
25 return s
25 return s
26
26
27 def read(self, node):
27 def read(self, node):
28 return self.revision(node)
28 return self.revision(node)
29 def add(self, text, transaction, link, p1=None, p2=None):
29 def add(self, text, transaction, link, p1=None, p2=None):
30 return self.addrevision(text, transaction, link, p1, p2)
30 return self.addrevision(text, transaction, link, p1, p2)
31
31
32 def resolvedag(self, old, new, transaction, link):
32 def resolvedag(self, old, new, transaction, link):
33 """resolve unmerged heads in our DAG"""
33 """resolve unmerged heads in our DAG"""
34 if old == new: return None
34 if old == new: return None
35 a = self.ancestor(old, new)
35 a = self.ancestor(old, new)
36 if old == a: return None
36 if old == a: return None
37 return self.merge3(old, new, a, transaction, link)
37 return self.merge3(old, new, a, transaction, link)
38
38
39 def merge3(self, my, other, base, transaction, link):
39 def merge3(self, my, other, base, transaction, link):
40 """perform a 3-way merge and append the result"""
40 """perform a 3-way merge and append the result"""
41 def temp(prefix, node):
41 def temp(prefix, node):
42 (fd, name) = tempfile.mkstemp(prefix)
42 (fd, name) = tempfile.mkstemp(prefix)
43 f = os.fdopen(fd, "w")
43 f = os.fdopen(fd, "w")
44 f.write(self.revision(node))
44 f.write(self.revision(node))
45 f.close()
45 f.close()
46 return name
46 return name
47
47
48 a = temp("local", my)
48 a = temp("local", my)
49 b = temp("remote", other)
49 b = temp("remote", other)
50 c = temp("parent", base)
50 c = temp("parent", base)
51
51
52 cmd = os.environ["HGMERGE"]
52 cmd = os.environ["HGMERGE"]
53 r = os.system("%s %s %s %s" % (cmd, a, b, c))
53 r = os.system("%s %s %s %s" % (cmd, a, b, c))
54 if r:
54 if r:
55 raise "Merge failed, implement rollback!"
55 raise "Merge failed, implement rollback!"
56
56
57 t = open(a).read()
57 t = open(a).read()
58 os.unlink(a)
58 os.unlink(a)
59 os.unlink(b)
59 os.unlink(b)
60 os.unlink(c)
60 os.unlink(c)
61 return self.addrevision(t, transaction, link, my, other)
61 return self.addrevision(t, transaction, link, my, other)
62
62
63 def merge(self, other, transaction, linkseq, link):
63 def merge(self, other, transaction, linkseq, link):
64 """perform a merge and resolve resulting heads"""
64 """perform a merge and resolve resulting heads"""
65 (o, n) = self.mergedag(other, transaction, linkseq)
65 (o, n) = self.mergedag(other, transaction, linkseq)
66 return self.resolvedag(o, n, transaction, link)
66 return self.resolvedag(o, n, transaction, link)
67
67
68 class manifest(revlog):
68 class manifest(revlog):
69 def __init__(self, opener):
69 def __init__(self, opener):
70 self.mapcache = None
70 self.mapcache = None
71 self.listcache = None
71 self.listcache = None
72 self.addlist = None
72 self.addlist = None
73 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
73 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
74
74
75 def read(self, node):
75 def read(self, node):
76 if self.mapcache and self.mapcache[0] == node:
76 if self.mapcache and self.mapcache[0] == node:
77 return self.mapcache[1]
77 return self.mapcache[1]
78 text = self.revision(node)
78 text = self.revision(node)
79 map = {}
79 map = {}
80 self.listcache = (text, text.splitlines(1))
80 self.listcache = (text, text.splitlines(1))
81 for l in self.listcache[1]:
81 for l in self.listcache[1]:
82 (f, n) = l.split('\0')
82 (f, n) = l.split('\0')
83 map[f] = bin(n[:40])
83 map[f] = bin(n[:40])
84 self.mapcache = (node, map)
84 self.mapcache = (node, map)
85 return map
85 return map
86
86
87 def diff(self, a, b):
87 def diff(self, a, b):
88 # this is sneaky, as we're not actually using a and b
88 # this is sneaky, as we're not actually using a and b
89 if self.listcache and len(self.listcache[0]) == len(a):
89 if self.listcache and len(self.listcache[0]) == len(a):
90 return mdiff.diff(self.listcache[1], self.addlist, 1)
90 return mdiff.diff(self.listcache[1], self.addlist, 1)
91 else:
91 else:
92 return mdiff.textdiff(a, b)
92 return mdiff.textdiff(a, b)
93
93
94 def add(self, map, transaction, link, p1=None, p2=None):
94 def add(self, map, transaction, link, p1=None, p2=None):
95 files = map.keys()
95 files = map.keys()
96 files.sort()
96 files.sort()
97
97
98 self.addlist = ["%s\000%s\n" % (f, hex(map[f])) for f in files]
98 self.addlist = ["%s\000%s\n" % (f, hex(map[f])) for f in files]
99 text = "".join(self.addlist)
99 text = "".join(self.addlist)
100
100
101 n = self.addrevision(text, transaction, link, p1, p2)
101 n = self.addrevision(text, transaction, link, p1, p2)
102 self.mapcache = (n, map)
102 self.mapcache = (n, map)
103 self.listcache = (text, self.addlist)
103 self.listcache = (text, self.addlist)
104
104
105 return n
105 return n
106
106
107 class changelog(revlog):
107 class changelog(revlog):
108 def __init__(self, opener):
108 def __init__(self, opener):
109 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
109 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
110
110
111 def extract(self, text):
111 def extract(self, text):
112 if not text:
112 if not text:
113 return (nullid, "", "0", [], "")
113 return (nullid, "", "0", [], "")
114 last = text.index("\n\n")
114 last = text.index("\n\n")
115 desc = text[last + 2:]
115 desc = text[last + 2:]
116 l = text[:last].splitlines()
116 l = text[:last].splitlines()
117 manifest = bin(l[0])
117 manifest = bin(l[0])
118 user = l[1]
118 user = l[1]
119 date = l[2]
119 date = l[2]
120 files = l[3:]
120 files = l[3:]
121 return (manifest, user, date, files, desc)
121 return (manifest, user, date, files, desc)
122
122
123 def read(self, node):
123 def read(self, node):
124 return self.extract(self.revision(node))
124 return self.extract(self.revision(node))
125
125
126 def add(self, manifest, list, desc, transaction, p1=None, p2=None):
126 def add(self, manifest, list, desc, transaction, p1=None, p2=None):
127 user = (os.environ.get("HGUSER") or
127 user = (os.environ.get("HGUSER") or
128 os.environ.get("EMAIL") or
128 os.environ.get("EMAIL") or
129 os.environ.get("LOGNAME", "unknown") + '@' + socket.getfqdn())
129 os.environ.get("LOGNAME", "unknown") + '@' + socket.getfqdn())
130 date = "%d %d" % (time.time(), time.timezone)
130 date = "%d %d" % (time.time(), time.timezone)
131 list.sort()
131 list.sort()
132 l = [hex(manifest), user, date] + list + ["", desc]
132 l = [hex(manifest), user, date] + list + ["", desc]
133 text = "\n".join(l)
133 text = "\n".join(l)
134 return self.addrevision(text, transaction, self.count(), p1, p2)
134 return self.addrevision(text, transaction, self.count(), p1, p2)
135
135
136 def merge3(self, my, other, base):
136 def merge3(self, my, other, base):
137 pass
137 pass
138
138
139 class dircache:
139 class dircache:
140 def __init__(self, opener, ui):
140 def __init__(self, opener, ui):
141 self.opener = opener
141 self.opener = opener
142 self.dirty = 0
142 self.dirty = 0
143 self.ui = ui
143 self.ui = ui
144 self.map = None
144 self.map = None
145 def __del__(self):
145 def __del__(self):
146 if self.dirty: self.write()
146 if self.dirty: self.write()
147 def __getitem__(self, key):
147 def __getitem__(self, key):
148 try:
148 try:
149 return self.map[key]
149 return self.map[key]
150 except TypeError:
150 except TypeError:
151 self.read()
151 self.read()
152 return self[key]
152 return self[key]
153
153
154 def read(self):
154 def read(self):
155 if self.map is not None: return self.map
155 if self.map is not None: return self.map
156
156
157 self.map = {}
157 self.map = {}
158 try:
158 try:
159 st = self.opener("dircache").read()
159 st = self.opener("dircache").read()
160 except: return
160 except: return
161
161
162 pos = 0
162 pos = 0
163 while pos < len(st):
163 while pos < len(st):
164 e = struct.unpack(">llll", st[pos:pos+16])
164 e = struct.unpack(">llll", st[pos:pos+16])
165 l = e[3]
165 l = e[3]
166 pos += 16
166 pos += 16
167 f = st[pos:pos + l]
167 f = st[pos:pos + l]
168 self.map[f] = e[:3]
168 self.map[f] = e[:3]
169 pos += l
169 pos += l
170
170
171 def update(self, files):
171 def update(self, files):
172 if not files: return
172 if not files: return
173 self.read()
173 self.read()
174 self.dirty = 1
174 self.dirty = 1
175 for f in files:
175 for f in files:
176 try:
176 try:
177 s = os.stat(f)
177 s = os.stat(f)
178 self.map[f] = (s.st_mode, s.st_size, s.st_mtime)
178 self.map[f] = (s.st_mode, s.st_size, s.st_mtime)
179 except IOError:
179 except IOError:
180 self.remove(f)
180 self.remove(f)
181
181
182 def taint(self, files):
182 def taint(self, files):
183 if not files: return
183 if not files: return
184 self.read()
184 self.read()
185 self.dirty = 1
185 self.dirty = 1
186 for f in files:
186 for f in files:
187 self.map[f] = (0, -1, 0)
187 self.map[f] = (0, -1, 0)
188
188
189 def remove(self, files):
189 def remove(self, files):
190 if not files: return
190 if not files: return
191 self.read()
191 self.read()
192 self.dirty = 1
192 self.dirty = 1
193 for f in files:
193 for f in files:
194 try:
194 try:
195 del self.map[f]
195 del self.map[f]
196 except KeyError:
196 except KeyError:
197 self.ui.warn("Not in dircache: %s\n" % f)
197 self.ui.warn("Not in dircache: %s\n" % f)
198 pass
198 pass
199
199
200 def clear(self):
200 def clear(self):
201 self.map = {}
201 self.map = {}
202 self.dirty = 1
202 self.dirty = 1
203
203
204 def write(self):
204 def write(self):
205 st = self.opener("dircache", "w")
205 st = self.opener("dircache", "w")
206 for f, e in self.map.items():
206 for f, e in self.map.items():
207 e = struct.pack(">llll", e[0], e[1], e[2], len(f))
207 e = struct.pack(">llll", e[0], e[1], e[2], len(f))
208 st.write(e + f)
208 st.write(e + f)
209 self.dirty = 0
209 self.dirty = 0
210
210
211 def copy(self):
211 def copy(self):
212 self.read()
212 self.read()
213 return self.map.copy()
213 return self.map.copy()
214
214
215 # used to avoid circular references so destructors work
215 # used to avoid circular references so destructors work
216 def opener(base):
216 def opener(base):
217 p = base
217 p = base
218 def o(path, mode="r"):
218 def o(path, mode="r"):
219 if p[:7] == "http://":
219 if p[:7] == "http://":
220 f = os.path.join(p, urllib.quote(path))
220 f = os.path.join(p, urllib.quote(path))
221 return httprangereader(f)
221 return httprangereader(f)
222
222
223 f = os.path.join(p, path)
223 f = os.path.join(p, path)
224
224
225 if mode != "r" and os.path.isfile(f):
225 if mode != "r" and os.path.isfile(f):
226 s = os.stat(f)
226 s = os.stat(f)
227 if s.st_nlink > 1:
227 if s.st_nlink > 1:
228 file(f + ".tmp", "w").write(file(f).read())
228 file(f + ".tmp", "w").write(file(f).read())
229 os.rename(f+".tmp", f)
229 os.rename(f+".tmp", f)
230
230
231 return file(f, mode)
231 return file(f, mode)
232
232
233 return o
233 return o
234
234
235 class localrepository:
235 class localrepository:
236 def __init__(self, ui, path=None, create=0):
236 def __init__(self, ui, path=None, create=0):
237 self.remote = 0
237 self.remote = 0
238 if path and path[:7] == "http://":
238 if path and path[:7] == "http://":
239 self.remote = 1
239 self.remote = 1
240 self.path = path
240 self.path = path
241 else:
241 else:
242 if not path:
242 if not path:
243 p = os.getcwd()
243 p = os.getcwd()
244 while not os.path.isdir(os.path.join(p, ".hg")):
244 while not os.path.isdir(os.path.join(p, ".hg")):
245 p = os.path.dirname(p)
245 p = os.path.dirname(p)
246 if p == "/": raise "No repo found"
246 if p == "/": raise "No repo found"
247 path = p
247 path = p
248 self.path = os.path.join(path, ".hg")
248 self.path = os.path.join(path, ".hg")
249
249
250 self.root = path
250 self.root = path
251 self.ui = ui
251 self.ui = ui
252
252
253 if create:
253 if create:
254 os.mkdir(self.path)
254 os.mkdir(self.path)
255 os.mkdir(self.join("data"))
255 os.mkdir(self.join("data"))
256
256
257 self.opener = opener(self.path)
257 self.opener = opener(self.path)
258 self.manifest = manifest(self.opener)
258 self.manifest = manifest(self.opener)
259 self.changelog = changelog(self.opener)
259 self.changelog = changelog(self.opener)
260 self.ignorelist = None
260 self.ignorelist = None
261
261
262 if not self.remote:
262 if not self.remote:
263 self.dircache = dircache(self.opener, ui)
263 self.dircache = dircache(self.opener, ui)
264 try:
264 try:
265 self.current = bin(self.opener("current").read())
265 self.current = bin(self.opener("current").read())
266 except IOError:
266 except IOError:
267 self.current = None
267 self.current = None
268
268
269 def setcurrent(self, node):
269 def setcurrent(self, node):
270 self.current = node
270 self.current = node
271 self.opener("current", "w").write(hex(node))
271 self.opener("current", "w").write(hex(node))
272
272
273 def ignore(self, f):
273 def ignore(self, f):
274 if self.ignorelist is None:
274 if self.ignorelist is None:
275 self.ignorelist = []
275 self.ignorelist = []
276 try:
276 try:
277 l = open(os.path.join(self.root, ".hgignore")).readlines()
277 l = open(os.path.join(self.root, ".hgignore")).readlines()
278 for pat in l:
278 for pat in l:
279 if pat != "\n":
279 if pat != "\n":
280 self.ignorelist.append(re.compile(pat[:-1]))
280 self.ignorelist.append(re.compile(pat[:-1]))
281 except IOError: pass
281 except IOError: pass
282 for pat in self.ignorelist:
282 for pat in self.ignorelist:
283 if pat.search(f): return True
283 if pat.search(f): return True
284 return False
284 return False
285
285
286 def join(self, f):
286 def join(self, f):
287 return os.path.join(self.path, f)
287 return os.path.join(self.path, f)
288
288
289 def file(self, f):
289 def file(self, f):
290 return filelog(self.opener, f)
290 return filelog(self.opener, f)
291
291
292 def transaction(self):
292 def transaction(self):
293 return transaction(self.opener, self.join("journal"))
293 return transaction(self.opener, self.join("journal"))
294
294
295 def merge(self, other):
295 def merge(self, other):
296 tr = self.transaction()
296 tr = self.transaction()
297 changed = {}
297 changed = {}
298 new = {}
298 new = {}
299 seqrev = self.changelog.count()
299 seqrev = self.changelog.count()
300 # some magic to allow fiddling in nested scope
300 # some magic to allow fiddling in nested scope
301 nextrev = [seqrev]
301 nextrev = [seqrev]
302
302
303 # helpers for back-linking file revisions to local changeset
303 # helpers for back-linking file revisions to local changeset
304 # revisions so we can immediately get to changeset from annotate
304 # revisions so we can immediately get to changeset from annotate
305 def accumulate(text):
305 def accumulate(text):
306 # track which files are added in which changeset and the
306 # track which files are added in which changeset and the
307 # corresponding _local_ changeset revision
307 # corresponding _local_ changeset revision
308 files = self.changelog.extract(text)[3]
308 files = self.changelog.extract(text)[3]
309 for f in files:
309 for f in files:
310 changed.setdefault(f, []).append(nextrev[0])
310 changed.setdefault(f, []).append(nextrev[0])
311 nextrev[0] += 1
311 nextrev[0] += 1
312
312
313 def seq(start):
313 def seq(start):
314 while 1:
314 while 1:
315 yield start
315 yield start
316 start += 1
316 start += 1
317
317
318 def lseq(l):
318 def lseq(l):
319 for r in l:
319 for r in l:
320 yield r
320 yield r
321
321
322 # begin the import/merge of changesets
322 # begin the import/merge of changesets
323 self.ui.status("merging new changesets\n")
323 self.ui.status("merging new changesets\n")
324 (co, cn) = self.changelog.mergedag(other.changelog, tr,
324 (co, cn) = self.changelog.mergedag(other.changelog, tr,
325 seq(seqrev), accumulate)
325 seq(seqrev), accumulate)
326 resolverev = self.changelog.count()
326 resolverev = self.changelog.count()
327
327
328 # is there anything to do?
328 # is there anything to do?
329 if co == cn:
329 if co == cn:
330 tr.close()
330 tr.close()
331 return
331 return
332
332
333 # do we need to resolve?
333 # do we need to resolve?
334 simple = (co == self.changelog.ancestor(co, cn))
334 simple = (co == self.changelog.ancestor(co, cn))
335
335
336 # merge all files changed by the changesets,
336 # merge all files changed by the changesets,
337 # keeping track of the new tips
337 # keeping track of the new tips
338 changelist = changed.keys()
338 changelist = changed.keys()
339 changelist.sort()
339 changelist.sort()
340 for f in changelist:
340 for f in changelist:
341 sys.stdout.write(".")
341 sys.stdout.write(".")
342 sys.stdout.flush()
342 sys.stdout.flush()
343 r = self.file(f)
343 r = self.file(f)
344 node = r.merge(other.file(f), tr, lseq(changed[f]), resolverev)
344 node = r.merge(other.file(f), tr, lseq(changed[f]), resolverev)
345 if node:
345 if node:
346 new[f] = node
346 new[f] = node
347 sys.stdout.write("\n")
347 sys.stdout.write("\n")
348
348
349 # begin the merge of the manifest
349 # begin the merge of the manifest
350 self.ui.status("merging manifests\n")
350 self.ui.status("merging manifests\n")
351 (mm, mo) = self.manifest.mergedag(other.manifest, tr, seq(seqrev))
351 (mm, mo) = self.manifest.mergedag(other.manifest, tr, seq(seqrev))
352
352
353 # For simple merges, we don't need to resolve manifests or changesets
353 # For simple merges, we don't need to resolve manifests or changesets
354 if simple:
354 if simple:
355 tr.close()
355 tr.close()
356 return
356 return
357
357
358 ma = self.manifest.ancestor(mm, mo)
358 ma = self.manifest.ancestor(mm, mo)
359
359
360 # resolve the manifest to point to all the merged files
360 # resolve the manifest to point to all the merged files
361 self.ui.status("resolving manifests\n")
361 self.ui.status("resolving manifests\n")
362 mmap = self.manifest.read(mm) # mine
362 mmap = self.manifest.read(mm) # mine
363 omap = self.manifest.read(mo) # other
363 omap = self.manifest.read(mo) # other
364 amap = self.manifest.read(ma) # ancestor
364 amap = self.manifest.read(ma) # ancestor
365 nmap = {}
365 nmap = {}
366
366
367 for f, mid in mmap.iteritems():
367 for f, mid in mmap.iteritems():
368 if f in omap:
368 if f in omap:
369 if mid != omap[f]:
369 if mid != omap[f]:
370 nmap[f] = new.get(f, mid) # use merged version
370 nmap[f] = new.get(f, mid) # use merged version
371 else:
371 else:
372 nmap[f] = new.get(f, mid) # they're the same
372 nmap[f] = new.get(f, mid) # they're the same
373 del omap[f]
373 del omap[f]
374 elif f in amap:
374 elif f in amap:
375 if mid != amap[f]:
375 if mid != amap[f]:
376 pass # we should prompt here
376 pass # we should prompt here
377 else:
377 else:
378 pass # other deleted it
378 pass # other deleted it
379 else:
379 else:
380 nmap[f] = new.get(f, mid) # we created it
380 nmap[f] = new.get(f, mid) # we created it
381
381
382 del mmap
382 del mmap
383
383
384 for f, oid in omap.iteritems():
384 for f, oid in omap.iteritems():
385 if f in amap:
385 if f in amap:
386 if oid != amap[f]:
386 if oid != amap[f]:
387 pass # this is the nasty case, we should prompt
387 pass # this is the nasty case, we should prompt
388 else:
388 else:
389 pass # probably safe
389 pass # probably safe
390 else:
390 else:
391 nmap[f] = new.get(f, oid) # remote created it
391 nmap[f] = new.get(f, oid) # remote created it
392
392
393 del omap
393 del omap
394 del amap
394 del amap
395
395
396 node = self.manifest.add(nmap, tr, resolverev, mm, mo)
396 node = self.manifest.add(nmap, tr, resolverev, mm, mo)
397
397
398 # Now all files and manifests are merged, we add the changed files
398 # Now all files and manifests are merged, we add the changed files
399 # and manifest id to the changelog
399 # and manifest id to the changelog
400 self.ui.status("committing merge changeset\n")
400 self.ui.status("committing merge changeset\n")
401 new = new.keys()
401 new = new.keys()
402 new.sort()
402 new.sort()
403 if co == cn: cn = -1
403 if co == cn: cn = -1
404
404
405 edittext = "\n"+"".join(["HG: changed %s\n" % f for f in new])
405 edittext = "\n"+"".join(["HG: changed %s\n" % f for f in new])
406 edittext = self.ui.edit(edittext)
406 edittext = self.ui.edit(edittext)
407 n = self.changelog.add(node, new, edittext, tr, co, cn)
407 n = self.changelog.add(node, new, edittext, tr, co, cn)
408
408
409 tr.close()
409 tr.close()
410
410
411 def commit(self, parent, update = None, text = ""):
411 def commit(self, parent, update = None, text = ""):
412 tr = self.transaction()
412 tr = self.transaction()
413
413
414 try:
414 try:
415 remove = [ l[:-1] for l in self.opener("to-remove") ]
415 remove = [ l[:-1] for l in self.opener("to-remove") ]
416 os.unlink(self.join("to-remove"))
416 os.unlink(self.join("to-remove"))
417
417
418 except IOError:
418 except IOError:
419 remove = []
419 remove = []
420
420
421 if update == None:
421 if update == None:
422 update = self.diffdir(self.root, parent)[0]
422 update = self.diffdir(self.root, parent)[0]
423
423
424 # check in files
424 # check in files
425 new = {}
425 new = {}
426 linkrev = self.changelog.count()
426 linkrev = self.changelog.count()
427 for f in update:
427 for f in update:
428 try:
428 try:
429 t = file(f).read()
429 t = file(f).read()
430 except IOError:
430 except IOError:
431 remove.append(f)
431 remove.append(f)
432 continue
432 continue
433 r = self.file(f)
433 r = self.file(f)
434 new[f] = r.add(t, tr, linkrev)
434 new[f] = r.add(t, tr, linkrev)
435
435
436 # update manifest
436 # update manifest
437 mmap = self.manifest.read(self.manifest.tip())
437 mmap = self.manifest.read(self.manifest.tip())
438 mmap.update(new)
438 mmap.update(new)
439 for f in remove:
439 for f in remove:
440 del mmap[f]
440 del mmap[f]
441 mnode = self.manifest.add(mmap, tr, linkrev)
441 mnode = self.manifest.add(mmap, tr, linkrev)
442
442
443 # add changeset
443 # add changeset
444 new = new.keys()
444 new = new.keys()
445 new.sort()
445 new.sort()
446
446
447 edittext = text + "\n"+"".join(["HG: changed %s\n" % f for f in new])
447 edittext = text + "\n"+"".join(["HG: changed %s\n" % f for f in new])
448 edittext += "".join(["HG: removed %s\n" % f for f in remove])
448 edittext += "".join(["HG: removed %s\n" % f for f in remove])
449 edittext = self.ui.edit(edittext)
449 edittext = self.ui.edit(edittext)
450
450
451 n = self.changelog.add(mnode, new, edittext, tr)
451 n = self.changelog.add(mnode, new, edittext, tr)
452 tr.close()
452 tr.close()
453
453
454 self.setcurrent(n)
454 self.setcurrent(n)
455 self.dircache.update(new)
455 self.dircache.update(new)
456 self.dircache.remove(remove)
456 self.dircache.remove(remove)
457
457
458 def checkdir(self, path):
458 def checkdir(self, path):
459 d = os.path.dirname(path)
459 d = os.path.dirname(path)
460 if not d: return
460 if not d: return
461 if not os.path.isdir(d):
461 if not os.path.isdir(d):
462 self.checkdir(d)
462 self.checkdir(d)
463 os.mkdir(d)
463 os.mkdir(d)
464
464
465 def checkout(self, node):
465 def checkout(self, node):
466 # checkout is really dumb at the moment
466 # checkout is really dumb at the moment
467 # it ought to basically merge
467 # it ought to basically merge
468 change = self.changelog.read(node)
468 change = self.changelog.read(node)
469 mmap = self.manifest.read(change[0])
469 mmap = self.manifest.read(change[0])
470
470
471 l = mmap.keys()
471 l = mmap.keys()
472 l.sort()
472 l.sort()
473 stats = []
473 stats = []
474 for f in l:
474 for f in l:
475 r = self.file(f)
475 r = self.file(f)
476 t = r.revision(mmap[f])
476 t = r.revision(mmap[f])
477 try:
477 try:
478 file(f, "w").write(t)
478 file(f, "w").write(t)
479 except:
479 except:
480 self.checkdir(f)
480 self.checkdir(f)
481 file(f, "w").write(t)
481 file(f, "w").write(t)
482
482
483 self.setcurrent(node)
483 self.setcurrent(node)
484 self.dircache.clear()
484 self.dircache.clear()
485 self.dircache.update(l)
485 self.dircache.update(l)
486
486
487 def diffdir(self, path, changeset):
487 def diffdir(self, path, changeset):
488 changed = []
488 changed = []
489 mf = {}
489 mf = {}
490 added = []
490 added = []
491
491
492 if changeset:
492 if changeset:
493 change = self.changelog.read(changeset)
493 change = self.changelog.read(changeset)
494 mf = self.manifest.read(change[0])
494 mf = self.manifest.read(change[0])
495
495
496 if changeset == self.current:
496 if changeset == self.current:
497 dc = self.dircache.copy()
497 dc = self.dircache.copy()
498 else:
498 else:
499 dc = dict.fromkeys(mf)
499 dc = dict.fromkeys(mf)
500
500
501 def fcmp(fn):
501 def fcmp(fn):
502 t1 = file(os.path.join(self.root, fn)).read()
502 t1 = file(os.path.join(self.root, fn)).read()
503 t2 = self.file(fn).revision(mf[fn])
503 t2 = self.file(fn).revision(mf[fn])
504 return cmp(t1, t2)
504 return cmp(t1, t2)
505
505
506 for dir, subdirs, files in os.walk(self.root):
506 for dir, subdirs, files in os.walk(self.root):
507 d = dir[len(self.root)+1:]
507 d = dir[len(self.root)+1:]
508 if ".hg" in subdirs: subdirs.remove(".hg")
508 if ".hg" in subdirs: subdirs.remove(".hg")
509
509
510 for f in files:
510 for f in files:
511 fn = os.path.join(d, f)
511 fn = os.path.join(d, f)
512 try: s = os.stat(os.path.join(self.root, fn))
512 try: s = os.stat(os.path.join(self.root, fn))
513 except: continue
513 except: continue
514 if fn in dc:
514 if fn in dc:
515 c = dc[fn]
515 c = dc[fn]
516 del dc[fn]
516 del dc[fn]
517 if not c:
517 if not c:
518 if fcmp(fn):
518 if fcmp(fn):
519 changed.append(fn)
519 changed.append(fn)
520 elif c[1] != s.st_size:
520 elif c[1] != s.st_size:
521 changed.append(fn)
521 changed.append(fn)
522 elif c[0] != s.st_mode or c[2] != s.st_mtime:
522 elif c[0] != s.st_mode or c[2] != s.st_mtime:
523 if fcmp(fn):
523 if fcmp(fn):
524 changed.append(fn)
524 changed.append(fn)
525 else:
525 else:
526 if self.ignore(fn): continue
526 if self.ignore(fn): continue
527 added.append(fn)
527 added.append(fn)
528
528
529 deleted = dc.keys()
529 deleted = dc.keys()
530 deleted.sort()
530 deleted.sort()
531
531
532 return (changed, added, deleted)
532 return (changed, added, deleted)
533
533
534 def diffrevs(self, node1, node2):
534 def diffrevs(self, node1, node2):
535 changed, added = [], []
535 changed, added = [], []
536
536
537 change = self.changelog.read(node1)
537 change = self.changelog.read(node1)
538 mf1 = self.manifest.read(change[0])
538 mf1 = self.manifest.read(change[0])
539 change = self.changelog.read(node2)
539 change = self.changelog.read(node2)
540 mf2 = self.manifest.read(change[0])
540 mf2 = self.manifest.read(change[0])
541
541
542 for fn in mf2:
542 for fn in mf2:
543 if mf1.has_key(fn):
543 if mf1.has_key(fn):
544 if mf1[fn] != mf2[fn]:
544 if mf1[fn] != mf2[fn]:
545 changed.append(fn)
545 changed.append(fn)
546 del mf1[fn]
546 del mf1[fn]
547 else:
547 else:
548 added.append(fn)
548 added.append(fn)
549
549
550 deleted = mf1.keys()
550 deleted = mf1.keys()
551 deleted.sort()
551 deleted.sort()
552
552
553 return (changed, added, deleted)
553 return (changed, added, deleted)
554
554
555 def add(self, list):
555 def add(self, list):
556 self.dircache.taint(list)
556 self.dircache.taint(list)
557
557
558 def remove(self, list):
558 def remove(self, list):
559 dl = self.opener("to-remove", "a")
559 dl = self.opener("to-remove", "a")
560 for f in list:
560 for f in list:
561 dl.write(f + "\n")
561 dl.write(f + "\n")
562
562
563 def branches(self, nodes):
563 def branches(self, nodes):
564 if not nodes: nodes = [self.changelog.tip()]
564 if not nodes: nodes = [self.changelog.tip()]
565 b = []
565 b = []
566 for n in nodes:
566 for n in nodes:
567 t = n
567 t = n
568 while n:
568 while n:
569 p = self.changelog.parents(n)
569 p = self.changelog.parents(n)
570 if p[1] != nullid or p[0] == nullid:
570 if p[1] != nullid or p[0] == nullid:
571 b.append((t, n, p[0], p[1]))
571 b.append((t, n, p[0], p[1]))
572 break
572 break
573 n = p[0]
573 n = p[0]
574 return b
574 return b
575
575
576 def between(self, pairs):
576 def between(self, pairs):
577 r = []
577 r = []
578
578
579 for top, bottom in pairs:
579 for top, bottom in pairs:
580 n, l, i = top, [], 0
580 n, l, i = top, [], 0
581 f = 1
581 f = 1
582
582
583 while n != bottom:
583 while n != bottom:
584 p = self.changelog.parents(n)[0]
584 p = self.changelog.parents(n)[0]
585 if i == f:
585 if i == f:
586 l.append(n)
586 l.append(n)
587 f = f * 2
587 f = f * 2
588 n = p
588 n = p
589 i += 1
589 i += 1
590
590
591 r.append(l)
591 r.append(l)
592
592
593 return r
593 return r
594
594
595 def newer(self, nodes):
595 def newer(self, nodes):
596 m = {}
596 m = {}
597 nl = []
597 nl = []
598 cl = self.changelog
598 cl = self.changelog
599 t = l = cl.count()
599 t = l = cl.count()
600 for n in nodes:
600 for n in nodes:
601 l = min(l, cl.rev(n))
601 l = min(l, cl.rev(n))
602 for p in cl.parents(n):
602 for p in cl.parents(n):
603 m[p] = 1
603 m[p] = 1
604
604
605 for i in xrange(l, t):
605 for i in xrange(l, t):
606 n = cl.node(i)
606 n = cl.node(i)
607 for p in cl.parents(n):
607 for p in cl.parents(n):
608 if p in m and n not in m:
608 if p in m and n not in m:
609 m[n] = 1
609 m[n] = 1
610 nl.append(n)
610 nl.append(n)
611
611
612 return nl
612 return nl
613
613
614 def getchangegroup(self, remote):
614 def getchangegroup(self, remote):
615 tip = remote.branches([])[0]
615 tip = remote.branches([])[0]
616 cl = self.changelog
616 m = self.changelog.nodemap
617 unknown = [tip]
617 unknown = [tip]
618 search = []
618 search = []
619 fetch = []
619 fetch = []
620
620
621 if tip[0] == self.changelog.tip():
621 if tip[0] in m:
622 return None
622 return None
623
623
624 while unknown:
624 while unknown:
625 n = unknown.pop(0)
625 n = unknown.pop(0)
626 if n == nullid: break
626 if n == nullid: break
627 if n[1] and cl.nodemap.has_key(n[1]): # do we know the base?
627 if n[1] and n[1] in m: # do we know the base?
628 search.append(n) # schedule branch range for scanning
628 search.append(n) # schedule branch range for scanning
629 else:
629 else:
630 for b in remote.branches([n[2], n[3]]):
630 for b in remote.branches([n[2], n[3]]):
631 if cl.nodemap.has_key(b[0]):
631 if b[0] in m:
632 fetch.append(n[1]) # earliest unknown
632 if n[1] not in fetch:
633 fetch.append(n[1]) # earliest unknown
633 else:
634 else:
634 unknown.append(b)
635 unknown.append(b)
635
636
636 while search:
637 while search:
637 n = search.pop(0)
638 n = search.pop(0)
638 l = remote.between([(n[0], n[1])])[0]
639 l = remote.between([(n[0], n[1])])[0]
639 p = n[0]
640 p = n[0]
640 f = 1
641 f = 1
641 for i in l + [n[1]]:
642 for i in l + [n[1]]:
642 if self.changelog.nodemap.has_key(i):
643 if i in m:
643 if f <= 4:
644 if f <= 4:
644 fetch.append(p)
645 fetch.append(p)
645 else:
646 else:
646 search.append((p, i))
647 search.append((p, i))
648 break
647 p, f = i, f * 2
649 p, f = i, f * 2
648
650
651 for f in fetch:
652 if f in m:
653 raise "already have", hex(f[:4])
654
649 return remote.changegroup(fetch)
655 return remote.changegroup(fetch)
650
656
651 def changegroup(self, basenodes):
657 def changegroup(self, basenodes):
652 nodes = self.newer(basenodes)
658 nodes = self.newer(basenodes)
653
659
654 # construct the link map
660 # construct the link map
655 linkmap = {}
661 linkmap = {}
656 for n in nodes:
662 for n in nodes:
657 linkmap[self.changelog.rev(n)] = n
663 linkmap[self.changelog.rev(n)] = n
658
664
659 # construct a list of all changed files
665 # construct a list of all changed files
660 changed = {}
666 changed = {}
661 for n in nodes:
667 for n in nodes:
662 c = self.changelog.read(n)
668 c = self.changelog.read(n)
663 for f in c[3]:
669 for f in c[3]:
664 changed[f] = 1
670 changed[f] = 1
665 changed = changed.keys()
671 changed = changed.keys()
666 changed.sort()
672 changed.sort()
667
673
668 # the changegroup is changesets + manifests + all file revs
674 # the changegroup is changesets + manifests + all file revs
669 revs = [ self.changelog.rev(n) for n in nodes ]
675 revs = [ self.changelog.rev(n) for n in nodes ]
670
676
671 yield self.changelog.group(linkmap)
677 yield self.changelog.group(linkmap)
672 yield self.manifest.group(linkmap)
678 yield self.manifest.group(linkmap)
673
679
674 for f in changed:
680 for f in changed:
675 g = self.file(f).group(linkmap)
681 g = self.file(f).group(linkmap)
676 if not g: raise "couldn't find change to %s" % f
682 if not g: raise "couldn't find change to %s" % f
677 l = struct.pack(">l", len(f))
683 l = struct.pack(">l", len(f))
678 yield "".join([l, f, g])
684 yield "".join([l, f, g])
679
685
680 def addchangegroup(self, data):
686 def addchangegroup(self, generator):
681 def getlen(data, pos):
687 class genread:
682 return struct.unpack(">l", data[pos:pos + 4])[0]
688 def __init__(self, generator):
689 self.g = generator
690 self.buf = ""
691 def read(self, l):
692 while l > len(self.buf):
693 try:
694 self.buf += self.g.next()
695 except StopIteration:
696 break
697 d, self.buf = self.buf[:l], self.buf[l:]
698 return d
699
700 if not generator: return
701 source = genread(generator)
683
702
684 if not data: return
703 def getchunk(add = 0):
685
704 d = source.read(4)
705 if not d: return ""
706 l = struct.unpack(">l", d)[0]
707 return source.read(l - 4 + add)
708
686 tr = self.transaction()
709 tr = self.transaction()
687 simple = True
710 simple = True
688
711
689 print "merging changesets"
712 print "merging changesets"
690 # pull off the changeset group
713 # pull off the changeset group
691 l = getlen(data, 0)
714 csg = getchunk()
692 csg = data[0:l]
693 pos = l
694 co = self.changelog.tip()
715 co = self.changelog.tip()
695 cn = self.changelog.addgroup(csg, lambda x: self.changelog.count(), tr)
716 cn = self.changelog.addgroup(csg, lambda x: self.changelog.count(), tr)
696
717
697 print "merging manifests"
718 print "merging manifests"
698 # pull off the manifest group
719 # pull off the manifest group
699 l = getlen(data, pos)
720 mfg = getchunk()
700 mfg = data[pos: pos + l]
701 pos += l
702 mo = self.manifest.tip()
721 mo = self.manifest.tip()
703 mn = self.manifest.addgroup(mfg, lambda x: self.changelog.rev(x), tr)
722 mm = self.manifest.addgroup(mfg, lambda x: self.changelog.rev(x), tr)
704
723
705 # do we need a resolve?
724 # do we need a resolve?
706 if self.changelog.ancestor(co, cn) != co:
725 if self.changelog.ancestor(co, cn) != co:
707 print "NEED RESOLVE"
708 simple = False
726 simple = False
709 resolverev = self.changelog.count()
727 resolverev = self.changelog.count()
710
728
711 # process the files
729 # process the files
712 print "merging files"
730 print "merging files"
713 new = {}
731 new = {}
714 while pos < len(data):
732 while 1:
715 l = getlen(data, pos)
733 f = getchunk(4)
716 pos += 4
734 if not f: break
717 f = data[pos:pos + l]
735 fg = getchunk()
718 pos += l
719
720 l = getlen(data, pos)
721 fg = data[pos: pos + l]
722 pos += l
723
736
724 fl = self.file(f)
737 fl = self.file(f)
725 o = fl.tip()
738 o = fl.tip()
726 n = fl.addgroup(fg, lambda x: self.changelog.rev(x), tr)
739 n = fl.addgroup(fg, lambda x: self.changelog.rev(x), tr)
727 if not simple:
740 if not simple:
728 new[fl] = fl.resolvedag(o, n, tr, resolverev)
741 nn = fl.resolvedag(o, n, tr, resolverev)
742 if nn: new[f] = nn
729
743
730 # For simple merges, we don't need to resolve manifests or changesets
744 # For simple merges, we don't need to resolve manifests or changesets
731 if simple:
745 if simple:
732 tr.close()
746 tr.close()
733 return
747 return
734
748
735 # resolve the manifest to point to all the merged files
749 # resolve the manifest to point to all the merged files
736 self.ui.status("resolving manifests\n")
750 self.ui.status("resolving manifests\n")
737 ma = self.manifest.ancestor(mm, mo)
751 ma = self.manifest.ancestor(mm, mo)
738 mmap = self.manifest.read(mm) # mine
752 mmap = self.manifest.read(mm) # mine
739 omap = self.manifest.read(mo) # other
753 omap = self.manifest.read(mo) # other
740 amap = self.manifest.read(ma) # ancestor
754 amap = self.manifest.read(ma) # ancestor
741 nmap = {}
755 nmap = {}
742
756
743 for f, mid in mmap.iteritems():
757 for f, mid in mmap.iteritems():
744 if f in omap:
758 if f in omap:
745 if mid != omap[f]:
759 if mid != omap[f]:
746 nmap[f] = new.get(f, mid) # use merged version
760 nmap[f] = new.get(f, mid) # use merged version
747 else:
761 else:
748 nmap[f] = new.get(f, mid) # they're the same
762 nmap[f] = new.get(f, mid) # they're the same
749 del omap[f]
763 del omap[f]
750 elif f in amap:
764 elif f in amap:
751 if mid != amap[f]:
765 if mid != amap[f]:
752 pass # we should prompt here
766 pass # we should prompt here
753 else:
767 else:
754 pass # other deleted it
768 pass # other deleted it
755 else:
769 else:
756 nmap[f] = new.get(f, mid) # we created it
770 nmap[f] = new.get(f, mid) # we created it
757
771
758 del mmap
772 del mmap
759
773
760 for f, oid in omap.iteritems():
774 for f, oid in omap.iteritems():
761 if f in amap:
775 if f in amap:
762 if oid != amap[f]:
776 if oid != amap[f]:
763 pass # this is the nasty case, we should prompt
777 pass # this is the nasty case, we should prompt
764 else:
778 else:
765 pass # probably safe
779 pass # probably safe
766 else:
780 else:
767 nmap[f] = new.get(f, oid) # remote created it
781 nmap[f] = new.get(f, oid) # remote created it
768
782
769 del omap
783 del omap
770 del amap
784 del amap
771
785
772 node = self.manifest.add(nmap, tr, resolverev, mm, mo)
786 node = self.manifest.add(nmap, tr, resolverev, mm, mo)
773
787
774 # Now all files and manifests are merged, we add the changed files
788 # Now all files and manifests are merged, we add the changed files
775 # and manifest id to the changelog
789 # and manifest id to the changelog
776 self.ui.status("committing merge changeset\n")
790 self.ui.status("committing merge changeset\n")
777 new = new.keys()
791 new = new.keys()
778 new.sort()
792 new.sort()
779 if co == cn: cn = -1
793 if co == cn: cn = -1
780
794
781 edittext = "\n"+"".join(["HG: changed %s\n" % f for f in new])
795 edittext = "\n"+"".join(["HG: changed %s\n" % f for f in new])
782 edittext = self.ui.edit(edittext)
796 edittext = self.ui.edit(edittext)
783 n = self.changelog.add(node, new, edittext, tr, co, cn)
797 n = self.changelog.add(node, new, edittext, tr, co, cn)
784
798
785 tr.close()
799 tr.close()
786
800
787 class remoterepository:
801 class remoterepository:
788 def __init__(self, ui, path):
802 def __init__(self, ui, path):
789 self.url = path.replace("hg://", "http://", 1)
803 self.url = path.replace("hg://", "http://", 1)
790 self.ui = ui
804 self.ui = ui
791
805
792 def do_cmd(self, cmd, **args):
806 def do_cmd(self, cmd, **args):
793 q = {"cmd": cmd}
807 q = {"cmd": cmd}
794 q.update(args)
808 q.update(args)
795 qs = urllib.urlencode(q)
809 qs = urllib.urlencode(q)
796 cu = "%s?%s" % (self.url, qs)
810 cu = "%s?%s" % (self.url, qs)
797 return urllib.urlopen(cu).read()
811 return urllib.urlopen(cu)
798
812
799 def branches(self, nodes):
813 def branches(self, nodes):
800 n = " ".join(map(hex, nodes))
814 n = " ".join(map(hex, nodes))
801 d = self.do_cmd("branches", nodes=n)
815 d = self.do_cmd("branches", nodes=n).read()
802 br = [ map(bin, b.split(" ")) for b in d.splitlines() ]
816 br = [ map(bin, b.split(" ")) for b in d.splitlines() ]
803 return br
817 return br
804
818
805 def between(self, pairs):
819 def between(self, pairs):
806 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
820 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
807 d = self.do_cmd("between", pairs=n)
821 d = self.do_cmd("between", pairs=n).read()
808 p = [ map(bin, l.split(" ")) for l in d.splitlines() ]
822 p = [ map(bin, l.split(" ")) for l in d.splitlines() ]
809 return p
823 return p
810
824
811 def changegroup(self, nodes):
825 def changegroup(self, nodes):
812 n = " ".join(map(hex, nodes))
826 n = " ".join(map(hex, nodes))
813 d = self.do_cmd("changegroup", roots=n)
827 zd = zlib.decompressobj()
814 return zlib.decompress(d)
828 f = self.do_cmd("changegroup", roots=n)
829 while 1:
830 d = f.read(4096)
831 if not d:
832 yield zd.flush()
833 break
834 yield zd.decompress(d)
815
835
816 def repository(ui, path=None, create=0):
836 def repository(ui, path=None, create=0):
817 if path and path[:5] == "hg://":
837 if path and path[:5] == "hg://":
818 return remoterepository(ui, path)
838 return remoterepository(ui, path)
819 else:
839 else:
820 return localrepository(ui, path, create)
840 return localrepository(ui, path, create)
821
841
822 class ui:
842 class ui:
823 def __init__(self, verbose=False, debug=False):
843 def __init__(self, verbose=False, debug=False):
824 self.verbose = verbose
844 self.verbose = verbose
825 def write(self, *args):
845 def write(self, *args):
826 for a in args:
846 for a in args:
827 sys.stdout.write(str(a))
847 sys.stdout.write(str(a))
828 def prompt(self, msg, pat):
848 def prompt(self, msg, pat):
829 while 1:
849 while 1:
830 sys.stdout.write(msg)
850 sys.stdout.write(msg)
831 r = sys.stdin.readline()[:-1]
851 r = sys.stdin.readline()[:-1]
832 if re.match(pat, r):
852 if re.match(pat, r):
833 return r
853 return r
834 def status(self, *msg):
854 def status(self, *msg):
835 self.write(*msg)
855 self.write(*msg)
836 def warn(self, msg):
856 def warn(self, msg):
837 self.write(*msg)
857 self.write(*msg)
838 def note(self, msg):
858 def note(self, msg):
839 if self.verbose: self.write(*msg)
859 if self.verbose: self.write(*msg)
840 def debug(self, msg):
860 def debug(self, msg):
841 if self.debug: self.write(*msg)
861 if self.debug: self.write(*msg)
842 def edit(self, text):
862 def edit(self, text):
843 (fd, name) = tempfile.mkstemp("hg")
863 (fd, name) = tempfile.mkstemp("hg")
844 f = os.fdopen(fd, "w")
864 f = os.fdopen(fd, "w")
845 f.write(text)
865 f.write(text)
846 f.close()
866 f.close()
847
867
848 editor = os.environ.get("EDITOR", "vi")
868 editor = os.environ.get("EDITOR", "vi")
849 r = os.system("%s %s" % (editor, name))
869 r = os.system("%s %s" % (editor, name))
850 if r:
870 if r:
851 raise "Edit failed!"
871 raise "Edit failed!"
852
872
853 t = open(name).read()
873 t = open(name).read()
854 t = re.sub("(?m)^HG:.*\n", "", t)
874 t = re.sub("(?m)^HG:.*\n", "", t)
855
875
856 return t
876 return t
857
877
858
878
859 class httprangereader:
879 class httprangereader:
860 def __init__(self, url):
880 def __init__(self, url):
861 self.url = url
881 self.url = url
862 self.pos = 0
882 self.pos = 0
863 def seek(self, pos):
883 def seek(self, pos):
864 self.pos = pos
884 self.pos = pos
865 def read(self, bytes=None):
885 def read(self, bytes=None):
866 opener = urllib2.build_opener(byterange.HTTPRangeHandler())
886 opener = urllib2.build_opener(byterange.HTTPRangeHandler())
867 urllib2.install_opener(opener)
887 urllib2.install_opener(opener)
868 req = urllib2.Request(self.url)
888 req = urllib2.Request(self.url)
869 end = ''
889 end = ''
870 if bytes: end = self.pos + bytes
890 if bytes: end = self.pos + bytes
871 req.add_header('Range', 'bytes=%d-%s' % (self.pos, end))
891 req.add_header('Range', 'bytes=%d-%s' % (self.pos, end))
872 f = urllib2.urlopen(req)
892 f = urllib2.urlopen(req)
873 return f.read()
893 return f.read()
@@ -1,414 +1,412 b''
1 # revlog.py - storage back-end for mercurial
1 # revlog.py - storage back-end for mercurial
2 #
2 #
3 # This provides efficient delta storage with O(1) retrieve and append
3 # This provides efficient delta storage with O(1) retrieve and append
4 # and O(changes) merge between branches
4 # and O(changes) merge between branches
5 #
5 #
6 # Copyright 2005 Matt Mackall <mpm@selenic.com>
6 # Copyright 2005 Matt Mackall <mpm@selenic.com>
7 #
7 #
8 # This software may be used and distributed according to the terms
8 # This software may be used and distributed according to the terms
9 # of the GNU General Public License, incorporated herein by reference.
9 # of the GNU General Public License, incorporated herein by reference.
10
10
11 import zlib, struct, sha, os, tempfile, binascii
11 import zlib, struct, sha, os, tempfile, binascii
12 from mercurial import mdiff
12 from mercurial import mdiff
13
13
14 def hex(node): return binascii.hexlify(node)
14 def hex(node): return binascii.hexlify(node)
15 def bin(node): return binascii.unhexlify(node)
15 def bin(node): return binascii.unhexlify(node)
16
16
17 def compress(text):
17 def compress(text):
18 return zlib.compress(text)
18 return zlib.compress(text)
19
19
20 def decompress(bin):
20 def decompress(bin):
21 return zlib.decompress(bin)
21 return zlib.decompress(bin)
22
22
23 def hash(text, p1, p2):
23 def hash(text, p1, p2):
24 l = [p1, p2]
24 l = [p1, p2]
25 l.sort()
25 l.sort()
26 return sha.sha(l[0] + l[1] + text).digest()
26 return sha.sha(l[0] + l[1] + text).digest()
27
27
28 nullid = "\0" * 20
28 nullid = "\0" * 20
29 indexformat = ">4l20s20s20s"
29 indexformat = ">4l20s20s20s"
30
30
31 class revlog:
31 class revlog:
32 def __init__(self, opener, indexfile, datafile):
32 def __init__(self, opener, indexfile, datafile):
33 self.indexfile = indexfile
33 self.indexfile = indexfile
34 self.datafile = datafile
34 self.datafile = datafile
35 self.index = []
35 self.index = []
36 self.opener = opener
36 self.opener = opener
37 self.cache = None
37 self.cache = None
38 self.nodemap = {nullid: -1}
38 self.nodemap = {nullid: -1}
39 # read the whole index for now, handle on-demand later
39 # read the whole index for now, handle on-demand later
40 try:
40 try:
41 n = 0
41 n = 0
42 i = self.opener(self.indexfile).read()
42 i = self.opener(self.indexfile).read()
43 s = struct.calcsize(indexformat)
43 s = struct.calcsize(indexformat)
44 for f in range(0, len(i), s):
44 for f in range(0, len(i), s):
45 # offset, size, base, linkrev, p1, p2, nodeid
45 # offset, size, base, linkrev, p1, p2, nodeid
46 e = struct.unpack(indexformat, i[f:f + s])
46 e = struct.unpack(indexformat, i[f:f + s])
47 self.nodemap[e[6]] = n
47 self.nodemap[e[6]] = n
48 self.index.append(e)
48 self.index.append(e)
49 n += 1
49 n += 1
50 except IOError: pass
50 except IOError: pass
51
51
52 def tip(self): return self.node(len(self.index) - 1)
52 def tip(self): return self.node(len(self.index) - 1)
53 def count(self): return len(self.index)
53 def count(self): return len(self.index)
54 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
54 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
55 def rev(self, node): return self.nodemap[node]
55 def rev(self, node): return self.nodemap[node]
56 def linkrev(self, node): return self.index[self.nodemap[node]][3]
56 def linkrev(self, node): return self.index[self.nodemap[node]][3]
57 def parents(self, node):
57 def parents(self, node):
58 if node == nullid: return (nullid, nullid)
58 if node == nullid: return (nullid, nullid)
59 return self.index[self.nodemap[node]][4:6]
59 return self.index[self.nodemap[node]][4:6]
60
60
61 def start(self, rev): return self.index[rev][0]
61 def start(self, rev): return self.index[rev][0]
62 def length(self, rev): return self.index[rev][1]
62 def length(self, rev): return self.index[rev][1]
63 def end(self, rev): return self.start(rev) + self.length(rev)
63 def end(self, rev): return self.start(rev) + self.length(rev)
64 def base(self, rev): return self.index[rev][2]
64 def base(self, rev): return self.index[rev][2]
65
65
66 def lookup(self, id):
66 def lookup(self, id):
67 try:
67 try:
68 rev = int(id)
68 rev = int(id)
69 return self.node(rev)
69 return self.node(rev)
70 except ValueError:
70 except ValueError:
71 c = []
71 c = []
72 for n in self.nodemap:
72 for n in self.nodemap:
73 if id in hex(n):
73 if id in hex(n):
74 c.append(n)
74 c.append(n)
75 if len(c) > 1: raise KeyError("Ambiguous identifier")
75 if len(c) > 1: raise KeyError("Ambiguous identifier")
76 if len(c) < 1: raise KeyError
76 if len(c) < 1: raise KeyError
77 return c[0]
77 return c[0]
78
78
79 return None
79 return None
80
80
81 def revisions(self, list):
81 def revisions(self, list):
82 # this can be optimized to do spans, etc
82 # this can be optimized to do spans, etc
83 # be stupid for now
83 # be stupid for now
84 for node in list:
84 for node in list:
85 yield self.revision(node)
85 yield self.revision(node)
86
86
87 def diff(self, a, b):
87 def diff(self, a, b):
88 return mdiff.textdiff(a, b)
88 return mdiff.textdiff(a, b)
89
89
90 def patch(self, text, patch):
90 def patch(self, text, patch):
91 return mdiff.patch(text, patch)
91 return mdiff.patch(text, patch)
92
92
93 def revision(self, node):
93 def revision(self, node):
94 if node == nullid: return ""
94 if node == nullid: return ""
95 if self.cache and self.cache[0] == node: return self.cache[2]
95 if self.cache and self.cache[0] == node: return self.cache[2]
96
96
97 text = None
97 text = None
98 rev = self.rev(node)
98 rev = self.rev(node)
99 base = self.base(rev)
99 base = self.base(rev)
100 start = self.start(base)
100 start = self.start(base)
101 end = self.end(rev)
101 end = self.end(rev)
102
102
103 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
103 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
104 base = self.cache[1]
104 base = self.cache[1]
105 start = self.start(base + 1)
105 start = self.start(base + 1)
106 text = self.cache[2]
106 text = self.cache[2]
107 last = 0
107 last = 0
108
108
109 f = self.opener(self.datafile)
109 f = self.opener(self.datafile)
110 f.seek(start)
110 f.seek(start)
111 data = f.read(end - start)
111 data = f.read(end - start)
112
112
113 if not text:
113 if not text:
114 last = self.length(base)
114 last = self.length(base)
115 text = decompress(data[:last])
115 text = decompress(data[:last])
116
116
117 for r in xrange(base + 1, rev + 1):
117 for r in xrange(base + 1, rev + 1):
118 s = self.length(r)
118 s = self.length(r)
119 b = decompress(data[last:last + s])
119 b = decompress(data[last:last + s])
120 text = self.patch(text, b)
120 text = self.patch(text, b)
121 last = last + s
121 last = last + s
122
122
123 (p1, p2) = self.parents(node)
123 (p1, p2) = self.parents(node)
124 if node != hash(text, p1, p2):
124 if node != hash(text, p1, p2):
125 raise "integrity check failed on %s:%d" % (self.datafile, rev)
125 raise "integrity check failed on %s:%d" % (self.datafile, rev)
126
126
127 self.cache = (node, rev, text)
127 self.cache = (node, rev, text)
128 return text
128 return text
129
129
130 def addrevision(self, text, transaction, link, p1=None, p2=None):
130 def addrevision(self, text, transaction, link, p1=None, p2=None):
131 if text is None: text = ""
131 if text is None: text = ""
132 if p1 is None: p1 = self.tip()
132 if p1 is None: p1 = self.tip()
133 if p2 is None: p2 = nullid
133 if p2 is None: p2 = nullid
134
134
135 node = hash(text, p1, p2)
135 node = hash(text, p1, p2)
136
136
137 n = self.count()
137 n = self.count()
138 t = n - 1
138 t = n - 1
139
139
140 if n:
140 if n:
141 base = self.base(t)
141 base = self.base(t)
142 start = self.start(base)
142 start = self.start(base)
143 end = self.end(t)
143 end = self.end(t)
144 prev = self.revision(self.tip())
144 prev = self.revision(self.tip())
145 data = compress(self.diff(prev, text))
145 data = compress(self.diff(prev, text))
146 dist = end - start + len(data)
146 dist = end - start + len(data)
147
147
148 # full versions are inserted when the needed deltas
148 # full versions are inserted when the needed deltas
149 # become comparable to the uncompressed text
149 # become comparable to the uncompressed text
150 if not n or dist > len(text) * 2:
150 if not n or dist > len(text) * 2:
151 data = compress(text)
151 data = compress(text)
152 base = n
152 base = n
153 else:
153 else:
154 base = self.base(t)
154 base = self.base(t)
155
155
156 offset = 0
156 offset = 0
157 if t >= 0:
157 if t >= 0:
158 offset = self.end(t)
158 offset = self.end(t)
159
159
160 e = (offset, len(data), base, link, p1, p2, node)
160 e = (offset, len(data), base, link, p1, p2, node)
161
161
162 self.index.append(e)
162 self.index.append(e)
163 self.nodemap[node] = n
163 self.nodemap[node] = n
164 entry = struct.pack(indexformat, *e)
164 entry = struct.pack(indexformat, *e)
165
165
166 transaction.add(self.datafile, e[0])
166 transaction.add(self.datafile, e[0])
167 self.opener(self.datafile, "a").write(data)
167 self.opener(self.datafile, "a").write(data)
168 transaction.add(self.indexfile, n * len(entry))
168 transaction.add(self.indexfile, n * len(entry))
169 self.opener(self.indexfile, "a").write(entry)
169 self.opener(self.indexfile, "a").write(entry)
170
170
171 self.cache = (node, n, text)
171 self.cache = (node, n, text)
172 return node
172 return node
173
173
174 def ancestor(self, a, b):
174 def ancestor(self, a, b):
175 def expand(list, map):
175 def expand(list, map):
176 a = []
176 a = []
177 while list:
177 while list:
178 n = list.pop(0)
178 n = list.pop(0)
179 map[n] = 1
179 map[n] = 1
180 yield n
180 yield n
181 for p in self.parents(n):
181 for p in self.parents(n):
182 if p != nullid and p not in map:
182 if p != nullid and p not in map:
183 list.append(p)
183 list.append(p)
184 yield nullid
184 yield nullid
185
185
186 amap = {}
186 amap = {}
187 bmap = {}
187 bmap = {}
188 ag = expand([a], amap)
188 ag = expand([a], amap)
189 bg = expand([b], bmap)
189 bg = expand([b], bmap)
190 adone = bdone = 0
190 adone = bdone = 0
191
191
192 while not adone or not bdone:
192 while not adone or not bdone:
193 if not adone:
193 if not adone:
194 an = ag.next()
194 an = ag.next()
195 if an == nullid:
195 if an == nullid:
196 adone = 1
196 adone = 1
197 elif an in bmap:
197 elif an in bmap:
198 return an
198 return an
199 if not bdone:
199 if not bdone:
200 bn = bg.next()
200 bn = bg.next()
201 if bn == nullid:
201 if bn == nullid:
202 bdone = 1
202 bdone = 1
203 elif bn in amap:
203 elif bn in amap:
204 return bn
204 return bn
205
205
206 return nullid
206 return nullid
207
207
208 def mergedag(self, other, transaction, linkseq, accumulate = None):
208 def mergedag(self, other, transaction, linkseq, accumulate = None):
209 """combine the nodes from other's DAG into ours"""
209 """combine the nodes from other's DAG into ours"""
210 old = self.tip()
210 old = self.tip()
211 i = self.count()
211 i = self.count()
212 l = []
212 l = []
213
213
214 # merge the other revision log into our DAG
214 # merge the other revision log into our DAG
215 for r in range(other.count()):
215 for r in range(other.count()):
216 id = other.node(r)
216 id = other.node(r)
217 if id not in self.nodemap:
217 if id not in self.nodemap:
218 (xn, yn) = other.parents(id)
218 (xn, yn) = other.parents(id)
219 l.append((id, xn, yn))
219 l.append((id, xn, yn))
220 self.nodemap[id] = i
220 self.nodemap[id] = i
221 i += 1
221 i += 1
222
222
223 # merge node date for new nodes
223 # merge node date for new nodes
224 r = other.revisions([e[0] for e in l])
224 r = other.revisions([e[0] for e in l])
225 for e in l:
225 for e in l:
226 t = r.next()
226 t = r.next()
227 if accumulate: accumulate(t)
227 if accumulate: accumulate(t)
228 self.addrevision(t, transaction, linkseq.next(), e[1], e[2])
228 self.addrevision(t, transaction, linkseq.next(), e[1], e[2])
229
229
230 # return the unmerged heads for later resolving
230 # return the unmerged heads for later resolving
231 return (old, self.tip())
231 return (old, self.tip())
232
232
233 def group(self, linkmap):
233 def group(self, linkmap):
234 # given a list of changeset revs, return a set of deltas and
234 # given a list of changeset revs, return a set of deltas and
235 # metadata corresponding to nodes the first delta is
235 # metadata corresponding to nodes the first delta is
236 # parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
236 # parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
237 # have this parent as it has all history before these
237 # have this parent as it has all history before these
238 # changesets. parent is parent[0]
238 # changesets. parent is parent[0]
239
239
240 revs = []
240 revs = []
241 needed = {}
241 needed = {}
242
242
243 # find file nodes/revs that match changeset revs
243 # find file nodes/revs that match changeset revs
244 for i in xrange(0, self.count()):
244 for i in xrange(0, self.count()):
245 if self.index[i][3] in linkmap:
245 if self.index[i][3] in linkmap:
246 revs.append(i)
246 revs.append(i)
247 needed[i] = 1
247 needed[i] = 1
248
248
249 # if we don't have any revisions touched by these changesets, bail
249 # if we don't have any revisions touched by these changesets, bail
250 if not revs: return struct.pack(">l", 0)
250 if not revs: return struct.pack(">l", 0)
251
251
252 # add the parent of the first rev
252 # add the parent of the first rev
253 p = self.parents(self.node(revs[0]))[0]
253 p = self.parents(self.node(revs[0]))[0]
254 revs.insert(0, self.rev(p))
254 revs.insert(0, self.rev(p))
255
255
256 # for each delta that isn't contiguous in the log, we need to
256 # for each delta that isn't contiguous in the log, we need to
257 # reconstruct the base, reconstruct the result, and then
257 # reconstruct the base, reconstruct the result, and then
258 # calculate the delta. We also need to do this where we've
258 # calculate the delta. We also need to do this where we've
259 # stored a full version and not a delta
259 # stored a full version and not a delta
260 for i in xrange(0, len(revs) - 1):
260 for i in xrange(0, len(revs) - 1):
261 a, b = revs[i], revs[i + 1]
261 a, b = revs[i], revs[i + 1]
262 if a + 1 != b or self.base(b) == b:
262 if a + 1 != b or self.base(b) == b:
263 for j in xrange(self.base(a), a + 1):
263 for j in xrange(self.base(a), a + 1):
264 needed[j] = 1
264 needed[j] = 1
265 for j in xrange(self.base(b), b + 1):
265 for j in xrange(self.base(b), b + 1):
266 needed[j] = 1
266 needed[j] = 1
267
267
268 # calculate spans to retrieve from datafile
268 # calculate spans to retrieve from datafile
269 needed = needed.keys()
269 needed = needed.keys()
270 needed.sort()
270 needed.sort()
271 spans = []
271 spans = []
272 for n in needed:
272 for n in needed:
273 if n < 0: continue
273 if n < 0: continue
274 o = self.start(n)
274 o = self.start(n)
275 l = self.length(n)
275 l = self.length(n)
276 spans.append((o, l, [(n, l)]))
276 spans.append((o, l, [(n, l)]))
277
277
278 # merge spans
278 # merge spans
279 merge = [spans.pop(0)]
279 merge = [spans.pop(0)]
280 while spans:
280 while spans:
281 e = spans.pop(0)
281 e = spans.pop(0)
282 f = merge[-1]
282 f = merge[-1]
283 if e[0] == f[0] + f[1]:
283 if e[0] == f[0] + f[1]:
284 merge[-1] = (f[0], f[1] + e[1], f[2] + e[2])
284 merge[-1] = (f[0], f[1] + e[1], f[2] + e[2])
285 else:
285 else:
286 merge.append(e)
286 merge.append(e)
287
287
288 # read spans in, divide up chunks
288 # read spans in, divide up chunks
289 chunks = {}
289 chunks = {}
290 for span in merge:
290 for span in merge:
291 # we reopen the file for each span to make http happy for now
291 # we reopen the file for each span to make http happy for now
292 f = self.opener(self.datafile)
292 f = self.opener(self.datafile)
293 f.seek(span[0])
293 f.seek(span[0])
294 data = f.read(span[1])
294 data = f.read(span[1])
295
295
296 # divide up the span
296 # divide up the span
297 pos = 0
297 pos = 0
298 for r, l in span[2]:
298 for r, l in span[2]:
299 chunks[r] = data[pos: pos + l]
299 chunks[r] = data[pos: pos + l]
300 pos += l
300 pos += l
301
301
302 # helper to reconstruct intermediate versions
302 # helper to reconstruct intermediate versions
303 def construct(text, base, rev):
303 def construct(text, base, rev):
304 for r in range(base + 1, rev + 1):
304 for r in range(base + 1, rev + 1):
305 b = decompress(chunks[r])
305 b = decompress(chunks[r])
306 text = self.patch(text, b)
306 text = self.patch(text, b)
307 return text
307 return text
308
308
309 # build deltas
309 # build deltas
310 deltas = []
310 deltas = []
311 for d in range(0, len(revs) - 1):
311 for d in range(0, len(revs) - 1):
312 a, b = revs[d], revs[d + 1]
312 a, b = revs[d], revs[d + 1]
313 n = self.node(b)
313 n = self.node(b)
314
314
315 if a + 1 != b or self.base(b) == b:
315 if a + 1 != b or self.base(b) == b:
316 if a >= 0:
316 if a >= 0:
317 base = self.base(a)
317 base = self.base(a)
318 ta = decompress(chunks[self.base(a)])
318 ta = decompress(chunks[self.base(a)])
319 ta = construct(ta, base, a)
319 ta = construct(ta, base, a)
320 else:
320 else:
321 ta = ""
321 ta = ""
322
322
323 base = self.base(b)
323 base = self.base(b)
324 if a > base:
324 if a > base:
325 base = a
325 base = a
326 tb = ta
326 tb = ta
327 else:
327 else:
328 tb = decompress(chunks[self.base(b)])
328 tb = decompress(chunks[self.base(b)])
329 tb = construct(tb, base, b)
329 tb = construct(tb, base, b)
330 d = self.diff(ta, tb)
330 d = self.diff(ta, tb)
331 else:
331 else:
332 d = decompress(chunks[b])
332 d = decompress(chunks[b])
333
333
334 p = self.parents(n)
334 p = self.parents(n)
335 meta = n + p[0] + p[1] + linkmap[self.linkrev(n)]
335 meta = n + p[0] + p[1] + linkmap[self.linkrev(n)]
336 l = struct.pack(">l", len(meta) + len(d) + 4)
336 l = struct.pack(">l", len(meta) + len(d) + 4)
337 deltas.append(l + meta + d)
337 deltas.append(l + meta + d)
338
338
339 l = struct.pack(">l", sum(map(len, deltas)) + 4)
339 l = struct.pack(">l", sum(map(len, deltas)) + 4)
340 deltas.insert(0, l)
340 deltas.insert(0, l)
341 return "".join(deltas)
341 return "".join(deltas)
342
342
343 def addgroup(self, data, linkmapper, transaction):
343 def addgroup(self, data, linkmapper, transaction):
344 # given a set of deltas, add them to the revision log. the
344 # given a set of deltas, add them to the revision log. the
345 # first delta is against its parent, which should be in our
345 # first delta is against its parent, which should be in our
346 # log, the rest are against the previous delta.
346 # log, the rest are against the previous delta.
347
347
348 if len(data) <= 4: return
348 if not data: return self.tip()
349
349
350 # retrieve the parent revision of the delta chain
350 # retrieve the parent revision of the delta chain
351 chain = data[28:48]
351 chain = data[24:44]
352 text = self.revision(chain)
353
352
354 # track the base of the current delta log
353 # track the base of the current delta log
355 r = self.count()
354 r = self.count()
356 t = r - 1
355 t = r - 1
357
356
358 base = prev = -1
357 base = prev = -1
359 start = end = 0
358 start = end = 0
360 if r:
359 if r:
361 start = self.start(self.base(t))
360 start = self.start(self.base(t))
362 end = self.end(t)
361 end = self.end(t)
363 measure = self.length(self.base(t))
362 measure = self.length(self.base(t))
364 base = self.base(t)
363 base = self.base(t)
365 prev = self.tip()
364 prev = self.tip()
366
365
367 transaction.add(self.datafile, end)
366 transaction.add(self.datafile, end)
368 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
367 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
369 dfh = self.opener(self.datafile, "a")
368 dfh = self.opener(self.datafile, "a")
370 ifh = self.opener(self.indexfile, "a")
369 ifh = self.opener(self.indexfile, "a")
371
370
372 # loop through our set of deltas
371 # loop through our set of deltas
373 pos = 4
372 pos = 0
374 while pos < len(data):
373 while pos < len(data):
375 l, node, p1, p2, cs = struct.unpack(">l20s20s20s20s",
374 l, node, p1, p2, cs = struct.unpack(">l20s20s20s20s",
376 data[pos:pos+84])
375 data[pos:pos+84])
377 link = linkmapper(cs)
376 link = linkmapper(cs)
378 delta = data[pos + 84:pos + l]
377 delta = data[pos + 84:pos + l]
379 pos += l
378 pos += l
380
379
381 # full versions are inserted when the needed deltas become
380 # full versions are inserted when the needed deltas become
382 # comparable to the uncompressed text or when the previous
381 # comparable to the uncompressed text or when the previous
383 # version is not the one we have a delta against. We use
382 # version is not the one we have a delta against. We use
384 # the size of the previous full rev as a proxy for the
383 # the size of the previous full rev as a proxy for the
385 # current size.
384 # current size.
386
385
387 if chain == prev:
386 if chain == prev:
388 cdelta = compress(delta)
387 cdelta = compress(delta)
389
388
390 if chain != prev or (end - start + len(cdelta)) > measure * 2:
389 if chain != prev or (end - start + len(cdelta)) > measure * 2:
391 # flush our writes here so we can read it in revision
390 # flush our writes here so we can read it in revision
392 dfh.flush()
391 dfh.flush()
393 ifh.flush()
392 ifh.flush()
394 text = self.revision(self.node(t))
393 text = self.revision(chain)
395 text = self.patch(text, delta)
394 text = self.patch(text, delta)
396 chk = self.addrevision(text, transaction, link, p1, p2)
395 chk = self.addrevision(text, transaction, link, p1, p2)
397 if chk != node:
396 if chk != node:
398 raise "consistency error adding group"
397 raise "consistency error adding group"
399 measure = len(text)
398 measure = len(text)
400 else:
399 else:
401 e = (end, len(cdelta), self.base(t), link, p1, p2, node)
400 e = (end, len(cdelta), self.base(t), link, p1, p2, node)
402 self.index.append(e)
401 self.index.append(e)
403 self.nodemap[node] = r
402 self.nodemap[node] = r
404 dfh.write(cdelta)
403 dfh.write(cdelta)
405 ifh.write(struct.pack(indexformat, *e))
404 ifh.write(struct.pack(indexformat, *e))
406
405
407 t, r = r, r + 1
406 t, r, chain, prev = r, r + 1, node, node
408 chain = prev
409 start = self.start(self.base(t))
407 start = self.start(self.base(t))
410 end = self.end(t)
408 end = self.end(t)
411
409
412 dfh.close()
410 dfh.close()
413 ifh.close()
411 ifh.close()
414 return node
412 return node
General Comments 0
You need to be logged in to leave comments. Login now