##// END OF EJS Templates
Add changegroup support
mpm@selenic.com -
r46:93e868fa default
parent child Browse files
Show More
@@ -1,342 +1,352 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 #
2 #
3 # mercurial - a minimal scalable distributed SCM
3 # mercurial - a minimal scalable distributed SCM
4 # v0.4d "oedipa maas"
4 # v0.4d "oedipa maas"
5 #
5 #
6 # Copyright 2005 Matt Mackall <mpm@selenic.com>
6 # Copyright 2005 Matt Mackall <mpm@selenic.com>
7 #
7 #
8 # This software may be used and distributed according to the terms
8 # This software may be used and distributed according to the terms
9 # of the GNU General Public License, incorporated herein by reference.
9 # of the GNU General Public License, incorporated herein by reference.
10
10
11 # the psyco compiler makes commits about twice as fast
11 # the psyco compiler makes commits a bit faster
12 try:
12 # and makes changegroup merge about 20 times slower!
13 import psyco
13 # try:
14 psyco.full()
14 # import psyco
15 except:
15 # psyco.full()
16 pass
16 # except:
17 # pass
17
18
18 import sys, os, time
19 import sys, os, time
19 from mercurial import hg, mdiff, fancyopts
20 from mercurial import hg, mdiff, fancyopts
20
21
21 def help():
22 def help():
22 print """\
23 print """\
23 commands:
24 commands:
24
25
25 init create a new repository in this directory
26 init create a new repository in this directory
26 branch <path> create a branch of <path> in this directory
27 branch <path> create a branch of <path> in this directory
27 merge <path> merge changes from <path> into local repository
28 merge <path> merge changes from <path> into local repository
28 checkout [changeset] checkout the latest or given changeset
29 checkout [changeset] checkout the latest or given changeset
29 status show new, missing, and changed files in working dir
30 status show new, missing, and changed files in working dir
30 add [files...] add the given files in the next commit
31 add [files...] add the given files in the next commit
31 remove [files...] remove the given files in the next commit
32 remove [files...] remove the given files in the next commit
32 addremove add all new files, delete all missing files
33 addremove add all new files, delete all missing files
33 commit commit all changes to the repository
34 commit commit all changes to the repository
34 history show changeset history
35 history show changeset history
35 log <file> show revision history of a single file
36 log <file> show revision history of a single file
36 dump <file> [rev] dump the latest or given revision of a file
37 dump <file> [rev] dump the latest or given revision of a file
37 dumpmanifest [rev] dump the latest or given revision of the manifest
38 dumpmanifest [rev] dump the latest or given revision of the manifest
38 diff [files...] diff working directory (or selected files)
39 diff [files...] diff working directory (or selected files)
39 """
40 """
40
41
41 def filterfiles(list, files):
42 def filterfiles(list, files):
42 l = [ x for x in list if x in files ]
43 l = [ x for x in list if x in files ]
43
44
44 for f in files:
45 for f in files:
45 if f[-1] != os.sep: f += os.sep
46 if f[-1] != os.sep: f += os.sep
46 l += [ x for x in list if x.startswith(f) ]
47 l += [ x for x in list if x.startswith(f) ]
47 return l
48 return l
48
49
49 def diff(files = None, node1 = None, node2 = None):
50 def diff(files = None, node1 = None, node2 = None):
50
51
51 if node2:
52 if node2:
52 change = repo.changelog.read(node2)
53 change = repo.changelog.read(node2)
53 mmap2 = repo.manifest.read(change[0])
54 mmap2 = repo.manifest.read(change[0])
54 (c, a, d) = repo.diffrevs(node1, node2)
55 (c, a, d) = repo.diffrevs(node1, node2)
55 def read(f): return repo.file(f).read(mmap2[f])
56 def read(f): return repo.file(f).read(mmap2[f])
56 else:
57 else:
57 if not node1:
58 if not node1:
58 node1 = repo.current
59 node1 = repo.current
59 (c, a, d) = repo.diffdir(repo.root, node1)
60 (c, a, d) = repo.diffdir(repo.root, node1)
60 def read(f): return file(f).read()
61 def read(f): return file(f).read()
61
62
62 change = repo.changelog.read(node1)
63 change = repo.changelog.read(node1)
63 mmap = repo.manifest.read(change[0])
64 mmap = repo.manifest.read(change[0])
64
65
65 if files:
66 if files:
66 (c, a, d) = map(lambda x: filterfiles(x, files), (c, a, d))
67 (c, a, d) = map(lambda x: filterfiles(x, files), (c, a, d))
67
68
68 for f in c:
69 for f in c:
69 to = repo.file(f).read(mmap[f])
70 to = repo.file(f).read(mmap[f])
70 tn = read(f)
71 tn = read(f)
71 sys.stdout.write(mdiff.unidiff(to, tn, f))
72 sys.stdout.write(mdiff.unidiff(to, tn, f))
72 for f in a:
73 for f in a:
73 to = ""
74 to = ""
74 tn = read(f)
75 tn = read(f)
75 sys.stdout.write(mdiff.unidiff(to, tn, f))
76 sys.stdout.write(mdiff.unidiff(to, tn, f))
76 for f in d:
77 for f in d:
77 to = repo.file(f).read(mmap[f])
78 to = repo.file(f).read(mmap[f])
78 tn = ""
79 tn = ""
79 sys.stdout.write(mdiff.unidiff(to, tn, f))
80 sys.stdout.write(mdiff.unidiff(to, tn, f))
80
81
81
82
82 options = {}
83 options = {}
83 opts = [('v', 'verbose', None, 'verbose'),
84 opts = [('v', 'verbose', None, 'verbose'),
84 ('d', 'debug', None, 'debug')]
85 ('d', 'debug', None, 'debug')]
85
86
86 args = fancyopts.fancyopts(sys.argv[1:], opts, options,
87 args = fancyopts.fancyopts(sys.argv[1:], opts, options,
87 'hg [options] <command> [command options] [files]')
88 'hg [options] <command> [command options] [files]')
88
89
89 try:
90 try:
90 cmd = args[0]
91 cmd = args[0]
91 args = args[1:]
92 args = args[1:]
92 except:
93 except:
93 cmd = ""
94 cmd = ""
94
95
95 ui = hg.ui(options["verbose"], options["debug"])
96 ui = hg.ui(options["verbose"], options["debug"])
96
97
97 if cmd == "init":
98 if cmd == "init":
98 repo = hg.repository(ui, ".", create=1)
99 repo = hg.repository(ui, ".", create=1)
99 sys.exit(0)
100 sys.exit(0)
100 elif cmd == "branch" or cmd == "clone":
101 elif cmd == "branch" or cmd == "clone":
101 os.system("cp -al %s/.hg .hg" % args[0])
102 os.system("cp -al %s/.hg .hg" % args[0])
102 sys.exit(0)
103 sys.exit(0)
103 elif cmd == "help":
104 elif cmd == "help":
104 help()
105 help()
105 sys.exit(0)
106 sys.exit(0)
106 else:
107 else:
107 try:
108 try:
108 repo = hg.repository(ui=ui)
109 repo = hg.repository(ui=ui)
109 except:
110 except:
110 print "Unable to open repository"
111 print "Unable to open repository"
111 sys.exit(0)
112 sys.exit(0)
112
113
113 if cmd == "checkout" or cmd == "co":
114 if cmd == "checkout" or cmd == "co":
114 node = repo.changelog.tip()
115 node = repo.changelog.tip()
115 if args:
116 if args:
116 node = repo.changelog.lookup(args[0])
117 node = repo.changelog.lookup(args[0])
117 repo.checkout(node)
118 repo.checkout(node)
118
119
119 elif cmd == "add":
120 elif cmd == "add":
120 repo.add(args)
121 repo.add(args)
121
122
122 elif cmd == "remove" or cmd == "rm" or cmd == "del" or cmd == "delete":
123 elif cmd == "remove" or cmd == "rm" or cmd == "del" or cmd == "delete":
123 repo.remove(args)
124 repo.remove(args)
124
125
125 elif cmd == "commit" or cmd == "checkin" or cmd == "ci":
126 elif cmd == "commit" or cmd == "checkin" or cmd == "ci":
126 if 1:
127 if 1:
127 if len(args) > 0:
128 if len(args) > 0:
128 repo.commit(repo.current, args)
129 repo.commit(repo.current, args)
129 else:
130 else:
130 repo.commit(repo.current)
131 repo.commit(repo.current)
131
132
132 elif cmd == "import" or cmd == "patch":
133 elif cmd == "import" or cmd == "patch":
133 ioptions = {}
134 ioptions = {}
134 opts = [('p', 'strip', 1, 'path strip'),
135 opts = [('p', 'strip', 1, 'path strip'),
135 ('b', 'base', "", 'base path')]
136 ('b', 'base', "", 'base path')]
136
137
137 args = fancyopts.fancyopts(args, opts, ioptions,
138 args = fancyopts.fancyopts(args, opts, ioptions,
138 'hg import [options] <patch names>')
139 'hg import [options] <patch names>')
139 d = ioptions["base"]
140 d = ioptions["base"]
140 strip = ioptions["strip"]
141 strip = ioptions["strip"]
141
142
142 for patch in args:
143 for patch in args:
143 ui.status("applying %s\n" % patch)
144 ui.status("applying %s\n" % patch)
144 pf = d + patch
145 pf = d + patch
145 os.system("patch -p%d < %s > /dev/null" % (strip, pf))
146 os.system("patch -p%d < %s > /dev/null" % (strip, pf))
146 f = os.popen("lsdiff --strip %d %s" % (strip, pf))
147 f = os.popen("lsdiff --strip %d %s" % (strip, pf))
147 files = f.read().splitlines()
148 files = f.read().splitlines()
148 f.close()
149 f.close()
149 repo.commit(repo.current, files)
150 repo.commit(repo.current, files)
150
151
151 elif cmd == "status":
152 elif cmd == "status":
152 (c, a, d) = repo.diffdir(repo.root, repo.current)
153 (c, a, d) = repo.diffdir(repo.root, repo.current)
153 for f in c: print "C", f
154 for f in c: print "C", f
154 for f in a: print "?", f
155 for f in a: print "?", f
155 for f in d: print "R", f
156 for f in d: print "R", f
156
157
157 elif cmd == "diff":
158 elif cmd == "diff":
158 revs = []
159 revs = []
159
160
160 if args:
161 if args:
161 doptions = {}
162 doptions = {}
162 opts = [('r', 'revision', [], 'revision')]
163 opts = [('r', 'revision', [], 'revision')]
163 args = fancyopts.fancyopts(args, opts, doptions,
164 args = fancyopts.fancyopts(args, opts, doptions,
164 'hg diff [options] [files]')
165 'hg diff [options] [files]')
165 revs = map(lambda x: repo.changelog.lookup(x), doptions['revision'])
166 revs = map(lambda x: repo.changelog.lookup(x), doptions['revision'])
166
167
167 if len(revs) > 2:
168 if len(revs) > 2:
168 print "too many revisions to diff"
169 print "too many revisions to diff"
169 sys.exit(1)
170 sys.exit(1)
170 else:
171 else:
171 diff(args, *revs)
172 diff(args, *revs)
172
173
173 elif cmd == "export":
174 elif cmd == "export":
174 node = repo.changelog.lookup(args[0])
175 node = repo.changelog.lookup(args[0])
175 prev = repo.changelog.parents(node)[0]
176 prev = repo.changelog.parents(node)[0]
176 diff(None, prev, node)
177 diff(None, prev, node)
177
178
179 elif cmd == "debugchangegroup":
180 newer = repo.newer(repo.changelog.lookup(args[0]))
181 cg = repo.changegroup(newer)
182 sys.stdout.write(cg)
183
184 elif cmd == "debugaddchangegroup":
185 data = sys.stdin.read()
186 repo.addchangegroup(data)
187
178 elif cmd == "addremove":
188 elif cmd == "addremove":
179 (c, a, d) = repo.diffdir(repo.root, repo.current)
189 (c, a, d) = repo.diffdir(repo.root, repo.current)
180 repo.add(a)
190 repo.add(a)
181 repo.remove(d)
191 repo.remove(d)
182
192
183 elif cmd == "history":
193 elif cmd == "history":
184 for i in range(repo.changelog.count()):
194 for i in range(repo.changelog.count()):
185 n = repo.changelog.node(i)
195 n = repo.changelog.node(i)
186 changes = repo.changelog.read(n)
196 changes = repo.changelog.read(n)
187 (p1, p2) = repo.changelog.parents(n)
197 (p1, p2) = repo.changelog.parents(n)
188 (h, h1, h2) = map(hg.hex, (n, p1, p2))
198 (h, h1, h2) = map(hg.hex, (n, p1, p2))
189 (i1, i2) = map(repo.changelog.rev, (p1, p2))
199 (i1, i2) = map(repo.changelog.rev, (p1, p2))
190 print "rev: %4d:%s" % (i, h)
200 print "rev: %4d:%s" % (i, h)
191 print "parents: %4d:%s" % (i1, h1)
201 print "parents: %4d:%s" % (i1, h1)
192 if i2: print " %4d:%s" % (i2, h2)
202 if i2: print " %4d:%s" % (i2, h2)
193 print "manifest: %4d:%s" % (repo.manifest.rev(changes[0]),
203 print "manifest: %4d:%s" % (repo.manifest.rev(changes[0]),
194 hg.hex(changes[0]))
204 hg.hex(changes[0]))
195 print "user:", changes[1]
205 print "user:", changes[1]
196 print "date:", time.asctime(
206 print "date:", time.asctime(
197 time.localtime(float(changes[2].split(' ')[0])))
207 time.localtime(float(changes[2].split(' ')[0])))
198 print "files:", " ".join(changes[3])
208 print "files:", " ".join(changes[3])
199 print "description:"
209 print "description:"
200 print changes[4]
210 print changes[4]
201
211
202 elif cmd == "log":
212 elif cmd == "log":
203 if args:
213 if args:
204 r = repo.file(args[0])
214 r = repo.file(args[0])
205 for i in range(r.count()):
215 for i in range(r.count()):
206 n = r.node(i)
216 n = r.node(i)
207 (p1, p2) = r.parents(n)
217 (p1, p2) = r.parents(n)
208 (h, h1, h2) = map(hg.hex, (n, p1, p2))
218 (h, h1, h2) = map(hg.hex, (n, p1, p2))
209 (i1, i2) = map(r.rev, (p1, p2))
219 (i1, i2) = map(r.rev, (p1, p2))
210 cr = r.linkrev(n)
220 cr = r.linkrev(n)
211 cn = hg.hex(repo.changelog.node(cr))
221 cn = hg.hex(repo.changelog.node(cr))
212 print "rev: %4d:%s" % (i, h)
222 print "rev: %4d:%s" % (i, h)
213 print "changeset: %4d:%s" % (cr, cn)
223 print "changeset: %4d:%s" % (cr, cn)
214 print "parents: %4d:%s" % (i1, h1)
224 print "parents: %4d:%s" % (i1, h1)
215 if i2: print " %4d:%s" % (i2, h2)
225 if i2: print " %4d:%s" % (i2, h2)
216 else:
226 else:
217 print "missing filename"
227 print "missing filename"
218
228
219 elif cmd == "dump":
229 elif cmd == "dump":
220 if args:
230 if args:
221 r = repo.file(args[0])
231 r = repo.file(args[0])
222 n = r.tip()
232 n = r.tip()
223 if len(args) > 1: n = r.lookup(args[1])
233 if len(args) > 1: n = r.lookup(args[1])
224 sys.stdout.write(r.read(n))
234 sys.stdout.write(r.read(n))
225 else:
235 else:
226 print "missing filename"
236 print "missing filename"
227
237
228 elif cmd == "dumpmanifest":
238 elif cmd == "dumpmanifest":
229 n = repo.manifest.tip()
239 n = repo.manifest.tip()
230 if len(args) > 0:
240 if len(args) > 0:
231 n = repo.manifest.lookup(args[0])
241 n = repo.manifest.lookup(args[0])
232 m = repo.manifest.read(n)
242 m = repo.manifest.read(n)
233 files = m.keys()
243 files = m.keys()
234 files.sort()
244 files.sort()
235
245
236 for f in files:
246 for f in files:
237 print hg.hex(m[f]), f
247 print hg.hex(m[f]), f
238
248
239 elif cmd == "debughash":
249 elif cmd == "debughash":
240 f = repo.file(args[0])
250 f = repo.file(args[0])
241 print f.encodepath(args[0])
251 print f.encodepath(args[0])
242
252
243 elif cmd == "debugindex":
253 elif cmd == "debugindex":
244 r = hg.revlog(open, args[0], "")
254 r = hg.revlog(open, args[0], "")
245 print " rev offset length base linkrev"+\
255 print " rev offset length base linkrev"+\
246 " p1 p2 nodeid"
256 " p1 p2 nodeid"
247 for i in range(r.count()):
257 for i in range(r.count()):
248 e = r.index[i]
258 e = r.index[i]
249 print "% 6d % 9d % 7d % 5d % 7d %s.. %s.. %s.." % (
259 print "% 6d % 9d % 7d % 5d % 7d %s.. %s.. %s.." % (
250 i, e[0], e[1], e[2], e[3],
260 i, e[0], e[1], e[2], e[3],
251 hg.hex(e[4][:5]), hg.hex(e[5][:5]), hg.hex(e[6][:5]))
261 hg.hex(e[4][:5]), hg.hex(e[5][:5]), hg.hex(e[6][:5]))
252
262
253 elif cmd == "merge":
263 elif cmd == "merge":
254 if args:
264 if args:
255 other = hg.repository(ui, args[0])
265 other = hg.repository(ui, args[0])
256 repo.merge(other)
266 repo.merge(other)
257 else:
267 else:
258 print "missing source repository"
268 print "missing source repository"
259
269
260 elif cmd == "verify":
270 elif cmd == "verify":
261 filelinkrevs = {}
271 filelinkrevs = {}
262 filenodes = {}
272 filenodes = {}
263 manifestchangeset = {}
273 manifestchangeset = {}
264 changesets = revisions = files = 0
274 changesets = revisions = files = 0
265
275
266 print "checking changesets"
276 print "checking changesets"
267 for i in range(repo.changelog.count()):
277 for i in range(repo.changelog.count()):
268 changesets += 1
278 changesets += 1
269 n = repo.changelog.node(i)
279 n = repo.changelog.node(i)
270 changes = repo.changelog.read(n)
280 changes = repo.changelog.read(n)
271 manifestchangeset[changes[0]] = n
281 manifestchangeset[changes[0]] = n
272 for f in changes[3]:
282 for f in changes[3]:
273 revisions += 1
283 revisions += 1
274 filelinkrevs.setdefault(f, []).append(i)
284 filelinkrevs.setdefault(f, []).append(i)
275
285
276 print "checking manifests"
286 print "checking manifests"
277 for i in range(repo.manifest.count()):
287 for i in range(repo.manifest.count()):
278 n = repo.manifest.node(i)
288 n = repo.manifest.node(i)
279 ca = repo.changelog.node(repo.manifest.linkrev(n))
289 ca = repo.changelog.node(repo.manifest.linkrev(n))
280 cc = manifestchangeset[n]
290 cc = manifestchangeset[n]
281 if ca != cc:
291 if ca != cc:
282 print "manifest %s points to %s, not %s" % \
292 print "manifest %s points to %s, not %s" % \
283 (hg.hex(n), hg.hex(ca), hg.hex(cc))
293 (hg.hex(n), hg.hex(ca), hg.hex(cc))
284 m = repo.manifest.read(n)
294 m = repo.manifest.read(n)
285 for f, fn in m.items():
295 for f, fn in m.items():
286 filenodes.setdefault(f, {})[fn] = 1
296 filenodes.setdefault(f, {})[fn] = 1
287
297
288 print "crosschecking files in changesets and manifests"
298 print "crosschecking files in changesets and manifests"
289 for f in filenodes:
299 for f in filenodes:
290 if f not in filelinkrevs:
300 if f not in filelinkrevs:
291 print "file %s in manifest but not in changesets"
301 print "file %s in manifest but not in changesets"
292
302
293 for f in filelinkrevs:
303 for f in filelinkrevs:
294 if f not in filenodes:
304 if f not in filenodes:
295 print "file %s in changeset but not in manifest"
305 print "file %s in changeset but not in manifest"
296
306
297 print "checking files"
307 print "checking files"
298 for f in filenodes:
308 for f in filenodes:
299 files += 1
309 files += 1
300 fl = repo.file(f)
310 fl = repo.file(f)
301 nodes = {"\0"*20: 1}
311 nodes = {"\0"*20: 1}
302 for i in range(fl.count()):
312 for i in range(fl.count()):
303 n = fl.node(i)
313 n = fl.node(i)
304
314
305 if n not in filenodes[f]:
315 if n not in filenodes[f]:
306 print "%s:%s not in manifests" % (f, hg.hex(n))
316 print "%s:%s not in manifests" % (f, hg.hex(n))
307 else:
317 else:
308 del filenodes[f][n]
318 del filenodes[f][n]
309
319
310 flr = fl.linkrev(n)
320 flr = fl.linkrev(n)
311 if flr not in filelinkrevs[f]:
321 if flr not in filelinkrevs[f]:
312 print "%s:%s points to unexpected changeset rev %d" \
322 print "%s:%s points to unexpected changeset rev %d" \
313 % (f, hg.hex(n), fl.linkrev(n))
323 % (f, hg.hex(n), fl.linkrev(n))
314 else:
324 else:
315 filelinkrevs[f].remove(flr)
325 filelinkrevs[f].remove(flr)
316
326
317 # verify contents
327 # verify contents
318 t = fl.read(n)
328 t = fl.read(n)
319
329
320 # verify parents
330 # verify parents
321 (p1, p2) = fl.parents(n)
331 (p1, p2) = fl.parents(n)
322 if p1 not in nodes:
332 if p1 not in nodes:
323 print "%s:%s unknown parent 1 %s" % (f, hg.hex(n), hg.hex(p1))
333 print "%s:%s unknown parent 1 %s" % (f, hg.hex(n), hg.hex(p1))
324 if p2 not in nodes:
334 if p2 not in nodes:
325 print "file %s:%s unknown parent %s" % (f, hg.hex(n), hg.hex(p1))
335 print "file %s:%s unknown parent %s" % (f, hg.hex(n), hg.hex(p1))
326 nodes[n] = 1
336 nodes[n] = 1
327
337
328 # cross-check
338 # cross-check
329 for flr in filelinkrevs[f]:
339 for flr in filelinkrevs[f]:
330 print "changeset rev %d not in %s" % (flr, f)
340 print "changeset rev %d not in %s" % (flr, f)
331
341
332 for node in filenodes[f]:
342 for node in filenodes[f]:
333 print "node %s in manifests not in %s" % (hg.hex(n), f)
343 print "node %s in manifests not in %s" % (hg.hex(n), f)
334
344
335
345
336 print "%d files, %d changesets, %d total revisions" % (files, changesets,
346 print "%d files, %d changesets, %d total revisions" % (files, changesets,
337 revisions)
347 revisions)
338
348
339 else:
349 else:
340 print "unknown command\n"
350 print "unknown command\n"
341 help()
351 help()
342 sys.exit(1)
352 sys.exit(1)
@@ -1,613 +1,758 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import sys, struct, sha, socket, os, time, base64, re, urllib2
8 import sys, struct, sha, socket, os, time, base64, re, urllib2
9 import urllib
9 import urllib
10 from mercurial import byterange
10 from mercurial import byterange
11 from mercurial.transaction import *
11 from mercurial.transaction import *
12 from mercurial.revlog import *
12 from mercurial.revlog import *
13
13
14 class filelog(revlog):
14 class filelog(revlog):
15 def __init__(self, opener, path):
15 def __init__(self, opener, path):
16 s = self.encodepath(path)
16 s = self.encodepath(path)
17 revlog.__init__(self, opener, os.path.join("data", s + "i"),
17 revlog.__init__(self, opener, os.path.join("data", s + "i"),
18 os.path.join("data", s))
18 os.path.join("data", s))
19
19
20 def encodepath(self, path):
20 def encodepath(self, path):
21 s = sha.sha(path).digest()
21 s = sha.sha(path).digest()
22 s = base64.encodestring(s)[:-3]
22 s = base64.encodestring(s)[:-3]
23 s = re.sub("\+", "%", s)
23 s = re.sub("\+", "%", s)
24 s = re.sub("/", "_", s)
24 s = re.sub("/", "_", s)
25 return s
25 return s
26
26
27 def read(self, node):
27 def read(self, node):
28 return self.revision(node)
28 return self.revision(node)
29 def add(self, text, transaction, link, p1=None, p2=None):
29 def add(self, text, transaction, link, p1=None, p2=None):
30 return self.addrevision(text, transaction, link, p1, p2)
30 return self.addrevision(text, transaction, link, p1, p2)
31
31
32 def resolvedag(self, old, new, transaction, link):
32 def resolvedag(self, old, new, transaction, link):
33 """resolve unmerged heads in our DAG"""
33 """resolve unmerged heads in our DAG"""
34 if old == new: return None
34 if old == new: return None
35 a = self.ancestor(old, new)
35 a = self.ancestor(old, new)
36 if old == a: return new
36 if old == a: return new
37 return self.merge3(old, new, a, transaction, link)
37 return self.merge3(old, new, a, transaction, link)
38
38
39 def merge3(self, my, other, base, transaction, link):
39 def merge3(self, my, other, base, transaction, link):
40 """perform a 3-way merge and append the result"""
40 """perform a 3-way merge and append the result"""
41 def temp(prefix, node):
41 def temp(prefix, node):
42 (fd, name) = tempfile.mkstemp(prefix)
42 (fd, name) = tempfile.mkstemp(prefix)
43 f = os.fdopen(fd, "w")
43 f = os.fdopen(fd, "w")
44 f.write(self.revision(node))
44 f.write(self.revision(node))
45 f.close()
45 f.close()
46 return name
46 return name
47
47
48 a = temp("local", my)
48 a = temp("local", my)
49 b = temp("remote", other)
49 b = temp("remote", other)
50 c = temp("parent", base)
50 c = temp("parent", base)
51
51
52 cmd = os.environ["HGMERGE"]
52 cmd = os.environ["HGMERGE"]
53 r = os.system("%s %s %s %s" % (cmd, a, b, c))
53 r = os.system("%s %s %s %s" % (cmd, a, b, c))
54 if r:
54 if r:
55 raise "Merge failed, implement rollback!"
55 raise "Merge failed, implement rollback!"
56
56
57 t = open(a).read()
57 t = open(a).read()
58 os.unlink(a)
58 os.unlink(a)
59 os.unlink(b)
59 os.unlink(b)
60 os.unlink(c)
60 os.unlink(c)
61 return self.addrevision(t, transaction, link, my, other)
61 return self.addrevision(t, transaction, link, my, other)
62
62
63 def merge(self, other, transaction, linkseq, link):
63 def merge(self, other, transaction, linkseq, link):
64 """perform a merge and resolve resulting heads"""
64 """perform a merge and resolve resulting heads"""
65 (o, n) = self.mergedag(other, transaction, linkseq)
65 (o, n) = self.mergedag(other, transaction, linkseq)
66 return self.resolvedag(o, n, transaction, link)
66 return self.resolvedag(o, n, transaction, link)
67
67
68 class manifest(revlog):
68 class manifest(revlog):
69 def __init__(self, opener):
69 def __init__(self, opener):
70 self.mapcache = None
70 self.mapcache = None
71 self.listcache = None
71 self.listcache = None
72 self.addlist = None
72 self.addlist = None
73 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
73 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
74
74
75 def read(self, node):
75 def read(self, node):
76 if self.mapcache and self.mapcache[0] == node:
76 if self.mapcache and self.mapcache[0] == node:
77 return self.mapcache[1]
77 return self.mapcache[1]
78 text = self.revision(node)
78 text = self.revision(node)
79 map = {}
79 map = {}
80 self.listcache = (text, text.splitlines(1))
80 self.listcache = (text, text.splitlines(1))
81 for l in self.listcache[1]:
81 for l in self.listcache[1]:
82 (f, n) = l.split('\0')
82 (f, n) = l.split('\0')
83 map[f] = bin(n[:40])
83 map[f] = bin(n[:40])
84 self.mapcache = (node, map)
84 self.mapcache = (node, map)
85 return map
85 return map
86
86
87 def diff(self, a, b):
87 def diff(self, a, b):
88 # this is sneaky, as we're not actually using a and b
88 # this is sneaky, as we're not actually using a and b
89 if self.listcache and len(self.listcache[0]) == len(a):
89 if self.listcache and len(self.listcache[0]) == len(a):
90 return mdiff.diff(self.listcache[1], self.addlist, 1)
90 return mdiff.diff(self.listcache[1], self.addlist, 1)
91 else:
91 else:
92 return mdiff.textdiff(a, b)
92 return mdiff.textdiff(a, b)
93
93
94 def add(self, map, transaction, link, p1=None, p2=None):
94 def add(self, map, transaction, link, p1=None, p2=None):
95 files = map.keys()
95 files = map.keys()
96 files.sort()
96 files.sort()
97
97
98 self.addlist = ["%s\000%s\n" % (f, hex(map[f])) for f in files]
98 self.addlist = ["%s\000%s\n" % (f, hex(map[f])) for f in files]
99 text = "".join(self.addlist)
99 text = "".join(self.addlist)
100
100
101 n = self.addrevision(text, transaction, link, p1, p2)
101 n = self.addrevision(text, transaction, link, p1, p2)
102 self.mapcache = (n, map)
102 self.mapcache = (n, map)
103 self.listcache = (text, self.addlist)
103 self.listcache = (text, self.addlist)
104
104
105 return n
105 return n
106
106
107 class changelog(revlog):
107 class changelog(revlog):
108 def __init__(self, opener):
108 def __init__(self, opener):
109 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
109 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
110
110
111 def extract(self, text):
111 def extract(self, text):
112 if not text:
112 if not text:
113 return (nullid, "", "0", [], "")
113 return (nullid, "", "0", [], "")
114 last = text.index("\n\n")
114 last = text.index("\n\n")
115 desc = text[last + 2:]
115 desc = text[last + 2:]
116 l = text[:last].splitlines()
116 l = text[:last].splitlines()
117 manifest = bin(l[0])
117 manifest = bin(l[0])
118 user = l[1]
118 user = l[1]
119 date = l[2]
119 date = l[2]
120 files = l[3:]
120 files = l[3:]
121 return (manifest, user, date, files, desc)
121 return (manifest, user, date, files, desc)
122
122
123 def read(self, node):
123 def read(self, node):
124 return self.extract(self.revision(node))
124 return self.extract(self.revision(node))
125
125
126 def add(self, manifest, list, desc, transaction, p1=None, p2=None):
126 def add(self, manifest, list, desc, transaction, p1=None, p2=None):
127 try: user = os.environ["HGUSER"]
127 try: user = os.environ["HGUSER"]
128 except: user = os.environ["LOGNAME"] + '@' + socket.getfqdn()
128 except: user = os.environ["LOGNAME"] + '@' + socket.getfqdn()
129 date = "%d %d" % (time.time(), time.timezone)
129 date = "%d %d" % (time.time(), time.timezone)
130 list.sort()
130 list.sort()
131 l = [hex(manifest), user, date] + list + ["", desc]
131 l = [hex(manifest), user, date] + list + ["", desc]
132 text = "\n".join(l)
132 text = "\n".join(l)
133 return self.addrevision(text, transaction, self.count(), p1, p2)
133 return self.addrevision(text, transaction, self.count(), p1, p2)
134
134
135 def merge3(self, my, other, base):
135 def merge3(self, my, other, base):
136 pass
136 pass
137
137
138 class dircache:
138 class dircache:
139 def __init__(self, opener, ui):
139 def __init__(self, opener, ui):
140 self.opener = opener
140 self.opener = opener
141 self.dirty = 0
141 self.dirty = 0
142 self.ui = ui
142 self.ui = ui
143 self.map = None
143 self.map = None
144 def __del__(self):
144 def __del__(self):
145 if self.dirty: self.write()
145 if self.dirty: self.write()
146 def __getitem__(self, key):
146 def __getitem__(self, key):
147 try:
147 try:
148 return self.map[key]
148 return self.map[key]
149 except TypeError:
149 except TypeError:
150 self.read()
150 self.read()
151 return self[key]
151 return self[key]
152
152
153 def read(self):
153 def read(self):
154 if self.map is not None: return self.map
154 if self.map is not None: return self.map
155
155
156 self.map = {}
156 self.map = {}
157 try:
157 try:
158 st = self.opener("dircache").read()
158 st = self.opener("dircache").read()
159 except: return
159 except: return
160
160
161 pos = 0
161 pos = 0
162 while pos < len(st):
162 while pos < len(st):
163 e = struct.unpack(">llll", st[pos:pos+16])
163 e = struct.unpack(">llll", st[pos:pos+16])
164 l = e[3]
164 l = e[3]
165 pos += 16
165 pos += 16
166 f = st[pos:pos + l]
166 f = st[pos:pos + l]
167 self.map[f] = e[:3]
167 self.map[f] = e[:3]
168 pos += l
168 pos += l
169
169
170 def update(self, files):
170 def update(self, files):
171 if not files: return
171 if not files: return
172 self.read()
172 self.read()
173 self.dirty = 1
173 self.dirty = 1
174 for f in files:
174 for f in files:
175 try:
175 try:
176 s = os.stat(f)
176 s = os.stat(f)
177 self.map[f] = (s.st_mode, s.st_size, s.st_mtime)
177 self.map[f] = (s.st_mode, s.st_size, s.st_mtime)
178 except IOError:
178 except IOError:
179 self.remove(f)
179 self.remove(f)
180
180
181 def taint(self, files):
181 def taint(self, files):
182 if not files: return
182 if not files: return
183 self.read()
183 self.read()
184 self.dirty = 1
184 self.dirty = 1
185 for f in files:
185 for f in files:
186 self.map[f] = (0, -1, 0)
186 self.map[f] = (0, -1, 0)
187
187
188 def remove(self, files):
188 def remove(self, files):
189 if not files: return
189 if not files: return
190 self.read()
190 self.read()
191 self.dirty = 1
191 self.dirty = 1
192 for f in files:
192 for f in files:
193 try:
193 try:
194 del self.map[f]
194 del self.map[f]
195 except KeyError:
195 except KeyError:
196 self.ui.warn("Not in dircache: %s\n" % f)
196 self.ui.warn("Not in dircache: %s\n" % f)
197 pass
197 pass
198
198
199 def clear(self):
199 def clear(self):
200 self.map = {}
200 self.map = {}
201 self.dirty = 1
201 self.dirty = 1
202
202
203 def write(self):
203 def write(self):
204 st = self.opener("dircache", "w")
204 st = self.opener("dircache", "w")
205 for f, e in self.map.items():
205 for f, e in self.map.items():
206 e = struct.pack(">llll", e[0], e[1], e[2], len(f))
206 e = struct.pack(">llll", e[0], e[1], e[2], len(f))
207 st.write(e + f)
207 st.write(e + f)
208 self.dirty = 0
208 self.dirty = 0
209
209
210 def copy(self):
210 def copy(self):
211 self.read()
211 self.read()
212 return self.map.copy()
212 return self.map.copy()
213
213
214 # used to avoid circular references so destructors work
214 # used to avoid circular references so destructors work
215 def opener(base):
215 def opener(base):
216 p = base
216 p = base
217 def o(path, mode="r"):
217 def o(path, mode="r"):
218 if p[:7] == "http://":
218 if p[:7] == "http://":
219 f = os.path.join(p, urllib.quote(path))
219 f = os.path.join(p, urllib.quote(path))
220 return httprangereader(f)
220 return httprangereader(f)
221
221
222 f = os.path.join(p, path)
222 f = os.path.join(p, path)
223
223
224 if mode != "r" and os.path.isfile(f):
224 if mode != "r" and os.path.isfile(f):
225 s = os.stat(f)
225 s = os.stat(f)
226 if s.st_nlink > 1:
226 if s.st_nlink > 1:
227 file(f + ".tmp", "w").write(file(f).read())
227 file(f + ".tmp", "w").write(file(f).read())
228 os.rename(f+".tmp", f)
228 os.rename(f+".tmp", f)
229
229
230 return file(f, mode)
230 return file(f, mode)
231
231
232 return o
232 return o
233
233
234 class repository:
234 class repository:
235 def __init__(self, ui, path=None, create=0):
235 def __init__(self, ui, path=None, create=0):
236 self.remote = 0
236 self.remote = 0
237 if path and path[:7] == "http://":
237 if path and path[:7] == "http://":
238 self.remote = 1
238 self.remote = 1
239 self.path = path
239 self.path = path
240 else:
240 else:
241 if not path:
241 if not path:
242 p = os.getcwd()
242 p = os.getcwd()
243 while not os.path.isdir(os.path.join(p, ".hg")):
243 while not os.path.isdir(os.path.join(p, ".hg")):
244 p = os.path.dirname(p)
244 p = os.path.dirname(p)
245 if p == "/": raise "No repo found"
245 if p == "/": raise "No repo found"
246 path = p
246 path = p
247 self.path = os.path.join(path, ".hg")
247 self.path = os.path.join(path, ".hg")
248
248
249 self.root = path
249 self.root = path
250 self.ui = ui
250 self.ui = ui
251
251
252 if create:
252 if create:
253 os.mkdir(self.path)
253 os.mkdir(self.path)
254 os.mkdir(self.join("data"))
254 os.mkdir(self.join("data"))
255
255
256 self.opener = opener(self.path)
256 self.opener = opener(self.path)
257 self.manifest = manifest(self.opener)
257 self.manifest = manifest(self.opener)
258 self.changelog = changelog(self.opener)
258 self.changelog = changelog(self.opener)
259 self.ignorelist = None
259 self.ignorelist = None
260
260
261 if not self.remote:
261 if not self.remote:
262 self.dircache = dircache(self.opener, ui)
262 self.dircache = dircache(self.opener, ui)
263 try:
263 try:
264 self.current = bin(self.opener("current").read())
264 self.current = bin(self.opener("current").read())
265 except IOError:
265 except IOError:
266 self.current = None
266 self.current = None
267
267
268 def setcurrent(self, node):
268 def setcurrent(self, node):
269 self.current = node
269 self.current = node
270 self.opener("current", "w").write(hex(node))
270 self.opener("current", "w").write(hex(node))
271
271
272 def ignore(self, f):
272 def ignore(self, f):
273 if self.ignorelist is None:
273 if self.ignorelist is None:
274 self.ignorelist = []
274 self.ignorelist = []
275 try:
275 try:
276 l = open(os.path.join(self.root, ".hgignore")).readlines()
276 l = open(os.path.join(self.root, ".hgignore")).readlines()
277 for pat in l:
277 for pat in l:
278 if pat != "\n":
278 if pat != "\n":
279 self.ignorelist.append(re.compile(pat[:-1]))
279 self.ignorelist.append(re.compile(pat[:-1]))
280 except IOError: pass
280 except IOError: pass
281 for pat in self.ignorelist:
281 for pat in self.ignorelist:
282 if pat.search(f): return True
282 if pat.search(f): return True
283 return False
283 return False
284
284
285 def join(self, f):
285 def join(self, f):
286 return os.path.join(self.path, f)
286 return os.path.join(self.path, f)
287
287
288 def file(self, f):
288 def file(self, f):
289 return filelog(self.opener, f)
289 return filelog(self.opener, f)
290
290
291 def transaction(self):
291 def transaction(self):
292 return transaction(self.opener, self.join("journal"))
292 return transaction(self.opener, self.join("journal"))
293
293
294 def merge(self, other):
294 def merge(self, other):
295 tr = self.transaction()
295 tr = self.transaction()
296 changed = {}
296 changed = {}
297 new = {}
297 new = {}
298 seqrev = self.changelog.count()
298 seqrev = self.changelog.count()
299 # some magic to allow fiddling in nested scope
299 # some magic to allow fiddling in nested scope
300 nextrev = [seqrev]
300 nextrev = [seqrev]
301
301
302 # helpers for back-linking file revisions to local changeset
302 # helpers for back-linking file revisions to local changeset
303 # revisions so we can immediately get to changeset from annotate
303 # revisions so we can immediately get to changeset from annotate
304 def accumulate(text):
304 def accumulate(text):
305 # track which files are added in which changeset and the
305 # track which files are added in which changeset and the
306 # corresponding _local_ changeset revision
306 # corresponding _local_ changeset revision
307 files = self.changelog.extract(text)[3]
307 files = self.changelog.extract(text)[3]
308 for f in files:
308 for f in files:
309 changed.setdefault(f, []).append(nextrev[0])
309 changed.setdefault(f, []).append(nextrev[0])
310 nextrev[0] += 1
310 nextrev[0] += 1
311
311
312 def seq(start):
312 def seq(start):
313 while 1:
313 while 1:
314 yield start
314 yield start
315 start += 1
315 start += 1
316
316
317 def lseq(l):
317 def lseq(l):
318 for r in l:
318 for r in l:
319 yield r
319 yield r
320
320
321 # begin the import/merge of changesets
321 # begin the import/merge of changesets
322 self.ui.status("merging new changesets\n")
322 self.ui.status("merging new changesets\n")
323 (co, cn) = self.changelog.mergedag(other.changelog, tr,
323 (co, cn) = self.changelog.mergedag(other.changelog, tr,
324 seq(seqrev), accumulate)
324 seq(seqrev), accumulate)
325 resolverev = self.changelog.count()
325 resolverev = self.changelog.count()
326
326
327 # is there anything to do?
327 # is there anything to do?
328 if co == cn:
328 if co == cn:
329 tr.close()
329 tr.close()
330 return
330 return
331
331
332 # do we need to resolve?
332 # do we need to resolve?
333 simple = (co == self.changelog.ancestor(co, cn))
333 simple = (co == self.changelog.ancestor(co, cn))
334
334
335 # merge all files changed by the changesets,
335 # merge all files changed by the changesets,
336 # keeping track of the new tips
336 # keeping track of the new tips
337 changelist = changed.keys()
337 changelist = changed.keys()
338 changelist.sort()
338 changelist.sort()
339 for f in changelist:
339 for f in changelist:
340 sys.stdout.write(".")
340 sys.stdout.write(".")
341 sys.stdout.flush()
341 sys.stdout.flush()
342 r = self.file(f)
342 r = self.file(f)
343 node = r.merge(other.file(f), tr, lseq(changed[f]), resolverev)
343 node = r.merge(other.file(f), tr, lseq(changed[f]), resolverev)
344 if node:
344 if node:
345 new[f] = node
345 new[f] = node
346 sys.stdout.write("\n")
346 sys.stdout.write("\n")
347
347
348 # begin the merge of the manifest
348 # begin the merge of the manifest
349 self.ui.status("merging manifests\n")
349 self.ui.status("merging manifests\n")
350 (mm, mo) = self.manifest.mergedag(other.manifest, tr, seq(seqrev))
350 (mm, mo) = self.manifest.mergedag(other.manifest, tr, seq(seqrev))
351
351
352 # For simple merges, we don't need to resolve manifests or changesets
352 # For simple merges, we don't need to resolve manifests or changesets
353 if simple:
353 if simple:
354 tr.close()
354 tr.close()
355 return
355 return
356
356
357 ma = self.manifest.ancestor(mm, mo)
357 ma = self.manifest.ancestor(mm, mo)
358
358
359 # resolve the manifest to point to all the merged files
359 # resolve the manifest to point to all the merged files
360 self.ui.status("resolving manifests\n")
360 self.ui.status("resolving manifests\n")
361 mmap = self.manifest.read(mm) # mine
361 mmap = self.manifest.read(mm) # mine
362 omap = self.manifest.read(mo) # other
362 omap = self.manifest.read(mo) # other
363 amap = self.manifest.read(ma) # ancestor
363 amap = self.manifest.read(ma) # ancestor
364 nmap = {}
364 nmap = {}
365
365
366 for f, mid in mmap.iteritems():
366 for f, mid in mmap.iteritems():
367 if f in omap:
367 if f in omap:
368 if mid != omap[f]:
368 if mid != omap[f]:
369 nmap[f] = new.get(f, mid) # use merged version
369 nmap[f] = new.get(f, mid) # use merged version
370 else:
370 else:
371 nmap[f] = new.get(f, mid) # they're the same
371 nmap[f] = new.get(f, mid) # they're the same
372 del omap[f]
372 del omap[f]
373 elif f in amap:
373 elif f in amap:
374 if mid != amap[f]:
374 if mid != amap[f]:
375 pass # we should prompt here
375 pass # we should prompt here
376 else:
376 else:
377 pass # other deleted it
377 pass # other deleted it
378 else:
378 else:
379 nmap[f] = new.get(f, mid) # we created it
379 nmap[f] = new.get(f, mid) # we created it
380
380
381 del mmap
381 del mmap
382
382
383 for f, oid in omap.iteritems():
383 for f, oid in omap.iteritems():
384 if f in amap:
384 if f in amap:
385 if oid != amap[f]:
385 if oid != amap[f]:
386 pass # this is the nasty case, we should prompt
386 pass # this is the nasty case, we should prompt
387 else:
387 else:
388 pass # probably safe
388 pass # probably safe
389 else:
389 else:
390 nmap[f] = new.get(f, oid) # remote created it
390 nmap[f] = new.get(f, oid) # remote created it
391
391
392 del omap
392 del omap
393 del amap
393 del amap
394
394
395 node = self.manifest.add(nmap, tr, resolverev, mm, mo)
395 node = self.manifest.add(nmap, tr, resolverev, mm, mo)
396
396
397 # Now all files and manifests are merged, we add the changed files
397 # Now all files and manifests are merged, we add the changed files
398 # and manifest id to the changelog
398 # and manifest id to the changelog
399 self.ui.status("committing merge changeset\n")
399 self.ui.status("committing merge changeset\n")
400 new = new.keys()
400 new = new.keys()
401 new.sort()
401 new.sort()
402 if co == cn: cn = -1
402 if co == cn: cn = -1
403
403
404 edittext = "\n"+"".join(["HG: changed %s\n" % f for f in new])
404 edittext = "\n"+"".join(["HG: changed %s\n" % f for f in new])
405 edittext = self.ui.edit(edittext)
405 edittext = self.ui.edit(edittext)
406 n = self.changelog.add(node, new, edittext, tr, co, cn)
406 n = self.changelog.add(node, new, edittext, tr, co, cn)
407
407
408 tr.close()
408 tr.close()
409
409
410 def commit(self, parent, update = None, text = ""):
410 def commit(self, parent, update = None, text = ""):
411 tr = self.transaction()
411 tr = self.transaction()
412
412
413 try:
413 try:
414 remove = [ l[:-1] for l in self.opener("to-remove") ]
414 remove = [ l[:-1] for l in self.opener("to-remove") ]
415 os.unlink(self.join("to-remove"))
415 os.unlink(self.join("to-remove"))
416
416
417 except IOError:
417 except IOError:
418 remove = []
418 remove = []
419
419
420 if update == None:
420 if update == None:
421 update = self.diffdir(self.root, parent)[0]
421 update = self.diffdir(self.root, parent)[0]
422
422
423 # check in files
423 # check in files
424 new = {}
424 new = {}
425 linkrev = self.changelog.count()
425 linkrev = self.changelog.count()
426 for f in update:
426 for f in update:
427 try:
427 try:
428 t = file(f).read()
428 t = file(f).read()
429 except IOError:
429 except IOError:
430 remove.append(f)
430 remove.append(f)
431 continue
431 continue
432 r = self.file(f)
432 r = self.file(f)
433 new[f] = r.add(t, tr, linkrev)
433 new[f] = r.add(t, tr, linkrev)
434
434
435 # update manifest
435 # update manifest
436 mmap = self.manifest.read(self.manifest.tip())
436 mmap = self.manifest.read(self.manifest.tip())
437 mmap.update(new)
437 mmap.update(new)
438 for f in remove:
438 for f in remove:
439 del mmap[f]
439 del mmap[f]
440 mnode = self.manifest.add(mmap, tr, linkrev)
440 mnode = self.manifest.add(mmap, tr, linkrev)
441
441
442 # add changeset
442 # add changeset
443 new = new.keys()
443 new = new.keys()
444 new.sort()
444 new.sort()
445
445
446 edittext = text + "\n"+"".join(["HG: changed %s\n" % f for f in new])
446 edittext = text + "\n"+"".join(["HG: changed %s\n" % f for f in new])
447 edittext += "".join(["HG: removed %s\n" % f for f in remove])
447 edittext += "".join(["HG: removed %s\n" % f for f in remove])
448 edittext = self.ui.edit(edittext)
448 edittext = self.ui.edit(edittext)
449
449
450 n = self.changelog.add(mnode, new, edittext, tr)
450 n = self.changelog.add(mnode, new, edittext, tr)
451 tr.close()
451 tr.close()
452
452
453 self.setcurrent(n)
453 self.setcurrent(n)
454 self.dircache.update(new)
454 self.dircache.update(new)
455 self.dircache.remove(remove)
455 self.dircache.remove(remove)
456
456
457 def checkdir(self, path):
457 def checkdir(self, path):
458 d = os.path.dirname(path)
458 d = os.path.dirname(path)
459 if not d: return
459 if not d: return
460 if not os.path.isdir(d):
460 if not os.path.isdir(d):
461 self.checkdir(d)
461 self.checkdir(d)
462 os.mkdir(d)
462 os.mkdir(d)
463
463
464 def checkout(self, node):
464 def checkout(self, node):
465 # checkout is really dumb at the moment
465 # checkout is really dumb at the moment
466 # it ought to basically merge
466 # it ought to basically merge
467 change = self.changelog.read(node)
467 change = self.changelog.read(node)
468 mmap = self.manifest.read(change[0])
468 mmap = self.manifest.read(change[0])
469
469
470 l = mmap.keys()
470 l = mmap.keys()
471 l.sort()
471 l.sort()
472 stats = []
472 stats = []
473 for f in l:
473 for f in l:
474 r = self.file(f)
474 r = self.file(f)
475 t = r.revision(mmap[f])
475 t = r.revision(mmap[f])
476 try:
476 try:
477 file(f, "w").write(t)
477 file(f, "w").write(t)
478 except:
478 except:
479 self.checkdir(f)
479 self.checkdir(f)
480 file(f, "w").write(t)
480 file(f, "w").write(t)
481
481
482 self.setcurrent(node)
482 self.setcurrent(node)
483 self.dircache.clear()
483 self.dircache.clear()
484 self.dircache.update(l)
484 self.dircache.update(l)
485
485
486 def diffdir(self, path, changeset):
486 def diffdir(self, path, changeset):
487 changed = []
487 changed = []
488 mf = {}
488 mf = {}
489 added = []
489 added = []
490
490
491 if changeset:
491 if changeset:
492 change = self.changelog.read(changeset)
492 change = self.changelog.read(changeset)
493 mf = self.manifest.read(change[0])
493 mf = self.manifest.read(change[0])
494
494
495 if changeset == self.current:
495 if changeset == self.current:
496 dc = self.dircache.copy()
496 dc = self.dircache.copy()
497 else:
497 else:
498 dc = dict.fromkeys(mf)
498 dc = dict.fromkeys(mf)
499
499
500 def fcmp(fn):
500 def fcmp(fn):
501 t1 = file(fn).read()
501 t1 = file(fn).read()
502 t2 = self.file(fn).revision(mf[fn])
502 t2 = self.file(fn).revision(mf[fn])
503 return cmp(t1, t2)
503 return cmp(t1, t2)
504
504
505 for dir, subdirs, files in os.walk(self.root):
505 for dir, subdirs, files in os.walk(self.root):
506 d = dir[len(self.root)+1:]
506 d = dir[len(self.root)+1:]
507 if ".hg" in subdirs: subdirs.remove(".hg")
507 if ".hg" in subdirs: subdirs.remove(".hg")
508
508
509 for f in files:
509 for f in files:
510 fn = os.path.join(d, f)
510 fn = os.path.join(d, f)
511 try: s = os.stat(fn)
511 try: s = os.stat(fn)
512 except: continue
512 except: continue
513 if fn in dc:
513 if fn in dc:
514 c = dc[fn]
514 c = dc[fn]
515 del dc[fn]
515 del dc[fn]
516 if not c:
516 if not c:
517 if fcmp(fn):
517 if fcmp(fn):
518 changed.append(fn)
518 changed.append(fn)
519 elif c[1] != s.st_size:
519 elif c[1] != s.st_size:
520 changed.append(fn)
520 changed.append(fn)
521 elif c[0] != s.st_mode or c[2] != s.st_mtime:
521 elif c[0] != s.st_mode or c[2] != s.st_mtime:
522 if fcmp(fn):
522 if fcmp(fn):
523 changed.append(fn)
523 changed.append(fn)
524 else:
524 else:
525 if self.ignore(fn): continue
525 if self.ignore(fn): continue
526 added.append(fn)
526 added.append(fn)
527
527
528 deleted = dc.keys()
528 deleted = dc.keys()
529 deleted.sort()
529 deleted.sort()
530
530
531 return (changed, added, deleted)
531 return (changed, added, deleted)
532
532
533 def diffrevs(self, node1, node2):
533 def diffrevs(self, node1, node2):
534 changed, added = [], []
534 changed, added = [], []
535
535
536 change = self.changelog.read(node1)
536 change = self.changelog.read(node1)
537 mf1 = self.manifest.read(change[0])
537 mf1 = self.manifest.read(change[0])
538 change = self.changelog.read(node2)
538 change = self.changelog.read(node2)
539 mf2 = self.manifest.read(change[0])
539 mf2 = self.manifest.read(change[0])
540
540
541 for fn in mf2:
541 for fn in mf2:
542 if mf1.has_key(fn):
542 if mf1.has_key(fn):
543 if mf1[fn] != mf2[fn]:
543 if mf1[fn] != mf2[fn]:
544 changed.append(fn)
544 changed.append(fn)
545 del mf1[fn]
545 del mf1[fn]
546 else:
546 else:
547 added.append(fn)
547 added.append(fn)
548
548
549 deleted = mf1.keys()
549 deleted = mf1.keys()
550 deleted.sort()
550 deleted.sort()
551
551
552 return (changed, added, deleted)
552 return (changed, added, deleted)
553
553
554 def add(self, list):
554 def add(self, list):
555 self.dircache.taint(list)
555 self.dircache.taint(list)
556
556
557 def remove(self, list):
557 def remove(self, list):
558 dl = self.opener("to-remove", "a")
558 dl = self.opener("to-remove", "a")
559 for f in list:
559 for f in list:
560 dl.write(f + "\n")
560 dl.write(f + "\n")
561
561
562 def newer(self, node):
563 nodes = []
564 for i in xrange(self.changelog.rev(node) + 1, self.changelog.count()):
565 nodes.append(self.changelog.node(i))
566
567 return nodes
568
569 def changegroup(self, nodes):
570 # construct the link map
571 linkmap = {}
572 for n in nodes:
573 linkmap[self.changelog.rev(n)] = n
574
575 # construct a list of all changed files
576 changed = {}
577 for n in nodes:
578 c = self.changelog.read(n)
579 for f in c[3]:
580 changed[f] = 1
581 changed = changed.keys()
582 changed.sort()
583
584 # the changegroup is changesets + manifests + all file revs
585 cg = []
586 revs = [ self.changelog.rev(n) for n in nodes ]
587
588 g = self.changelog.group(linkmap)
589 cg.append(g)
590 g = self.manifest.group(linkmap)
591 cg.append(g)
592
593 for f in changed:
594 g = self.file(f).group(linkmap)
595 if not g: raise "couldn't find change to %s" % f
596 l = struct.pack(">l", len(f))
597 cg += [l, f, g]
598
599 return compress("".join(cg))
600
601 def addchangegroup(self, data):
602 data = decompress(data)
603 def getlen(data, pos):
604 return struct.unpack(">l", data[pos:pos + 4])[0]
605
606 tr = self.transaction()
607 simple = True
608
609 print "merging changesets"
610 # pull off the changeset group
611 l = getlen(data, 0)
612 csg = data[0:l]
613 pos = l
614 co = self.changelog.tip()
615 cn = self.changelog.addgroup(csg, lambda x: self.changelog.count(), tr)
616
617 print "merging manifests"
618 # pull off the manifest group
619 l = getlen(data, pos)
620 mfg = data[pos: pos + l]
621 pos += l
622 mo = self.manifest.tip()
623 mn = self.manifest.addgroup(mfg, lambda x: self.changelog.rev(x), tr)
624
625 # do we need a resolve?
626 if self.changelog.ancestor(co, cn) != co:
627 print "NEED RESOLVE"
628 simple = False
629 resolverev = self.changelog.count()
630
631 # process the files
632 print "merging files"
633 new = {}
634 while pos < len(data):
635 l = getlen(data, pos)
636 pos += 4
637 f = data[pos:pos + l]
638 pos += l
639
640 l = getlen(data, pos)
641 fg = data[pos: pos + l]
642 pos += l
643
644 fl = self.file(f)
645 o = fl.tip()
646 n = fl.addgroup(fg, lambda x: self.changelog.rev(x), tr)
647 if not simple:
648 new[fl] = fl.resolvedag(o, n, tr, resolverev)
649
650 # For simple merges, we don't need to resolve manifests or changesets
651 if simple:
652 tr.close()
653 return
654
655 # resolve the manifest to point to all the merged files
656 self.ui.status("resolving manifests\n")
657 ma = self.manifest.ancestor(mm, mo)
658 mmap = self.manifest.read(mm) # mine
659 omap = self.manifest.read(mo) # other
660 amap = self.manifest.read(ma) # ancestor
661 nmap = {}
662
663 for f, mid in mmap.iteritems():
664 if f in omap:
665 if mid != omap[f]:
666 nmap[f] = new.get(f, mid) # use merged version
667 else:
668 nmap[f] = new.get(f, mid) # they're the same
669 del omap[f]
670 elif f in amap:
671 if mid != amap[f]:
672 pass # we should prompt here
673 else:
674 pass # other deleted it
675 else:
676 nmap[f] = new.get(f, mid) # we created it
677
678 del mmap
679
680 for f, oid in omap.iteritems():
681 if f in amap:
682 if oid != amap[f]:
683 pass # this is the nasty case, we should prompt
684 else:
685 pass # probably safe
686 else:
687 nmap[f] = new.get(f, oid) # remote created it
688
689 del omap
690 del amap
691
692 node = self.manifest.add(nmap, tr, resolverev, mm, mo)
693
694 # Now all files and manifests are merged, we add the changed files
695 # and manifest id to the changelog
696 self.ui.status("committing merge changeset\n")
697 new = new.keys()
698 new.sort()
699 if co == cn: cn = -1
700
701 edittext = "\n"+"".join(["HG: changed %s\n" % f for f in new])
702 edittext = self.ui.edit(edittext)
703 n = self.changelog.add(node, new, edittext, tr, co, cn)
704
705 tr.close()
706
562 class ui:
707 class ui:
563 def __init__(self, verbose=False, debug=False):
708 def __init__(self, verbose=False, debug=False):
564 self.verbose = verbose
709 self.verbose = verbose
565 def write(self, *args):
710 def write(self, *args):
566 for a in args:
711 for a in args:
567 sys.stdout.write(str(a))
712 sys.stdout.write(str(a))
568 def prompt(self, msg, pat):
713 def prompt(self, msg, pat):
569 while 1:
714 while 1:
570 sys.stdout.write(msg)
715 sys.stdout.write(msg)
571 r = sys.stdin.readline()[:-1]
716 r = sys.stdin.readline()[:-1]
572 if re.match(pat, r):
717 if re.match(pat, r):
573 return r
718 return r
574 def status(self, *msg):
719 def status(self, *msg):
575 self.write(*msg)
720 self.write(*msg)
576 def warn(self, msg):
721 def warn(self, msg):
577 self.write(*msg)
722 self.write(*msg)
578 def note(self, msg):
723 def note(self, msg):
579 if self.verbose: self.write(*msg)
724 if self.verbose: self.write(*msg)
580 def debug(self, msg):
725 def debug(self, msg):
581 if self.debug: self.write(*msg)
726 if self.debug: self.write(*msg)
582 def edit(self, text):
727 def edit(self, text):
583 (fd, name) = tempfile.mkstemp("hg")
728 (fd, name) = tempfile.mkstemp("hg")
584 f = os.fdopen(fd, "w")
729 f = os.fdopen(fd, "w")
585 f.write(text)
730 f.write(text)
586 f.close()
731 f.close()
587
732
588 editor = os.environ.get("EDITOR", "vi")
733 editor = os.environ.get("EDITOR", "vi")
589 r = os.system("%s %s" % (editor, name))
734 r = os.system("%s %s" % (editor, name))
590 if r:
735 if r:
591 raise "Edit failed!"
736 raise "Edit failed!"
592
737
593 t = open(name).read()
738 t = open(name).read()
594 t = re.sub("(?m)^HG:.*\n", "", t)
739 t = re.sub("(?m)^HG:.*\n", "", t)
595
740
596 return t
741 return t
597
742
598
743
599 class httprangereader:
744 class httprangereader:
600 def __init__(self, url):
745 def __init__(self, url):
601 self.url = url
746 self.url = url
602 self.pos = 0
747 self.pos = 0
603 def seek(self, pos):
748 def seek(self, pos):
604 self.pos = pos
749 self.pos = pos
605 def read(self, bytes=None):
750 def read(self, bytes=None):
606 opener = urllib2.build_opener(byterange.HTTPRangeHandler())
751 opener = urllib2.build_opener(byterange.HTTPRangeHandler())
607 urllib2.install_opener(opener)
752 urllib2.install_opener(opener)
608 req = urllib2.Request(self.url)
753 req = urllib2.Request(self.url)
609 end = ''
754 end = ''
610 if bytes: end = self.pos + bytes
755 if bytes: end = self.pos + bytes
611 req.add_header('Range', 'bytes=%d-%s' % (self.pos, end))
756 req.add_header('Range', 'bytes=%d-%s' % (self.pos, end))
612 f = urllib2.urlopen(req)
757 f = urllib2.urlopen(req)
613 return f.read()
758 return f.read()
@@ -1,229 +1,412 b''
1 # revlog.py - storage back-end for mercurial
1 # revlog.py - storage back-end for mercurial
2 #
2 #
3 # This provides efficient delta storage with O(1) retrieve and append
3 # This provides efficient delta storage with O(1) retrieve and append
4 # and O(changes) merge between branches
4 # and O(changes) merge between branches
5 #
5 #
6 # Copyright 2005 Matt Mackall <mpm@selenic.com>
6 # Copyright 2005 Matt Mackall <mpm@selenic.com>
7 #
7 #
8 # This software may be used and distributed according to the terms
8 # This software may be used and distributed according to the terms
9 # of the GNU General Public License, incorporated herein by reference.
9 # of the GNU General Public License, incorporated herein by reference.
10
10
11 import zlib, struct, sha, os, tempfile, binascii
11 import zlib, struct, sha, os, tempfile, binascii
12 from mercurial import mdiff
12 from mercurial import mdiff
13
13
14 def hex(node): return binascii.hexlify(node)
14 def hex(node): return binascii.hexlify(node)
15 def bin(node): return binascii.unhexlify(node)
15 def bin(node): return binascii.unhexlify(node)
16
16
17 def compress(text):
17 def compress(text):
18 return zlib.compress(text)
18 return zlib.compress(text)
19
19
20 def decompress(bin):
20 def decompress(bin):
21 return zlib.decompress(bin)
21 return zlib.decompress(bin)
22
22
23 def hash(text, p1, p2):
23 def hash(text, p1, p2):
24 l = [p1, p2]
24 l = [p1, p2]
25 l.sort()
25 l.sort()
26 return sha.sha(l[0] + l[1] + text).digest()
26 return sha.sha(l[0] + l[1] + text).digest()
27
27
28 nullid = "\0" * 20
28 nullid = "\0" * 20
29 indexformat = ">4l20s20s20s"
29 indexformat = ">4l20s20s20s"
30
30
31 class revlog:
31 class revlog:
32 def __init__(self, opener, indexfile, datafile):
32 def __init__(self, opener, indexfile, datafile):
33 self.indexfile = indexfile
33 self.indexfile = indexfile
34 self.datafile = datafile
34 self.datafile = datafile
35 self.index = []
35 self.index = []
36 self.opener = opener
36 self.opener = opener
37 self.cache = None
37 self.cache = None
38 self.nodemap = {nullid: -1}
38 self.nodemap = {nullid: -1}
39 # read the whole index for now, handle on-demand later
39 # read the whole index for now, handle on-demand later
40 try:
40 try:
41 n = 0
41 n = 0
42 i = self.opener(self.indexfile).read()
42 i = self.opener(self.indexfile).read()
43 s = struct.calcsize(indexformat)
43 s = struct.calcsize(indexformat)
44 for f in range(0, len(i), s):
44 for f in range(0, len(i), s):
45 # offset, size, base, linkrev, p1, p2, nodeid
45 # offset, size, base, linkrev, p1, p2, nodeid
46 e = struct.unpack(indexformat, i[f:f + s])
46 e = struct.unpack(indexformat, i[f:f + s])
47 self.nodemap[e[6]] = n
47 self.nodemap[e[6]] = n
48 self.index.append(e)
48 self.index.append(e)
49 n += 1
49 n += 1
50 except IOError: pass
50 except IOError: pass
51
51
52 def tip(self): return self.node(len(self.index) - 1)
52 def tip(self): return self.node(len(self.index) - 1)
53 def count(self): return len(self.index)
53 def count(self): return len(self.index)
54 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
54 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
55 def rev(self, node): return self.nodemap[node]
55 def rev(self, node): return self.nodemap[node]
56 def linkrev(self, node): return self.index[self.nodemap[node]][3]
56 def linkrev(self, node): return self.index[self.nodemap[node]][3]
57 def parents(self, node):
57 def parents(self, node):
58 if node == nullid: return (nullid, nullid)
58 if node == nullid: return (nullid, nullid)
59 return self.index[self.nodemap[node]][4:6]
59 return self.index[self.nodemap[node]][4:6]
60
60
61 def start(self, rev): return self.index[rev][0]
61 def start(self, rev): return self.index[rev][0]
62 def length(self, rev): return self.index[rev][1]
62 def length(self, rev): return self.index[rev][1]
63 def end(self, rev): return self.start(rev) + self.length(rev)
63 def end(self, rev): return self.start(rev) + self.length(rev)
64 def base(self, rev): return self.index[rev][2]
64 def base(self, rev): return self.index[rev][2]
65
65
66 def lookup(self, id):
66 def lookup(self, id):
67 try:
67 try:
68 rev = int(id)
68 rev = int(id)
69 return self.node(rev)
69 return self.node(rev)
70 except ValueError:
70 except ValueError:
71 c = []
71 c = []
72 for n in self.nodemap:
72 for n in self.nodemap:
73 if id in hex(n):
73 if id in hex(n):
74 c.append(n)
74 c.append(n)
75 if len(c) > 1: raise KeyError("Ambiguous identifier")
75 if len(c) > 1: raise KeyError("Ambiguous identifier")
76 if len(c) < 1: raise KeyError
76 if len(c) < 1: raise KeyError
77 return c[0]
77 return c[0]
78
78
79 return None
79 return None
80
80
81 def revisions(self, list):
81 def revisions(self, list):
82 # this can be optimized to do spans, etc
82 # this can be optimized to do spans, etc
83 # be stupid for now
83 # be stupid for now
84 for node in list:
84 for node in list:
85 yield self.revision(node)
85 yield self.revision(node)
86
86
87 def diff(self, a, b):
87 def diff(self, a, b):
88 return mdiff.textdiff(a, b)
88 return mdiff.textdiff(a, b)
89
89
90 def patch(self, text, patch):
90 def patch(self, text, patch):
91 return mdiff.patch(text, patch)
91 return mdiff.patch(text, patch)
92
92
93 def revision(self, node):
93 def revision(self, node):
94 if node == nullid: return ""
94 if node == nullid: return ""
95 if self.cache and self.cache[0] == node: return self.cache[2]
95 if self.cache and self.cache[0] == node: return self.cache[2]
96
96
97 text = None
97 text = None
98 rev = self.rev(node)
98 rev = self.rev(node)
99 base = self.base(rev)
99 base = self.base(rev)
100 start = self.start(base)
100 start = self.start(base)
101 end = self.end(rev)
101 end = self.end(rev)
102
102
103 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
103 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
104 base = self.cache[1]
104 base = self.cache[1]
105 start = self.start(base + 1)
105 start = self.start(base + 1)
106 text = self.cache[2]
106 text = self.cache[2]
107 last = 0
107 last = 0
108
108
109 f = self.opener(self.datafile)
109 f = self.opener(self.datafile)
110 f.seek(start)
110 f.seek(start)
111 data = f.read(end - start)
111 data = f.read(end - start)
112
112
113 if not text:
113 if not text:
114 last = self.length(base)
114 last = self.length(base)
115 text = decompress(data[:last])
115 text = decompress(data[:last])
116
116
117 for r in range(base + 1, rev + 1):
117 for r in range(base + 1, rev + 1):
118 s = self.length(r)
118 s = self.length(r)
119 b = decompress(data[last:last + s])
119 b = decompress(data[last:last + s])
120 text = self.patch(text, b)
120 text = self.patch(text, b)
121 last = last + s
121 last = last + s
122
122
123 (p1, p2) = self.parents(node)
123 (p1, p2) = self.parents(node)
124 if node != hash(text, p1, p2):
124 if node != hash(text, p1, p2):
125 raise "integrity check failed on %s:%d" % (self.datafile, rev)
125 raise "integrity check failed on %s:%d" % (self.datafile, rev)
126
126
127 self.cache = (node, rev, text)
127 self.cache = (node, rev, text)
128 return text
128 return text
129
129
130 def addrevision(self, text, transaction, link, p1=None, p2=None):
130 def addrevision(self, text, transaction, link, p1=None, p2=None):
131 if text is None: text = ""
131 if text is None: text = ""
132 if p1 is None: p1 = self.tip()
132 if p1 is None: p1 = self.tip()
133 if p2 is None: p2 = nullid
133 if p2 is None: p2 = nullid
134
134
135 node = hash(text, p1, p2)
135 node = hash(text, p1, p2)
136
136
137 n = self.count()
137 n = self.count()
138 t = n - 1
138 t = n - 1
139
139
140 if n:
140 if n:
141 start = self.start(self.base(t))
141 start = self.start(self.base(t))
142 end = self.end(t)
142 end = self.end(t)
143 prev = self.revision(self.tip())
143 prev = self.revision(self.tip())
144 data = compress(self.diff(prev, text))
144 data = compress(self.diff(prev, text))
145
145
146 # full versions are inserted when the needed deltas
146 # full versions are inserted when the needed deltas
147 # become comparable to the uncompressed text
147 # become comparable to the uncompressed text
148 if not n or (end + len(data) - start) > len(text) * 2:
148 if not n or (end + len(data) - start) > len(text) * 2:
149 data = compress(text)
149 data = compress(text)
150 base = n
150 base = n
151 else:
151 else:
152 base = self.base(t)
152 base = self.base(t)
153
153
154 offset = 0
154 offset = 0
155 if t >= 0:
155 if t >= 0:
156 offset = self.end(t)
156 offset = self.end(t)
157
157
158 e = (offset, len(data), base, link, p1, p2, node)
158 e = (offset, len(data), base, link, p1, p2, node)
159
159
160 self.index.append(e)
160 self.index.append(e)
161 self.nodemap[node] = n
161 self.nodemap[node] = n
162 entry = struct.pack(indexformat, *e)
162 entry = struct.pack(indexformat, *e)
163
163
164 transaction.add(self.datafile, e[0])
164 transaction.add(self.datafile, e[0])
165 self.opener(self.datafile, "a").write(data)
165 self.opener(self.datafile, "a").write(data)
166 transaction.add(self.indexfile, n * len(entry))
166 transaction.add(self.indexfile, n * len(entry))
167 self.opener(self.indexfile, "a").write(entry)
167 self.opener(self.indexfile, "a").write(entry)
168
168
169 self.cache = (node, n, text)
169 self.cache = (node, n, text)
170 return node
170 return node
171
171
172 def ancestor(self, a, b):
172 def ancestor(self, a, b):
173 def expand(list, map):
173 def expand(list, map):
174 a = []
174 a = []
175 while list:
175 while list:
176 n = list.pop(0)
176 n = list.pop(0)
177 map[n] = 1
177 map[n] = 1
178 yield n
178 yield n
179 for p in self.parents(n):
179 for p in self.parents(n):
180 if p != nullid and p not in map:
180 if p != nullid and p not in map:
181 list.append(p)
181 list.append(p)
182 yield nullid
182 yield nullid
183
183
184 amap = {}
184 amap = {}
185 bmap = {}
185 bmap = {}
186 ag = expand([a], amap)
186 ag = expand([a], amap)
187 bg = expand([b], bmap)
187 bg = expand([b], bmap)
188 adone = bdone = 0
188 adone = bdone = 0
189
189
190 while not adone or not bdone:
190 while not adone or not bdone:
191 if not adone:
191 if not adone:
192 an = ag.next()
192 an = ag.next()
193 if an == nullid:
193 if an == nullid:
194 adone = 1
194 adone = 1
195 elif an in bmap:
195 elif an in bmap:
196 return an
196 return an
197 if not bdone:
197 if not bdone:
198 bn = bg.next()
198 bn = bg.next()
199 if bn == nullid:
199 if bn == nullid:
200 bdone = 1
200 bdone = 1
201 elif bn in amap:
201 elif bn in amap:
202 return bn
202 return bn
203
203
204 return nullid
204 return nullid
205
205
206 def mergedag(self, other, transaction, linkseq, accumulate = None):
206 def mergedag(self, other, transaction, linkseq, accumulate = None):
207 """combine the nodes from other's DAG into ours"""
207 """combine the nodes from other's DAG into ours"""
208 old = self.tip()
208 old = self.tip()
209 i = self.count()
209 i = self.count()
210 l = []
210 l = []
211
211
212 # merge the other revision log into our DAG
212 # merge the other revision log into our DAG
213 for r in range(other.count()):
213 for r in range(other.count()):
214 id = other.node(r)
214 id = other.node(r)
215 if id not in self.nodemap:
215 if id not in self.nodemap:
216 (xn, yn) = other.parents(id)
216 (xn, yn) = other.parents(id)
217 l.append((id, xn, yn))
217 l.append((id, xn, yn))
218 self.nodemap[id] = i
218 self.nodemap[id] = i
219 i += 1
219 i += 1
220
220
221 # merge node date for new nodes
221 # merge node date for new nodes
222 r = other.revisions([e[0] for e in l])
222 r = other.revisions([e[0] for e in l])
223 for e in l:
223 for e in l:
224 t = r.next()
224 t = r.next()
225 if accumulate: accumulate(t)
225 if accumulate: accumulate(t)
226 self.addrevision(t, transaction, linkseq.next(), e[1], e[2])
226 self.addrevision(t, transaction, linkseq.next(), e[1], e[2])
227
227
228 # return the unmerged heads for later resolving
228 # return the unmerged heads for later resolving
229 return (old, self.tip())
229 return (old, self.tip())
230
231 def group(self, linkmap):
232 # given a list of changeset revs, return a set of deltas and
233 # metadata corresponding to nodes the first delta is
234 # parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
235 # have this parent as it has all history before these
236 # changesets. parent is parent[0]
237
238 revs = []
239 needed = {}
240
241 # find file nodes/revs that match changeset revs
242 for i in xrange(0, self.count()):
243 if self.index[i][3] in linkmap:
244 revs.append(i)
245 needed[i] = 1
246
247 # if we don't have any revisions touched by these changesets, bail
248 if not revs: return struct.pack(">l", 0)
249
250 # add the parent of the first rev
251 p = self.parents(self.node(revs[0]))[0]
252 revs.insert(0, self.rev(p))
253
254 # for each delta that isn't contiguous in the log, we need to
255 # reconstruct the base, reconstruct the result, and then
256 # calculate the delta. We also need to do this where we've
257 # stored a full version and not a delta
258 for i in xrange(0, len(revs) - 1):
259 a, b = revs[i], revs[i + 1]
260 if a + 1 != b or self.base(b) == b:
261 for j in xrange(self.base(a), a + 1):
262 needed[j] = 1
263 for j in xrange(self.base(b), b + 1):
264 needed[j] = 1
265
266 # calculate spans to retrieve from datafile
267 needed = needed.keys()
268 needed.sort()
269 spans = []
270 for n in needed:
271 if n < 0: continue
272 o = self.start(n)
273 l = self.length(n)
274 spans.append((o, l, [(n, l)]))
275
276 # merge spans
277 merge = [spans.pop(0)]
278 while spans:
279 e = spans.pop(0)
280 f = merge[-1]
281 if e[0] == f[0] + f[1]:
282 merge[-1] = (f[0], f[1] + e[1], f[2] + e[2])
283 else:
284 merge.append(e)
285
286 # read spans in, divide up chunks
287 chunks = {}
288 for span in merge:
289 # we reopen the file for each span to make http happy for now
290 f = self.opener(self.datafile)
291 f.seek(span[0])
292 data = f.read(span[1])
293
294 # divide up the span
295 pos = 0
296 for r, l in span[2]:
297 chunks[r] = data[pos: pos + l]
298 pos += l
299
300 # helper to reconstruct intermediate versions
301 def construct(text, base, rev):
302 for r in range(base + 1, rev + 1):
303 b = decompress(chunks[r])
304 text = self.patch(text, b)
305 return text
306
307 # build deltas
308 deltas = []
309 for d in range(0, len(revs) - 1):
310 a, b = revs[d], revs[d + 1]
311 n = self.node(b)
312
313 if a + 1 != b or self.base(b) == b:
314 if a >= 0:
315 base = self.base(a)
316 ta = decompress(chunks[self.base(a)])
317 ta = construct(ta, base, a)
318 else:
319 ta = ""
320
321 base = self.base(b)
322 if a > base:
323 base = a
324 tb = ta
325 else:
326 tb = decompress(chunks[self.base(b)])
327 tb = construct(tb, base, b)
328 d = self.diff(ta, tb)
329 else:
330 d = decompress(chunks[b])
331
332 p = self.parents(n)
333 meta = n + p[0] + p[1] + linkmap[self.linkrev(n)]
334 l = struct.pack(">l", len(meta) + len(d) + 4)
335 deltas.append(l + meta + d)
336
337 l = struct.pack(">l", sum(map(len, deltas)) + 4)
338 deltas.insert(0, l)
339 return "".join(deltas)
340
341 def addgroup(self, data, linkmapper, transaction):
342 # given a set of deltas, add them to the revision log. the
343 # first delta is against its parent, which should be in our
344 # log, the rest are against the previous delta.
345
346 if len(data) <= 4: return
347
348 # retrieve the parent revision of the delta chain
349 chain = data[28:48]
350 text = self.revision(chain)
351
352 # track the base of the current delta log
353 r = self.count()
354 t = r - 1
355
356 base = prev = -1
357 start = end = 0
358 if r:
359 start = self.start(self.base(t))
360 end = self.end(t)
361 measure = self.length(self.base(t))
362 base = self.base(t)
363 prev = self.tip()
364
365 transaction.add(self.datafile, end)
366 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
367 dfh = self.opener(self.datafile, "a")
368 ifh = self.opener(self.indexfile, "a")
369
370 # loop through our set of deltas
371 pos = 4
372 while pos < len(data):
373 l, node, p1, p2, cs = struct.unpack(">l20s20s20s20s",
374 data[pos:pos+84])
375 link = linkmapper(cs)
376 delta = data[pos + 84:pos + l]
377 pos += l
378
379 # full versions are inserted when the needed deltas become
380 # comparable to the uncompressed text or when the previous
381 # version is not the one we have a delta against. We use
382 # the size of the previous full rev as a proxy for the
383 # current size.
384
385 if chain == prev:
386 cdelta = compress(delta)
387
388 if chain != prev or (end - start + len(cdelta)) > measure * 2:
389 # flush our writes here so we can read it in revision
390 dfh.flush()
391 ifh.flush()
392 text = self.revision(self.node(t))
393 text = self.patch(text, delta)
394 chk = self.addrevision(text, transaction, link, p1, p2)
395 if chk != node:
396 raise "consistency error adding group"
397 measure = len(text)
398 else:
399 e = (end, len(cdelta), self.base(t), link, p1, p2, node)
400 self.index.append(e)
401 self.nodemap[node] = r
402 dfh.write(cdelta)
403 ifh.write(struct.pack(indexformat, *e))
404
405 t, r = r, r + 1
406 chain = prev
407 start = self.start(self.base(t))
408 end = self.end(t)
409
410 dfh.close()
411 ifh.close()
412 return node
General Comments 0
You need to be logged in to leave comments. Login now