##// END OF EJS Templates
Diff in subdirectories from Jake Edge...
mpm@selenic.com -
r64:b3e2ddff default
parent child Browse files
Show More
@@ -1,371 +1,380
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 #
2 #
3 # mercurial - a minimal scalable distributed SCM
3 # mercurial - a minimal scalable distributed SCM
4 # v0.4e "sabina"
4 # v0.4e "sabina"
5 #
5 #
6 # Copyright 2005 Matt Mackall <mpm@selenic.com>
6 # Copyright 2005 Matt Mackall <mpm@selenic.com>
7 #
7 #
8 # This software may be used and distributed according to the terms
8 # This software may be used and distributed according to the terms
9 # of the GNU General Public License, incorporated herein by reference.
9 # of the GNU General Public License, incorporated herein by reference.
10
10
11 # the psyco compiler makes commits a bit faster
11 # the psyco compiler makes commits a bit faster
12 # and makes changegroup merge about 20 times slower!
12 # and makes changegroup merge about 20 times slower!
13 # try:
13 # try:
14 # import psyco
14 # import psyco
15 # psyco.full()
15 # psyco.full()
16 # except:
16 # except:
17 # pass
17 # pass
18
18
19 import sys, os, time
19 import sys, os, time
20 from mercurial import hg, mdiff, fancyopts
20 from mercurial import hg, mdiff, fancyopts
21
21
22 def help():
22 def help():
23 print """\
23 print """\
24 commands:
24 commands:
25
25
26 init create a new repository in this directory
26 init create a new repository in this directory
27 branch <path> create a branch of <path> in this directory
27 branch <path> create a branch of <path> in this directory
28 merge <path> merge changes from <path> into local repository
28 merge <path> merge changes from <path> into local repository
29 checkout [changeset] checkout the latest or given changeset
29 checkout [changeset] checkout the latest or given changeset
30 status show new, missing, and changed files in working dir
30 status show new, missing, and changed files in working dir
31 add [files...] add the given files in the next commit
31 add [files...] add the given files in the next commit
32 remove [files...] remove the given files in the next commit
32 remove [files...] remove the given files in the next commit
33 addremove add all new files, delete all missing files
33 addremove add all new files, delete all missing files
34 commit commit all changes to the repository
34 commit commit all changes to the repository
35 history show changeset history
35 history show changeset history
36 log <file> show revision history of a single file
36 log <file> show revision history of a single file
37 dump <file> [rev] dump the latest or given revision of a file
37 dump <file> [rev] dump the latest or given revision of a file
38 dumpmanifest [rev] dump the latest or given revision of the manifest
38 dumpmanifest [rev] dump the latest or given revision of the manifest
39 diff [files...] diff working directory (or selected files)
39 diff [files...] diff working directory (or selected files)
40 """
40 """
41
41
42 def filterfiles(list, files):
42 def filterfiles(list, files):
43 l = [ x for x in list if x in files ]
43 l = [ x for x in list if x in files ]
44
44
45 for f in files:
45 for f in files:
46 if f[-1] != os.sep: f += os.sep
46 if f[-1] != os.sep: f += os.sep
47 l += [ x for x in list if x.startswith(f) ]
47 l += [ x for x in list if x.startswith(f) ]
48 return l
48 return l
49
49
50 def diff(files = None, node1 = None, node2 = None):
50 def diff(files = None, node1 = None, node2 = None):
51 def date(c):
52 return time.asctime(time.gmtime(float(c[2].split(' ')[0])))
51
53
52 if node2:
54 if node2:
53 change = repo.changelog.read(node2)
55 change = repo.changelog.read(node2)
54 mmap2 = repo.manifest.read(change[0])
56 mmap2 = repo.manifest.read(change[0])
55 (c, a, d) = repo.diffrevs(node1, node2)
57 (c, a, d) = repo.diffrevs(node1, node2)
56 def read(f): return repo.file(f).read(mmap2[f])
58 def read(f): return repo.file(f).read(mmap2[f])
59 date2 = date(change)
57 else:
60 else:
61 date2 = time.asctime()
58 if not node1:
62 if not node1:
59 node1 = repo.current
63 node1 = repo.current
60 (c, a, d) = repo.diffdir(repo.root, node1)
64 (c, a, d) = repo.diffdir(repo.root, node1)
61 def read(f): return file(f).read()
65 def read(f): return file(os.path.join(repo.root, f)).read()
62
66
63 change = repo.changelog.read(node1)
67 change = repo.changelog.read(node1)
64 mmap = repo.manifest.read(change[0])
68 mmap = repo.manifest.read(change[0])
69 date1 = date(change)
65
70
66 if files:
71 if files:
67 (c, a, d) = map(lambda x: filterfiles(x, files), (c, a, d))
72 (c, a, d) = map(lambda x: filterfiles(x, files), (c, a, d))
68
73
69 for f in c:
74 for f in c:
70 to = repo.file(f).read(mmap[f])
75 to = repo.file(f).read(mmap[f])
71 tn = read(f)
76 tn = read(f)
72 sys.stdout.write(mdiff.unidiff(to, tn, f))
77 sys.stdout.write(mdiff.unidiff(to, date1, tn, date2, f))
73 for f in a:
78 for f in a:
74 to = ""
79 to = ""
75 tn = read(f)
80 tn = read(f)
76 sys.stdout.write(mdiff.unidiff(to, tn, f))
81 sys.stdout.write(mdiff.unidiff(to, date1, tn, date2, f))
77 for f in d:
82 for f in d:
78 to = repo.file(f).read(mmap[f])
83 to = repo.file(f).read(mmap[f])
79 tn = ""
84 tn = ""
80 sys.stdout.write(mdiff.unidiff(to, tn, f))
85 sys.stdout.write(mdiff.unidiff(to, date1, tn, date2, f))
81
82
86
83 options = {}
87 options = {}
84 opts = [('v', 'verbose', None, 'verbose'),
88 opts = [('v', 'verbose', None, 'verbose'),
85 ('d', 'debug', None, 'debug')]
89 ('d', 'debug', None, 'debug')]
86
90
87 args = fancyopts.fancyopts(sys.argv[1:], opts, options,
91 args = fancyopts.fancyopts(sys.argv[1:], opts, options,
88 'hg [options] <command> [command options] [files]')
92 'hg [options] <command> [command options] [files]')
89
93
90 try:
94 try:
91 cmd = args[0]
95 cmd = args[0]
92 args = args[1:]
96 args = args[1:]
93 except:
97 except:
94 cmd = ""
98 cmd = ""
95
99
96 ui = hg.ui(options["verbose"], options["debug"])
100 ui = hg.ui(options["verbose"], options["debug"])
97
101
98 if cmd == "init":
102 if cmd == "init":
99 repo = hg.repository(ui, ".", create=1)
103 repo = hg.repository(ui, ".", create=1)
100 sys.exit(0)
104 sys.exit(0)
101 elif cmd == "branch" or cmd == "clone":
105 elif cmd == "branch" or cmd == "clone":
102 os.system("cp -al %s/.hg .hg" % args[0])
106 os.system("cp -al %s/.hg .hg" % args[0])
103 sys.exit(0)
107 sys.exit(0)
104 elif cmd == "help":
108 elif cmd == "help":
105 help()
109 help()
106 sys.exit(0)
110 sys.exit(0)
107 else:
111 else:
108 try:
112 try:
109 repo = hg.repository(ui=ui)
113 repo = hg.repository(ui=ui)
110 except:
114 except:
111 print "Unable to open repository"
115 print "Unable to open repository"
112 sys.exit(0)
116 sys.exit(0)
113
117
114 if cmd == "checkout" or cmd == "co":
118 if cmd == "checkout" or cmd == "co":
115 node = repo.changelog.tip()
119 node = repo.changelog.tip()
116 if args:
120 if args:
117 node = repo.changelog.lookup(args[0])
121 node = repo.changelog.lookup(args[0])
118 repo.checkout(node)
122 repo.checkout(node)
119
123
120 elif cmd == "add":
124 elif cmd == "add":
121 repo.add(args)
125 repo.add(args)
122
126
123 elif cmd == "remove" or cmd == "rm" or cmd == "del" or cmd == "delete":
127 elif cmd == "remove" or cmd == "rm" or cmd == "del" or cmd == "delete":
124 repo.remove(args)
128 repo.remove(args)
125
129
126 elif cmd == "commit" or cmd == "checkin" or cmd == "ci":
130 elif cmd == "commit" or cmd == "checkin" or cmd == "ci":
127 if 1:
131 if 1:
128 if len(args) > 0:
132 if len(args) > 0:
129 repo.commit(repo.current, args)
133 repo.commit(repo.current, args)
130 else:
134 else:
131 repo.commit(repo.current)
135 repo.commit(repo.current)
132
136
133 elif cmd == "import" or cmd == "patch":
137 elif cmd == "import" or cmd == "patch":
134 ioptions = {}
138 ioptions = {}
135 opts = [('p', 'strip', 1, 'path strip'),
139 opts = [('p', 'strip', 1, 'path strip'),
136 ('b', 'base', "", 'base path'),
140 ('b', 'base', "", 'base path'),
137 ('q', 'quiet', "", 'silence diff')
141 ('q', 'quiet', "", 'silence diff')
138 ]
142 ]
139
143
140 args = fancyopts.fancyopts(args, opts, ioptions,
144 args = fancyopts.fancyopts(args, opts, ioptions,
141 'hg import [options] <patch names>')
145 'hg import [options] <patch names>')
142 d = ioptions["base"]
146 d = ioptions["base"]
143 strip = ioptions["strip"]
147 strip = ioptions["strip"]
144 quiet = ioptions["quiet"] and "> /dev/null" or ""
148 quiet = ioptions["quiet"] and "> /dev/null" or ""
145
149
146 for patch in args:
150 for patch in args:
147 ui.status("applying %s\n" % patch)
151 ui.status("applying %s\n" % patch)
148 pf = os.path.join(d, patch)
152 pf = os.path.join(d, patch)
149
153
150 text = ""
154 text = ""
151 for l in file(pf):
155 for l in file(pf):
152 if l[:3] == "---": break
156 if l[:3] == "---": break
153 text += l
157 text += l
154
158
155 if os.system("patch -p%d < %s %s" % (strip, pf, quiet)):
159 if os.system("patch -p%d < %s %s" % (strip, pf, quiet)):
156 raise "patch failed!"
160 raise "patch failed!"
157 f = os.popen("lsdiff --strip %d %s" % (strip, pf))
161 f = os.popen("lsdiff --strip %d %s" % (strip, pf))
158 files = filter(None, map(lambda x: x.rstrip(), f.read().splitlines()))
162 files = filter(None, map(lambda x: x.rstrip(), f.read().splitlines()))
159 f.close()
163 f.close()
160 repo.commit(repo.current, files, text)
164 repo.commit(repo.current, files, text)
161
165
162 elif cmd == "status":
166 elif cmd == "status":
163 (c, a, d) = repo.diffdir(repo.root, repo.current)
167 (c, a, d) = repo.diffdir(repo.root, repo.current)
164 for f in c: print "C", f
168 for f in c: print "C", f
165 for f in a: print "?", f
169 for f in a: print "?", f
166 for f in d: print "R", f
170 for f in d: print "R", f
167
171
168 elif cmd == "diff":
172 elif cmd == "diff":
169 revs = []
173 revs = []
170
174
171 if args:
175 if args:
172 doptions = {}
176 doptions = {}
173 opts = [('r', 'revision', [], 'revision')]
177 opts = [('r', 'revision', [], 'revision')]
174 args = fancyopts.fancyopts(args, opts, doptions,
178 args = fancyopts.fancyopts(args, opts, doptions,
175 'hg diff [options] [files]')
179 'hg diff [options] [files]')
176 revs = map(lambda x: repo.changelog.lookup(x), doptions['revision'])
180 revs = map(lambda x: repo.changelog.lookup(x), doptions['revision'])
177
181
178 if len(revs) > 2:
182 if len(revs) > 2:
179 print "too many revisions to diff"
183 print "too many revisions to diff"
180 sys.exit(1)
184 sys.exit(1)
181 else:
185
186 if os.getcwd() != repo.root:
187 relpath = os.getcwd()[len(repo.root) + 1: ]
188 if not args: args = [ relpath ]
189 else: args = [ os.path.join(relpath, x) for x in args ]
190
182 diff(args, *revs)
191 diff(args, *revs)
183
192
184 elif cmd == "export":
193 elif cmd == "export":
185 node = repo.changelog.lookup(args[0])
194 node = repo.changelog.lookup(args[0])
186 prev = repo.changelog.parents(node)[0]
195 prev = repo.changelog.parents(node)[0]
187 diff(None, prev, node)
196 diff(None, prev, node)
188
197
189 elif cmd == "debugchangegroup":
198 elif cmd == "debugchangegroup":
190 newer = repo.newer(map(repo.changelog.lookup, args))
199 newer = repo.newer(map(repo.changelog.lookup, args))
191 for chunk in repo.changegroup(newer):
200 for chunk in repo.changegroup(newer):
192 sys.stdout.write(chunk)
201 sys.stdout.write(chunk)
193
202
194 elif cmd == "debugaddchangegroup":
203 elif cmd == "debugaddchangegroup":
195 data = sys.stdin.read()
204 data = sys.stdin.read()
196 repo.addchangegroup(data)
205 repo.addchangegroup(data)
197
206
198 elif cmd == "addremove":
207 elif cmd == "addremove":
199 (c, a, d) = repo.diffdir(repo.root, repo.current)
208 (c, a, d) = repo.diffdir(repo.root, repo.current)
200 repo.add(a)
209 repo.add(a)
201 repo.remove(d)
210 repo.remove(d)
202
211
203 elif cmd == "history":
212 elif cmd == "history":
204 for i in range(repo.changelog.count()):
213 for i in range(repo.changelog.count()):
205 n = repo.changelog.node(i)
214 n = repo.changelog.node(i)
206 changes = repo.changelog.read(n)
215 changes = repo.changelog.read(n)
207 (p1, p2) = repo.changelog.parents(n)
216 (p1, p2) = repo.changelog.parents(n)
208 (h, h1, h2) = map(hg.hex, (n, p1, p2))
217 (h, h1, h2) = map(hg.hex, (n, p1, p2))
209 (i1, i2) = map(repo.changelog.rev, (p1, p2))
218 (i1, i2) = map(repo.changelog.rev, (p1, p2))
210 print "rev: %4d:%s" % (i, h)
219 print "rev: %4d:%s" % (i, h)
211 print "parents: %4d:%s" % (i1, h1)
220 print "parents: %4d:%s" % (i1, h1)
212 if i2: print " %4d:%s" % (i2, h2)
221 if i2: print " %4d:%s" % (i2, h2)
213 print "manifest: %4d:%s" % (repo.manifest.rev(changes[0]),
222 print "manifest: %4d:%s" % (repo.manifest.rev(changes[0]),
214 hg.hex(changes[0]))
223 hg.hex(changes[0]))
215 print "user:", changes[1]
224 print "user:", changes[1]
216 print "date:", time.asctime(
225 print "date:", time.asctime(
217 time.localtime(float(changes[2].split(' ')[0])))
226 time.localtime(float(changes[2].split(' ')[0])))
218 print "files:", " ".join(changes[3])
227 print "files:", " ".join(changes[3])
219 print "description:"
228 print "description:"
220 print changes[4]
229 print changes[4]
221
230
222 elif cmd == "log":
231 elif cmd == "log":
223 if args:
232 if args:
224 r = repo.file(args[0])
233 r = repo.file(args[0])
225 for i in range(r.count()):
234 for i in range(r.count()):
226 n = r.node(i)
235 n = r.node(i)
227 (p1, p2) = r.parents(n)
236 (p1, p2) = r.parents(n)
228 (h, h1, h2) = map(hg.hex, (n, p1, p2))
237 (h, h1, h2) = map(hg.hex, (n, p1, p2))
229 (i1, i2) = map(r.rev, (p1, p2))
238 (i1, i2) = map(r.rev, (p1, p2))
230 cr = r.linkrev(n)
239 cr = r.linkrev(n)
231 cn = hg.hex(repo.changelog.node(cr))
240 cn = hg.hex(repo.changelog.node(cr))
232 print "rev: %4d:%s" % (i, h)
241 print "rev: %4d:%s" % (i, h)
233 print "changeset: %4d:%s" % (cr, cn)
242 print "changeset: %4d:%s" % (cr, cn)
234 print "parents: %4d:%s" % (i1, h1)
243 print "parents: %4d:%s" % (i1, h1)
235 if i2: print " %4d:%s" % (i2, h2)
244 if i2: print " %4d:%s" % (i2, h2)
236 else:
245 else:
237 print "missing filename"
246 print "missing filename"
238
247
239 elif cmd == "dump":
248 elif cmd == "dump":
240 if args:
249 if args:
241 r = repo.file(args[0])
250 r = repo.file(args[0])
242 n = r.tip()
251 n = r.tip()
243 if len(args) > 1: n = r.lookup(args[1])
252 if len(args) > 1: n = r.lookup(args[1])
244 sys.stdout.write(r.read(n))
253 sys.stdout.write(r.read(n))
245 else:
254 else:
246 print "missing filename"
255 print "missing filename"
247
256
248 elif cmd == "dumpmanifest":
257 elif cmd == "dumpmanifest":
249 n = repo.manifest.tip()
258 n = repo.manifest.tip()
250 if len(args) > 0:
259 if len(args) > 0:
251 n = repo.manifest.lookup(args[0])
260 n = repo.manifest.lookup(args[0])
252 m = repo.manifest.read(n)
261 m = repo.manifest.read(n)
253 files = m.keys()
262 files = m.keys()
254 files.sort()
263 files.sort()
255
264
256 for f in files:
265 for f in files:
257 print hg.hex(m[f]), f
266 print hg.hex(m[f]), f
258
267
259 elif cmd == "debughash":
268 elif cmd == "debughash":
260 f = repo.file(args[0])
269 f = repo.file(args[0])
261 print f.encodepath(args[0])
270 print f.encodepath(args[0])
262
271
263 elif cmd == "debugindex":
272 elif cmd == "debugindex":
264 r = hg.revlog(open, args[0], "")
273 r = hg.revlog(open, args[0], "")
265 print " rev offset length base linkrev"+\
274 print " rev offset length base linkrev"+\
266 " p1 p2 nodeid"
275 " p1 p2 nodeid"
267 for i in range(r.count()):
276 for i in range(r.count()):
268 e = r.index[i]
277 e = r.index[i]
269 print "% 6d % 9d % 7d % 5d % 7d %s.. %s.. %s.." % (
278 print "% 6d % 9d % 7d % 5d % 7d %s.. %s.. %s.." % (
270 i, e[0], e[1], e[2], e[3],
279 i, e[0], e[1], e[2], e[3],
271 hg.hex(e[4][:5]), hg.hex(e[5][:5]), hg.hex(e[6][:5]))
280 hg.hex(e[4][:5]), hg.hex(e[5][:5]), hg.hex(e[6][:5]))
272
281
273 elif cmd == "merge":
282 elif cmd == "merge":
274 if args:
283 if args:
275 other = hg.repository(ui, args[0])
284 other = hg.repository(ui, args[0])
276 print "retrieving changegroup"
285 print "retrieving changegroup"
277 cg = repo.getchangegroup(other)
286 cg = repo.getchangegroup(other)
278 repo.addchangegroup(cg)
287 repo.addchangegroup(cg)
279 else:
288 else:
280 print "missing source repository"
289 print "missing source repository"
281
290
282 elif cmd == "debugoldmerge":
291 elif cmd == "debugoldmerge":
283 if args:
292 if args:
284 other = hg.repository(ui, args[0])
293 other = hg.repository(ui, args[0])
285 repo.merge(other)
294 repo.merge(other)
286 else:
295 else:
287 print "missing source repository"
296 print "missing source repository"
288
297
289 elif cmd == "verify":
298 elif cmd == "verify":
290 filelinkrevs = {}
299 filelinkrevs = {}
291 filenodes = {}
300 filenodes = {}
292 manifestchangeset = {}
301 manifestchangeset = {}
293 changesets = revisions = files = 0
302 changesets = revisions = files = 0
294
303
295 print "checking changesets"
304 print "checking changesets"
296 for i in range(repo.changelog.count()):
305 for i in range(repo.changelog.count()):
297 changesets += 1
306 changesets += 1
298 n = repo.changelog.node(i)
307 n = repo.changelog.node(i)
299 changes = repo.changelog.read(n)
308 changes = repo.changelog.read(n)
300 manifestchangeset[changes[0]] = n
309 manifestchangeset[changes[0]] = n
301 for f in changes[3]:
310 for f in changes[3]:
302 revisions += 1
311 revisions += 1
303 filelinkrevs.setdefault(f, []).append(i)
312 filelinkrevs.setdefault(f, []).append(i)
304
313
305 print "checking manifests"
314 print "checking manifests"
306 for i in range(repo.manifest.count()):
315 for i in range(repo.manifest.count()):
307 n = repo.manifest.node(i)
316 n = repo.manifest.node(i)
308 ca = repo.changelog.node(repo.manifest.linkrev(n))
317 ca = repo.changelog.node(repo.manifest.linkrev(n))
309 cc = manifestchangeset[n]
318 cc = manifestchangeset[n]
310 if ca != cc:
319 if ca != cc:
311 print "manifest %s points to %s, not %s" % \
320 print "manifest %s points to %s, not %s" % \
312 (hg.hex(n), hg.hex(ca), hg.hex(cc))
321 (hg.hex(n), hg.hex(ca), hg.hex(cc))
313 m = repo.manifest.read(n)
322 m = repo.manifest.read(n)
314 for f, fn in m.items():
323 for f, fn in m.items():
315 filenodes.setdefault(f, {})[fn] = 1
324 filenodes.setdefault(f, {})[fn] = 1
316
325
317 print "crosschecking files in changesets and manifests"
326 print "crosschecking files in changesets and manifests"
318 for f in filenodes:
327 for f in filenodes:
319 if f not in filelinkrevs:
328 if f not in filelinkrevs:
320 print "file %s in manifest but not in changesets"
329 print "file %s in manifest but not in changesets"
321
330
322 for f in filelinkrevs:
331 for f in filelinkrevs:
323 if f not in filenodes:
332 if f not in filenodes:
324 print "file %s in changeset but not in manifest"
333 print "file %s in changeset but not in manifest"
325
334
326 print "checking files"
335 print "checking files"
327 for f in filenodes:
336 for f in filenodes:
328 files += 1
337 files += 1
329 fl = repo.file(f)
338 fl = repo.file(f)
330 nodes = {"\0"*20: 1}
339 nodes = {"\0"*20: 1}
331 for i in range(fl.count()):
340 for i in range(fl.count()):
332 n = fl.node(i)
341 n = fl.node(i)
333
342
334 if n not in filenodes[f]:
343 if n not in filenodes[f]:
335 print "%s:%s not in manifests" % (f, hg.hex(n))
344 print "%s:%s not in manifests" % (f, hg.hex(n))
336 else:
345 else:
337 del filenodes[f][n]
346 del filenodes[f][n]
338
347
339 flr = fl.linkrev(n)
348 flr = fl.linkrev(n)
340 if flr not in filelinkrevs[f]:
349 if flr not in filelinkrevs[f]:
341 print "%s:%s points to unexpected changeset rev %d" \
350 print "%s:%s points to unexpected changeset rev %d" \
342 % (f, hg.hex(n), fl.linkrev(n))
351 % (f, hg.hex(n), fl.linkrev(n))
343 else:
352 else:
344 filelinkrevs[f].remove(flr)
353 filelinkrevs[f].remove(flr)
345
354
346 # verify contents
355 # verify contents
347 t = fl.read(n)
356 t = fl.read(n)
348
357
349 # verify parents
358 # verify parents
350 (p1, p2) = fl.parents(n)
359 (p1, p2) = fl.parents(n)
351 if p1 not in nodes:
360 if p1 not in nodes:
352 print "%s:%s unknown parent 1 %s" % (f, hg.hex(n), hg.hex(p1))
361 print "%s:%s unknown parent 1 %s" % (f, hg.hex(n), hg.hex(p1))
353 if p2 not in nodes:
362 if p2 not in nodes:
354 print "file %s:%s unknown parent %s" % (f, hg.hex(n), hg.hex(p1))
363 print "file %s:%s unknown parent %s" % (f, hg.hex(n), hg.hex(p1))
355 nodes[n] = 1
364 nodes[n] = 1
356
365
357 # cross-check
366 # cross-check
358 for flr in filelinkrevs[f]:
367 for flr in filelinkrevs[f]:
359 print "changeset rev %d not in %s" % (flr, f)
368 print "changeset rev %d not in %s" % (flr, f)
360
369
361 for node in filenodes[f]:
370 for node in filenodes[f]:
362 print "node %s in manifests not in %s" % (hg.hex(n), f)
371 print "node %s in manifests not in %s" % (hg.hex(n), f)
363
372
364
373
365 print "%d files, %d changesets, %d total revisions" % (files, changesets,
374 print "%d files, %d changesets, %d total revisions" % (files, changesets,
366 revisions)
375 revisions)
367
376
368 else:
377 else:
369 print "unknown command\n"
378 print "unknown command\n"
370 help()
379 help()
371 sys.exit(1)
380 sys.exit(1)
@@ -1,873 +1,873
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import sys, struct, sha, socket, os, time, base64, re, urllib2
8 import sys, struct, sha, socket, os, time, base64, re, urllib2
9 import urllib
9 import urllib
10 from mercurial import byterange
10 from mercurial import byterange
11 from mercurial.transaction import *
11 from mercurial.transaction import *
12 from mercurial.revlog import *
12 from mercurial.revlog import *
13
13
14 class filelog(revlog):
14 class filelog(revlog):
15 def __init__(self, opener, path):
15 def __init__(self, opener, path):
16 s = self.encodepath(path)
16 s = self.encodepath(path)
17 revlog.__init__(self, opener, os.path.join("data", s + "i"),
17 revlog.__init__(self, opener, os.path.join("data", s + "i"),
18 os.path.join("data", s))
18 os.path.join("data", s))
19
19
20 def encodepath(self, path):
20 def encodepath(self, path):
21 s = sha.sha(path).digest()
21 s = sha.sha(path).digest()
22 s = base64.encodestring(s)[:-3]
22 s = base64.encodestring(s)[:-3]
23 s = re.sub("\+", "%", s)
23 s = re.sub("\+", "%", s)
24 s = re.sub("/", "_", s)
24 s = re.sub("/", "_", s)
25 return s
25 return s
26
26
27 def read(self, node):
27 def read(self, node):
28 return self.revision(node)
28 return self.revision(node)
29 def add(self, text, transaction, link, p1=None, p2=None):
29 def add(self, text, transaction, link, p1=None, p2=None):
30 return self.addrevision(text, transaction, link, p1, p2)
30 return self.addrevision(text, transaction, link, p1, p2)
31
31
32 def resolvedag(self, old, new, transaction, link):
32 def resolvedag(self, old, new, transaction, link):
33 """resolve unmerged heads in our DAG"""
33 """resolve unmerged heads in our DAG"""
34 if old == new: return None
34 if old == new: return None
35 a = self.ancestor(old, new)
35 a = self.ancestor(old, new)
36 if old == a: return None
36 if old == a: return None
37 return self.merge3(old, new, a, transaction, link)
37 return self.merge3(old, new, a, transaction, link)
38
38
39 def merge3(self, my, other, base, transaction, link):
39 def merge3(self, my, other, base, transaction, link):
40 """perform a 3-way merge and append the result"""
40 """perform a 3-way merge and append the result"""
41 def temp(prefix, node):
41 def temp(prefix, node):
42 (fd, name) = tempfile.mkstemp(prefix)
42 (fd, name) = tempfile.mkstemp(prefix)
43 f = os.fdopen(fd, "w")
43 f = os.fdopen(fd, "w")
44 f.write(self.revision(node))
44 f.write(self.revision(node))
45 f.close()
45 f.close()
46 return name
46 return name
47
47
48 a = temp("local", my)
48 a = temp("local", my)
49 b = temp("remote", other)
49 b = temp("remote", other)
50 c = temp("parent", base)
50 c = temp("parent", base)
51
51
52 cmd = os.environ["HGMERGE"]
52 cmd = os.environ["HGMERGE"]
53 r = os.system("%s %s %s %s" % (cmd, a, b, c))
53 r = os.system("%s %s %s %s" % (cmd, a, b, c))
54 if r:
54 if r:
55 raise "Merge failed, implement rollback!"
55 raise "Merge failed, implement rollback!"
56
56
57 t = open(a).read()
57 t = open(a).read()
58 os.unlink(a)
58 os.unlink(a)
59 os.unlink(b)
59 os.unlink(b)
60 os.unlink(c)
60 os.unlink(c)
61 return self.addrevision(t, transaction, link, my, other)
61 return self.addrevision(t, transaction, link, my, other)
62
62
63 def merge(self, other, transaction, linkseq, link):
63 def merge(self, other, transaction, linkseq, link):
64 """perform a merge and resolve resulting heads"""
64 """perform a merge and resolve resulting heads"""
65 (o, n) = self.mergedag(other, transaction, linkseq)
65 (o, n) = self.mergedag(other, transaction, linkseq)
66 return self.resolvedag(o, n, transaction, link)
66 return self.resolvedag(o, n, transaction, link)
67
67
68 class manifest(revlog):
68 class manifest(revlog):
69 def __init__(self, opener):
69 def __init__(self, opener):
70 self.mapcache = None
70 self.mapcache = None
71 self.listcache = None
71 self.listcache = None
72 self.addlist = None
72 self.addlist = None
73 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
73 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
74
74
75 def read(self, node):
75 def read(self, node):
76 if self.mapcache and self.mapcache[0] == node:
76 if self.mapcache and self.mapcache[0] == node:
77 return self.mapcache[1]
77 return self.mapcache[1]
78 text = self.revision(node)
78 text = self.revision(node)
79 map = {}
79 map = {}
80 self.listcache = (text, text.splitlines(1))
80 self.listcache = (text, text.splitlines(1))
81 for l in self.listcache[1]:
81 for l in self.listcache[1]:
82 (f, n) = l.split('\0')
82 (f, n) = l.split('\0')
83 map[f] = bin(n[:40])
83 map[f] = bin(n[:40])
84 self.mapcache = (node, map)
84 self.mapcache = (node, map)
85 return map
85 return map
86
86
87 def diff(self, a, b):
87 def diff(self, a, b):
88 # this is sneaky, as we're not actually using a and b
88 # this is sneaky, as we're not actually using a and b
89 if self.listcache and len(self.listcache[0]) == len(a):
89 if self.listcache and len(self.listcache[0]) == len(a):
90 return mdiff.diff(self.listcache[1], self.addlist, 1)
90 return mdiff.diff(self.listcache[1], self.addlist, 1)
91 else:
91 else:
92 return mdiff.textdiff(a, b)
92 return mdiff.textdiff(a, b)
93
93
94 def add(self, map, transaction, link, p1=None, p2=None):
94 def add(self, map, transaction, link, p1=None, p2=None):
95 files = map.keys()
95 files = map.keys()
96 files.sort()
96 files.sort()
97
97
98 self.addlist = ["%s\000%s\n" % (f, hex(map[f])) for f in files]
98 self.addlist = ["%s\000%s\n" % (f, hex(map[f])) for f in files]
99 text = "".join(self.addlist)
99 text = "".join(self.addlist)
100
100
101 n = self.addrevision(text, transaction, link, p1, p2)
101 n = self.addrevision(text, transaction, link, p1, p2)
102 self.mapcache = (n, map)
102 self.mapcache = (n, map)
103 self.listcache = (text, self.addlist)
103 self.listcache = (text, self.addlist)
104
104
105 return n
105 return n
106
106
107 class changelog(revlog):
107 class changelog(revlog):
108 def __init__(self, opener):
108 def __init__(self, opener):
109 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
109 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
110
110
111 def extract(self, text):
111 def extract(self, text):
112 if not text:
112 if not text:
113 return (nullid, "", "0", [], "")
113 return (nullid, "", "0", [], "")
114 last = text.index("\n\n")
114 last = text.index("\n\n")
115 desc = text[last + 2:]
115 desc = text[last + 2:]
116 l = text[:last].splitlines()
116 l = text[:last].splitlines()
117 manifest = bin(l[0])
117 manifest = bin(l[0])
118 user = l[1]
118 user = l[1]
119 date = l[2]
119 date = l[2]
120 files = l[3:]
120 files = l[3:]
121 return (manifest, user, date, files, desc)
121 return (manifest, user, date, files, desc)
122
122
123 def read(self, node):
123 def read(self, node):
124 return self.extract(self.revision(node))
124 return self.extract(self.revision(node))
125
125
126 def add(self, manifest, list, desc, transaction, p1=None, p2=None):
126 def add(self, manifest, list, desc, transaction, p1=None, p2=None):
127 user = (os.environ.get("HGUSER") or
127 user = (os.environ.get("HGUSER") or
128 os.environ.get("EMAIL") or
128 os.environ.get("EMAIL") or
129 os.environ.get("LOGNAME", "unknown") + '@' + socket.getfqdn())
129 os.environ.get("LOGNAME", "unknown") + '@' + socket.getfqdn())
130 date = "%d %d" % (time.time(), time.timezone)
130 date = "%d %d" % (time.time(), time.timezone)
131 list.sort()
131 list.sort()
132 l = [hex(manifest), user, date] + list + ["", desc]
132 l = [hex(manifest), user, date] + list + ["", desc]
133 text = "\n".join(l)
133 text = "\n".join(l)
134 return self.addrevision(text, transaction, self.count(), p1, p2)
134 return self.addrevision(text, transaction, self.count(), p1, p2)
135
135
136 def merge3(self, my, other, base):
136 def merge3(self, my, other, base):
137 pass
137 pass
138
138
139 class dircache:
139 class dircache:
140 def __init__(self, opener, ui):
140 def __init__(self, opener, ui):
141 self.opener = opener
141 self.opener = opener
142 self.dirty = 0
142 self.dirty = 0
143 self.ui = ui
143 self.ui = ui
144 self.map = None
144 self.map = None
145 def __del__(self):
145 def __del__(self):
146 if self.dirty: self.write()
146 if self.dirty: self.write()
147 def __getitem__(self, key):
147 def __getitem__(self, key):
148 try:
148 try:
149 return self.map[key]
149 return self.map[key]
150 except TypeError:
150 except TypeError:
151 self.read()
151 self.read()
152 return self[key]
152 return self[key]
153
153
154 def read(self):
154 def read(self):
155 if self.map is not None: return self.map
155 if self.map is not None: return self.map
156
156
157 self.map = {}
157 self.map = {}
158 try:
158 try:
159 st = self.opener("dircache").read()
159 st = self.opener("dircache").read()
160 except: return
160 except: return
161
161
162 pos = 0
162 pos = 0
163 while pos < len(st):
163 while pos < len(st):
164 e = struct.unpack(">llll", st[pos:pos+16])
164 e = struct.unpack(">llll", st[pos:pos+16])
165 l = e[3]
165 l = e[3]
166 pos += 16
166 pos += 16
167 f = st[pos:pos + l]
167 f = st[pos:pos + l]
168 self.map[f] = e[:3]
168 self.map[f] = e[:3]
169 pos += l
169 pos += l
170
170
171 def update(self, files):
171 def update(self, files):
172 if not files: return
172 if not files: return
173 self.read()
173 self.read()
174 self.dirty = 1
174 self.dirty = 1
175 for f in files:
175 for f in files:
176 try:
176 try:
177 s = os.stat(f)
177 s = os.stat(f)
178 self.map[f] = (s.st_mode, s.st_size, s.st_mtime)
178 self.map[f] = (s.st_mode, s.st_size, s.st_mtime)
179 except IOError:
179 except IOError:
180 self.remove(f)
180 self.remove(f)
181
181
182 def taint(self, files):
182 def taint(self, files):
183 if not files: return
183 if not files: return
184 self.read()
184 self.read()
185 self.dirty = 1
185 self.dirty = 1
186 for f in files:
186 for f in files:
187 self.map[f] = (0, -1, 0)
187 self.map[f] = (0, -1, 0)
188
188
189 def remove(self, files):
189 def remove(self, files):
190 if not files: return
190 if not files: return
191 self.read()
191 self.read()
192 self.dirty = 1
192 self.dirty = 1
193 for f in files:
193 for f in files:
194 try:
194 try:
195 del self.map[f]
195 del self.map[f]
196 except KeyError:
196 except KeyError:
197 self.ui.warn("Not in dircache: %s\n" % f)
197 self.ui.warn("Not in dircache: %s\n" % f)
198 pass
198 pass
199
199
200 def clear(self):
200 def clear(self):
201 self.map = {}
201 self.map = {}
202 self.dirty = 1
202 self.dirty = 1
203
203
204 def write(self):
204 def write(self):
205 st = self.opener("dircache", "w")
205 st = self.opener("dircache", "w")
206 for f, e in self.map.items():
206 for f, e in self.map.items():
207 e = struct.pack(">llll", e[0], e[1], e[2], len(f))
207 e = struct.pack(">llll", e[0], e[1], e[2], len(f))
208 st.write(e + f)
208 st.write(e + f)
209 self.dirty = 0
209 self.dirty = 0
210
210
211 def copy(self):
211 def copy(self):
212 self.read()
212 self.read()
213 return self.map.copy()
213 return self.map.copy()
214
214
215 # used to avoid circular references so destructors work
215 # used to avoid circular references so destructors work
216 def opener(base):
216 def opener(base):
217 p = base
217 p = base
218 def o(path, mode="r"):
218 def o(path, mode="r"):
219 if p[:7] == "http://":
219 if p[:7] == "http://":
220 f = os.path.join(p, urllib.quote(path))
220 f = os.path.join(p, urllib.quote(path))
221 return httprangereader(f)
221 return httprangereader(f)
222
222
223 f = os.path.join(p, path)
223 f = os.path.join(p, path)
224
224
225 if mode != "r" and os.path.isfile(f):
225 if mode != "r" and os.path.isfile(f):
226 s = os.stat(f)
226 s = os.stat(f)
227 if s.st_nlink > 1:
227 if s.st_nlink > 1:
228 file(f + ".tmp", "w").write(file(f).read())
228 file(f + ".tmp", "w").write(file(f).read())
229 os.rename(f+".tmp", f)
229 os.rename(f+".tmp", f)
230
230
231 return file(f, mode)
231 return file(f, mode)
232
232
233 return o
233 return o
234
234
235 class localrepository:
235 class localrepository:
236 def __init__(self, ui, path=None, create=0):
236 def __init__(self, ui, path=None, create=0):
237 self.remote = 0
237 self.remote = 0
238 if path and path[:7] == "http://":
238 if path and path[:7] == "http://":
239 self.remote = 1
239 self.remote = 1
240 self.path = path
240 self.path = path
241 else:
241 else:
242 if not path:
242 if not path:
243 p = os.getcwd()
243 p = os.getcwd()
244 while not os.path.isdir(os.path.join(p, ".hg")):
244 while not os.path.isdir(os.path.join(p, ".hg")):
245 p = os.path.dirname(p)
245 p = os.path.dirname(p)
246 if p == "/": raise "No repo found"
246 if p == "/": raise "No repo found"
247 path = p
247 path = p
248 self.path = os.path.join(path, ".hg")
248 self.path = os.path.join(path, ".hg")
249
249
250 self.root = path
250 self.root = path
251 self.ui = ui
251 self.ui = ui
252
252
253 if create:
253 if create:
254 os.mkdir(self.path)
254 os.mkdir(self.path)
255 os.mkdir(self.join("data"))
255 os.mkdir(self.join("data"))
256
256
257 self.opener = opener(self.path)
257 self.opener = opener(self.path)
258 self.manifest = manifest(self.opener)
258 self.manifest = manifest(self.opener)
259 self.changelog = changelog(self.opener)
259 self.changelog = changelog(self.opener)
260 self.ignorelist = None
260 self.ignorelist = None
261
261
262 if not self.remote:
262 if not self.remote:
263 self.dircache = dircache(self.opener, ui)
263 self.dircache = dircache(self.opener, ui)
264 try:
264 try:
265 self.current = bin(self.opener("current").read())
265 self.current = bin(self.opener("current").read())
266 except IOError:
266 except IOError:
267 self.current = None
267 self.current = None
268
268
269 def setcurrent(self, node):
269 def setcurrent(self, node):
270 self.current = node
270 self.current = node
271 self.opener("current", "w").write(hex(node))
271 self.opener("current", "w").write(hex(node))
272
272
273 def ignore(self, f):
273 def ignore(self, f):
274 if self.ignorelist is None:
274 if self.ignorelist is None:
275 self.ignorelist = []
275 self.ignorelist = []
276 try:
276 try:
277 l = open(os.path.join(self.root, ".hgignore")).readlines()
277 l = open(os.path.join(self.root, ".hgignore")).readlines()
278 for pat in l:
278 for pat in l:
279 if pat != "\n":
279 if pat != "\n":
280 self.ignorelist.append(re.compile(pat[:-1]))
280 self.ignorelist.append(re.compile(pat[:-1]))
281 except IOError: pass
281 except IOError: pass
282 for pat in self.ignorelist:
282 for pat in self.ignorelist:
283 if pat.search(f): return True
283 if pat.search(f): return True
284 return False
284 return False
285
285
286 def join(self, f):
286 def join(self, f):
287 return os.path.join(self.path, f)
287 return os.path.join(self.path, f)
288
288
289 def file(self, f):
289 def file(self, f):
290 return filelog(self.opener, f)
290 return filelog(self.opener, f)
291
291
292 def transaction(self):
292 def transaction(self):
293 return transaction(self.opener, self.join("journal"))
293 return transaction(self.opener, self.join("journal"))
294
294
295 def merge(self, other):
295 def merge(self, other):
296 tr = self.transaction()
296 tr = self.transaction()
297 changed = {}
297 changed = {}
298 new = {}
298 new = {}
299 seqrev = self.changelog.count()
299 seqrev = self.changelog.count()
300 # some magic to allow fiddling in nested scope
300 # some magic to allow fiddling in nested scope
301 nextrev = [seqrev]
301 nextrev = [seqrev]
302
302
303 # helpers for back-linking file revisions to local changeset
303 # helpers for back-linking file revisions to local changeset
304 # revisions so we can immediately get to changeset from annotate
304 # revisions so we can immediately get to changeset from annotate
305 def accumulate(text):
305 def accumulate(text):
306 # track which files are added in which changeset and the
306 # track which files are added in which changeset and the
307 # corresponding _local_ changeset revision
307 # corresponding _local_ changeset revision
308 files = self.changelog.extract(text)[3]
308 files = self.changelog.extract(text)[3]
309 for f in files:
309 for f in files:
310 changed.setdefault(f, []).append(nextrev[0])
310 changed.setdefault(f, []).append(nextrev[0])
311 nextrev[0] += 1
311 nextrev[0] += 1
312
312
313 def seq(start):
313 def seq(start):
314 while 1:
314 while 1:
315 yield start
315 yield start
316 start += 1
316 start += 1
317
317
318 def lseq(l):
318 def lseq(l):
319 for r in l:
319 for r in l:
320 yield r
320 yield r
321
321
322 # begin the import/merge of changesets
322 # begin the import/merge of changesets
323 self.ui.status("merging new changesets\n")
323 self.ui.status("merging new changesets\n")
324 (co, cn) = self.changelog.mergedag(other.changelog, tr,
324 (co, cn) = self.changelog.mergedag(other.changelog, tr,
325 seq(seqrev), accumulate)
325 seq(seqrev), accumulate)
326 resolverev = self.changelog.count()
326 resolverev = self.changelog.count()
327
327
328 # is there anything to do?
328 # is there anything to do?
329 if co == cn:
329 if co == cn:
330 tr.close()
330 tr.close()
331 return
331 return
332
332
333 # do we need to resolve?
333 # do we need to resolve?
334 simple = (co == self.changelog.ancestor(co, cn))
334 simple = (co == self.changelog.ancestor(co, cn))
335
335
336 # merge all files changed by the changesets,
336 # merge all files changed by the changesets,
337 # keeping track of the new tips
337 # keeping track of the new tips
338 changelist = changed.keys()
338 changelist = changed.keys()
339 changelist.sort()
339 changelist.sort()
340 for f in changelist:
340 for f in changelist:
341 sys.stdout.write(".")
341 sys.stdout.write(".")
342 sys.stdout.flush()
342 sys.stdout.flush()
343 r = self.file(f)
343 r = self.file(f)
344 node = r.merge(other.file(f), tr, lseq(changed[f]), resolverev)
344 node = r.merge(other.file(f), tr, lseq(changed[f]), resolverev)
345 if node:
345 if node:
346 new[f] = node
346 new[f] = node
347 sys.stdout.write("\n")
347 sys.stdout.write("\n")
348
348
349 # begin the merge of the manifest
349 # begin the merge of the manifest
350 self.ui.status("merging manifests\n")
350 self.ui.status("merging manifests\n")
351 (mm, mo) = self.manifest.mergedag(other.manifest, tr, seq(seqrev))
351 (mm, mo) = self.manifest.mergedag(other.manifest, tr, seq(seqrev))
352
352
353 # For simple merges, we don't need to resolve manifests or changesets
353 # For simple merges, we don't need to resolve manifests or changesets
354 if simple:
354 if simple:
355 tr.close()
355 tr.close()
356 return
356 return
357
357
358 ma = self.manifest.ancestor(mm, mo)
358 ma = self.manifest.ancestor(mm, mo)
359
359
360 # resolve the manifest to point to all the merged files
360 # resolve the manifest to point to all the merged files
361 self.ui.status("resolving manifests\n")
361 self.ui.status("resolving manifests\n")
362 mmap = self.manifest.read(mm) # mine
362 mmap = self.manifest.read(mm) # mine
363 omap = self.manifest.read(mo) # other
363 omap = self.manifest.read(mo) # other
364 amap = self.manifest.read(ma) # ancestor
364 amap = self.manifest.read(ma) # ancestor
365 nmap = {}
365 nmap = {}
366
366
367 for f, mid in mmap.iteritems():
367 for f, mid in mmap.iteritems():
368 if f in omap:
368 if f in omap:
369 if mid != omap[f]:
369 if mid != omap[f]:
370 nmap[f] = new.get(f, mid) # use merged version
370 nmap[f] = new.get(f, mid) # use merged version
371 else:
371 else:
372 nmap[f] = new.get(f, mid) # they're the same
372 nmap[f] = new.get(f, mid) # they're the same
373 del omap[f]
373 del omap[f]
374 elif f in amap:
374 elif f in amap:
375 if mid != amap[f]:
375 if mid != amap[f]:
376 pass # we should prompt here
376 pass # we should prompt here
377 else:
377 else:
378 pass # other deleted it
378 pass # other deleted it
379 else:
379 else:
380 nmap[f] = new.get(f, mid) # we created it
380 nmap[f] = new.get(f, mid) # we created it
381
381
382 del mmap
382 del mmap
383
383
384 for f, oid in omap.iteritems():
384 for f, oid in omap.iteritems():
385 if f in amap:
385 if f in amap:
386 if oid != amap[f]:
386 if oid != amap[f]:
387 pass # this is the nasty case, we should prompt
387 pass # this is the nasty case, we should prompt
388 else:
388 else:
389 pass # probably safe
389 pass # probably safe
390 else:
390 else:
391 nmap[f] = new.get(f, oid) # remote created it
391 nmap[f] = new.get(f, oid) # remote created it
392
392
393 del omap
393 del omap
394 del amap
394 del amap
395
395
396 node = self.manifest.add(nmap, tr, resolverev, mm, mo)
396 node = self.manifest.add(nmap, tr, resolverev, mm, mo)
397
397
398 # Now all files and manifests are merged, we add the changed files
398 # Now all files and manifests are merged, we add the changed files
399 # and manifest id to the changelog
399 # and manifest id to the changelog
400 self.ui.status("committing merge changeset\n")
400 self.ui.status("committing merge changeset\n")
401 new = new.keys()
401 new = new.keys()
402 new.sort()
402 new.sort()
403 if co == cn: cn = -1
403 if co == cn: cn = -1
404
404
405 edittext = "\n"+"".join(["HG: changed %s\n" % f for f in new])
405 edittext = "\n"+"".join(["HG: changed %s\n" % f for f in new])
406 edittext = self.ui.edit(edittext)
406 edittext = self.ui.edit(edittext)
407 n = self.changelog.add(node, new, edittext, tr, co, cn)
407 n = self.changelog.add(node, new, edittext, tr, co, cn)
408
408
409 tr.close()
409 tr.close()
410
410
411 def commit(self, parent, update = None, text = ""):
411 def commit(self, parent, update = None, text = ""):
412 tr = self.transaction()
412 tr = self.transaction()
413
413
414 try:
414 try:
415 remove = [ l[:-1] for l in self.opener("to-remove") ]
415 remove = [ l[:-1] for l in self.opener("to-remove") ]
416 os.unlink(self.join("to-remove"))
416 os.unlink(self.join("to-remove"))
417
417
418 except IOError:
418 except IOError:
419 remove = []
419 remove = []
420
420
421 if update == None:
421 if update == None:
422 update = self.diffdir(self.root, parent)[0]
422 update = self.diffdir(self.root, parent)[0]
423
423
424 # check in files
424 # check in files
425 new = {}
425 new = {}
426 linkrev = self.changelog.count()
426 linkrev = self.changelog.count()
427 for f in update:
427 for f in update:
428 try:
428 try:
429 t = file(f).read()
429 t = file(f).read()
430 except IOError:
430 except IOError:
431 remove.append(f)
431 remove.append(f)
432 continue
432 continue
433 r = self.file(f)
433 r = self.file(f)
434 new[f] = r.add(t, tr, linkrev)
434 new[f] = r.add(t, tr, linkrev)
435
435
436 # update manifest
436 # update manifest
437 mmap = self.manifest.read(self.manifest.tip())
437 mmap = self.manifest.read(self.manifest.tip())
438 mmap.update(new)
438 mmap.update(new)
439 for f in remove:
439 for f in remove:
440 del mmap[f]
440 del mmap[f]
441 mnode = self.manifest.add(mmap, tr, linkrev)
441 mnode = self.manifest.add(mmap, tr, linkrev)
442
442
443 # add changeset
443 # add changeset
444 new = new.keys()
444 new = new.keys()
445 new.sort()
445 new.sort()
446
446
447 edittext = text + "\n"+"".join(["HG: changed %s\n" % f for f in new])
447 edittext = text + "\n"+"".join(["HG: changed %s\n" % f for f in new])
448 edittext += "".join(["HG: removed %s\n" % f for f in remove])
448 edittext += "".join(["HG: removed %s\n" % f for f in remove])
449 edittext = self.ui.edit(edittext)
449 edittext = self.ui.edit(edittext)
450
450
451 n = self.changelog.add(mnode, new, edittext, tr)
451 n = self.changelog.add(mnode, new, edittext, tr)
452 tr.close()
452 tr.close()
453
453
454 self.setcurrent(n)
454 self.setcurrent(n)
455 self.dircache.update(new)
455 self.dircache.update(new)
456 self.dircache.remove(remove)
456 self.dircache.remove(remove)
457
457
458 def checkdir(self, path):
458 def checkdir(self, path):
459 d = os.path.dirname(path)
459 d = os.path.dirname(path)
460 if not d: return
460 if not d: return
461 if not os.path.isdir(d):
461 if not os.path.isdir(d):
462 self.checkdir(d)
462 self.checkdir(d)
463 os.mkdir(d)
463 os.mkdir(d)
464
464
465 def checkout(self, node):
465 def checkout(self, node):
466 # checkout is really dumb at the moment
466 # checkout is really dumb at the moment
467 # it ought to basically merge
467 # it ought to basically merge
468 change = self.changelog.read(node)
468 change = self.changelog.read(node)
469 mmap = self.manifest.read(change[0])
469 mmap = self.manifest.read(change[0])
470
470
471 l = mmap.keys()
471 l = mmap.keys()
472 l.sort()
472 l.sort()
473 stats = []
473 stats = []
474 for f in l:
474 for f in l:
475 r = self.file(f)
475 r = self.file(f)
476 t = r.revision(mmap[f])
476 t = r.revision(mmap[f])
477 try:
477 try:
478 file(f, "w").write(t)
478 file(f, "w").write(t)
479 except:
479 except:
480 self.checkdir(f)
480 self.checkdir(f)
481 file(f, "w").write(t)
481 file(f, "w").write(t)
482
482
483 self.setcurrent(node)
483 self.setcurrent(node)
484 self.dircache.clear()
484 self.dircache.clear()
485 self.dircache.update(l)
485 self.dircache.update(l)
486
486
487 def diffdir(self, path, changeset):
487 def diffdir(self, path, changeset):
488 changed = []
488 changed = []
489 mf = {}
489 mf = {}
490 added = []
490 added = []
491
491
492 if changeset:
492 if changeset:
493 change = self.changelog.read(changeset)
493 change = self.changelog.read(changeset)
494 mf = self.manifest.read(change[0])
494 mf = self.manifest.read(change[0])
495
495
496 if changeset == self.current:
496 if changeset == self.current:
497 dc = self.dircache.copy()
497 dc = self.dircache.copy()
498 else:
498 else:
499 dc = dict.fromkeys(mf)
499 dc = dict.fromkeys(mf)
500
500
501 def fcmp(fn):
501 def fcmp(fn):
502 t1 = file(fn).read()
502 t1 = file(os.path.join(self.root, fn)).read()
503 t2 = self.file(fn).revision(mf[fn])
503 t2 = self.file(fn).revision(mf[fn])
504 return cmp(t1, t2)
504 return cmp(t1, t2)
505
505
506 for dir, subdirs, files in os.walk(self.root):
506 for dir, subdirs, files in os.walk(self.root):
507 d = dir[len(self.root)+1:]
507 d = dir[len(self.root)+1:]
508 if ".hg" in subdirs: subdirs.remove(".hg")
508 if ".hg" in subdirs: subdirs.remove(".hg")
509
509
510 for f in files:
510 for f in files:
511 fn = os.path.join(d, f)
511 fn = os.path.join(d, f)
512 try: s = os.stat(fn)
512 try: s = os.stat(os.path.join(self.root, fn))
513 except: continue
513 except: continue
514 if fn in dc:
514 if fn in dc:
515 c = dc[fn]
515 c = dc[fn]
516 del dc[fn]
516 del dc[fn]
517 if not c:
517 if not c:
518 if fcmp(fn):
518 if fcmp(fn):
519 changed.append(fn)
519 changed.append(fn)
520 elif c[1] != s.st_size:
520 elif c[1] != s.st_size:
521 changed.append(fn)
521 changed.append(fn)
522 elif c[0] != s.st_mode or c[2] != s.st_mtime:
522 elif c[0] != s.st_mode or c[2] != s.st_mtime:
523 if fcmp(fn):
523 if fcmp(fn):
524 changed.append(fn)
524 changed.append(fn)
525 else:
525 else:
526 if self.ignore(fn): continue
526 if self.ignore(fn): continue
527 added.append(fn)
527 added.append(fn)
528
528
529 deleted = dc.keys()
529 deleted = dc.keys()
530 deleted.sort()
530 deleted.sort()
531
531
532 return (changed, added, deleted)
532 return (changed, added, deleted)
533
533
534 def diffrevs(self, node1, node2):
534 def diffrevs(self, node1, node2):
535 changed, added = [], []
535 changed, added = [], []
536
536
537 change = self.changelog.read(node1)
537 change = self.changelog.read(node1)
538 mf1 = self.manifest.read(change[0])
538 mf1 = self.manifest.read(change[0])
539 change = self.changelog.read(node2)
539 change = self.changelog.read(node2)
540 mf2 = self.manifest.read(change[0])
540 mf2 = self.manifest.read(change[0])
541
541
542 for fn in mf2:
542 for fn in mf2:
543 if mf1.has_key(fn):
543 if mf1.has_key(fn):
544 if mf1[fn] != mf2[fn]:
544 if mf1[fn] != mf2[fn]:
545 changed.append(fn)
545 changed.append(fn)
546 del mf1[fn]
546 del mf1[fn]
547 else:
547 else:
548 added.append(fn)
548 added.append(fn)
549
549
550 deleted = mf1.keys()
550 deleted = mf1.keys()
551 deleted.sort()
551 deleted.sort()
552
552
553 return (changed, added, deleted)
553 return (changed, added, deleted)
554
554
555 def add(self, list):
555 def add(self, list):
556 self.dircache.taint(list)
556 self.dircache.taint(list)
557
557
558 def remove(self, list):
558 def remove(self, list):
559 dl = self.opener("to-remove", "a")
559 dl = self.opener("to-remove", "a")
560 for f in list:
560 for f in list:
561 dl.write(f + "\n")
561 dl.write(f + "\n")
562
562
563 def branches(self, nodes):
563 def branches(self, nodes):
564 if not nodes: nodes = [self.changelog.tip()]
564 if not nodes: nodes = [self.changelog.tip()]
565 b = []
565 b = []
566 for n in nodes:
566 for n in nodes:
567 t = n
567 t = n
568 while n:
568 while n:
569 p = self.changelog.parents(n)
569 p = self.changelog.parents(n)
570 if p[1] != nullid or p[0] == nullid:
570 if p[1] != nullid or p[0] == nullid:
571 b.append((t, n, p[0], p[1]))
571 b.append((t, n, p[0], p[1]))
572 break
572 break
573 n = p[0]
573 n = p[0]
574 return b
574 return b
575
575
576 def between(self, pairs):
576 def between(self, pairs):
577 r = []
577 r = []
578
578
579 for top, bottom in pairs:
579 for top, bottom in pairs:
580 n, l, i = top, [], 0
580 n, l, i = top, [], 0
581 f = 1
581 f = 1
582
582
583 while n != bottom:
583 while n != bottom:
584 p = self.changelog.parents(n)[0]
584 p = self.changelog.parents(n)[0]
585 if i == f:
585 if i == f:
586 l.append(n)
586 l.append(n)
587 f = f * 2
587 f = f * 2
588 n = p
588 n = p
589 i += 1
589 i += 1
590
590
591 r.append(l)
591 r.append(l)
592
592
593 return r
593 return r
594
594
595 def newer(self, nodes):
595 def newer(self, nodes):
596 m = {}
596 m = {}
597 nl = []
597 nl = []
598 cl = self.changelog
598 cl = self.changelog
599 t = l = cl.count()
599 t = l = cl.count()
600 for n in nodes:
600 for n in nodes:
601 l = min(l, cl.rev(n))
601 l = min(l, cl.rev(n))
602 for p in cl.parents(n):
602 for p in cl.parents(n):
603 m[p] = 1
603 m[p] = 1
604
604
605 for i in xrange(l, t):
605 for i in xrange(l, t):
606 n = cl.node(i)
606 n = cl.node(i)
607 for p in cl.parents(n):
607 for p in cl.parents(n):
608 if p in m and n not in m:
608 if p in m and n not in m:
609 m[n] = 1
609 m[n] = 1
610 nl.append(n)
610 nl.append(n)
611
611
612 return nl
612 return nl
613
613
614 def getchangegroup(self, remote):
614 def getchangegroup(self, remote):
615 tip = remote.branches([])[0]
615 tip = remote.branches([])[0]
616 cl = self.changelog
616 cl = self.changelog
617 unknown = [tip]
617 unknown = [tip]
618 search = []
618 search = []
619 fetch = []
619 fetch = []
620
620
621 if tip[0] == self.changelog.tip():
621 if tip[0] == self.changelog.tip():
622 return None
622 return None
623
623
624 while unknown:
624 while unknown:
625 n = unknown.pop(0)
625 n = unknown.pop(0)
626 if n == nullid: break
626 if n == nullid: break
627 if n[1] and cl.nodemap.has_key(n[1]): # do we know the base?
627 if n[1] and cl.nodemap.has_key(n[1]): # do we know the base?
628 search.append(n) # schedule branch range for scanning
628 search.append(n) # schedule branch range for scanning
629 else:
629 else:
630 for b in remote.branches([n[2], n[3]]):
630 for b in remote.branches([n[2], n[3]]):
631 if cl.nodemap.has_key(b[0]):
631 if cl.nodemap.has_key(b[0]):
632 fetch.append(n[1]) # earliest unknown
632 fetch.append(n[1]) # earliest unknown
633 else:
633 else:
634 unknown.append(b)
634 unknown.append(b)
635
635
636 while search:
636 while search:
637 n = search.pop(0)
637 n = search.pop(0)
638 l = remote.between([(n[0], n[1])])[0]
638 l = remote.between([(n[0], n[1])])[0]
639 p = n[0]
639 p = n[0]
640 f = 1
640 f = 1
641 for i in l + [n[1]]:
641 for i in l + [n[1]]:
642 if self.changelog.nodemap.has_key(i):
642 if self.changelog.nodemap.has_key(i):
643 if f <= 4:
643 if f <= 4:
644 fetch.append(p)
644 fetch.append(p)
645 else:
645 else:
646 search.append((p, i))
646 search.append((p, i))
647 p, f = i, f * 2
647 p, f = i, f * 2
648
648
649 return remote.changegroup(fetch)
649 return remote.changegroup(fetch)
650
650
651 def changegroup(self, basenodes):
651 def changegroup(self, basenodes):
652 nodes = self.newer(basenodes)
652 nodes = self.newer(basenodes)
653
653
654 # construct the link map
654 # construct the link map
655 linkmap = {}
655 linkmap = {}
656 for n in nodes:
656 for n in nodes:
657 linkmap[self.changelog.rev(n)] = n
657 linkmap[self.changelog.rev(n)] = n
658
658
659 # construct a list of all changed files
659 # construct a list of all changed files
660 changed = {}
660 changed = {}
661 for n in nodes:
661 for n in nodes:
662 c = self.changelog.read(n)
662 c = self.changelog.read(n)
663 for f in c[3]:
663 for f in c[3]:
664 changed[f] = 1
664 changed[f] = 1
665 changed = changed.keys()
665 changed = changed.keys()
666 changed.sort()
666 changed.sort()
667
667
668 # the changegroup is changesets + manifests + all file revs
668 # the changegroup is changesets + manifests + all file revs
669 revs = [ self.changelog.rev(n) for n in nodes ]
669 revs = [ self.changelog.rev(n) for n in nodes ]
670
670
671 yield self.changelog.group(linkmap)
671 yield self.changelog.group(linkmap)
672 yield self.manifest.group(linkmap)
672 yield self.manifest.group(linkmap)
673
673
674 for f in changed:
674 for f in changed:
675 g = self.file(f).group(linkmap)
675 g = self.file(f).group(linkmap)
676 if not g: raise "couldn't find change to %s" % f
676 if not g: raise "couldn't find change to %s" % f
677 l = struct.pack(">l", len(f))
677 l = struct.pack(">l", len(f))
678 yield "".join([l, f, g])
678 yield "".join([l, f, g])
679
679
680 def addchangegroup(self, data):
680 def addchangegroup(self, data):
681 def getlen(data, pos):
681 def getlen(data, pos):
682 return struct.unpack(">l", data[pos:pos + 4])[0]
682 return struct.unpack(">l", data[pos:pos + 4])[0]
683
683
684 if not data: return
684 if not data: return
685
685
686 tr = self.transaction()
686 tr = self.transaction()
687 simple = True
687 simple = True
688
688
689 print "merging changesets"
689 print "merging changesets"
690 # pull off the changeset group
690 # pull off the changeset group
691 l = getlen(data, 0)
691 l = getlen(data, 0)
692 csg = data[0:l]
692 csg = data[0:l]
693 pos = l
693 pos = l
694 co = self.changelog.tip()
694 co = self.changelog.tip()
695 cn = self.changelog.addgroup(csg, lambda x: self.changelog.count(), tr)
695 cn = self.changelog.addgroup(csg, lambda x: self.changelog.count(), tr)
696
696
697 print "merging manifests"
697 print "merging manifests"
698 # pull off the manifest group
698 # pull off the manifest group
699 l = getlen(data, pos)
699 l = getlen(data, pos)
700 mfg = data[pos: pos + l]
700 mfg = data[pos: pos + l]
701 pos += l
701 pos += l
702 mo = self.manifest.tip()
702 mo = self.manifest.tip()
703 mn = self.manifest.addgroup(mfg, lambda x: self.changelog.rev(x), tr)
703 mn = self.manifest.addgroup(mfg, lambda x: self.changelog.rev(x), tr)
704
704
705 # do we need a resolve?
705 # do we need a resolve?
706 if self.changelog.ancestor(co, cn) != co:
706 if self.changelog.ancestor(co, cn) != co:
707 print "NEED RESOLVE"
707 print "NEED RESOLVE"
708 simple = False
708 simple = False
709 resolverev = self.changelog.count()
709 resolverev = self.changelog.count()
710
710
711 # process the files
711 # process the files
712 print "merging files"
712 print "merging files"
713 new = {}
713 new = {}
714 while pos < len(data):
714 while pos < len(data):
715 l = getlen(data, pos)
715 l = getlen(data, pos)
716 pos += 4
716 pos += 4
717 f = data[pos:pos + l]
717 f = data[pos:pos + l]
718 pos += l
718 pos += l
719
719
720 l = getlen(data, pos)
720 l = getlen(data, pos)
721 fg = data[pos: pos + l]
721 fg = data[pos: pos + l]
722 pos += l
722 pos += l
723
723
724 fl = self.file(f)
724 fl = self.file(f)
725 o = fl.tip()
725 o = fl.tip()
726 n = fl.addgroup(fg, lambda x: self.changelog.rev(x), tr)
726 n = fl.addgroup(fg, lambda x: self.changelog.rev(x), tr)
727 if not simple:
727 if not simple:
728 new[fl] = fl.resolvedag(o, n, tr, resolverev)
728 new[fl] = fl.resolvedag(o, n, tr, resolverev)
729
729
730 # For simple merges, we don't need to resolve manifests or changesets
730 # For simple merges, we don't need to resolve manifests or changesets
731 if simple:
731 if simple:
732 tr.close()
732 tr.close()
733 return
733 return
734
734
735 # resolve the manifest to point to all the merged files
735 # resolve the manifest to point to all the merged files
736 self.ui.status("resolving manifests\n")
736 self.ui.status("resolving manifests\n")
737 ma = self.manifest.ancestor(mm, mo)
737 ma = self.manifest.ancestor(mm, mo)
738 mmap = self.manifest.read(mm) # mine
738 mmap = self.manifest.read(mm) # mine
739 omap = self.manifest.read(mo) # other
739 omap = self.manifest.read(mo) # other
740 amap = self.manifest.read(ma) # ancestor
740 amap = self.manifest.read(ma) # ancestor
741 nmap = {}
741 nmap = {}
742
742
743 for f, mid in mmap.iteritems():
743 for f, mid in mmap.iteritems():
744 if f in omap:
744 if f in omap:
745 if mid != omap[f]:
745 if mid != omap[f]:
746 nmap[f] = new.get(f, mid) # use merged version
746 nmap[f] = new.get(f, mid) # use merged version
747 else:
747 else:
748 nmap[f] = new.get(f, mid) # they're the same
748 nmap[f] = new.get(f, mid) # they're the same
749 del omap[f]
749 del omap[f]
750 elif f in amap:
750 elif f in amap:
751 if mid != amap[f]:
751 if mid != amap[f]:
752 pass # we should prompt here
752 pass # we should prompt here
753 else:
753 else:
754 pass # other deleted it
754 pass # other deleted it
755 else:
755 else:
756 nmap[f] = new.get(f, mid) # we created it
756 nmap[f] = new.get(f, mid) # we created it
757
757
758 del mmap
758 del mmap
759
759
760 for f, oid in omap.iteritems():
760 for f, oid in omap.iteritems():
761 if f in amap:
761 if f in amap:
762 if oid != amap[f]:
762 if oid != amap[f]:
763 pass # this is the nasty case, we should prompt
763 pass # this is the nasty case, we should prompt
764 else:
764 else:
765 pass # probably safe
765 pass # probably safe
766 else:
766 else:
767 nmap[f] = new.get(f, oid) # remote created it
767 nmap[f] = new.get(f, oid) # remote created it
768
768
769 del omap
769 del omap
770 del amap
770 del amap
771
771
772 node = self.manifest.add(nmap, tr, resolverev, mm, mo)
772 node = self.manifest.add(nmap, tr, resolverev, mm, mo)
773
773
774 # Now all files and manifests are merged, we add the changed files
774 # Now all files and manifests are merged, we add the changed files
775 # and manifest id to the changelog
775 # and manifest id to the changelog
776 self.ui.status("committing merge changeset\n")
776 self.ui.status("committing merge changeset\n")
777 new = new.keys()
777 new = new.keys()
778 new.sort()
778 new.sort()
779 if co == cn: cn = -1
779 if co == cn: cn = -1
780
780
781 edittext = "\n"+"".join(["HG: changed %s\n" % f for f in new])
781 edittext = "\n"+"".join(["HG: changed %s\n" % f for f in new])
782 edittext = self.ui.edit(edittext)
782 edittext = self.ui.edit(edittext)
783 n = self.changelog.add(node, new, edittext, tr, co, cn)
783 n = self.changelog.add(node, new, edittext, tr, co, cn)
784
784
785 tr.close()
785 tr.close()
786
786
787 class remoterepository:
787 class remoterepository:
788 def __init__(self, ui, path):
788 def __init__(self, ui, path):
789 self.url = path.replace("hg://", "http://", 1)
789 self.url = path.replace("hg://", "http://", 1)
790 self.ui = ui
790 self.ui = ui
791
791
792 def do_cmd(self, cmd, **args):
792 def do_cmd(self, cmd, **args):
793 q = {"cmd": cmd}
793 q = {"cmd": cmd}
794 q.update(args)
794 q.update(args)
795 qs = urllib.urlencode(q)
795 qs = urllib.urlencode(q)
796 cu = "%s?%s" % (self.url, qs)
796 cu = "%s?%s" % (self.url, qs)
797 return urllib.urlopen(cu).read()
797 return urllib.urlopen(cu).read()
798
798
799 def branches(self, nodes):
799 def branches(self, nodes):
800 n = " ".join(map(hex, nodes))
800 n = " ".join(map(hex, nodes))
801 d = self.do_cmd("branches", nodes=n)
801 d = self.do_cmd("branches", nodes=n)
802 br = [ map(bin, b.split(" ")) for b in d.splitlines() ]
802 br = [ map(bin, b.split(" ")) for b in d.splitlines() ]
803 return br
803 return br
804
804
805 def between(self, pairs):
805 def between(self, pairs):
806 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
806 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
807 d = self.do_cmd("between", pairs=n)
807 d = self.do_cmd("between", pairs=n)
808 p = [ map(bin, l.split(" ")) for l in d.splitlines() ]
808 p = [ map(bin, l.split(" ")) for l in d.splitlines() ]
809 return p
809 return p
810
810
811 def changegroup(self, nodes):
811 def changegroup(self, nodes):
812 n = " ".join(map(hex, nodes))
812 n = " ".join(map(hex, nodes))
813 d = self.do_cmd("changegroup", roots=n)
813 d = self.do_cmd("changegroup", roots=n)
814 return zlib.decompress(d)
814 return zlib.decompress(d)
815
815
816 def repository(ui, path=None, create=0):
816 def repository(ui, path=None, create=0):
817 if path and path[:5] == "hg://":
817 if path and path[:5] == "hg://":
818 return remoterepository(ui, path)
818 return remoterepository(ui, path)
819 else:
819 else:
820 return localrepository(ui, path, create)
820 return localrepository(ui, path, create)
821
821
822 class ui:
822 class ui:
823 def __init__(self, verbose=False, debug=False):
823 def __init__(self, verbose=False, debug=False):
824 self.verbose = verbose
824 self.verbose = verbose
825 def write(self, *args):
825 def write(self, *args):
826 for a in args:
826 for a in args:
827 sys.stdout.write(str(a))
827 sys.stdout.write(str(a))
828 def prompt(self, msg, pat):
828 def prompt(self, msg, pat):
829 while 1:
829 while 1:
830 sys.stdout.write(msg)
830 sys.stdout.write(msg)
831 r = sys.stdin.readline()[:-1]
831 r = sys.stdin.readline()[:-1]
832 if re.match(pat, r):
832 if re.match(pat, r):
833 return r
833 return r
834 def status(self, *msg):
834 def status(self, *msg):
835 self.write(*msg)
835 self.write(*msg)
836 def warn(self, msg):
836 def warn(self, msg):
837 self.write(*msg)
837 self.write(*msg)
838 def note(self, msg):
838 def note(self, msg):
839 if self.verbose: self.write(*msg)
839 if self.verbose: self.write(*msg)
840 def debug(self, msg):
840 def debug(self, msg):
841 if self.debug: self.write(*msg)
841 if self.debug: self.write(*msg)
842 def edit(self, text):
842 def edit(self, text):
843 (fd, name) = tempfile.mkstemp("hg")
843 (fd, name) = tempfile.mkstemp("hg")
844 f = os.fdopen(fd, "w")
844 f = os.fdopen(fd, "w")
845 f.write(text)
845 f.write(text)
846 f.close()
846 f.close()
847
847
848 editor = os.environ.get("EDITOR", "vi")
848 editor = os.environ.get("EDITOR", "vi")
849 r = os.system("%s %s" % (editor, name))
849 r = os.system("%s %s" % (editor, name))
850 if r:
850 if r:
851 raise "Edit failed!"
851 raise "Edit failed!"
852
852
853 t = open(name).read()
853 t = open(name).read()
854 t = re.sub("(?m)^HG:.*\n", "", t)
854 t = re.sub("(?m)^HG:.*\n", "", t)
855
855
856 return t
856 return t
857
857
858
858
859 class httprangereader:
859 class httprangereader:
860 def __init__(self, url):
860 def __init__(self, url):
861 self.url = url
861 self.url = url
862 self.pos = 0
862 self.pos = 0
863 def seek(self, pos):
863 def seek(self, pos):
864 self.pos = pos
864 self.pos = pos
865 def read(self, bytes=None):
865 def read(self, bytes=None):
866 opener = urllib2.build_opener(byterange.HTTPRangeHandler())
866 opener = urllib2.build_opener(byterange.HTTPRangeHandler())
867 urllib2.install_opener(opener)
867 urllib2.install_opener(opener)
868 req = urllib2.Request(self.url)
868 req = urllib2.Request(self.url)
869 end = ''
869 end = ''
870 if bytes: end = self.pos + bytes
870 if bytes: end = self.pos + bytes
871 req.add_header('Range', 'bytes=%d-%s' % (self.pos, end))
871 req.add_header('Range', 'bytes=%d-%s' % (self.pos, end))
872 f = urllib2.urlopen(req)
872 f = urllib2.urlopen(req)
873 return f.read()
873 return f.read()
@@ -1,77 +1,75
1 #!/usr/bin/python
1 #!/usr/bin/python
2 import difflib, struct
2 import difflib, struct
3 from cStringIO import StringIO
3 from cStringIO import StringIO
4
4
5 def unidiff(a, b, fn):
5 def unidiff(a, ad, b, bd, fn):
6 if not a and not b: return ""
6 if not a and not b: return ""
7 a = a.splitlines(1)
7 a = a.splitlines(1)
8 b = b.splitlines(1)
8 b = b.splitlines(1)
9 l = list(difflib.unified_diff(a, b, fn, fn))
9 l = list(difflib.unified_diff(a, b, "a/" + fn, "b/" + fn, ad, bd))
10 return "".join(l)
10 return "".join(l)
11
11
12 def textdiff(a, b):
12 def textdiff(a, b):
13 return diff(a.splitlines(1), b.splitlines(1))
13 return diff(a.splitlines(1), b.splitlines(1))
14
14
15 def sortdiff(a, b):
15 def sortdiff(a, b):
16 la = lb = 0
16 la = lb = 0
17
17
18 while 1:
18 while 1:
19 if la >= len(a) or lb >= len(b): break
19 if la >= len(a) or lb >= len(b): break
20 if b[lb] < a[la]:
20 if b[lb] < a[la]:
21 si = lb
21 si = lb
22 while lb < len(b) and b[lb] < a[la] : lb += 1
22 while lb < len(b) and b[lb] < a[la] : lb += 1
23 yield "insert", la, la, si, lb
23 yield "insert", la, la, si, lb
24 elif a[la] < b[lb]:
24 elif a[la] < b[lb]:
25 si = la
25 si = la
26 while la < len(a) and a[la] < b[lb]: la += 1
26 while la < len(a) and a[la] < b[lb]: la += 1
27 yield "delete", si, la, lb, lb
27 yield "delete", si, la, lb, lb
28 else:
28 else:
29 la += 1
29 la += 1
30 lb += 1
30 lb += 1
31
31
32 si = lb
32 if lb < len(b):
33 while lb < len(b):
33 yield "insert", la, la, lb, len(b)
34 lb += 1
35 yield "insert", la, la, si, lb
36
34
37 si = la
35 if la < len(a):
38 while la < len(a):
36 yield "delete", la, len(a), lb, lb
39 la += 1
40 yield "delete", si, la, lb, lb
41
37
42 def diff(a, b, sorted=0):
38 def diff(a, b, sorted=0):
43 bin = []
39 bin = []
44 p = [0]
40 p = [0]
45 for i in a: p.append(p[-1] + len(i))
41 for i in a: p.append(p[-1] + len(i))
46
42
47 if sorted:
43 if sorted:
48 d = sortdiff(a, b)
44 d = sortdiff(a, b)
49 else:
45 else:
50 d = difflib.SequenceMatcher(None, a, b).get_opcodes()
46 d = difflib.SequenceMatcher(None, a, b).get_opcodes()
51
47
52 for o, m, n, s, t in d:
48 for o, m, n, s, t in d:
53 if o == 'equal': continue
49 if o == 'equal': continue
54 s = "".join(b[s:t])
50 s = "".join(b[s:t])
55 bin.append(struct.pack(">lll", p[m], p[n], len(s)) + s)
51 bin.append(struct.pack(">lll", p[m], p[n], len(s)) + s)
56
52
57 return "".join(bin)
53 return "".join(bin)
58
54
59 def patch(a, bin):
55 def patch(a, bin):
60 last = pos = 0
56 last = pos = 0
61 r = []
57 r = []
62
58
59 c = 0
63 while pos < len(bin):
60 while pos < len(bin):
64 p1, p2, l = struct.unpack(">lll", bin[pos:pos + 12])
61 p1, p2, l = struct.unpack(">lll", bin[pos:pos + 12])
65 pos += 12
62 pos += 12
66 r.append(a[last:p1])
63 r.append(a[last:p1])
67 r.append(bin[pos:pos + l])
64 r.append(bin[pos:pos + l])
68 pos += l
65 pos += l
69 last = p2
66 last = p2
67 c += 1
70 r.append(a[last:])
68 r.append(a[last:])
71
69
72 return "".join(r)
70 return "".join(r)
73
71
74
72
75
73
76
74
77
75
@@ -1,412 +1,414
1 # revlog.py - storage back-end for mercurial
1 # revlog.py - storage back-end for mercurial
2 #
2 #
3 # This provides efficient delta storage with O(1) retrieve and append
3 # This provides efficient delta storage with O(1) retrieve and append
4 # and O(changes) merge between branches
4 # and O(changes) merge between branches
5 #
5 #
6 # Copyright 2005 Matt Mackall <mpm@selenic.com>
6 # Copyright 2005 Matt Mackall <mpm@selenic.com>
7 #
7 #
8 # This software may be used and distributed according to the terms
8 # This software may be used and distributed according to the terms
9 # of the GNU General Public License, incorporated herein by reference.
9 # of the GNU General Public License, incorporated herein by reference.
10
10
11 import zlib, struct, sha, os, tempfile, binascii
11 import zlib, struct, sha, os, tempfile, binascii
12 from mercurial import mdiff
12 from mercurial import mdiff
13
13
14 def hex(node): return binascii.hexlify(node)
14 def hex(node): return binascii.hexlify(node)
15 def bin(node): return binascii.unhexlify(node)
15 def bin(node): return binascii.unhexlify(node)
16
16
17 def compress(text):
17 def compress(text):
18 return zlib.compress(text)
18 return zlib.compress(text)
19
19
20 def decompress(bin):
20 def decompress(bin):
21 return zlib.decompress(bin)
21 return zlib.decompress(bin)
22
22
23 def hash(text, p1, p2):
23 def hash(text, p1, p2):
24 l = [p1, p2]
24 l = [p1, p2]
25 l.sort()
25 l.sort()
26 return sha.sha(l[0] + l[1] + text).digest()
26 return sha.sha(l[0] + l[1] + text).digest()
27
27
28 nullid = "\0" * 20
28 nullid = "\0" * 20
29 indexformat = ">4l20s20s20s"
29 indexformat = ">4l20s20s20s"
30
30
31 class revlog:
31 class revlog:
32 def __init__(self, opener, indexfile, datafile):
32 def __init__(self, opener, indexfile, datafile):
33 self.indexfile = indexfile
33 self.indexfile = indexfile
34 self.datafile = datafile
34 self.datafile = datafile
35 self.index = []
35 self.index = []
36 self.opener = opener
36 self.opener = opener
37 self.cache = None
37 self.cache = None
38 self.nodemap = {nullid: -1}
38 self.nodemap = {nullid: -1}
39 # read the whole index for now, handle on-demand later
39 # read the whole index for now, handle on-demand later
40 try:
40 try:
41 n = 0
41 n = 0
42 i = self.opener(self.indexfile).read()
42 i = self.opener(self.indexfile).read()
43 s = struct.calcsize(indexformat)
43 s = struct.calcsize(indexformat)
44 for f in range(0, len(i), s):
44 for f in range(0, len(i), s):
45 # offset, size, base, linkrev, p1, p2, nodeid
45 # offset, size, base, linkrev, p1, p2, nodeid
46 e = struct.unpack(indexformat, i[f:f + s])
46 e = struct.unpack(indexformat, i[f:f + s])
47 self.nodemap[e[6]] = n
47 self.nodemap[e[6]] = n
48 self.index.append(e)
48 self.index.append(e)
49 n += 1
49 n += 1
50 except IOError: pass
50 except IOError: pass
51
51
52 def tip(self): return self.node(len(self.index) - 1)
52 def tip(self): return self.node(len(self.index) - 1)
53 def count(self): return len(self.index)
53 def count(self): return len(self.index)
54 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
54 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
55 def rev(self, node): return self.nodemap[node]
55 def rev(self, node): return self.nodemap[node]
56 def linkrev(self, node): return self.index[self.nodemap[node]][3]
56 def linkrev(self, node): return self.index[self.nodemap[node]][3]
57 def parents(self, node):
57 def parents(self, node):
58 if node == nullid: return (nullid, nullid)
58 if node == nullid: return (nullid, nullid)
59 return self.index[self.nodemap[node]][4:6]
59 return self.index[self.nodemap[node]][4:6]
60
60
61 def start(self, rev): return self.index[rev][0]
61 def start(self, rev): return self.index[rev][0]
62 def length(self, rev): return self.index[rev][1]
62 def length(self, rev): return self.index[rev][1]
63 def end(self, rev): return self.start(rev) + self.length(rev)
63 def end(self, rev): return self.start(rev) + self.length(rev)
64 def base(self, rev): return self.index[rev][2]
64 def base(self, rev): return self.index[rev][2]
65
65
66 def lookup(self, id):
66 def lookup(self, id):
67 try:
67 try:
68 rev = int(id)
68 rev = int(id)
69 return self.node(rev)
69 return self.node(rev)
70 except ValueError:
70 except ValueError:
71 c = []
71 c = []
72 for n in self.nodemap:
72 for n in self.nodemap:
73 if id in hex(n):
73 if id in hex(n):
74 c.append(n)
74 c.append(n)
75 if len(c) > 1: raise KeyError("Ambiguous identifier")
75 if len(c) > 1: raise KeyError("Ambiguous identifier")
76 if len(c) < 1: raise KeyError
76 if len(c) < 1: raise KeyError
77 return c[0]
77 return c[0]
78
78
79 return None
79 return None
80
80
81 def revisions(self, list):
81 def revisions(self, list):
82 # this can be optimized to do spans, etc
82 # this can be optimized to do spans, etc
83 # be stupid for now
83 # be stupid for now
84 for node in list:
84 for node in list:
85 yield self.revision(node)
85 yield self.revision(node)
86
86
87 def diff(self, a, b):
87 def diff(self, a, b):
88 return mdiff.textdiff(a, b)
88 return mdiff.textdiff(a, b)
89
89
90 def patch(self, text, patch):
90 def patch(self, text, patch):
91 return mdiff.patch(text, patch)
91 return mdiff.patch(text, patch)
92
92
93 def revision(self, node):
93 def revision(self, node):
94 if node == nullid: return ""
94 if node == nullid: return ""
95 if self.cache and self.cache[0] == node: return self.cache[2]
95 if self.cache and self.cache[0] == node: return self.cache[2]
96
96
97 text = None
97 text = None
98 rev = self.rev(node)
98 rev = self.rev(node)
99 base = self.base(rev)
99 base = self.base(rev)
100 start = self.start(base)
100 start = self.start(base)
101 end = self.end(rev)
101 end = self.end(rev)
102
102
103 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
103 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
104 base = self.cache[1]
104 base = self.cache[1]
105 start = self.start(base + 1)
105 start = self.start(base + 1)
106 text = self.cache[2]
106 text = self.cache[2]
107 last = 0
107 last = 0
108
108
109 f = self.opener(self.datafile)
109 f = self.opener(self.datafile)
110 f.seek(start)
110 f.seek(start)
111 data = f.read(end - start)
111 data = f.read(end - start)
112
112
113 if not text:
113 if not text:
114 last = self.length(base)
114 last = self.length(base)
115 text = decompress(data[:last])
115 text = decompress(data[:last])
116
116
117 for r in range(base + 1, rev + 1):
117 for r in xrange(base + 1, rev + 1):
118 s = self.length(r)
118 s = self.length(r)
119 b = decompress(data[last:last + s])
119 b = decompress(data[last:last + s])
120 text = self.patch(text, b)
120 text = self.patch(text, b)
121 last = last + s
121 last = last + s
122
122
123 (p1, p2) = self.parents(node)
123 (p1, p2) = self.parents(node)
124 if node != hash(text, p1, p2):
124 if node != hash(text, p1, p2):
125 raise "integrity check failed on %s:%d" % (self.datafile, rev)
125 raise "integrity check failed on %s:%d" % (self.datafile, rev)
126
126
127 self.cache = (node, rev, text)
127 self.cache = (node, rev, text)
128 return text
128 return text
129
129
130 def addrevision(self, text, transaction, link, p1=None, p2=None):
130 def addrevision(self, text, transaction, link, p1=None, p2=None):
131 if text is None: text = ""
131 if text is None: text = ""
132 if p1 is None: p1 = self.tip()
132 if p1 is None: p1 = self.tip()
133 if p2 is None: p2 = nullid
133 if p2 is None: p2 = nullid
134
134
135 node = hash(text, p1, p2)
135 node = hash(text, p1, p2)
136
136
137 n = self.count()
137 n = self.count()
138 t = n - 1
138 t = n - 1
139
139
140 if n:
140 if n:
141 start = self.start(self.base(t))
141 base = self.base(t)
142 start = self.start(base)
142 end = self.end(t)
143 end = self.end(t)
143 prev = self.revision(self.tip())
144 prev = self.revision(self.tip())
144 data = compress(self.diff(prev, text))
145 data = compress(self.diff(prev, text))
146 dist = end - start + len(data)
145
147
146 # full versions are inserted when the needed deltas
148 # full versions are inserted when the needed deltas
147 # become comparable to the uncompressed text
149 # become comparable to the uncompressed text
148 if not n or (end + len(data) - start) > len(text) * 2:
150 if not n or dist > len(text) * 2:
149 data = compress(text)
151 data = compress(text)
150 base = n
152 base = n
151 else:
153 else:
152 base = self.base(t)
154 base = self.base(t)
153
155
154 offset = 0
156 offset = 0
155 if t >= 0:
157 if t >= 0:
156 offset = self.end(t)
158 offset = self.end(t)
157
159
158 e = (offset, len(data), base, link, p1, p2, node)
160 e = (offset, len(data), base, link, p1, p2, node)
159
161
160 self.index.append(e)
162 self.index.append(e)
161 self.nodemap[node] = n
163 self.nodemap[node] = n
162 entry = struct.pack(indexformat, *e)
164 entry = struct.pack(indexformat, *e)
163
165
164 transaction.add(self.datafile, e[0])
166 transaction.add(self.datafile, e[0])
165 self.opener(self.datafile, "a").write(data)
167 self.opener(self.datafile, "a").write(data)
166 transaction.add(self.indexfile, n * len(entry))
168 transaction.add(self.indexfile, n * len(entry))
167 self.opener(self.indexfile, "a").write(entry)
169 self.opener(self.indexfile, "a").write(entry)
168
170
169 self.cache = (node, n, text)
171 self.cache = (node, n, text)
170 return node
172 return node
171
173
172 def ancestor(self, a, b):
174 def ancestor(self, a, b):
173 def expand(list, map):
175 def expand(list, map):
174 a = []
176 a = []
175 while list:
177 while list:
176 n = list.pop(0)
178 n = list.pop(0)
177 map[n] = 1
179 map[n] = 1
178 yield n
180 yield n
179 for p in self.parents(n):
181 for p in self.parents(n):
180 if p != nullid and p not in map:
182 if p != nullid and p not in map:
181 list.append(p)
183 list.append(p)
182 yield nullid
184 yield nullid
183
185
184 amap = {}
186 amap = {}
185 bmap = {}
187 bmap = {}
186 ag = expand([a], amap)
188 ag = expand([a], amap)
187 bg = expand([b], bmap)
189 bg = expand([b], bmap)
188 adone = bdone = 0
190 adone = bdone = 0
189
191
190 while not adone or not bdone:
192 while not adone or not bdone:
191 if not adone:
193 if not adone:
192 an = ag.next()
194 an = ag.next()
193 if an == nullid:
195 if an == nullid:
194 adone = 1
196 adone = 1
195 elif an in bmap:
197 elif an in bmap:
196 return an
198 return an
197 if not bdone:
199 if not bdone:
198 bn = bg.next()
200 bn = bg.next()
199 if bn == nullid:
201 if bn == nullid:
200 bdone = 1
202 bdone = 1
201 elif bn in amap:
203 elif bn in amap:
202 return bn
204 return bn
203
205
204 return nullid
206 return nullid
205
207
206 def mergedag(self, other, transaction, linkseq, accumulate = None):
208 def mergedag(self, other, transaction, linkseq, accumulate = None):
207 """combine the nodes from other's DAG into ours"""
209 """combine the nodes from other's DAG into ours"""
208 old = self.tip()
210 old = self.tip()
209 i = self.count()
211 i = self.count()
210 l = []
212 l = []
211
213
212 # merge the other revision log into our DAG
214 # merge the other revision log into our DAG
213 for r in range(other.count()):
215 for r in range(other.count()):
214 id = other.node(r)
216 id = other.node(r)
215 if id not in self.nodemap:
217 if id not in self.nodemap:
216 (xn, yn) = other.parents(id)
218 (xn, yn) = other.parents(id)
217 l.append((id, xn, yn))
219 l.append((id, xn, yn))
218 self.nodemap[id] = i
220 self.nodemap[id] = i
219 i += 1
221 i += 1
220
222
221 # merge node date for new nodes
223 # merge node date for new nodes
222 r = other.revisions([e[0] for e in l])
224 r = other.revisions([e[0] for e in l])
223 for e in l:
225 for e in l:
224 t = r.next()
226 t = r.next()
225 if accumulate: accumulate(t)
227 if accumulate: accumulate(t)
226 self.addrevision(t, transaction, linkseq.next(), e[1], e[2])
228 self.addrevision(t, transaction, linkseq.next(), e[1], e[2])
227
229
228 # return the unmerged heads for later resolving
230 # return the unmerged heads for later resolving
229 return (old, self.tip())
231 return (old, self.tip())
230
232
231 def group(self, linkmap):
233 def group(self, linkmap):
232 # given a list of changeset revs, return a set of deltas and
234 # given a list of changeset revs, return a set of deltas and
233 # metadata corresponding to nodes the first delta is
235 # metadata corresponding to nodes the first delta is
234 # parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
236 # parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
235 # have this parent as it has all history before these
237 # have this parent as it has all history before these
236 # changesets. parent is parent[0]
238 # changesets. parent is parent[0]
237
239
238 revs = []
240 revs = []
239 needed = {}
241 needed = {}
240
242
241 # find file nodes/revs that match changeset revs
243 # find file nodes/revs that match changeset revs
242 for i in xrange(0, self.count()):
244 for i in xrange(0, self.count()):
243 if self.index[i][3] in linkmap:
245 if self.index[i][3] in linkmap:
244 revs.append(i)
246 revs.append(i)
245 needed[i] = 1
247 needed[i] = 1
246
248
247 # if we don't have any revisions touched by these changesets, bail
249 # if we don't have any revisions touched by these changesets, bail
248 if not revs: return struct.pack(">l", 0)
250 if not revs: return struct.pack(">l", 0)
249
251
250 # add the parent of the first rev
252 # add the parent of the first rev
251 p = self.parents(self.node(revs[0]))[0]
253 p = self.parents(self.node(revs[0]))[0]
252 revs.insert(0, self.rev(p))
254 revs.insert(0, self.rev(p))
253
255
254 # for each delta that isn't contiguous in the log, we need to
256 # for each delta that isn't contiguous in the log, we need to
255 # reconstruct the base, reconstruct the result, and then
257 # reconstruct the base, reconstruct the result, and then
256 # calculate the delta. We also need to do this where we've
258 # calculate the delta. We also need to do this where we've
257 # stored a full version and not a delta
259 # stored a full version and not a delta
258 for i in xrange(0, len(revs) - 1):
260 for i in xrange(0, len(revs) - 1):
259 a, b = revs[i], revs[i + 1]
261 a, b = revs[i], revs[i + 1]
260 if a + 1 != b or self.base(b) == b:
262 if a + 1 != b or self.base(b) == b:
261 for j in xrange(self.base(a), a + 1):
263 for j in xrange(self.base(a), a + 1):
262 needed[j] = 1
264 needed[j] = 1
263 for j in xrange(self.base(b), b + 1):
265 for j in xrange(self.base(b), b + 1):
264 needed[j] = 1
266 needed[j] = 1
265
267
266 # calculate spans to retrieve from datafile
268 # calculate spans to retrieve from datafile
267 needed = needed.keys()
269 needed = needed.keys()
268 needed.sort()
270 needed.sort()
269 spans = []
271 spans = []
270 for n in needed:
272 for n in needed:
271 if n < 0: continue
273 if n < 0: continue
272 o = self.start(n)
274 o = self.start(n)
273 l = self.length(n)
275 l = self.length(n)
274 spans.append((o, l, [(n, l)]))
276 spans.append((o, l, [(n, l)]))
275
277
276 # merge spans
278 # merge spans
277 merge = [spans.pop(0)]
279 merge = [spans.pop(0)]
278 while spans:
280 while spans:
279 e = spans.pop(0)
281 e = spans.pop(0)
280 f = merge[-1]
282 f = merge[-1]
281 if e[0] == f[0] + f[1]:
283 if e[0] == f[0] + f[1]:
282 merge[-1] = (f[0], f[1] + e[1], f[2] + e[2])
284 merge[-1] = (f[0], f[1] + e[1], f[2] + e[2])
283 else:
285 else:
284 merge.append(e)
286 merge.append(e)
285
287
286 # read spans in, divide up chunks
288 # read spans in, divide up chunks
287 chunks = {}
289 chunks = {}
288 for span in merge:
290 for span in merge:
289 # we reopen the file for each span to make http happy for now
291 # we reopen the file for each span to make http happy for now
290 f = self.opener(self.datafile)
292 f = self.opener(self.datafile)
291 f.seek(span[0])
293 f.seek(span[0])
292 data = f.read(span[1])
294 data = f.read(span[1])
293
295
294 # divide up the span
296 # divide up the span
295 pos = 0
297 pos = 0
296 for r, l in span[2]:
298 for r, l in span[2]:
297 chunks[r] = data[pos: pos + l]
299 chunks[r] = data[pos: pos + l]
298 pos += l
300 pos += l
299
301
300 # helper to reconstruct intermediate versions
302 # helper to reconstruct intermediate versions
301 def construct(text, base, rev):
303 def construct(text, base, rev):
302 for r in range(base + 1, rev + 1):
304 for r in range(base + 1, rev + 1):
303 b = decompress(chunks[r])
305 b = decompress(chunks[r])
304 text = self.patch(text, b)
306 text = self.patch(text, b)
305 return text
307 return text
306
308
307 # build deltas
309 # build deltas
308 deltas = []
310 deltas = []
309 for d in range(0, len(revs) - 1):
311 for d in range(0, len(revs) - 1):
310 a, b = revs[d], revs[d + 1]
312 a, b = revs[d], revs[d + 1]
311 n = self.node(b)
313 n = self.node(b)
312
314
313 if a + 1 != b or self.base(b) == b:
315 if a + 1 != b or self.base(b) == b:
314 if a >= 0:
316 if a >= 0:
315 base = self.base(a)
317 base = self.base(a)
316 ta = decompress(chunks[self.base(a)])
318 ta = decompress(chunks[self.base(a)])
317 ta = construct(ta, base, a)
319 ta = construct(ta, base, a)
318 else:
320 else:
319 ta = ""
321 ta = ""
320
322
321 base = self.base(b)
323 base = self.base(b)
322 if a > base:
324 if a > base:
323 base = a
325 base = a
324 tb = ta
326 tb = ta
325 else:
327 else:
326 tb = decompress(chunks[self.base(b)])
328 tb = decompress(chunks[self.base(b)])
327 tb = construct(tb, base, b)
329 tb = construct(tb, base, b)
328 d = self.diff(ta, tb)
330 d = self.diff(ta, tb)
329 else:
331 else:
330 d = decompress(chunks[b])
332 d = decompress(chunks[b])
331
333
332 p = self.parents(n)
334 p = self.parents(n)
333 meta = n + p[0] + p[1] + linkmap[self.linkrev(n)]
335 meta = n + p[0] + p[1] + linkmap[self.linkrev(n)]
334 l = struct.pack(">l", len(meta) + len(d) + 4)
336 l = struct.pack(">l", len(meta) + len(d) + 4)
335 deltas.append(l + meta + d)
337 deltas.append(l + meta + d)
336
338
337 l = struct.pack(">l", sum(map(len, deltas)) + 4)
339 l = struct.pack(">l", sum(map(len, deltas)) + 4)
338 deltas.insert(0, l)
340 deltas.insert(0, l)
339 return "".join(deltas)
341 return "".join(deltas)
340
342
341 def addgroup(self, data, linkmapper, transaction):
343 def addgroup(self, data, linkmapper, transaction):
342 # given a set of deltas, add them to the revision log. the
344 # given a set of deltas, add them to the revision log. the
343 # first delta is against its parent, which should be in our
345 # first delta is against its parent, which should be in our
344 # log, the rest are against the previous delta.
346 # log, the rest are against the previous delta.
345
347
346 if len(data) <= 4: return
348 if len(data) <= 4: return
347
349
348 # retrieve the parent revision of the delta chain
350 # retrieve the parent revision of the delta chain
349 chain = data[28:48]
351 chain = data[28:48]
350 text = self.revision(chain)
352 text = self.revision(chain)
351
353
352 # track the base of the current delta log
354 # track the base of the current delta log
353 r = self.count()
355 r = self.count()
354 t = r - 1
356 t = r - 1
355
357
356 base = prev = -1
358 base = prev = -1
357 start = end = 0
359 start = end = 0
358 if r:
360 if r:
359 start = self.start(self.base(t))
361 start = self.start(self.base(t))
360 end = self.end(t)
362 end = self.end(t)
361 measure = self.length(self.base(t))
363 measure = self.length(self.base(t))
362 base = self.base(t)
364 base = self.base(t)
363 prev = self.tip()
365 prev = self.tip()
364
366
365 transaction.add(self.datafile, end)
367 transaction.add(self.datafile, end)
366 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
368 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
367 dfh = self.opener(self.datafile, "a")
369 dfh = self.opener(self.datafile, "a")
368 ifh = self.opener(self.indexfile, "a")
370 ifh = self.opener(self.indexfile, "a")
369
371
370 # loop through our set of deltas
372 # loop through our set of deltas
371 pos = 4
373 pos = 4
372 while pos < len(data):
374 while pos < len(data):
373 l, node, p1, p2, cs = struct.unpack(">l20s20s20s20s",
375 l, node, p1, p2, cs = struct.unpack(">l20s20s20s20s",
374 data[pos:pos+84])
376 data[pos:pos+84])
375 link = linkmapper(cs)
377 link = linkmapper(cs)
376 delta = data[pos + 84:pos + l]
378 delta = data[pos + 84:pos + l]
377 pos += l
379 pos += l
378
380
379 # full versions are inserted when the needed deltas become
381 # full versions are inserted when the needed deltas become
380 # comparable to the uncompressed text or when the previous
382 # comparable to the uncompressed text or when the previous
381 # version is not the one we have a delta against. We use
383 # version is not the one we have a delta against. We use
382 # the size of the previous full rev as a proxy for the
384 # the size of the previous full rev as a proxy for the
383 # current size.
385 # current size.
384
386
385 if chain == prev:
387 if chain == prev:
386 cdelta = compress(delta)
388 cdelta = compress(delta)
387
389
388 if chain != prev or (end - start + len(cdelta)) > measure * 2:
390 if chain != prev or (end - start + len(cdelta)) > measure * 2:
389 # flush our writes here so we can read it in revision
391 # flush our writes here so we can read it in revision
390 dfh.flush()
392 dfh.flush()
391 ifh.flush()
393 ifh.flush()
392 text = self.revision(self.node(t))
394 text = self.revision(self.node(t))
393 text = self.patch(text, delta)
395 text = self.patch(text, delta)
394 chk = self.addrevision(text, transaction, link, p1, p2)
396 chk = self.addrevision(text, transaction, link, p1, p2)
395 if chk != node:
397 if chk != node:
396 raise "consistency error adding group"
398 raise "consistency error adding group"
397 measure = len(text)
399 measure = len(text)
398 else:
400 else:
399 e = (end, len(cdelta), self.base(t), link, p1, p2, node)
401 e = (end, len(cdelta), self.base(t), link, p1, p2, node)
400 self.index.append(e)
402 self.index.append(e)
401 self.nodemap[node] = r
403 self.nodemap[node] = r
402 dfh.write(cdelta)
404 dfh.write(cdelta)
403 ifh.write(struct.pack(indexformat, *e))
405 ifh.write(struct.pack(indexformat, *e))
404
406
405 t, r = r, r + 1
407 t, r = r, r + 1
406 chain = prev
408 chain = prev
407 start = self.start(self.base(t))
409 start = self.start(self.base(t))
408 end = self.end(t)
410 end = self.end(t)
409
411
410 dfh.close()
412 dfh.close()
411 ifh.close()
413 ifh.close()
412 return node
414 return node
General Comments 0
You need to be logged in to leave comments. Login now