##// END OF EJS Templates
Changes to network protocol...
mpm@selenic.com -
r192:5d855335 default
parent child Browse files
Show More
@@ -1,589 +1,588 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 #
2 #
3 # mercurial - a minimal scalable distributed SCM
3 # mercurial - a minimal scalable distributed SCM
4 # v0.5 "katje"
4 # v0.5 "katje"
5 #
5 #
6 # Copyright 2005 Matt Mackall <mpm@selenic.com>
6 # Copyright 2005 Matt Mackall <mpm@selenic.com>
7 #
7 #
8 # This software may be used and distributed according to the terms
8 # This software may be used and distributed according to the terms
9 # of the GNU General Public License, incorporated herein by reference.
9 # of the GNU General Public License, incorporated herein by reference.
10
10
11 # the psyco compiler makes commits a bit faster
11 # the psyco compiler makes commits a bit faster
12 # and makes changegroup merge about 20 times slower!
12 # and makes changegroup merge about 20 times slower!
13 # try:
13 # try:
14 # import psyco
14 # import psyco
15 # psyco.full()
15 # psyco.full()
16 # except:
16 # except:
17 # pass
17 # pass
18
18
19 import sys, os, time
19 import sys, os, time
20 from mercurial import hg, mdiff, fancyopts
20 from mercurial import hg, mdiff, fancyopts
21
21
22 def help():
22 def help():
23 ui.status("""\
23 ui.status("""\
24 commands:
24 commands:
25
25
26 add [files...] add the given files in the next commit
26 add [files...] add the given files in the next commit
27 addremove add all new files, delete all missing files
27 addremove add all new files, delete all missing files
28 annotate [files...] show changeset number per file line
28 annotate [files...] show changeset number per file line
29 branch <path> create a branch of <path> in this directory
29 branch <path> create a branch of <path> in this directory
30 checkout [changeset] checkout the latest or given changeset
30 checkout [changeset] checkout the latest or given changeset
31 commit commit all changes to the repository
31 commit commit all changes to the repository
32 diff [files...] diff working directory (or selected files)
32 diff [files...] diff working directory (or selected files)
33 dump <file> [rev] dump the latest or given revision of a file
33 dump <file> [rev] dump the latest or given revision of a file
34 dumpmanifest [rev] dump the latest or given revision of the manifest
34 dumpmanifest [rev] dump the latest or given revision of the manifest
35 history show changeset history
35 history show changeset history
36 init create a new repository in this directory
36 init create a new repository in this directory
37 log <file> show revision history of a single file
37 log <file> show revision history of a single file
38 merge <path> merge changes from <path> into local repository
38 merge <path> merge changes from <path> into local repository
39 recover rollback an interrupted transaction
39 recover rollback an interrupted transaction
40 remove [files...] remove the given files in the next commit
40 remove [files...] remove the given files in the next commit
41 serve export the repository via HTTP
41 serve export the repository via HTTP
42 status show new, missing, and changed files in working dir
42 status show new, missing, and changed files in working dir
43 tags show current changeset tags
43 tags show current changeset tags
44 undo undo the last transaction
44 undo undo the last transaction
45 """)
45 """)
46
46
47 def filterfiles(list, files):
47 def filterfiles(list, files):
48 l = [ x for x in list if x in files ]
48 l = [ x for x in list if x in files ]
49
49
50 for f in files:
50 for f in files:
51 if f[-1] != os.sep: f += os.sep
51 if f[-1] != os.sep: f += os.sep
52 l += [ x for x in list if x.startswith(f) ]
52 l += [ x for x in list if x.startswith(f) ]
53 return l
53 return l
54
54
55 def diff(files = None, node1 = None, node2 = None):
55 def diff(files = None, node1 = None, node2 = None):
56 def date(c):
56 def date(c):
57 return time.asctime(time.gmtime(float(c[2].split(' ')[0])))
57 return time.asctime(time.gmtime(float(c[2].split(' ')[0])))
58
58
59 if node2:
59 if node2:
60 change = repo.changelog.read(node2)
60 change = repo.changelog.read(node2)
61 mmap2 = repo.manifest.read(change[0])
61 mmap2 = repo.manifest.read(change[0])
62 (c, a, d) = repo.diffrevs(node1, node2)
62 (c, a, d) = repo.diffrevs(node1, node2)
63 def read(f): return repo.file(f).read(mmap2[f])
63 def read(f): return repo.file(f).read(mmap2[f])
64 date2 = date(change)
64 date2 = date(change)
65 else:
65 else:
66 date2 = time.asctime()
66 date2 = time.asctime()
67 if not node1:
67 if not node1:
68 node1 = repo.current
68 node1 = repo.current
69 (c, a, d) = repo.diffdir(repo.root, node1)
69 (c, a, d) = repo.diffdir(repo.root, node1)
70 a = [] # ignore unknown files in repo, by popular request
70 a = [] # ignore unknown files in repo, by popular request
71 def read(f): return file(os.path.join(repo.root, f)).read()
71 def read(f): return file(os.path.join(repo.root, f)).read()
72
72
73 change = repo.changelog.read(node1)
73 change = repo.changelog.read(node1)
74 mmap = repo.manifest.read(change[0])
74 mmap = repo.manifest.read(change[0])
75 date1 = date(change)
75 date1 = date(change)
76
76
77 if files:
77 if files:
78 c, a, d = map(lambda x: filterfiles(x, files), (c, a, d))
78 c, a, d = map(lambda x: filterfiles(x, files), (c, a, d))
79
79
80 for f in c:
80 for f in c:
81 to = ""
81 to = ""
82 if mmap.has_key(f):
82 if mmap.has_key(f):
83 to = repo.file(f).read(mmap[f])
83 to = repo.file(f).read(mmap[f])
84 tn = read(f)
84 tn = read(f)
85 sys.stdout.write(mdiff.unidiff(to, date1, tn, date2, f))
85 sys.stdout.write(mdiff.unidiff(to, date1, tn, date2, f))
86 for f in a:
86 for f in a:
87 to = ""
87 to = ""
88 tn = read(f)
88 tn = read(f)
89 sys.stdout.write(mdiff.unidiff(to, date1, tn, date2, f))
89 sys.stdout.write(mdiff.unidiff(to, date1, tn, date2, f))
90 for f in d:
90 for f in d:
91 to = repo.file(f).read(mmap[f])
91 to = repo.file(f).read(mmap[f])
92 tn = ""
92 tn = ""
93 sys.stdout.write(mdiff.unidiff(to, date1, tn, date2, f))
93 sys.stdout.write(mdiff.unidiff(to, date1, tn, date2, f))
94
94
95 options = {}
95 options = {}
96 opts = [('v', 'verbose', None, 'verbose'),
96 opts = [('v', 'verbose', None, 'verbose'),
97 ('d', 'debug', None, 'debug'),
97 ('d', 'debug', None, 'debug'),
98 ('q', 'quiet', None, 'quiet'),
98 ('q', 'quiet', None, 'quiet'),
99 ('y', 'noninteractive', None, 'run non-interactively'),
99 ('y', 'noninteractive', None, 'run non-interactively'),
100 ]
100 ]
101
101
102 args = fancyopts.fancyopts(sys.argv[1:], opts, options,
102 args = fancyopts.fancyopts(sys.argv[1:], opts, options,
103 'hg [options] <command> [command options] [files]')
103 'hg [options] <command> [command options] [files]')
104
104
105 try:
105 try:
106 cmd = args[0]
106 cmd = args[0]
107 args = args[1:]
107 args = args[1:]
108 except:
108 except:
109 cmd = ""
109 cmd = ""
110
110
111 ui = hg.ui(options["verbose"], options["debug"], options["quiet"],
111 ui = hg.ui(options["verbose"], options["debug"], options["quiet"],
112 not options["noninteractive"])
112 not options["noninteractive"])
113
113
114 if cmd == "init":
114 if cmd == "init":
115 repo = hg.repository(ui, ".", create=1)
115 repo = hg.repository(ui, ".", create=1)
116 sys.exit(0)
116 sys.exit(0)
117 elif cmd == "branch" or cmd == "clone":
117 elif cmd == "branch" or cmd == "clone":
118 os.system("cp -al %s/.hg .hg" % args[0])
118 os.system("cp -al %s/.hg .hg" % args[0])
119 sys.exit(0)
119 sys.exit(0)
120 elif cmd == "help":
120 elif cmd == "help":
121 help()
121 help()
122 sys.exit(0)
122 sys.exit(0)
123 else:
123 else:
124 try:
124 try:
125 repo = hg.repository(ui=ui)
125 repo = hg.repository(ui=ui)
126 except IOError:
126 except IOError:
127 ui.warn("Unable to open repository\n")
127 ui.warn("Unable to open repository\n")
128 sys.exit(0)
128 sys.exit(0)
129
129
130 relpath = None
130 relpath = None
131 if os.getcwd() != repo.root:
131 if os.getcwd() != repo.root:
132 relpath = os.getcwd()[len(repo.root) + 1: ]
132 relpath = os.getcwd()[len(repo.root) + 1: ]
133
133
134 if cmd == "checkout" or cmd == "co":
134 if cmd == "checkout" or cmd == "co":
135 node = repo.changelog.tip()
135 node = repo.changelog.tip()
136 if args:
136 if args:
137 node = repo.lookup(args[0])
137 node = repo.lookup(args[0])
138 repo.checkout(node)
138 repo.checkout(node)
139
139
140 elif cmd == "add":
140 elif cmd == "add":
141 repo.add(args)
141 repo.add(args)
142
142
143 elif cmd == "remove" or cmd == "rm" or cmd == "del" or cmd == "delete":
143 elif cmd == "remove" or cmd == "rm" or cmd == "del" or cmd == "delete":
144 repo.remove(args)
144 repo.remove(args)
145
145
146 elif cmd == "commit" or cmd == "checkin" or cmd == "ci":
146 elif cmd == "commit" or cmd == "checkin" or cmd == "ci":
147 if 1:
147 if 1:
148 if len(args) > 0:
148 if len(args) > 0:
149 repo.commit(repo.current, args)
149 repo.commit(repo.current, args)
150 else:
150 else:
151 repo.commit(repo.current)
151 repo.commit(repo.current)
152
152
153 elif cmd == "import" or cmd == "patch":
153 elif cmd == "import" or cmd == "patch":
154 try:
154 try:
155 import psyco
155 import psyco
156 psyco.full()
156 psyco.full()
157 except:
157 except:
158 pass
158 pass
159
159
160 ioptions = {}
160 ioptions = {}
161 opts = [('p', 'strip', 1, 'path strip'),
161 opts = [('p', 'strip', 1, 'path strip'),
162 ('b', 'base', "", 'base path'),
162 ('b', 'base', "", 'base path'),
163 ('q', 'quiet', "", 'silence diff')
163 ('q', 'quiet', "", 'silence diff')
164 ]
164 ]
165
165
166 args = fancyopts.fancyopts(args, opts, ioptions,
166 args = fancyopts.fancyopts(args, opts, ioptions,
167 'hg import [options] <patch names>')
167 'hg import [options] <patch names>')
168 d = ioptions["base"]
168 d = ioptions["base"]
169 strip = ioptions["strip"]
169 strip = ioptions["strip"]
170 quiet = ioptions["quiet"] and "> /dev/null" or ""
170 quiet = ioptions["quiet"] and "> /dev/null" or ""
171
171
172 for patch in args:
172 for patch in args:
173 ui.status("applying %s\n" % patch)
173 ui.status("applying %s\n" % patch)
174 pf = os.path.join(d, patch)
174 pf = os.path.join(d, patch)
175
175
176 text = ""
176 text = ""
177 for l in file(pf):
177 for l in file(pf):
178 if l[:4] == "--- ": break
178 if l[:4] == "--- ": break
179 text += l
179 text += l
180
180
181 f = os.popen("lsdiff --strip %d %s" % (strip, pf))
181 f = os.popen("lsdiff --strip %d %s" % (strip, pf))
182 files = filter(None, map(lambda x: x.rstrip(), f.read().splitlines()))
182 files = filter(None, map(lambda x: x.rstrip(), f.read().splitlines()))
183 f.close()
183 f.close()
184
184
185 if files:
185 if files:
186 if os.system("patch -p%d < %s %s" % (strip, pf, quiet)):
186 if os.system("patch -p%d < %s %s" % (strip, pf, quiet)):
187 raise "patch failed!"
187 raise "patch failed!"
188 repo.commit(repo.current, files, text)
188 repo.commit(repo.current, files, text)
189
189
190 elif cmd == "status":
190 elif cmd == "status":
191 (c, a, d) = repo.diffdir(repo.root, repo.current)
191 (c, a, d) = repo.diffdir(repo.root, repo.current)
192 if relpath:
192 if relpath:
193 (c, a, d) = map(lambda x: filterfiles(x, [ relpath ]), (c, a, d))
193 (c, a, d) = map(lambda x: filterfiles(x, [ relpath ]), (c, a, d))
194
194
195 for f in c: print "C", f
195 for f in c: print "C", f
196 for f in a: print "?", f
196 for f in a: print "?", f
197 for f in d: print "R", f
197 for f in d: print "R", f
198
198
199 elif cmd == "diff":
199 elif cmd == "diff":
200 revs = []
200 revs = []
201
201
202 if args:
202 if args:
203 doptions = {}
203 doptions = {}
204 opts = [('r', 'revision', [], 'revision')]
204 opts = [('r', 'revision', [], 'revision')]
205 args = fancyopts.fancyopts(args, opts, doptions,
205 args = fancyopts.fancyopts(args, opts, doptions,
206 'hg diff [options] [files]')
206 'hg diff [options] [files]')
207 revs = map(lambda x: repo.lookup(x), doptions['revision'])
207 revs = map(lambda x: repo.lookup(x), doptions['revision'])
208
208
209 if len(revs) > 2:
209 if len(revs) > 2:
210 self.ui.warn("too many revisions to diff\n")
210 self.ui.warn("too many revisions to diff\n")
211 sys.exit(1)
211 sys.exit(1)
212
212
213 if relpath:
213 if relpath:
214 if not args: args = [ relpath ]
214 if not args: args = [ relpath ]
215 else: args = [ os.path.join(relpath, x) for x in args ]
215 else: args = [ os.path.join(relpath, x) for x in args ]
216
216
217 diff(args, *revs)
217 diff(args, *revs)
218
218
219 elif cmd == "annotate":
219 elif cmd == "annotate":
220 bcache = {}
220 bcache = {}
221
221
222 def getnode(rev):
222 def getnode(rev):
223 return hg.short(repo.changelog.node(rev))
223 return hg.short(repo.changelog.node(rev))
224
224
225 def getname(rev):
225 def getname(rev):
226 try:
226 try:
227 return bcache[rev]
227 return bcache[rev]
228 except KeyError:
228 except KeyError:
229 cl = repo.changelog.read(repo.changelog.node(rev))
229 cl = repo.changelog.read(repo.changelog.node(rev))
230 name = cl[1]
230 name = cl[1]
231 f = name.find('@')
231 f = name.find('@')
232 if f >= 0:
232 if f >= 0:
233 name = name[:f]
233 name = name[:f]
234 bcache[rev] = name
234 bcache[rev] = name
235 return name
235 return name
236
236
237 aoptions = {}
237 aoptions = {}
238 opts = [('r', 'revision', '', 'revision'),
238 opts = [('r', 'revision', '', 'revision'),
239 ('u', 'user', None, 'show user'),
239 ('u', 'user', None, 'show user'),
240 ('n', 'number', None, 'show revision number'),
240 ('n', 'number', None, 'show revision number'),
241 ('c', 'changeset', None, 'show changeset')]
241 ('c', 'changeset', None, 'show changeset')]
242
242
243 args = fancyopts.fancyopts(args, opts, aoptions,
243 args = fancyopts.fancyopts(args, opts, aoptions,
244 'hg annotate [-u] [-c] [-n] [-r id] [files]')
244 'hg annotate [-u] [-c] [-n] [-r id] [files]')
245
245
246 opmap = [['user', getname], ['number', str], ['changeset', getnode]]
246 opmap = [['user', getname], ['number', str], ['changeset', getnode]]
247 if not aoptions['user'] and not aoptions['changeset']:
247 if not aoptions['user'] and not aoptions['changeset']:
248 aoptions['number'] = 1
248 aoptions['number'] = 1
249
249
250 if args:
250 if args:
251 if relpath: args = [ os.path.join(relpath, x) for x in args ]
251 if relpath: args = [ os.path.join(relpath, x) for x in args ]
252 node = repo.current
252 node = repo.current
253 if aoptions['revision']:
253 if aoptions['revision']:
254 node = repo.changelog.lookup(aoptions['revision'])
254 node = repo.changelog.lookup(aoptions['revision'])
255 change = repo.changelog.read(node)
255 change = repo.changelog.read(node)
256 mmap = repo.manifest.read(change[0])
256 mmap = repo.manifest.read(change[0])
257 maxuserlen = 0
257 maxuserlen = 0
258 maxchangelen = 0
258 maxchangelen = 0
259 for f in args:
259 for f in args:
260 lines = repo.file(f).annotate(mmap[f])
260 lines = repo.file(f).annotate(mmap[f])
261 pieces = []
261 pieces = []
262
262
263 for o, f in opmap:
263 for o, f in opmap:
264 if aoptions[o]:
264 if aoptions[o]:
265 l = [ f(n) for n,t in lines ]
265 l = [ f(n) for n,t in lines ]
266 m = max(map(len, l))
266 m = max(map(len, l))
267 pieces.append([ "%*s" % (m, x) for x in l])
267 pieces.append([ "%*s" % (m, x) for x in l])
268
268
269 for p,l in zip(zip(*pieces), lines):
269 for p,l in zip(zip(*pieces), lines):
270 sys.stdout.write(" ".join(p) + ": " + l[1])
270 sys.stdout.write(" ".join(p) + ": " + l[1])
271
271
272 elif cmd == "export":
272 elif cmd == "export":
273 node = repo.lookup(args[0])
273 node = repo.lookup(args[0])
274 prev, other = repo.changelog.parents(node)
274 prev, other = repo.changelog.parents(node)
275 change = repo.changelog.read(node)
275 change = repo.changelog.read(node)
276 print "# HG changeset patch"
276 print "# HG changeset patch"
277 print "# User %s" % change[1]
277 print "# User %s" % change[1]
278 print "# Node ID %s" % hg.hex(node)
278 print "# Node ID %s" % hg.hex(node)
279 print "# Parent %s" % hg.hex(prev)
279 print "# Parent %s" % hg.hex(prev)
280 print
280 print
281 if other != hg.nullid:
281 if other != hg.nullid:
282 print "# Parent %s" % hg.hex(other)
282 print "# Parent %s" % hg.hex(other)
283 print change[4]
283 print change[4]
284
284
285 diff(None, prev, node)
285 diff(None, prev, node)
286
286
287 elif cmd == "debugchangegroup":
287 elif cmd == "debugchangegroup":
288 newer = repo.newer(map(repo.lookup, args))
288 newer = repo.newer(map(repo.lookup, args))
289 for chunk in repo.changegroup(newer):
289 for chunk in repo.changegroup(newer):
290 sys.stdout.write(chunk)
290 sys.stdout.write(chunk)
291
291
292 elif cmd == "debugaddchangegroup":
292 elif cmd == "debugaddchangegroup":
293 data = sys.stdin.read()
293 data = sys.stdin.read()
294 repo.addchangegroup(data)
294 repo.addchangegroup(data)
295
295
296 elif cmd == "addremove":
296 elif cmd == "addremove":
297 (c, a, d) = repo.diffdir(repo.root, repo.current)
297 (c, a, d) = repo.diffdir(repo.root, repo.current)
298 repo.add(a)
298 repo.add(a)
299 repo.remove(d)
299 repo.remove(d)
300
300
301 elif cmd == "history":
301 elif cmd == "history":
302 for i in range(repo.changelog.count()):
302 for i in range(repo.changelog.count()):
303 n = repo.changelog.node(i)
303 n = repo.changelog.node(i)
304 changes = repo.changelog.read(n)
304 changes = repo.changelog.read(n)
305 (p1, p2) = repo.changelog.parents(n)
305 (p1, p2) = repo.changelog.parents(n)
306 (h, h1, h2) = map(hg.hex, (n, p1, p2))
306 (h, h1, h2) = map(hg.hex, (n, p1, p2))
307 (i1, i2) = map(repo.changelog.rev, (p1, p2))
307 (i1, i2) = map(repo.changelog.rev, (p1, p2))
308 print "rev: %4d:%s" % (i, h)
308 print "rev: %4d:%s" % (i, h)
309 print "parents: %4d:%s" % (i1, h1)
309 print "parents: %4d:%s" % (i1, h1)
310 if i2: print " %4d:%s" % (i2, h2)
310 if i2: print " %4d:%s" % (i2, h2)
311 print "manifest: %4d:%s" % (repo.manifest.rev(changes[0]),
311 print "manifest: %4d:%s" % (repo.manifest.rev(changes[0]),
312 hg.hex(changes[0]))
312 hg.hex(changes[0]))
313 print "user:", changes[1]
313 print "user:", changes[1]
314 print "date:", time.asctime(
314 print "date:", time.asctime(
315 time.localtime(float(changes[2].split(' ')[0])))
315 time.localtime(float(changes[2].split(' ')[0])))
316 if ui.verbose: print "files:", " ".join(changes[3])
316 if ui.verbose: print "files:", " ".join(changes[3])
317 print "description:"
317 print "description:"
318 print changes[4]
318 print changes[4]
319
319
320 elif cmd == "tip":
320 elif cmd == "tip":
321 n = repo.changelog.tip()
321 n = repo.changelog.tip()
322 t = repo.changelog.rev(n)
322 t = repo.changelog.rev(n)
323 ui.status("%d:%s\n" % (t, hg.hex(n)))
323 ui.status("%d:%s\n" % (t, hg.hex(n)))
324
324
325 elif cmd == "log":
325 elif cmd == "log":
326
326
327 if len(args) == 1:
327 if len(args) == 1:
328 if relpath:
328 if relpath:
329 args[0] = os.path.join(relpath, args[0])
329 args[0] = os.path.join(relpath, args[0])
330
330
331 r = repo.file(args[0])
331 r = repo.file(args[0])
332 for i in range(r.count()):
332 for i in range(r.count()):
333 n = r.node(i)
333 n = r.node(i)
334 (p1, p2) = r.parents(n)
334 (p1, p2) = r.parents(n)
335 (h, h1, h2) = map(hg.hex, (n, p1, p2))
335 (h, h1, h2) = map(hg.hex, (n, p1, p2))
336 (i1, i2) = map(r.rev, (p1, p2))
336 (i1, i2) = map(r.rev, (p1, p2))
337 cr = r.linkrev(n)
337 cr = r.linkrev(n)
338 cn = hg.hex(repo.changelog.node(cr))
338 cn = hg.hex(repo.changelog.node(cr))
339 print "rev: %4d:%s" % (i, h)
339 print "rev: %4d:%s" % (i, h)
340 print "changeset: %4d:%s" % (cr, cn)
340 print "changeset: %4d:%s" % (cr, cn)
341 print "parents: %4d:%s" % (i1, h1)
341 print "parents: %4d:%s" % (i1, h1)
342 if i2: print " %4d:%s" % (i2, h2)
342 if i2: print " %4d:%s" % (i2, h2)
343 changes = repo.changelog.read(repo.changelog.node(cr))
343 changes = repo.changelog.read(repo.changelog.node(cr))
344 print "user: %s" % changes[1]
344 print "user: %s" % changes[1]
345 print "date: %s" % time.asctime(
345 print "date: %s" % time.asctime(
346 time.localtime(float(changes[2].split(' ')[0])))
346 time.localtime(float(changes[2].split(' ')[0])))
347 print "description:"
347 print "description:"
348 print changes[4]
348 print changes[4]
349 print
349 print
350 elif len(args) > 1:
350 elif len(args) > 1:
351 print "too many args"
351 print "too many args"
352 else:
352 else:
353 print "missing filename"
353 print "missing filename"
354
354
355 elif cmd == "dump":
355 elif cmd == "dump":
356 if args:
356 if args:
357 r = repo.file(args[0])
357 r = repo.file(args[0])
358 n = r.tip()
358 n = r.tip()
359 if len(args) > 1: n = r.lookup(args[1])
359 if len(args) > 1: n = r.lookup(args[1])
360 sys.stdout.write(r.read(n))
360 sys.stdout.write(r.read(n))
361 else:
361 else:
362 print "missing filename"
362 print "missing filename"
363
363
364 elif cmd == "dumpmanifest":
364 elif cmd == "dumpmanifest":
365 n = repo.manifest.tip()
365 n = repo.manifest.tip()
366 if len(args) > 0:
366 if len(args) > 0:
367 n = repo.manifest.lookup(args[0])
367 n = repo.manifest.lookup(args[0])
368 m = repo.manifest.read(n)
368 m = repo.manifest.read(n)
369 files = m.keys()
369 files = m.keys()
370 files.sort()
370 files.sort()
371
371
372 for f in files:
372 for f in files:
373 print hg.hex(m[f]), f
373 print hg.hex(m[f]), f
374
374
375 elif cmd == "debugindex":
375 elif cmd == "debugindex":
376 if ".hg" not in args[0]:
376 if ".hg" not in args[0]:
377 args[0] = ".hg/data/" + repo.file(args[0]).encodepath(args[0]) + "i"
377 args[0] = ".hg/data/" + repo.file(args[0]).encodepath(args[0]) + "i"
378
378
379 r = hg.revlog(open, args[0], "")
379 r = hg.revlog(open, args[0], "")
380 print " rev offset length base linkrev"+\
380 print " rev offset length base linkrev"+\
381 " p1 p2 nodeid"
381 " p1 p2 nodeid"
382 for i in range(r.count()):
382 for i in range(r.count()):
383 e = r.index[i]
383 e = r.index[i]
384 print "% 6d % 9d % 7d % 6d % 7d %s.. %s.. %s.." % (
384 print "% 6d % 9d % 7d % 6d % 7d %s.. %s.. %s.." % (
385 i, e[0], e[1], e[2], e[3],
385 i, e[0], e[1], e[2], e[3],
386 hg.hex(e[4][:5]), hg.hex(e[5][:5]), hg.hex(e[6][:5]))
386 hg.hex(e[4][:5]), hg.hex(e[5][:5]), hg.hex(e[6][:5]))
387
387
388 elif cmd == "debugindexdot":
388 elif cmd == "debugindexdot":
389 if ".hg" not in args[0]:
389 if ".hg" not in args[0]:
390 args[0] = ".hg/data/" + repo.file(args[0]).encodepath(args[0]) + "i"
390 args[0] = ".hg/data/" + repo.file(args[0]).encodepath(args[0]) + "i"
391
391
392 r = hg.revlog(open, args[0], "")
392 r = hg.revlog(open, args[0], "")
393 print "digraph G {"
393 print "digraph G {"
394 for i in range(r.count()):
394 for i in range(r.count()):
395 e = r.index[i]
395 e = r.index[i]
396 print "\t%d -> %d" % (r.rev(e[4]), i)
396 print "\t%d -> %d" % (r.rev(e[4]), i)
397 if e[5] != hg.nullid:
397 if e[5] != hg.nullid:
398 print "\t%d -> %d" % (r.rev(e[5]), i)
398 print "\t%d -> %d" % (r.rev(e[5]), i)
399 print "}"
399 print "}"
400
400
401 elif cmd == "merge":
401 elif cmd == "merge":
402 (c, a, d) = repo.diffdir(repo.root, repo.current)
402 (c, a, d) = repo.diffdir(repo.root, repo.current)
403 if c:
403 if c:
404 ui.warn("aborting (outstanding changes in working directory)\n")
404 ui.warn("aborting (outstanding changes in working directory)\n")
405 sys.exit(1)
405 sys.exit(1)
406
406
407 if args:
407 if args:
408 paths = {}
408 paths = {}
409 try:
409 try:
410 pf = os.path.join(os.environ["HOME"], ".hgpaths")
410 pf = os.path.join(os.environ["HOME"], ".hgpaths")
411 for l in file(pf):
411 for l in file(pf):
412 name, path = l.split()
412 name, path = l.split()
413 paths[name] = path
413 paths[name] = path
414 except:
414 except:
415 pass
415 pass
416
416
417 if args[0] in paths: args[0] = paths[args[0]]
417 if args[0] in paths: args[0] = paths[args[0]]
418
418
419 other = hg.repository(ui, args[0])
419 other = hg.repository(ui, args[0])
420 ui.status("requesting changegroup\n")
421 cg = repo.getchangegroup(other)
420 cg = repo.getchangegroup(other)
422 repo.addchangegroup(cg)
421 repo.addchangegroup(cg)
423 else:
422 else:
424 print "missing source repository"
423 print "missing source repository"
425
424
426 elif cmd == "tags":
425 elif cmd == "tags":
427 repo.lookup(0) # prime the cache
426 repo.lookup(0) # prime the cache
428 i = repo.tags.items()
427 i = repo.tags.items()
429 i.sort()
428 i.sort()
430 for k, n in i:
429 for k, n in i:
431 try:
430 try:
432 r = repo.changelog.rev(n)
431 r = repo.changelog.rev(n)
433 except KeyError:
432 except KeyError:
434 r = "?"
433 r = "?"
435 print "%-30s %5d:%s" % (k, repo.changelog.rev(n), hg.hex(n))
434 print "%-30s %5d:%s" % (k, repo.changelog.rev(n), hg.hex(n))
436
435
437 elif cmd == "recover":
436 elif cmd == "recover":
438 repo.recover()
437 repo.recover()
439
438
440 elif cmd == "undo":
439 elif cmd == "undo":
441 repo.recover("undo")
440 repo.recover("undo")
442
441
443 elif cmd == "verify":
442 elif cmd == "verify":
444 filelinkrevs = {}
443 filelinkrevs = {}
445 filenodes = {}
444 filenodes = {}
446 manifestchangeset = {}
445 manifestchangeset = {}
447 changesets = revisions = files = 0
446 changesets = revisions = files = 0
448 errors = 0
447 errors = 0
449
448
450 ui.status("checking changesets\n")
449 ui.status("checking changesets\n")
451 for i in range(repo.changelog.count()):
450 for i in range(repo.changelog.count()):
452 changesets += 1
451 changesets += 1
453 n = repo.changelog.node(i)
452 n = repo.changelog.node(i)
454 for p in repo.changelog.parents(n):
453 for p in repo.changelog.parents(n):
455 if p not in repo.changelog.nodemap:
454 if p not in repo.changelog.nodemap:
456 ui.warn("changeset %s has unknown parent %s\n" %
455 ui.warn("changeset %s has unknown parent %s\n" %
457 (hg.short(n), hg.short(p)))
456 (hg.short(n), hg.short(p)))
458 errors += 1
457 errors += 1
459 try:
458 try:
460 changes = repo.changelog.read(n)
459 changes = repo.changelog.read(n)
461 except Exception, inst:
460 except Exception, inst:
462 ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
461 ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
463 errors += 1
462 errors += 1
464
463
465 manifestchangeset[changes[0]] = n
464 manifestchangeset[changes[0]] = n
466 for f in changes[3]:
465 for f in changes[3]:
467 revisions += 1
466 revisions += 1
468 filelinkrevs.setdefault(f, []).append(i)
467 filelinkrevs.setdefault(f, []).append(i)
469
468
470 ui.status("checking manifests\n")
469 ui.status("checking manifests\n")
471 for i in range(repo.manifest.count()):
470 for i in range(repo.manifest.count()):
472 n = repo.manifest.node(i)
471 n = repo.manifest.node(i)
473 for p in repo.manifest.parents(n):
472 for p in repo.manifest.parents(n):
474 if p not in repo.manifest.nodemap:
473 if p not in repo.manifest.nodemap:
475 ui.warn("manifest %s has unknown parent %s\n" %
474 ui.warn("manifest %s has unknown parent %s\n" %
476 (hg.short(n), hg.short(p)))
475 (hg.short(n), hg.short(p)))
477 errors += 1
476 errors += 1
478 ca = repo.changelog.node(repo.manifest.linkrev(n))
477 ca = repo.changelog.node(repo.manifest.linkrev(n))
479 cc = manifestchangeset[n]
478 cc = manifestchangeset[n]
480 if ca != cc:
479 if ca != cc:
481 ui.warn("manifest %s points to %s, not %s\n" %
480 ui.warn("manifest %s points to %s, not %s\n" %
482 (hg.hex(n), hg.hex(ca), hg.hex(cc)))
481 (hg.hex(n), hg.hex(ca), hg.hex(cc)))
483 errors += 1
482 errors += 1
484
483
485 try:
484 try:
486 delta = mdiff.patchtext(repo.manifest.delta(n))
485 delta = mdiff.patchtext(repo.manifest.delta(n))
487 except KeyboardInterrupt:
486 except KeyboardInterrupt:
488 print "aborted"
487 print "aborted"
489 sys.exit(0)
488 sys.exit(0)
490 except Exception, inst:
489 except Exception, inst:
491 ui.warn("unpacking manifest %s: %s\n" % (hg.short(n), inst))
490 ui.warn("unpacking manifest %s: %s\n" % (hg.short(n), inst))
492 errors += 1
491 errors += 1
493
492
494 ff = [ l.split('\0') for l in delta.splitlines() ]
493 ff = [ l.split('\0') for l in delta.splitlines() ]
495 for f, fn in ff:
494 for f, fn in ff:
496 filenodes.setdefault(f, {})[hg.bin(fn)] = 1
495 filenodes.setdefault(f, {})[hg.bin(fn)] = 1
497
496
498 ui.status("crosschecking files in changesets and manifests\n")
497 ui.status("crosschecking files in changesets and manifests\n")
499 for f in filenodes:
498 for f in filenodes:
500 if f not in filelinkrevs:
499 if f not in filelinkrevs:
501 ui.warn("file %s in manifest but not in changesets\n" % f)
500 ui.warn("file %s in manifest but not in changesets\n" % f)
502 errors += 1
501 errors += 1
503
502
504 for f in filelinkrevs:
503 for f in filelinkrevs:
505 if f not in filenodes:
504 if f not in filenodes:
506 ui.warn("file %s in changeset but not in manifest\n" % f)
505 ui.warn("file %s in changeset but not in manifest\n" % f)
507 errors += 1
506 errors += 1
508
507
509 ui.status("checking files\n")
508 ui.status("checking files\n")
510 ff = filenodes.keys()
509 ff = filenodes.keys()
511 ff.sort()
510 ff.sort()
512 for f in ff:
511 for f in ff:
513 if f == "/dev/null": continue
512 if f == "/dev/null": continue
514 files += 1
513 files += 1
515 fl = repo.file(f)
514 fl = repo.file(f)
516 nodes = { hg.nullid: 1 }
515 nodes = { hg.nullid: 1 }
517 for i in range(fl.count()):
516 for i in range(fl.count()):
518 n = fl.node(i)
517 n = fl.node(i)
519
518
520 if n not in filenodes[f]:
519 if n not in filenodes[f]:
521 ui.warn("%s: %d:%s not in manifests\n" % (f, i, hg.short(n)))
520 ui.warn("%s: %d:%s not in manifests\n" % (f, i, hg.short(n)))
522 print len(filenodes[f].keys()), fl.count(), f
521 print len(filenodes[f].keys()), fl.count(), f
523 errors += 1
522 errors += 1
524 else:
523 else:
525 del filenodes[f][n]
524 del filenodes[f][n]
526
525
527 flr = fl.linkrev(n)
526 flr = fl.linkrev(n)
528 if flr not in filelinkrevs[f]:
527 if flr not in filelinkrevs[f]:
529 ui.warn("%s:%s points to unexpected changeset rev %d\n"
528 ui.warn("%s:%s points to unexpected changeset rev %d\n"
530 % (f, hg.short(n), fl.linkrev(n)))
529 % (f, hg.short(n), fl.linkrev(n)))
531 errors += 1
530 errors += 1
532 else:
531 else:
533 filelinkrevs[f].remove(flr)
532 filelinkrevs[f].remove(flr)
534
533
535 # verify contents
534 # verify contents
536 try:
535 try:
537 t = fl.read(n)
536 t = fl.read(n)
538 except Exception, inst:
537 except Exception, inst:
539 ui.warn("unpacking file %s %s: %s\n" % (f, hg.short(n), inst))
538 ui.warn("unpacking file %s %s: %s\n" % (f, hg.short(n), inst))
540 errors += 1
539 errors += 1
541
540
542 # verify parents
541 # verify parents
543 (p1, p2) = fl.parents(n)
542 (p1, p2) = fl.parents(n)
544 if p1 not in nodes:
543 if p1 not in nodes:
545 ui.warn("file %s:%s unknown parent 1 %s" %
544 ui.warn("file %s:%s unknown parent 1 %s" %
546 (f, hg.short(n), hg.short(p1)))
545 (f, hg.short(n), hg.short(p1)))
547 errors += 1
546 errors += 1
548 if p2 not in nodes:
547 if p2 not in nodes:
549 ui.warn("file %s:%s unknown parent 2 %s" %
548 ui.warn("file %s:%s unknown parent 2 %s" %
550 (f, hg.short(n), hg.short(p1)))
549 (f, hg.short(n), hg.short(p1)))
551 errors += 1
550 errors += 1
552 nodes[n] = 1
551 nodes[n] = 1
553
552
554 # cross-check
553 # cross-check
555 for flr in filelinkrevs[f]:
554 for flr in filelinkrevs[f]:
556 ui.warn("changeset rev %d not in %s\n" % (flr, f))
555 ui.warn("changeset rev %d not in %s\n" % (flr, f))
557 errors += 1
556 errors += 1
558
557
559 for node in filenodes[f]:
558 for node in filenodes[f]:
560 ui.warn("node %s in manifests not in %s\n" % (hg.hex(n), f))
559 ui.warn("node %s in manifests not in %s\n" % (hg.hex(n), f))
561 errors += 1
560 errors += 1
562
561
563 ui.status("%d files, %d changesets, %d total revisions\n" %
562 ui.status("%d files, %d changesets, %d total revisions\n" %
564 (files, changesets, revisions))
563 (files, changesets, revisions))
565
564
566 if errors:
565 if errors:
567 ui.warn("%d integrity errors encountered!\n" % errors)
566 ui.warn("%d integrity errors encountered!\n" % errors)
568 sys.exit(1)
567 sys.exit(1)
569
568
570 elif cmd == "serve":
569 elif cmd == "serve":
571 from mercurial import hgweb
570 from mercurial import hgweb
572
571
573 soptions = {}
572 soptions = {}
574 opts = [('p', 'port', 8000, 'listen port'),
573 opts = [('p', 'port', 8000, 'listen port'),
575 ('a', 'address', '', 'interface address'),
574 ('a', 'address', '', 'interface address'),
576 ('n', 'name', os.getcwd(), 'repository name'),
575 ('n', 'name', os.getcwd(), 'repository name'),
577 ('t', 'templates', "", 'template map')
576 ('t', 'templates', "", 'template map')
578 ]
577 ]
579
578
580 args = fancyopts.fancyopts(args, opts, soptions,
579 args = fancyopts.fancyopts(args, opts, soptions,
581 'hg serve [options]')
580 'hg serve [options]')
582
581
583 hgweb.server(repo.root, soptions["name"], soptions["templates"],
582 hgweb.server(repo.root, soptions["name"], soptions["templates"],
584 soptions["address"], soptions["port"])
583 soptions["address"], soptions["port"])
585
584
586 else:
585 else:
587 if cmd: ui.warn("unknown command\n\n")
586 if cmd: ui.warn("unknown command\n\n")
588 help()
587 help()
589 sys.exit(1)
588 sys.exit(1)
@@ -1,945 +1,952 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import sys, struct, sha, socket, os, time, re, urllib2
8 import sys, struct, sha, socket, os, time, re, urllib2
9 import urllib
9 import urllib
10 from mercurial import byterange, lock
10 from mercurial import byterange, lock
11 from mercurial.transaction import *
11 from mercurial.transaction import *
12 from mercurial.revlog import *
12 from mercurial.revlog import *
13 from difflib import SequenceMatcher
13 from difflib import SequenceMatcher
14
14
15 class filelog(revlog):
15 class filelog(revlog):
16 def __init__(self, opener, path):
16 def __init__(self, opener, path):
17 revlog.__init__(self, opener,
17 revlog.__init__(self, opener,
18 os.path.join("data", path + ".i"),
18 os.path.join("data", path + ".i"),
19 os.path.join("data", path + ".d"))
19 os.path.join("data", path + ".d"))
20
20
21 def read(self, node):
21 def read(self, node):
22 return self.revision(node)
22 return self.revision(node)
23 def add(self, text, transaction, link, p1=None, p2=None):
23 def add(self, text, transaction, link, p1=None, p2=None):
24 return self.addrevision(text, transaction, link, p1, p2)
24 return self.addrevision(text, transaction, link, p1, p2)
25
25
26 def annotate(self, node):
26 def annotate(self, node):
27 revs = []
27 revs = []
28 while node != nullid:
28 while node != nullid:
29 revs.append(node)
29 revs.append(node)
30 node = self.parents(node)[0]
30 node = self.parents(node)[0]
31 revs.reverse()
31 revs.reverse()
32 prev = []
32 prev = []
33 annotate = []
33 annotate = []
34
34
35 for node in revs:
35 for node in revs:
36 curr = self.read(node).splitlines(1)
36 curr = self.read(node).splitlines(1)
37 linkrev = self.linkrev(node)
37 linkrev = self.linkrev(node)
38 sm = SequenceMatcher(None, prev, curr)
38 sm = SequenceMatcher(None, prev, curr)
39 new = []
39 new = []
40 for o, m, n, s, t in sm.get_opcodes():
40 for o, m, n, s, t in sm.get_opcodes():
41 if o == 'equal':
41 if o == 'equal':
42 new += annotate[m:n]
42 new += annotate[m:n]
43 else:
43 else:
44 new += [(linkrev, l) for l in curr[s:t]]
44 new += [(linkrev, l) for l in curr[s:t]]
45 annotate, prev = new, curr
45 annotate, prev = new, curr
46 return annotate
46 return annotate
47
47
48 class manifest(revlog):
48 class manifest(revlog):
49 def __init__(self, opener):
49 def __init__(self, opener):
50 self.mapcache = None
50 self.mapcache = None
51 self.listcache = None
51 self.listcache = None
52 self.addlist = None
52 self.addlist = None
53 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
53 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
54
54
55 def read(self, node):
55 def read(self, node):
56 if self.mapcache and self.mapcache[0] == node:
56 if self.mapcache and self.mapcache[0] == node:
57 return self.mapcache[1].copy()
57 return self.mapcache[1].copy()
58 text = self.revision(node)
58 text = self.revision(node)
59 map = {}
59 map = {}
60 self.listcache = (text, text.splitlines(1))
60 self.listcache = (text, text.splitlines(1))
61 for l in self.listcache[1]:
61 for l in self.listcache[1]:
62 (f, n) = l.split('\0')
62 (f, n) = l.split('\0')
63 map[f] = bin(n[:40])
63 map[f] = bin(n[:40])
64 self.mapcache = (node, map)
64 self.mapcache = (node, map)
65 return map
65 return map
66
66
67 def diff(self, a, b):
67 def diff(self, a, b):
68 # this is sneaky, as we're not actually using a and b
68 # this is sneaky, as we're not actually using a and b
69 if self.listcache and self.addlist and self.listcache[0] == a:
69 if self.listcache and self.addlist and self.listcache[0] == a:
70 d = mdiff.diff(self.listcache[1], self.addlist, 1)
70 d = mdiff.diff(self.listcache[1], self.addlist, 1)
71 if mdiff.patch(a, d) != b:
71 if mdiff.patch(a, d) != b:
72 sys.stderr.write("*** sortdiff failed, falling back ***\n")
72 sys.stderr.write("*** sortdiff failed, falling back ***\n")
73 return mdiff.textdiff(a, b)
73 return mdiff.textdiff(a, b)
74 return d
74 return d
75 else:
75 else:
76 return mdiff.textdiff(a, b)
76 return mdiff.textdiff(a, b)
77
77
78 def add(self, map, transaction, link, p1=None, p2=None):
78 def add(self, map, transaction, link, p1=None, p2=None):
79 files = map.keys()
79 files = map.keys()
80 files.sort()
80 files.sort()
81
81
82 self.addlist = ["%s\000%s\n" % (f, hex(map[f])) for f in files]
82 self.addlist = ["%s\000%s\n" % (f, hex(map[f])) for f in files]
83 text = "".join(self.addlist)
83 text = "".join(self.addlist)
84
84
85 n = self.addrevision(text, transaction, link, p1, p2)
85 n = self.addrevision(text, transaction, link, p1, p2)
86 self.mapcache = (n, map)
86 self.mapcache = (n, map)
87 self.listcache = (text, self.addlist)
87 self.listcache = (text, self.addlist)
88 self.addlist = None
88 self.addlist = None
89
89
90 return n
90 return n
91
91
92 class changelog(revlog):
92 class changelog(revlog):
93 def __init__(self, opener):
93 def __init__(self, opener):
94 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
94 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
95
95
96 def extract(self, text):
96 def extract(self, text):
97 if not text:
97 if not text:
98 return (nullid, "", "0", [], "")
98 return (nullid, "", "0", [], "")
99 last = text.index("\n\n")
99 last = text.index("\n\n")
100 desc = text[last + 2:]
100 desc = text[last + 2:]
101 l = text[:last].splitlines()
101 l = text[:last].splitlines()
102 manifest = bin(l[0])
102 manifest = bin(l[0])
103 user = l[1]
103 user = l[1]
104 date = l[2]
104 date = l[2]
105 files = l[3:]
105 files = l[3:]
106 return (manifest, user, date, files, desc)
106 return (manifest, user, date, files, desc)
107
107
108 def read(self, node):
108 def read(self, node):
109 return self.extract(self.revision(node))
109 return self.extract(self.revision(node))
110
110
111 def add(self, manifest, list, desc, transaction, p1=None, p2=None):
111 def add(self, manifest, list, desc, transaction, p1=None, p2=None):
112 user = (os.environ.get("HGUSER") or
112 user = (os.environ.get("HGUSER") or
113 os.environ.get("EMAIL") or
113 os.environ.get("EMAIL") or
114 os.environ.get("LOGNAME", "unknown") + '@' + socket.getfqdn())
114 os.environ.get("LOGNAME", "unknown") + '@' + socket.getfqdn())
115 date = "%d %d" % (time.time(), time.timezone)
115 date = "%d %d" % (time.time(), time.timezone)
116 list.sort()
116 list.sort()
117 l = [hex(manifest), user, date] + list + ["", desc]
117 l = [hex(manifest), user, date] + list + ["", desc]
118 text = "\n".join(l)
118 text = "\n".join(l)
119 return self.addrevision(text, transaction, self.count(), p1, p2)
119 return self.addrevision(text, transaction, self.count(), p1, p2)
120
120
121 class dircache:
121 class dircache:
122 def __init__(self, opener, ui):
122 def __init__(self, opener, ui):
123 self.opener = opener
123 self.opener = opener
124 self.dirty = 0
124 self.dirty = 0
125 self.ui = ui
125 self.ui = ui
126 self.map = None
126 self.map = None
127 def __del__(self):
127 def __del__(self):
128 if self.dirty: self.write()
128 if self.dirty: self.write()
129 def __getitem__(self, key):
129 def __getitem__(self, key):
130 try:
130 try:
131 return self.map[key]
131 return self.map[key]
132 except TypeError:
132 except TypeError:
133 self.read()
133 self.read()
134 return self[key]
134 return self[key]
135
135
136 def read(self):
136 def read(self):
137 if self.map is not None: return self.map
137 if self.map is not None: return self.map
138
138
139 self.map = {}
139 self.map = {}
140 try:
140 try:
141 st = self.opener("dircache").read()
141 st = self.opener("dircache").read()
142 except: return
142 except: return
143
143
144 pos = 0
144 pos = 0
145 while pos < len(st):
145 while pos < len(st):
146 e = struct.unpack(">llll", st[pos:pos+16])
146 e = struct.unpack(">llll", st[pos:pos+16])
147 l = e[3]
147 l = e[3]
148 pos += 16
148 pos += 16
149 f = st[pos:pos + l]
149 f = st[pos:pos + l]
150 self.map[f] = e[:3]
150 self.map[f] = e[:3]
151 pos += l
151 pos += l
152
152
153 def update(self, files):
153 def update(self, files):
154 if not files: return
154 if not files: return
155 self.read()
155 self.read()
156 self.dirty = 1
156 self.dirty = 1
157 for f in files:
157 for f in files:
158 try:
158 try:
159 s = os.stat(f)
159 s = os.stat(f)
160 self.map[f] = (s.st_mode, s.st_size, s.st_mtime)
160 self.map[f] = (s.st_mode, s.st_size, s.st_mtime)
161 except IOError:
161 except IOError:
162 self.remove(f)
162 self.remove(f)
163
163
164 def taint(self, files):
164 def taint(self, files):
165 if not files: return
165 if not files: return
166 self.read()
166 self.read()
167 self.dirty = 1
167 self.dirty = 1
168 for f in files:
168 for f in files:
169 self.map[f] = (0, -1, 0)
169 self.map[f] = (0, -1, 0)
170
170
171 def remove(self, files):
171 def remove(self, files):
172 if not files: return
172 if not files: return
173 self.read()
173 self.read()
174 self.dirty = 1
174 self.dirty = 1
175 for f in files:
175 for f in files:
176 try:
176 try:
177 del self.map[f]
177 del self.map[f]
178 except KeyError:
178 except KeyError:
179 self.ui.warn("Not in dircache: %s\n" % f)
179 self.ui.warn("Not in dircache: %s\n" % f)
180 pass
180 pass
181
181
182 def clear(self):
182 def clear(self):
183 self.map = {}
183 self.map = {}
184 self.dirty = 1
184 self.dirty = 1
185
185
186 def write(self):
186 def write(self):
187 st = self.opener("dircache", "w")
187 st = self.opener("dircache", "w")
188 for f, e in self.map.items():
188 for f, e in self.map.items():
189 e = struct.pack(">llll", e[0], e[1], e[2], len(f))
189 e = struct.pack(">llll", e[0], e[1], e[2], len(f))
190 st.write(e + f)
190 st.write(e + f)
191 self.dirty = 0
191 self.dirty = 0
192
192
193 def copy(self):
193 def copy(self):
194 self.read()
194 self.read()
195 return self.map.copy()
195 return self.map.copy()
196
196
197 # used to avoid circular references so destructors work
197 # used to avoid circular references so destructors work
198 def opener(base):
198 def opener(base):
199 p = base
199 p = base
200 def o(path, mode="r"):
200 def o(path, mode="r"):
201 if p[:7] == "http://":
201 if p[:7] == "http://":
202 f = os.path.join(p, urllib.quote(path))
202 f = os.path.join(p, urllib.quote(path))
203 return httprangereader(f)
203 return httprangereader(f)
204
204
205 f = os.path.join(p, path)
205 f = os.path.join(p, path)
206
206
207 if mode != "r":
207 if mode != "r":
208 try:
208 try:
209 s = os.stat(f)
209 s = os.stat(f)
210 except OSError:
210 except OSError:
211 d = os.path.dirname(f)
211 d = os.path.dirname(f)
212 if not os.path.isdir(d):
212 if not os.path.isdir(d):
213 os.makedirs(d)
213 os.makedirs(d)
214 else:
214 else:
215 if s.st_nlink > 1:
215 if s.st_nlink > 1:
216 file(f + ".tmp", "w").write(file(f).read())
216 file(f + ".tmp", "w").write(file(f).read())
217 os.rename(f+".tmp", f)
217 os.rename(f+".tmp", f)
218
218
219 return file(f, mode)
219 return file(f, mode)
220
220
221 return o
221 return o
222
222
223 class localrepository:
223 class localrepository:
224 def __init__(self, ui, path=None, create=0):
224 def __init__(self, ui, path=None, create=0):
225 self.remote = 0
225 self.remote = 0
226 if path and path[:7] == "http://":
226 if path and path[:7] == "http://":
227 self.remote = 1
227 self.remote = 1
228 self.path = path
228 self.path = path
229 else:
229 else:
230 if not path:
230 if not path:
231 p = os.getcwd()
231 p = os.getcwd()
232 while not os.path.isdir(os.path.join(p, ".hg")):
232 while not os.path.isdir(os.path.join(p, ".hg")):
233 p = os.path.dirname(p)
233 p = os.path.dirname(p)
234 if p == "/": raise "No repo found"
234 if p == "/": raise "No repo found"
235 path = p
235 path = p
236 self.path = os.path.join(path, ".hg")
236 self.path = os.path.join(path, ".hg")
237
237
238 self.root = path
238 self.root = path
239 self.ui = ui
239 self.ui = ui
240
240
241 if create:
241 if create:
242 os.mkdir(self.path)
242 os.mkdir(self.path)
243 os.mkdir(self.join("data"))
243 os.mkdir(self.join("data"))
244
244
245 self.opener = opener(self.path)
245 self.opener = opener(self.path)
246 self.manifest = manifest(self.opener)
246 self.manifest = manifest(self.opener)
247 self.changelog = changelog(self.opener)
247 self.changelog = changelog(self.opener)
248 self.ignorelist = None
248 self.ignorelist = None
249 self.tags = None
249 self.tags = None
250
250
251 if not self.remote:
251 if not self.remote:
252 self.dircache = dircache(self.opener, ui)
252 self.dircache = dircache(self.opener, ui)
253 try:
253 try:
254 self.current = bin(self.opener("current").read())
254 self.current = bin(self.opener("current").read())
255 except IOError:
255 except IOError:
256 self.current = None
256 self.current = None
257
257
258 def setcurrent(self, node):
258 def setcurrent(self, node):
259 self.current = node
259 self.current = node
260 self.opener("current", "w").write(hex(node))
260 self.opener("current", "w").write(hex(node))
261
261
262 def ignore(self, f):
262 def ignore(self, f):
263 if self.ignorelist is None:
263 if self.ignorelist is None:
264 self.ignorelist = []
264 self.ignorelist = []
265 try:
265 try:
266 l = open(os.path.join(self.root, ".hgignore"))
266 l = open(os.path.join(self.root, ".hgignore"))
267 for pat in l:
267 for pat in l:
268 if pat != "\n":
268 if pat != "\n":
269 self.ignorelist.append(re.compile(pat[:-1]))
269 self.ignorelist.append(re.compile(pat[:-1]))
270 except IOError: pass
270 except IOError: pass
271 for pat in self.ignorelist:
271 for pat in self.ignorelist:
272 if pat.search(f): return True
272 if pat.search(f): return True
273 return False
273 return False
274
274
275 def lookup(self, key):
275 def lookup(self, key):
276 if self.tags is None:
276 if self.tags is None:
277 self.tags = {}
277 self.tags = {}
278 try:
278 try:
279 fl = self.file(".hgtags")
279 fl = self.file(".hgtags")
280 for l in fl.revision(fl.tip()).splitlines():
280 for l in fl.revision(fl.tip()).splitlines():
281 if l:
281 if l:
282 n, k = l.split(" ")
282 n, k = l.split(" ")
283 self.tags[k] = bin(n)
283 self.tags[k] = bin(n)
284 except KeyError: pass
284 except KeyError: pass
285 try:
285 try:
286 return self.tags[key]
286 return self.tags[key]
287 except KeyError:
287 except KeyError:
288 return self.changelog.lookup(key)
288 return self.changelog.lookup(key)
289
289
290 def join(self, f):
290 def join(self, f):
291 return os.path.join(self.path, f)
291 return os.path.join(self.path, f)
292
292
293 def file(self, f):
293 def file(self, f):
294 if f[0] == '/': f = f[1:]
294 return filelog(self.opener, f)
295 return filelog(self.opener, f)
295
296
296 def transaction(self):
297 def transaction(self):
297 return transaction(self.opener, self.join("journal"),
298 return transaction(self.opener, self.join("journal"),
298 self.join("undo"))
299 self.join("undo"))
299
300
300 def recover(self, f = "journal"):
301 def recover(self, f = "journal"):
301 self.lock()
302 self.lock()
302 if os.path.exists(self.join(f)):
303 if os.path.exists(self.join(f)):
303 self.ui.status("attempting to rollback %s information\n" % f)
304 self.ui.status("attempting to rollback %s information\n" % f)
304 return rollback(self.opener, self.join(f))
305 return rollback(self.opener, self.join(f))
305 else:
306 else:
306 self.ui.warn("no %s information available\n" % f)
307 self.ui.warn("no %s information available\n" % f)
307
308
308 def lock(self, wait = 1):
309 def lock(self, wait = 1):
309 try:
310 try:
310 return lock.lock(self.join("lock"), 0)
311 return lock.lock(self.join("lock"), 0)
311 except lock.LockHeld, inst:
312 except lock.LockHeld, inst:
312 if wait:
313 if wait:
313 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
314 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
314 return lock.lock(self.join("lock"), wait)
315 return lock.lock(self.join("lock"), wait)
315 raise inst
316 raise inst
316
317
317 def commit(self, parent, update = None, text = ""):
318 def commit(self, parent, update = None, text = ""):
318 self.lock()
319 self.lock()
319 try:
320 try:
320 remove = [ l[:-1] for l in self.opener("to-remove") ]
321 remove = [ l[:-1] for l in self.opener("to-remove") ]
321 os.unlink(self.join("to-remove"))
322 os.unlink(self.join("to-remove"))
322
323
323 except IOError:
324 except IOError:
324 remove = []
325 remove = []
325
326
326 if update == None:
327 if update == None:
327 update = self.diffdir(self.root, parent)[0]
328 update = self.diffdir(self.root, parent)[0]
328
329
329 if not update:
330 if not update:
330 self.ui.status("nothing changed\n")
331 self.ui.status("nothing changed\n")
331 return
332 return
332
333
333 tr = self.transaction()
334 tr = self.transaction()
334
335
335 # check in files
336 # check in files
336 new = {}
337 new = {}
337 linkrev = self.changelog.count()
338 linkrev = self.changelog.count()
338 update.sort()
339 update.sort()
339 for f in update:
340 for f in update:
340 self.ui.note(f + "\n")
341 self.ui.note(f + "\n")
341 try:
342 try:
342 t = file(f).read()
343 t = file(f).read()
343 except IOError:
344 except IOError:
344 remove.append(f)
345 remove.append(f)
345 continue
346 continue
346 r = self.file(f)
347 r = self.file(f)
347 new[f] = r.add(t, tr, linkrev)
348 new[f] = r.add(t, tr, linkrev)
348
349
349 # update manifest
350 # update manifest
350 mmap = self.manifest.read(self.manifest.tip())
351 mmap = self.manifest.read(self.manifest.tip())
351 mmap.update(new)
352 mmap.update(new)
352 for f in remove:
353 for f in remove:
353 del mmap[f]
354 del mmap[f]
354 mnode = self.manifest.add(mmap, tr, linkrev)
355 mnode = self.manifest.add(mmap, tr, linkrev)
355
356
356 # add changeset
357 # add changeset
357 new = new.keys()
358 new = new.keys()
358 new.sort()
359 new.sort()
359
360
360 edittext = text + "\n" + "HG: manifest hash %s\n" % hex(mnode)
361 edittext = text + "\n" + "HG: manifest hash %s\n" % hex(mnode)
361 edittext += "".join(["HG: changed %s\n" % f for f in new])
362 edittext += "".join(["HG: changed %s\n" % f for f in new])
362 edittext += "".join(["HG: removed %s\n" % f for f in remove])
363 edittext += "".join(["HG: removed %s\n" % f for f in remove])
363 edittext = self.ui.edit(edittext)
364 edittext = self.ui.edit(edittext)
364
365
365 n = self.changelog.add(mnode, new, edittext, tr)
366 n = self.changelog.add(mnode, new, edittext, tr)
366 tr.close()
367 tr.close()
367
368
368 self.setcurrent(n)
369 self.setcurrent(n)
369 self.dircache.update(new)
370 self.dircache.update(new)
370 self.dircache.remove(remove)
371 self.dircache.remove(remove)
371
372
372 def checkout(self, node):
373 def checkout(self, node):
373 # checkout is really dumb at the moment
374 # checkout is really dumb at the moment
374 # it ought to basically merge
375 # it ought to basically merge
375 change = self.changelog.read(node)
376 change = self.changelog.read(node)
376 l = self.manifest.read(change[0]).items()
377 l = self.manifest.read(change[0]).items()
377 l.sort()
378 l.sort()
378
379
379 for f,n in l:
380 for f,n in l:
380 if f[0] == "/": continue
381 if f[0] == "/": continue
381 self.ui.note(f, "\n")
382 self.ui.note(f, "\n")
382 t = self.file(f).revision(n)
383 t = self.file(f).revision(n)
383 try:
384 try:
384 file(f, "w").write(t)
385 file(f, "w").write(t)
385 except IOError:
386 except IOError:
386 os.makedirs(os.path.dirname(f))
387 os.makedirs(os.path.dirname(f))
387 file(f, "w").write(t)
388 file(f, "w").write(t)
388
389
389 self.setcurrent(node)
390 self.setcurrent(node)
390 self.dircache.clear()
391 self.dircache.clear()
391 self.dircache.update([f for f,n in l])
392 self.dircache.update([f for f,n in l])
392
393
393 def diffdir(self, path, changeset):
394 def diffdir(self, path, changeset):
394 changed = []
395 changed = []
395 mf = {}
396 mf = {}
396 added = []
397 added = []
397
398
398 if changeset:
399 if changeset:
399 change = self.changelog.read(changeset)
400 change = self.changelog.read(changeset)
400 mf = self.manifest.read(change[0])
401 mf = self.manifest.read(change[0])
401
402
402 if changeset == self.current:
403 if changeset == self.current:
403 dc = self.dircache.copy()
404 dc = self.dircache.copy()
404 else:
405 else:
405 dc = dict.fromkeys(mf)
406 dc = dict.fromkeys(mf)
406
407
407 def fcmp(fn):
408 def fcmp(fn):
408 t1 = file(os.path.join(self.root, fn)).read()
409 t1 = file(os.path.join(self.root, fn)).read()
409 t2 = self.file(fn).revision(mf[fn])
410 t2 = self.file(fn).revision(mf[fn])
410 return cmp(t1, t2)
411 return cmp(t1, t2)
411
412
412 for dir, subdirs, files in os.walk(self.root):
413 for dir, subdirs, files in os.walk(self.root):
413 d = dir[len(self.root)+1:]
414 d = dir[len(self.root)+1:]
414 if ".hg" in subdirs: subdirs.remove(".hg")
415 if ".hg" in subdirs: subdirs.remove(".hg")
415
416
416 for f in files:
417 for f in files:
417 fn = os.path.join(d, f)
418 fn = os.path.join(d, f)
418 try: s = os.stat(os.path.join(self.root, fn))
419 try: s = os.stat(os.path.join(self.root, fn))
419 except: continue
420 except: continue
420 if fn in dc:
421 if fn in dc:
421 c = dc[fn]
422 c = dc[fn]
422 del dc[fn]
423 del dc[fn]
423 if not c:
424 if not c:
424 if fcmp(fn):
425 if fcmp(fn):
425 changed.append(fn)
426 changed.append(fn)
426 elif c[1] != s.st_size:
427 elif c[1] != s.st_size:
427 changed.append(fn)
428 changed.append(fn)
428 elif c[0] != s.st_mode or c[2] != s.st_mtime:
429 elif c[0] != s.st_mode or c[2] != s.st_mtime:
429 if fcmp(fn):
430 if fcmp(fn):
430 changed.append(fn)
431 changed.append(fn)
431 else:
432 else:
432 if self.ignore(fn): continue
433 if self.ignore(fn): continue
433 added.append(fn)
434 added.append(fn)
434
435
435 deleted = dc.keys()
436 deleted = dc.keys()
436 deleted.sort()
437 deleted.sort()
437
438
438 return (changed, added, deleted)
439 return (changed, added, deleted)
439
440
440 def diffrevs(self, node1, node2):
441 def diffrevs(self, node1, node2):
441 changed, added = [], []
442 changed, added = [], []
442
443
443 change = self.changelog.read(node1)
444 change = self.changelog.read(node1)
444 mf1 = self.manifest.read(change[0])
445 mf1 = self.manifest.read(change[0])
445 change = self.changelog.read(node2)
446 change = self.changelog.read(node2)
446 mf2 = self.manifest.read(change[0])
447 mf2 = self.manifest.read(change[0])
447
448
448 for fn in mf2:
449 for fn in mf2:
449 if mf1.has_key(fn):
450 if mf1.has_key(fn):
450 if mf1[fn] != mf2[fn]:
451 if mf1[fn] != mf2[fn]:
451 changed.append(fn)
452 changed.append(fn)
452 del mf1[fn]
453 del mf1[fn]
453 else:
454 else:
454 added.append(fn)
455 added.append(fn)
455
456
456 deleted = mf1.keys()
457 deleted = mf1.keys()
457 deleted.sort()
458 deleted.sort()
458
459
459 return (changed, added, deleted)
460 return (changed, added, deleted)
460
461
461 def add(self, list):
462 def add(self, list):
462 self.dircache.taint(list)
463 self.dircache.taint(list)
463
464
464 def remove(self, list):
465 def remove(self, list):
465 dl = self.opener("to-remove", "a")
466 dl = self.opener("to-remove", "a")
466 for f in list:
467 for f in list:
467 dl.write(f + "\n")
468 dl.write(f + "\n")
468
469
469 def branches(self, nodes):
470 def branches(self, nodes):
470 if not nodes: nodes = [self.changelog.tip()]
471 if not nodes: nodes = [self.changelog.tip()]
471 b = []
472 b = []
472 for n in nodes:
473 for n in nodes:
473 t = n
474 t = n
474 while n:
475 while n:
475 p = self.changelog.parents(n)
476 p = self.changelog.parents(n)
476 if p[1] != nullid or p[0] == nullid:
477 if p[1] != nullid or p[0] == nullid:
477 b.append((t, n, p[0], p[1]))
478 b.append((t, n, p[0], p[1]))
478 break
479 break
479 n = p[0]
480 n = p[0]
480 return b
481 return b
481
482
482 def between(self, pairs):
483 def between(self, pairs):
483 r = []
484 r = []
484
485
485 for top, bottom in pairs:
486 for top, bottom in pairs:
486 n, l, i = top, [], 0
487 n, l, i = top, [], 0
487 f = 1
488 f = 1
488
489
489 while n != bottom:
490 while n != bottom:
490 p = self.changelog.parents(n)[0]
491 p = self.changelog.parents(n)[0]
491 if i == f:
492 if i == f:
492 l.append(n)
493 l.append(n)
493 f = f * 2
494 f = f * 2
494 n = p
495 n = p
495 i += 1
496 i += 1
496
497
497 r.append(l)
498 r.append(l)
498
499
499 return r
500 return r
500
501
501 def newer(self, nodes):
502 def newer(self, nodes):
502 m = {}
503 m = {}
503 nl = []
504 nl = []
504 pm = {}
505 pm = {}
505 cl = self.changelog
506 cl = self.changelog
506 t = l = cl.count()
507 t = l = cl.count()
507
508
508 # find the lowest numbered node
509 # find the lowest numbered node
509 for n in nodes:
510 for n in nodes:
510 l = min(l, cl.rev(n))
511 l = min(l, cl.rev(n))
511 m[n] = 1
512 m[n] = 1
512
513
513 for i in xrange(l, t):
514 for i in xrange(l, t):
514 n = cl.node(i)
515 n = cl.node(i)
515 if n in m: # explicitly listed
516 if n in m: # explicitly listed
516 pm[n] = 1
517 pm[n] = 1
517 nl.append(n)
518 nl.append(n)
518 continue
519 continue
519 for p in cl.parents(n):
520 for p in cl.parents(n):
520 if p in pm: # parent listed
521 if p in pm: # parent listed
521 pm[n] = 1
522 pm[n] = 1
522 nl.append(n)
523 nl.append(n)
523 break
524 break
524
525
525 return nl
526 return nl
526
527
527 def getchangegroup(self, remote):
528 def getchangegroup(self, remote):
528 m = self.changelog.nodemap
529 m = self.changelog.nodemap
529 search = []
530 search = []
530 fetch = []
531 fetch = []
531 seen = {}
532 seen = {}
532 seenbranch = {}
533 seenbranch = {}
534
535 self.ui.status("searching for changes\n")
533 tip = remote.branches([])[0]
536 tip = remote.branches([])[0]
534 self.ui.debug("remote tip branch is %s:%s\n" %
537 self.ui.debug("remote tip branch is %s:%s\n" %
535 (short(tip[0]), short(tip[1])))
538 (short(tip[0]), short(tip[1])))
536
539
537 # if we have an empty repo, fetch everything
540 # if we have an empty repo, fetch everything
538 if self.changelog.tip() == nullid:
541 if self.changelog.tip() == nullid:
539 return remote.changegroup([nullid])
542 return remote.changegroup([nullid])
540
543
541 # otherwise, assume we're closer to the tip than the root
544 # otherwise, assume we're closer to the tip than the root
542 unknown = [tip]
545 unknown = [tip]
543
546
544 if tip[0] in m:
547 if tip[0] in m:
545 self.ui.note("nothing to do!\n")
548 self.ui.status("nothing to do!\n")
546 return None
549 return None
547
550
548 while unknown:
551 while unknown:
549 n = unknown.pop(0)
552 n = unknown.pop(0)
550 seen[n[0]] = 1
553 seen[n[0]] = 1
551
554
552 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
555 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
553 if n == nullid: break
556 if n == nullid: break
554 if n in seenbranch:
557 if n in seenbranch:
555 self.ui.debug("branch already found\n")
558 self.ui.debug("branch already found\n")
556 continue
559 continue
557 if n[1] and n[1] in m: # do we know the base?
560 if n[1] and n[1] in m: # do we know the base?
558 self.ui.debug("found incomplete branch %s:%s\n"
561 self.ui.debug("found incomplete branch %s:%s\n"
559 % (short(n[0]), short(n[1])))
562 % (short(n[0]), short(n[1])))
560 search.append(n) # schedule branch range for scanning
563 search.append(n) # schedule branch range for scanning
561 seenbranch[n] = 1
564 seenbranch[n] = 1
562 else:
565 else:
563 if n[2] in m and n[3] in m:
566 if n[2] in m and n[3] in m:
564 if n[1] not in fetch:
567 if n[1] not in fetch:
565 self.ui.debug("found new changeset %s\n" %
568 self.ui.debug("found new changeset %s\n" %
566 short(n[1]))
569 short(n[1]))
567 fetch.append(n[1]) # earliest unknown
570 fetch.append(n[1]) # earliest unknown
568 continue
571 continue
569
572
570 r = []
573 r = []
571 for a in n[2:4]:
574 for a in n[2:4]:
572 if a not in seen: r.append(a)
575 if a not in seen: r.append(a)
573
576
574 if r:
577 if r:
575 self.ui.debug("requesting %s\n" %
578 self.ui.debug("requesting %s\n" %
576 " ".join(map(short, r)))
579 " ".join(map(short, r)))
577 for b in remote.branches(r):
580 for b in remote.branches(r):
578 self.ui.debug("received %s:%s\n" %
581 self.ui.debug("received %s:%s\n" %
579 (short(b[0]), short(b[1])))
582 (short(b[0]), short(b[1])))
580 if b[0] not in m and b[0] not in seen:
583 if b[0] not in m and b[0] not in seen:
581 unknown.append(b)
584 unknown.append(b)
582
585
583 while search:
586 while search:
584 n = search.pop(0)
587 n = search.pop(0)
585 l = remote.between([(n[0], n[1])])[0]
588 l = remote.between([(n[0], n[1])])[0]
586 p = n[0]
589 p = n[0]
587 f = 1
590 f = 1
588 for i in l + [n[1]]:
591 for i in l + [n[1]]:
589 if i in m:
592 if i in m:
590 if f <= 2:
593 if f <= 2:
591 self.ui.debug("found new branch changeset %s\n" %
594 self.ui.debug("found new branch changeset %s\n" %
592 short(p))
595 short(p))
593 fetch.append(p)
596 fetch.append(p)
594 else:
597 else:
595 self.ui.debug("narrowed branch search to %s:%s\n"
598 self.ui.debug("narrowed branch search to %s:%s\n"
596 % (short(p), short(i)))
599 % (short(p), short(i)))
597 search.append((p, i))
600 search.append((p, i))
598 break
601 break
599 p, f = i, f * 2
602 p, f = i, f * 2
600
603
601 for f in fetch:
604 for f in fetch:
602 if f in m:
605 if f in m:
603 raise "already have", short(f[:4])
606 raise "already have", short(f[:4])
604
607
605 self.ui.note("adding new changesets starting at " +
608 self.ui.note("adding new changesets starting at " +
606 " ".join([short(f) for f in fetch]) + "\n")
609 " ".join([short(f) for f in fetch]) + "\n")
607
610
608 return remote.changegroup(fetch)
611 return remote.changegroup(fetch)
609
612
610 def changegroup(self, basenodes):
613 def changegroup(self, basenodes):
611 nodes = self.newer(basenodes)
614 nodes = self.newer(basenodes)
612
615
613 # construct the link map
616 # construct the link map
614 linkmap = {}
617 linkmap = {}
615 for n in nodes:
618 for n in nodes:
616 linkmap[self.changelog.rev(n)] = n
619 linkmap[self.changelog.rev(n)] = n
617
620
618 # construct a list of all changed files
621 # construct a list of all changed files
619 changed = {}
622 changed = {}
620 for n in nodes:
623 for n in nodes:
621 c = self.changelog.read(n)
624 c = self.changelog.read(n)
622 for f in c[3]:
625 for f in c[3]:
623 changed[f] = 1
626 changed[f] = 1
624 changed = changed.keys()
627 changed = changed.keys()
625 changed.sort()
628 changed.sort()
626
629
627 # the changegroup is changesets + manifests + all file revs
630 # the changegroup is changesets + manifests + all file revs
628 revs = [ self.changelog.rev(n) for n in nodes ]
631 revs = [ self.changelog.rev(n) for n in nodes ]
629
632
630 yield self.changelog.group(linkmap)
633 for y in self.changelog.group(linkmap): yield y
631 yield self.manifest.group(linkmap)
634 for y in self.manifest.group(linkmap): yield y
632
633 for f in changed:
635 for f in changed:
636 yield struct.pack(">l", len(f) + 4) + f
634 g = self.file(f).group(linkmap)
637 g = self.file(f).group(linkmap)
635 if not g: raise "couldn't find change to %s" % f
638 for y in g:
636 l = struct.pack(">l", len(f))
639 yield y
637 yield "".join([l, f, g])
638
640
639 def addchangegroup(self, generator):
641 def addchangegroup(self, generator):
640 changesets = files = revisions = 0
642 changesets = files = revisions = 0
641
643
642 self.lock()
644 self.lock()
643 class genread:
645 class genread:
644 def __init__(self, generator):
646 def __init__(self, generator):
645 self.g = generator
647 self.g = generator
646 self.buf = ""
648 self.buf = ""
647 def read(self, l):
649 def read(self, l):
648 while l > len(self.buf):
650 while l > len(self.buf):
649 try:
651 try:
650 self.buf += self.g.next()
652 self.buf += self.g.next()
651 except StopIteration:
653 except StopIteration:
652 break
654 break
653 d, self.buf = self.buf[:l], self.buf[l:]
655 d, self.buf = self.buf[:l], self.buf[l:]
654 return d
656 return d
655
657
656 if not generator: return
658 if not generator: return
657 source = genread(generator)
659 source = genread(generator)
658
660
659 def getchunk(add = 0):
661 def getchunk():
660 d = source.read(4)
662 d = source.read(4)
661 if not d: return ""
663 if not d: return ""
662 l = struct.unpack(">l", d)[0]
664 l = struct.unpack(">l", d)[0]
663 return source.read(l - 4 + add)
665 if l <= 4: return ""
666 return source.read(l - 4)
667
668 def getgroup():
669 while 1:
670 c = getchunk()
671 if not c: break
672 yield c
664
673
665 tr = self.transaction()
674 tr = self.transaction()
666 simple = True
675 simple = True
667 need = {}
676 need = {}
668
677
669 self.ui.status("adding changesets\n")
678 self.ui.status("adding changesets\n")
670 # pull off the changeset group
679 # pull off the changeset group
671 def report(x):
680 def report(x):
672 self.ui.debug("add changeset %s\n" % short(x))
681 self.ui.debug("add changeset %s\n" % short(x))
673 return self.changelog.count()
682 return self.changelog.count()
674
683
675 csg = getchunk()
676 co = self.changelog.tip()
684 co = self.changelog.tip()
677 cn = self.changelog.addgroup(csg, report, tr)
685 cn = self.changelog.addgroup(getgroup(), report, tr)
678
686
679 revisions = self.changelog.rev(cn) - self.changelog.rev(co)
687 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
680 changesets = revisions
681
688
682 self.ui.status("adding manifests\n")
689 self.ui.status("adding manifests\n")
683 # pull off the manifest group
690 # pull off the manifest group
684 mfg = getchunk()
685 mm = self.manifest.tip()
691 mm = self.manifest.tip()
686 mo = self.manifest.addgroup(mfg, lambda x: self.changelog.rev(x), tr)
692 mo = self.manifest.addgroup(getgroup(),
687
693 lambda x: self.changelog.rev(x), tr)
688 revisions += self.manifest.rev(mo) - self.manifest.rev(mm)
689
694
690 # do we need a resolve?
695 # do we need a resolve?
691 if self.changelog.ancestor(co, cn) != co:
696 if self.changelog.ancestor(co, cn) != co:
692 simple = False
697 simple = False
693 resolverev = self.changelog.count()
698 resolverev = self.changelog.count()
694
699
695 # resolve the manifest to determine which files
700 # resolve the manifest to determine which files
696 # we care about merging
701 # we care about merging
697 self.ui.status("resolving manifests\n")
702 self.ui.status("resolving manifests\n")
698 ma = self.manifest.ancestor(mm, mo)
703 ma = self.manifest.ancestor(mm, mo)
699 omap = self.manifest.read(mo) # other
704 omap = self.manifest.read(mo) # other
700 amap = self.manifest.read(ma) # ancestor
705 amap = self.manifest.read(ma) # ancestor
701 mmap = self.manifest.read(mm) # mine
706 mmap = self.manifest.read(mm) # mine
702 nmap = {}
707 nmap = {}
703
708
704 self.ui.debug(" ancestor %s local %s remote %s\n" %
709 self.ui.debug(" ancestor %s local %s remote %s\n" %
705 (short(ma), short(mm), short(mo)))
710 (short(ma), short(mm), short(mo)))
706
711
707 for f, mid in mmap.iteritems():
712 for f, mid in mmap.iteritems():
708 if f in omap:
713 if f in omap:
709 if mid != omap[f]:
714 if mid != omap[f]:
710 self.ui.debug(" %s versions differ, do resolve\n" % f)
715 self.ui.debug(" %s versions differ, do resolve\n" % f)
711 need[f] = mid # use merged version or local version
716 need[f] = mid # use merged version or local version
712 else:
717 else:
713 nmap[f] = mid # keep ours
718 nmap[f] = mid # keep ours
714 del omap[f]
719 del omap[f]
715 elif f in amap:
720 elif f in amap:
716 if mid != amap[f]:
721 if mid != amap[f]:
717 r = self.ui.prompt(
722 r = self.ui.prompt(
718 (" local changed %s which remote deleted\n" % f) +
723 (" local changed %s which remote deleted\n" % f) +
719 "(k)eep or (d)elete?", "[kd]", "k")
724 "(k)eep or (d)elete?", "[kd]", "k")
720 if r == "k": nmap[f] = mid
725 if r == "k": nmap[f] = mid
721 else:
726 else:
722 self.ui.debug("other deleted %s\n" % f)
727 self.ui.debug("other deleted %s\n" % f)
723 pass # other deleted it
728 pass # other deleted it
724 else:
729 else:
725 self.ui.debug("local created %s\n" %f)
730 self.ui.debug("local created %s\n" %f)
726 nmap[f] = mid # we created it
731 nmap[f] = mid # we created it
727
732
728 del mmap
733 del mmap
729
734
730 for f, oid in omap.iteritems():
735 for f, oid in omap.iteritems():
731 if f in amap:
736 if f in amap:
732 if oid != amap[f]:
737 if oid != amap[f]:
733 r = self.ui.prompt(
738 r = self.ui.prompt(
734 ("remote changed %s which local deleted\n" % f) +
739 ("remote changed %s which local deleted\n" % f) +
735 "(k)eep or (d)elete?", "[kd]", "k")
740 "(k)eep or (d)elete?", "[kd]", "k")
736 if r == "k": nmap[f] = oid
741 if r == "k": nmap[f] = oid
737 else:
742 else:
738 pass # probably safe
743 pass # probably safe
739 else:
744 else:
740 self.ui.debug("remote created %s, do resolve\n" % f)
745 self.ui.debug("remote created %s, do resolve\n" % f)
741 need[f] = oid
746 need[f] = oid
742
747
743 del omap
748 del omap
744 del amap
749 del amap
745
750
746 new = need.keys()
751 new = need.keys()
747 new.sort()
752 new.sort()
748
753
749 # process the files
754 # process the files
750 self.ui.status("adding files\n")
755 self.ui.status("adding files\n")
751 while 1:
756 while 1:
752 f = getchunk(4)
757 f = getchunk()
753 if not f: break
758 if not f: break
754 fg = getchunk()
755 self.ui.debug("adding %s revisions\n" % f)
759 self.ui.debug("adding %s revisions\n" % f)
756 fl = self.file(f)
760 fl = self.file(f)
757 o = fl.tip()
761 o = fl.tip()
758 n = fl.addgroup(fg, lambda x: self.changelog.rev(x), tr)
762 n = fl.addgroup(getgroup(), lambda x: self.changelog.rev(x), tr)
759 revisions += fl.rev(n) - fl.rev(o)
763 revisions += fl.rev(n) - fl.rev(o)
760 files += 1
764 files += 1
761 if f in need:
765 if f in need:
762 del need[f]
766 del need[f]
763 # manifest resolve determined we need to merge the tips
767 # manifest resolve determined we need to merge the tips
764 nmap[f] = self.merge3(fl, f, o, n, tr, resolverev)
768 nmap[f] = self.merge3(fl, f, o, n, tr, resolverev)
765
769
766 if need:
770 if need:
767 # we need to do trivial merges on local files
771 # we need to do trivial merges on local files
768 for f in new:
772 for f in new:
769 if f not in need: continue
773 if f not in need: continue
770 fl = self.file(f)
774 fl = self.file(f)
771 nmap[f] = self.merge3(fl, f, need[f], fl.tip(), tr, resolverev)
775 nmap[f] = self.merge3(fl, f, need[f], fl.tip(), tr, resolverev)
772 revisions += 1
776 revisions += 1
773
777
774 # For simple merges, we don't need to resolve manifests or changesets
778 # For simple merges, we don't need to resolve manifests or changesets
775 if simple:
779 if simple:
776 self.ui.debug("simple merge, skipping resolve\n")
780 self.ui.debug("simple merge, skipping resolve\n")
777 self.ui.status(("added %d changesets, %d files," +
781 self.ui.status(("modified %d files, added %d changesets" +
778 " and %d new revisions\n")
782 " and %d new revisions\n")
779 % (changesets, files, revisions))
783 % (files, changesets, revisions))
780 tr.close()
784 tr.close()
781 return
785 return
782
786
783 node = self.manifest.add(nmap, tr, resolverev, mm, mo)
787 node = self.manifest.add(nmap, tr, resolverev, mm, mo)
784 revisions += 1
788 revisions += 1
785
789
786 # Now all files and manifests are merged, we add the changed files
790 # Now all files and manifests are merged, we add the changed files
787 # and manifest id to the changelog
791 # and manifest id to the changelog
788 self.ui.status("committing merge changeset\n")
792 self.ui.status("committing merge changeset\n")
789 if co == cn: cn = -1
793 if co == cn: cn = -1
790
794
791 edittext = "\nHG: merge resolve\n" + \
795 edittext = "\nHG: merge resolve\n" + \
792 "HG: manifest hash %s\n" % hex(node) + \
796 "HG: manifest hash %s\n" % hex(node) + \
793 "".join(["HG: changed %s\n" % f for f in new])
797 "".join(["HG: changed %s\n" % f for f in new])
794 edittext = self.ui.edit(edittext)
798 edittext = self.ui.edit(edittext)
795 n = self.changelog.add(node, new, edittext, tr, co, cn)
799 n = self.changelog.add(node, new, edittext, tr, co, cn)
796 revisions += 1
800 revisions += 1
797
801
798 self.ui.status("added %d changesets, %d files, and %d new revisions\n"
802 self.ui.status("added %d changesets, %d files, and %d new revisions\n"
799 % (changesets, files, revisions))
803 % (changesets, files, revisions))
800
804
801 tr.close()
805 tr.close()
802
806
803 def merge3(self, fl, fn, my, other, transaction, link):
807 def merge3(self, fl, fn, my, other, transaction, link):
804 """perform a 3-way merge and append the result"""
808 """perform a 3-way merge and append the result"""
805
809
806 def temp(prefix, node):
810 def temp(prefix, node):
807 pre = "%s~%s." % (os.path.basename(fn), prefix)
811 pre = "%s~%s." % (os.path.basename(fn), prefix)
808 (fd, name) = tempfile.mkstemp("", pre)
812 (fd, name) = tempfile.mkstemp("", pre)
809 f = os.fdopen(fd, "w")
813 f = os.fdopen(fd, "w")
810 f.write(fl.revision(node))
814 f.write(fl.revision(node))
811 f.close()
815 f.close()
812 return name
816 return name
813
817
814 base = fl.ancestor(my, other)
818 base = fl.ancestor(my, other)
815 self.ui.note("resolving %s\n" % fn)
819 self.ui.note("resolving %s\n" % fn)
816 self.ui.debug("local %s remote %s ancestor %s\n" %
820 self.ui.debug("local %s remote %s ancestor %s\n" %
817 (short(my), short(other), short(base)))
821 (short(my), short(other), short(base)))
818
822
819 if my == base:
823 if my == base:
820 text = fl.revision(other)
824 text = fl.revision(other)
821 else:
825 else:
822 a = temp("local", my)
826 a = temp("local", my)
823 b = temp("remote", other)
827 b = temp("remote", other)
824 c = temp("parent", base)
828 c = temp("parent", base)
825
829
826 cmd = os.environ["HGMERGE"]
830 cmd = os.environ["HGMERGE"]
827 self.ui.debug("invoking merge with %s\n" % cmd)
831 self.ui.debug("invoking merge with %s\n" % cmd)
828 r = os.system("%s %s %s %s %s" % (cmd, a, b, c, fn))
832 r = os.system("%s %s %s %s %s" % (cmd, a, b, c, fn))
829 if r:
833 if r:
830 raise "Merge failed!"
834 raise "Merge failed!"
831
835
832 text = open(a).read()
836 text = open(a).read()
833 os.unlink(a)
837 os.unlink(a)
834 os.unlink(b)
838 os.unlink(b)
835 os.unlink(c)
839 os.unlink(c)
836
840
837 return fl.add(text, transaction, link, my, other)
841 return fl.add(text, transaction, link, my, other)
838
842
839 class remoterepository:
843 class remoterepository:
840 def __init__(self, ui, path):
844 def __init__(self, ui, path):
841 self.url = path
845 self.url = path
842 self.ui = ui
846 self.ui = ui
843
847
844 def do_cmd(self, cmd, **args):
848 def do_cmd(self, cmd, **args):
845 self.ui.debug("sending %s command\n" % cmd)
849 self.ui.debug("sending %s command\n" % cmd)
846 q = {"cmd": cmd}
850 q = {"cmd": cmd}
847 q.update(args)
851 q.update(args)
848 qs = urllib.urlencode(q)
852 qs = urllib.urlencode(q)
849 cu = "%s?%s" % (self.url, qs)
853 cu = "%s?%s" % (self.url, qs)
850 return urllib.urlopen(cu)
854 return urllib.urlopen(cu)
851
855
852 def branches(self, nodes):
856 def branches(self, nodes):
853 n = " ".join(map(hex, nodes))
857 n = " ".join(map(hex, nodes))
854 d = self.do_cmd("branches", nodes=n).read()
858 d = self.do_cmd("branches", nodes=n).read()
855 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
859 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
856 return br
860 return br
857
861
858 def between(self, pairs):
862 def between(self, pairs):
859 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
863 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
860 d = self.do_cmd("between", pairs=n).read()
864 d = self.do_cmd("between", pairs=n).read()
861 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
865 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
862 return p
866 return p
863
867
864 def changegroup(self, nodes):
868 def changegroup(self, nodes):
865 n = " ".join(map(hex, nodes))
869 n = " ".join(map(hex, nodes))
866 zd = zlib.decompressobj()
870 zd = zlib.decompressobj()
867 f = self.do_cmd("changegroup", roots=n)
871 f = self.do_cmd("changegroup", roots=n)
872 bytes = 0
868 while 1:
873 while 1:
869 d = f.read(4096)
874 d = f.read(4096)
875 bytes += len(d)
870 if not d:
876 if not d:
871 yield zd.flush()
877 yield zd.flush()
872 break
878 break
873 yield zd.decompress(d)
879 yield zd.decompress(d)
880 self.ui.note("%d bytes of data transfered\n" % bytes)
874
881
875 def repository(ui, path=None, create=0):
882 def repository(ui, path=None, create=0):
876 if path and path[:7] == "http://":
883 if path and path[:7] == "http://":
877 return remoterepository(ui, path)
884 return remoterepository(ui, path)
878 if path and path[:5] == "hg://":
885 if path and path[:5] == "hg://":
879 return remoterepository(ui, path.replace("hg://", "http://"))
886 return remoterepository(ui, path.replace("hg://", "http://"))
880 if path and path[:11] == "old-http://":
887 if path and path[:11] == "old-http://":
881 return localrepository(ui, path.replace("old-http://", "http://"))
888 return localrepository(ui, path.replace("old-http://", "http://"))
882 else:
889 else:
883 return localrepository(ui, path, create)
890 return localrepository(ui, path, create)
884
891
885 class ui:
892 class ui:
886 def __init__(self, verbose=False, debug=False, quiet=False,
893 def __init__(self, verbose=False, debug=False, quiet=False,
887 interactive=True):
894 interactive=True):
888 self.quiet = quiet and not verbose and not debug
895 self.quiet = quiet and not verbose and not debug
889 self.verbose = verbose or debug
896 self.verbose = verbose or debug
890 self.debugflag = debug
897 self.debugflag = debug
891 self.interactive = interactive
898 self.interactive = interactive
892 def write(self, *args):
899 def write(self, *args):
893 for a in args:
900 for a in args:
894 sys.stdout.write(str(a))
901 sys.stdout.write(str(a))
895 def readline(self):
902 def readline(self):
896 return sys.stdin.readline()[:-1]
903 return sys.stdin.readline()[:-1]
897 def prompt(self, msg, pat, default = "y"):
904 def prompt(self, msg, pat, default = "y"):
898 if not self.interactive: return default
905 if not self.interactive: return default
899 while 1:
906 while 1:
900 self.write(msg, " ")
907 self.write(msg, " ")
901 r = self.readline()
908 r = self.readline()
902 if re.match(pat, r):
909 if re.match(pat, r):
903 return r
910 return r
904 else:
911 else:
905 self.write("unrecognized response\n")
912 self.write("unrecognized response\n")
906 def status(self, *msg):
913 def status(self, *msg):
907 if not self.quiet: self.write(*msg)
914 if not self.quiet: self.write(*msg)
908 def warn(self, msg):
915 def warn(self, msg):
909 self.write(*msg)
916 self.write(*msg)
910 def note(self, *msg):
917 def note(self, *msg):
911 if self.verbose: self.write(*msg)
918 if self.verbose: self.write(*msg)
912 def debug(self, *msg):
919 def debug(self, *msg):
913 if self.debugflag: self.write(*msg)
920 if self.debugflag: self.write(*msg)
914 def edit(self, text):
921 def edit(self, text):
915 (fd, name) = tempfile.mkstemp("hg")
922 (fd, name) = tempfile.mkstemp("hg")
916 f = os.fdopen(fd, "w")
923 f = os.fdopen(fd, "w")
917 f.write(text)
924 f.write(text)
918 f.close()
925 f.close()
919
926
920 editor = os.environ.get("HGEDITOR") or os.environ.get("EDITOR", "vi")
927 editor = os.environ.get("HGEDITOR") or os.environ.get("EDITOR", "vi")
921 r = os.system("%s %s" % (editor, name))
928 r = os.system("%s %s" % (editor, name))
922
929
923 if r:
930 if r:
924 raise "Edit failed!"
931 raise "Edit failed!"
925
932
926 t = open(name).read()
933 t = open(name).read()
927 t = re.sub("(?m)^HG:.*\n", "", t)
934 t = re.sub("(?m)^HG:.*\n", "", t)
928
935
929 return t
936 return t
930
937
931 class httprangereader:
938 class httprangereader:
932 def __init__(self, url):
939 def __init__(self, url):
933 self.url = url
940 self.url = url
934 self.pos = 0
941 self.pos = 0
935 def seek(self, pos):
942 def seek(self, pos):
936 self.pos = pos
943 self.pos = pos
937 def read(self, bytes=None):
944 def read(self, bytes=None):
938 opener = urllib2.build_opener(byterange.HTTPRangeHandler())
945 opener = urllib2.build_opener(byterange.HTTPRangeHandler())
939 urllib2.install_opener(opener)
946 urllib2.install_opener(opener)
940 req = urllib2.Request(self.url)
947 req = urllib2.Request(self.url)
941 end = ''
948 end = ''
942 if bytes: end = self.pos + bytes
949 if bytes: end = self.pos + bytes
943 req.add_header('Range', 'bytes=%d-%s' % (self.pos, end))
950 req.add_header('Range', 'bytes=%d-%s' % (self.pos, end))
944 f = urllib2.urlopen(req)
951 f = urllib2.urlopen(req)
945 return f.read()
952 return f.read()
@@ -1,497 +1,496 b''
1 # revlog.py - storage back-end for mercurial
1 # revlog.py - storage back-end for mercurial
2 #
2 #
3 # This provides efficient delta storage with O(1) retrieve and append
3 # This provides efficient delta storage with O(1) retrieve and append
4 # and O(changes) merge between branches
4 # and O(changes) merge between branches
5 #
5 #
6 # Copyright 2005 Matt Mackall <mpm@selenic.com>
6 # Copyright 2005 Matt Mackall <mpm@selenic.com>
7 #
7 #
8 # This software may be used and distributed according to the terms
8 # This software may be used and distributed according to the terms
9 # of the GNU General Public License, incorporated herein by reference.
9 # of the GNU General Public License, incorporated herein by reference.
10
10
11 import zlib, struct, sha, os, tempfile, binascii, heapq
11 import zlib, struct, sha, os, tempfile, binascii, heapq
12 from mercurial import mdiff
12 from mercurial import mdiff
13
13
14 def hex(node): return binascii.hexlify(node)
14 def hex(node): return binascii.hexlify(node)
15 def bin(node): return binascii.unhexlify(node)
15 def bin(node): return binascii.unhexlify(node)
16 def short(node): return hex(node[:4])
16 def short(node): return hex(node[:4])
17
17
18 def compress(text):
18 def compress(text):
19 if not text: return text
19 if not text: return text
20 if len(text) < 44:
20 if len(text) < 44:
21 if text[0] == '\0': return text
21 if text[0] == '\0': return text
22 return 'u' + text
22 return 'u' + text
23 bin = zlib.compress(text)
23 bin = zlib.compress(text)
24 if len(bin) > len(text):
24 if len(bin) > len(text):
25 if text[0] == '\0': return text
25 if text[0] == '\0': return text
26 return 'u' + text
26 return 'u' + text
27 return bin
27 return bin
28
28
29 def decompress(bin):
29 def decompress(bin):
30 if not bin: return bin
30 if not bin: return bin
31 t = bin[0]
31 t = bin[0]
32 if t == '\0': return bin
32 if t == '\0': return bin
33 if t == 'x': return zlib.decompress(bin)
33 if t == 'x': return zlib.decompress(bin)
34 if t == 'u': return bin[1:]
34 if t == 'u': return bin[1:]
35 raise "unknown compression type %s" % t
35 raise "unknown compression type %s" % t
36
36
37 def hash(text, p1, p2):
37 def hash(text, p1, p2):
38 l = [p1, p2]
38 l = [p1, p2]
39 l.sort()
39 l.sort()
40 return sha.sha(l[0] + l[1] + text).digest()
40 return sha.sha(l[0] + l[1] + text).digest()
41
41
42 nullid = "\0" * 20
42 nullid = "\0" * 20
43 indexformat = ">4l20s20s20s"
43 indexformat = ">4l20s20s20s"
44
44
45 class lazyparser:
45 class lazyparser:
46 def __init__(self, data):
46 def __init__(self, data):
47 self.data = data
47 self.data = data
48 self.s = struct.calcsize(indexformat)
48 self.s = struct.calcsize(indexformat)
49 self.l = len(data)/self.s
49 self.l = len(data)/self.s
50 self.index = [None] * self.l
50 self.index = [None] * self.l
51 self.map = {nullid: -1}
51 self.map = {nullid: -1}
52
52
53 def load(self, pos):
53 def load(self, pos):
54 block = pos / 1000
54 block = pos / 1000
55 i = block * 1000
55 i = block * 1000
56 end = min(self.l, i + 1000)
56 end = min(self.l, i + 1000)
57 while i < end:
57 while i < end:
58 d = self.data[i * self.s: (i + 1) * self.s]
58 d = self.data[i * self.s: (i + 1) * self.s]
59 e = struct.unpack(indexformat, d)
59 e = struct.unpack(indexformat, d)
60 self.index[i] = e
60 self.index[i] = e
61 self.map[e[6]] = i
61 self.map[e[6]] = i
62 i += 1
62 i += 1
63
63
64 class lazyindex:
64 class lazyindex:
65 def __init__(self, parser):
65 def __init__(self, parser):
66 self.p = parser
66 self.p = parser
67 def __len__(self):
67 def __len__(self):
68 return len(self.p.index)
68 return len(self.p.index)
69 def load(self, pos):
69 def load(self, pos):
70 self.p.load(pos)
70 self.p.load(pos)
71 return self.p.index[pos]
71 return self.p.index[pos]
72 def __getitem__(self, pos):
72 def __getitem__(self, pos):
73 return self.p.index[pos] or self.load(pos)
73 return self.p.index[pos] or self.load(pos)
74 def append(self, e):
74 def append(self, e):
75 self.p.index.append(e)
75 self.p.index.append(e)
76
76
77 class lazymap:
77 class lazymap:
78 def __init__(self, parser):
78 def __init__(self, parser):
79 self.p = parser
79 self.p = parser
80 def load(self, key):
80 def load(self, key):
81 n = self.p.data.find(key)
81 n = self.p.data.find(key)
82 if n < 0: raise KeyError("node " + hex(key))
82 if n < 0: raise KeyError("node " + hex(key))
83 pos = n / self.p.s
83 pos = n / self.p.s
84 self.p.load(pos)
84 self.p.load(pos)
85 def __contains__(self, key):
85 def __contains__(self, key):
86 try:
86 try:
87 self[key]
87 self[key]
88 return True
88 return True
89 except KeyError:
89 except KeyError:
90 return False
90 return False
91 def __iter__(self):
91 def __iter__(self):
92 for i in xrange(self.p.l):
92 for i in xrange(self.p.l):
93 try:
93 try:
94 yield self.p.index[i][6]
94 yield self.p.index[i][6]
95 except:
95 except:
96 self.p.load(i)
96 self.p.load(i)
97 yield self.p.index[i][6]
97 yield self.p.index[i][6]
98 def __getitem__(self, key):
98 def __getitem__(self, key):
99 try:
99 try:
100 return self.p.map[key]
100 return self.p.map[key]
101 except KeyError:
101 except KeyError:
102 try:
102 try:
103 self.load(key)
103 self.load(key)
104 return self.p.map[key]
104 return self.p.map[key]
105 except KeyError:
105 except KeyError:
106 raise KeyError("node " + hex(key))
106 raise KeyError("node " + hex(key))
107 def __setitem__(self, key, val):
107 def __setitem__(self, key, val):
108 self.p.map[key] = val
108 self.p.map[key] = val
109
109
110 class revlog:
110 class revlog:
111 def __init__(self, opener, indexfile, datafile):
111 def __init__(self, opener, indexfile, datafile):
112 self.indexfile = indexfile
112 self.indexfile = indexfile
113 self.datafile = datafile
113 self.datafile = datafile
114 self.opener = opener
114 self.opener = opener
115 self.cache = None
115 self.cache = None
116
116
117 try:
117 try:
118 i = self.opener(self.indexfile).read()
118 i = self.opener(self.indexfile).read()
119 except IOError:
119 except IOError:
120 i = ""
120 i = ""
121
121
122 if len(i) > 10000:
122 if len(i) > 10000:
123 # big index, let's parse it on demand
123 # big index, let's parse it on demand
124 parser = lazyparser(i)
124 parser = lazyparser(i)
125 self.index = lazyindex(parser)
125 self.index = lazyindex(parser)
126 self.nodemap = lazymap(parser)
126 self.nodemap = lazymap(parser)
127 else:
127 else:
128 s = struct.calcsize(indexformat)
128 s = struct.calcsize(indexformat)
129 l = len(i) / s
129 l = len(i) / s
130 self.index = [None] * l
130 self.index = [None] * l
131 m = [None] * l
131 m = [None] * l
132
132
133 n = 0
133 n = 0
134 for f in xrange(0, len(i), s):
134 for f in xrange(0, len(i), s):
135 # offset, size, base, linkrev, p1, p2, nodeid
135 # offset, size, base, linkrev, p1, p2, nodeid
136 e = struct.unpack(indexformat, i[f:f + s])
136 e = struct.unpack(indexformat, i[f:f + s])
137 m[n] = (e[6], n)
137 m[n] = (e[6], n)
138 self.index[n] = e
138 self.index[n] = e
139 n += 1
139 n += 1
140
140
141 self.nodemap = dict(m)
141 self.nodemap = dict(m)
142 self.nodemap[nullid] = -1
142 self.nodemap[nullid] = -1
143
143
144
144
145 def tip(self): return self.node(len(self.index) - 1)
145 def tip(self): return self.node(len(self.index) - 1)
146 def count(self): return len(self.index)
146 def count(self): return len(self.index)
147 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
147 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
148 def rev(self, node): return self.nodemap[node]
148 def rev(self, node): return self.nodemap[node]
149 def linkrev(self, node): return self.index[self.nodemap[node]][3]
149 def linkrev(self, node): return self.index[self.nodemap[node]][3]
150 def parents(self, node):
150 def parents(self, node):
151 if node == nullid: return (nullid, nullid)
151 if node == nullid: return (nullid, nullid)
152 return self.index[self.nodemap[node]][4:6]
152 return self.index[self.nodemap[node]][4:6]
153
153
154 def start(self, rev): return self.index[rev][0]
154 def start(self, rev): return self.index[rev][0]
155 def length(self, rev): return self.index[rev][1]
155 def length(self, rev): return self.index[rev][1]
156 def end(self, rev): return self.start(rev) + self.length(rev)
156 def end(self, rev): return self.start(rev) + self.length(rev)
157 def base(self, rev): return self.index[rev][2]
157 def base(self, rev): return self.index[rev][2]
158
158
159 def lookup(self, id):
159 def lookup(self, id):
160 try:
160 try:
161 rev = int(id)
161 rev = int(id)
162 return self.node(rev)
162 return self.node(rev)
163 except ValueError:
163 except ValueError:
164 c = []
164 c = []
165 for n in self.nodemap:
165 for n in self.nodemap:
166 if id in hex(n):
166 if id in hex(n):
167 c.append(n)
167 c.append(n)
168 if len(c) > 1: raise KeyError("Ambiguous identifier")
168 if len(c) > 1: raise KeyError("Ambiguous identifier")
169 if len(c) < 1: raise KeyError("No match found")
169 if len(c) < 1: raise KeyError("No match found")
170 return c[0]
170 return c[0]
171
171
172 return None
172 return None
173
173
174 def diff(self, a, b):
174 def diff(self, a, b):
175 return mdiff.textdiff(a, b)
175 return mdiff.textdiff(a, b)
176
176
177 def patches(self, t, pl):
177 def patches(self, t, pl):
178 return mdiff.patches(t, pl)
178 return mdiff.patches(t, pl)
179
179
180 def delta(self, node):
180 def delta(self, node):
181 r = self.rev(node)
181 r = self.rev(node)
182 b = self.base(r)
182 b = self.base(r)
183 if r == b:
183 if r == b:
184 return self.diff(self.revision(self.node(r - 1)),
184 return self.diff(self.revision(self.node(r - 1)),
185 self.revision(node))
185 self.revision(node))
186 else:
186 else:
187 f = self.opener(self.datafile)
187 f = self.opener(self.datafile)
188 f.seek(self.start(r))
188 f.seek(self.start(r))
189 data = f.read(self.length(r))
189 data = f.read(self.length(r))
190 return decompress(data)
190 return decompress(data)
191
191
192 def revision(self, node):
192 def revision(self, node):
193 if node == nullid: return ""
193 if node == nullid: return ""
194 if self.cache and self.cache[0] == node: return self.cache[2]
194 if self.cache and self.cache[0] == node: return self.cache[2]
195
195
196 text = None
196 text = None
197 rev = self.rev(node)
197 rev = self.rev(node)
198 start, length, base, link, p1, p2, node = self.index[rev]
198 start, length, base, link, p1, p2, node = self.index[rev]
199 end = start + length
199 end = start + length
200 if base != rev: start = self.start(base)
200 if base != rev: start = self.start(base)
201
201
202 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
202 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
203 base = self.cache[1]
203 base = self.cache[1]
204 start = self.start(base + 1)
204 start = self.start(base + 1)
205 text = self.cache[2]
205 text = self.cache[2]
206 last = 0
206 last = 0
207
207
208 f = self.opener(self.datafile)
208 f = self.opener(self.datafile)
209 f.seek(start)
209 f.seek(start)
210 data = f.read(end - start)
210 data = f.read(end - start)
211
211
212 if not text:
212 if not text:
213 last = self.length(base)
213 last = self.length(base)
214 text = decompress(data[:last])
214 text = decompress(data[:last])
215
215
216 bins = []
216 bins = []
217 for r in xrange(base + 1, rev + 1):
217 for r in xrange(base + 1, rev + 1):
218 s = self.length(r)
218 s = self.length(r)
219 bins.append(decompress(data[last:last + s]))
219 bins.append(decompress(data[last:last + s]))
220 last = last + s
220 last = last + s
221
221
222 text = mdiff.patches(text, bins)
222 text = mdiff.patches(text, bins)
223
223
224 if node != hash(text, p1, p2):
224 if node != hash(text, p1, p2):
225 raise IOError("integrity check failed on %s:%d"
225 raise IOError("integrity check failed on %s:%d"
226 % (self.datafile, rev))
226 % (self.datafile, rev))
227
227
228 self.cache = (node, rev, text)
228 self.cache = (node, rev, text)
229 return text
229 return text
230
230
231 def addrevision(self, text, transaction, link, p1=None, p2=None):
231 def addrevision(self, text, transaction, link, p1=None, p2=None):
232 if text is None: text = ""
232 if text is None: text = ""
233 if p1 is None: p1 = self.tip()
233 if p1 is None: p1 = self.tip()
234 if p2 is None: p2 = nullid
234 if p2 is None: p2 = nullid
235
235
236 node = hash(text, p1, p2)
236 node = hash(text, p1, p2)
237
237
238 n = self.count()
238 n = self.count()
239 t = n - 1
239 t = n - 1
240
240
241 if n:
241 if n:
242 base = self.base(t)
242 base = self.base(t)
243 start = self.start(base)
243 start = self.start(base)
244 end = self.end(t)
244 end = self.end(t)
245 prev = self.revision(self.tip())
245 prev = self.revision(self.tip())
246 d = self.diff(prev, text)
246 d = self.diff(prev, text)
247 if self.patches(prev, [d]) != text:
248 raise AssertionError("diff failed")
249 data = compress(d)
247 data = compress(d)
250 dist = end - start + len(data)
248 dist = end - start + len(data)
251
249
252 # full versions are inserted when the needed deltas
250 # full versions are inserted when the needed deltas
253 # become comparable to the uncompressed text
251 # become comparable to the uncompressed text
254 if not n or dist > len(text) * 2:
252 if not n or dist > len(text) * 2:
255 data = compress(text)
253 data = compress(text)
256 base = n
254 base = n
257 else:
255 else:
258 base = self.base(t)
256 base = self.base(t)
259
257
260 offset = 0
258 offset = 0
261 if t >= 0:
259 if t >= 0:
262 offset = self.end(t)
260 offset = self.end(t)
263
261
264 e = (offset, len(data), base, link, p1, p2, node)
262 e = (offset, len(data), base, link, p1, p2, node)
265
263
266 self.index.append(e)
264 self.index.append(e)
267 self.nodemap[node] = n
265 self.nodemap[node] = n
268 entry = struct.pack(indexformat, *e)
266 entry = struct.pack(indexformat, *e)
269
267
270 transaction.add(self.datafile, e[0])
268 transaction.add(self.datafile, e[0])
271 self.opener(self.datafile, "a").write(data)
269 self.opener(self.datafile, "a").write(data)
272 transaction.add(self.indexfile, n * len(entry))
270 transaction.add(self.indexfile, n * len(entry))
273 self.opener(self.indexfile, "a").write(entry)
271 self.opener(self.indexfile, "a").write(entry)
274
272
275 self.cache = (node, n, text)
273 self.cache = (node, n, text)
276 return node
274 return node
277
275
278 def ancestor(self, a, b):
276 def ancestor(self, a, b):
279 # calculate the distance of every node from root
277 # calculate the distance of every node from root
280 dist = {nullid: 0}
278 dist = {nullid: 0}
281 for i in xrange(self.count()):
279 for i in xrange(self.count()):
282 n = self.node(i)
280 n = self.node(i)
283 p1, p2 = self.parents(n)
281 p1, p2 = self.parents(n)
284 dist[n] = max(dist[p1], dist[p2]) + 1
282 dist[n] = max(dist[p1], dist[p2]) + 1
285
283
286 # traverse ancestors in order of decreasing distance from root
284 # traverse ancestors in order of decreasing distance from root
287 def ancestors(node):
285 def ancestors(node):
288 # we store negative distances because heap returns smallest member
286 # we store negative distances because heap returns smallest member
289 h = [(-dist[node], node)]
287 h = [(-dist[node], node)]
290 seen = {}
288 seen = {}
291 earliest = self.count()
289 earliest = self.count()
292 while h:
290 while h:
293 d, n = heapq.heappop(h)
291 d, n = heapq.heappop(h)
294 r = self.rev(n)
292 r = self.rev(n)
295 if n not in seen:
293 if n not in seen:
296 seen[n] = 1
294 seen[n] = 1
297 yield (-d, n)
295 yield (-d, n)
298 for p in self.parents(n):
296 for p in self.parents(n):
299 heapq.heappush(h, (-dist[p], p))
297 heapq.heappush(h, (-dist[p], p))
300
298
301 x = ancestors(a)
299 x = ancestors(a)
302 y = ancestors(b)
300 y = ancestors(b)
303 lx = x.next()
301 lx = x.next()
304 ly = y.next()
302 ly = y.next()
305
303
306 # increment each ancestor list until it is closer to root than
304 # increment each ancestor list until it is closer to root than
307 # the other, or they match
305 # the other, or they match
308 while 1:
306 while 1:
309 if lx == ly:
307 if lx == ly:
310 return lx[1]
308 return lx[1]
311 elif lx < ly:
309 elif lx < ly:
312 ly = y.next()
310 ly = y.next()
313 elif lx > ly:
311 elif lx > ly:
314 lx = x.next()
312 lx = x.next()
315
313
316 def group(self, linkmap):
314 def group(self, linkmap):
317 # given a list of changeset revs, return a set of deltas and
315 # given a list of changeset revs, return a set of deltas and
318 # metadata corresponding to nodes. the first delta is
316 # metadata corresponding to nodes. the first delta is
319 # parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
317 # parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
320 # have this parent as it has all history before these
318 # have this parent as it has all history before these
321 # changesets. parent is parent[0]
319 # changesets. parent is parent[0]
322
320
323 revs = []
321 revs = []
324 needed = {}
322 needed = {}
325
323
326 # find file nodes/revs that match changeset revs
324 # find file nodes/revs that match changeset revs
327 for i in xrange(0, self.count()):
325 for i in xrange(0, self.count()):
328 if self.index[i][3] in linkmap:
326 if self.index[i][3] in linkmap:
329 revs.append(i)
327 revs.append(i)
330 needed[i] = 1
328 needed[i] = 1
331
329
332 # if we don't have any revisions touched by these changesets, bail
330 # if we don't have any revisions touched by these changesets, bail
333 if not revs: return struct.pack(">l", 0)
331 if not revs:
332 yield struct.pack(">l", 0)
333 return
334
334
335 # add the parent of the first rev
335 # add the parent of the first rev
336 p = self.parents(self.node(revs[0]))[0]
336 p = self.parents(self.node(revs[0]))[0]
337 revs.insert(0, self.rev(p))
337 revs.insert(0, self.rev(p))
338
338
339 # for each delta that isn't contiguous in the log, we need to
339 # for each delta that isn't contiguous in the log, we need to
340 # reconstruct the base, reconstruct the result, and then
340 # reconstruct the base, reconstruct the result, and then
341 # calculate the delta. We also need to do this where we've
341 # calculate the delta. We also need to do this where we've
342 # stored a full version and not a delta
342 # stored a full version and not a delta
343 for i in xrange(0, len(revs) - 1):
343 for i in xrange(0, len(revs) - 1):
344 a, b = revs[i], revs[i + 1]
344 a, b = revs[i], revs[i + 1]
345 if a + 1 != b or self.base(b) == b:
345 if a + 1 != b or self.base(b) == b:
346 for j in xrange(self.base(a), a + 1):
346 for j in xrange(self.base(a), a + 1):
347 needed[j] = 1
347 needed[j] = 1
348 for j in xrange(self.base(b), b + 1):
348 for j in xrange(self.base(b), b + 1):
349 needed[j] = 1
349 needed[j] = 1
350
350
351 # calculate spans to retrieve from datafile
351 # calculate spans to retrieve from datafile
352 needed = needed.keys()
352 needed = needed.keys()
353 needed.sort()
353 needed.sort()
354 spans = []
354 spans = []
355 oo = -1
356 ol = 0
355 for n in needed:
357 for n in needed:
356 if n < 0: continue
358 if n < 0: continue
357 o = self.start(n)
359 o = self.start(n)
358 l = self.length(n)
360 l = self.length(n)
359 spans.append((o, l, [(n, l)]))
361 if oo + ol == o: # can we merge with the previous?
360
362 nl = spans[-1][2]
361 # merge spans
363 nl.append((n, l))
362 merge = [spans.pop(0)]
364 ol += l
363 while spans:
365 spans[-1] = (oo, ol, nl)
364 e = spans.pop(0)
365 f = merge[-1]
366 if e[0] == f[0] + f[1]:
367 merge[-1] = (f[0], f[1] + e[1], f[2] + e[2])
368 else:
366 else:
369 merge.append(e)
367 oo = o
368 ol = l
369 spans.append((oo, ol, [(n, l)]))
370
370
371 # read spans in, divide up chunks
371 # read spans in, divide up chunks
372 chunks = {}
372 chunks = {}
373 for span in merge:
373 for span in spans:
374 # we reopen the file for each span to make http happy for now
374 # we reopen the file for each span to make http happy for now
375 f = self.opener(self.datafile)
375 f = self.opener(self.datafile)
376 f.seek(span[0])
376 f.seek(span[0])
377 data = f.read(span[1])
377 data = f.read(span[1])
378
378
379 # divide up the span
379 # divide up the span
380 pos = 0
380 pos = 0
381 for r, l in span[2]:
381 for r, l in span[2]:
382 chunks[r] = data[pos: pos + l]
382 chunks[r] = decompress(data[pos: pos + l])
383 pos += l
383 pos += l
384
384
385 # helper to reconstruct intermediate versions
385 # helper to reconstruct intermediate versions
386 def construct(text, base, rev):
386 def construct(text, base, rev):
387 bins = [decompress(chunks[r]) for r in xrange(base + 1, rev + 1)]
387 bins = [chunks[r] for r in xrange(base + 1, rev + 1)]
388 return mdiff.patches(text, bins)
388 return mdiff.patches(text, bins)
389
389
390 # build deltas
390 # build deltas
391 deltas = []
391 deltas = []
392 for d in xrange(0, len(revs) - 1):
392 for d in xrange(0, len(revs) - 1):
393 a, b = revs[d], revs[d + 1]
393 a, b = revs[d], revs[d + 1]
394 n = self.node(b)
394 n = self.node(b)
395
395
396 # do we need to construct a new delta?
396 if a + 1 != b or self.base(b) == b:
397 if a + 1 != b or self.base(b) == b:
397 if a >= 0:
398 if a >= 0:
398 base = self.base(a)
399 base = self.base(a)
399 ta = decompress(chunks[self.base(a)])
400 ta = chunks[self.base(a)]
400 ta = construct(ta, base, a)
401 ta = construct(ta, base, a)
401 else:
402 else:
402 ta = ""
403 ta = ""
403
404
404 base = self.base(b)
405 base = self.base(b)
405 if a > base:
406 if a > base:
406 base = a
407 base = a
407 tb = ta
408 tb = ta
408 else:
409 else:
409 tb = decompress(chunks[self.base(b)])
410 tb = chunks[self.base(b)]
410 tb = construct(tb, base, b)
411 tb = construct(tb, base, b)
411 d = self.diff(ta, tb)
412 d = self.diff(ta, tb)
412 else:
413 else:
413 d = decompress(chunks[b])
414 d = chunks[b]
414
415
415 p = self.parents(n)
416 p = self.parents(n)
416 meta = n + p[0] + p[1] + linkmap[self.linkrev(n)]
417 meta = n + p[0] + p[1] + linkmap[self.linkrev(n)]
417 l = struct.pack(">l", len(meta) + len(d) + 4)
418 l = struct.pack(">l", len(meta) + len(d) + 4)
418 deltas.append(l + meta + d)
419 yield l
420 yield meta
421 yield d
419
422
420 l = struct.pack(">l", sum(map(len, deltas)) + 4)
423 yield struct.pack(">l", 0)
421 deltas.insert(0, l)
422 return "".join(deltas)
423
424
424 def addgroup(self, data, linkmapper, transaction):
425 def addgroup(self, revs, linkmapper, transaction):
425 # given a set of deltas, add them to the revision log. the
426 # given a set of deltas, add them to the revision log. the
426 # first delta is against its parent, which should be in our
427 # first delta is against its parent, which should be in our
427 # log, the rest are against the previous delta.
428 # log, the rest are against the previous delta.
428
429
429 if not data: return self.tip()
430
431 # retrieve the parent revision of the delta chain
432 chain = data[24:44]
433 if not chain in self.nodemap:
434 raise "unknown base %s" % short(chain[:4])
435
436 # track the base of the current delta log
430 # track the base of the current delta log
437 r = self.count()
431 r = self.count()
438 t = r - 1
432 t = r - 1
433 node = nullid
439
434
440 base = prev = -1
435 base = prev = -1
441 start = end = 0
436 start = end = 0
442 if r:
437 if r:
443 start = self.start(self.base(t))
438 start = self.start(self.base(t))
444 end = self.end(t)
439 end = self.end(t)
445 measure = self.length(self.base(t))
440 measure = self.length(self.base(t))
446 base = self.base(t)
441 base = self.base(t)
447 prev = self.tip()
442 prev = self.tip()
448
443
449 transaction.add(self.datafile, end)
444 transaction.add(self.datafile, end)
450 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
445 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
451 dfh = self.opener(self.datafile, "a")
446 dfh = self.opener(self.datafile, "a")
452 ifh = self.opener(self.indexfile, "a")
447 ifh = self.opener(self.indexfile, "a")
453
448
454 # loop through our set of deltas
449 # loop through our set of deltas
455 pos = 0
450 chain = None
456 while pos < len(data):
451 for chunk in revs:
457 l, node, p1, p2, cs = struct.unpack(">l20s20s20s20s",
452 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
458 data[pos:pos+84])
459 link = linkmapper(cs)
453 link = linkmapper(cs)
460 if node in self.nodemap:
454 if node in self.nodemap:
461 raise "already have %s" % hex(node[:4])
455 raise "already have %s" % hex(node[:4])
462 delta = data[pos + 84:pos + l]
456 delta = chunk[80:]
463 pos += l
457
458 if not chain:
459 # retrieve the parent revision of the delta chain
460 chain = p1
461 if not chain in self.nodemap:
462 raise "unknown base %s" % short(chain[:4])
464
463
465 # full versions are inserted when the needed deltas become
464 # full versions are inserted when the needed deltas become
466 # comparable to the uncompressed text or when the previous
465 # comparable to the uncompressed text or when the previous
467 # version is not the one we have a delta against. We use
466 # version is not the one we have a delta against. We use
468 # the size of the previous full rev as a proxy for the
467 # the size of the previous full rev as a proxy for the
469 # current size.
468 # current size.
470
469
471 if chain == prev:
470 if chain == prev:
472 cdelta = compress(delta)
471 cdelta = compress(delta)
473
472
474 if chain != prev or (end - start + len(cdelta)) > measure * 2:
473 if chain != prev or (end - start + len(cdelta)) > measure * 2:
475 # flush our writes here so we can read it in revision
474 # flush our writes here so we can read it in revision
476 dfh.flush()
475 dfh.flush()
477 ifh.flush()
476 ifh.flush()
478 text = self.revision(chain)
477 text = self.revision(chain)
479 text = self.patches(text, [delta])
478 text = self.patches(text, [delta])
480 chk = self.addrevision(text, transaction, link, p1, p2)
479 chk = self.addrevision(text, transaction, link, p1, p2)
481 if chk != node:
480 if chk != node:
482 raise "consistency error adding group"
481 raise "consistency error adding group"
483 measure = len(text)
482 measure = len(text)
484 else:
483 else:
485 e = (end, len(cdelta), self.base(t), link, p1, p2, node)
484 e = (end, len(cdelta), self.base(t), link, p1, p2, node)
486 self.index.append(e)
485 self.index.append(e)
487 self.nodemap[node] = r
486 self.nodemap[node] = r
488 dfh.write(cdelta)
487 dfh.write(cdelta)
489 ifh.write(struct.pack(indexformat, *e))
488 ifh.write(struct.pack(indexformat, *e))
490
489
491 t, r, chain, prev = r, r + 1, node, node
490 t, r, chain, prev = r, r + 1, node, node
492 start = self.start(self.base(t))
491 start = self.start(self.base(t))
493 end = self.end(t)
492 end = self.end(t)
494
493
495 dfh.close()
494 dfh.close()
496 ifh.close()
495 ifh.close()
497 return node
496 return node
General Comments 0
You need to be logged in to leave comments. Login now