##// END OF EJS Templates
fix bad assumption about uniqueness of file versions...
mpm@selenic.com -
r224:ccbcc4d7 default
parent child Browse files
Show More
@@ -1,542 +1,538 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 #
2 #
3 # mercurial - a minimal scalable distributed SCM
3 # mercurial - a minimal scalable distributed SCM
4 # v0.5b "katje"
4 # v0.5b "katje"
5 #
5 #
6 # Copyright 2005 Matt Mackall <mpm@selenic.com>
6 # Copyright 2005 Matt Mackall <mpm@selenic.com>
7 #
7 #
8 # This software may be used and distributed according to the terms
8 # This software may be used and distributed according to the terms
9 # of the GNU General Public License, incorporated herein by reference.
9 # of the GNU General Public License, incorporated herein by reference.
10
10
11 # the psyco compiler makes commits a bit faster
11 # the psyco compiler makes commits a bit faster
12 # and makes changegroup merge about 20 times slower!
12 # and makes changegroup merge about 20 times slower!
13 # try:
13 # try:
14 # import psyco
14 # import psyco
15 # psyco.full()
15 # psyco.full()
16 # except:
16 # except:
17 # pass
17 # pass
18
18
19 import sys, os, time
19 import sys, os, time
20 from mercurial import hg, mdiff, fancyopts, ui, commands
20 from mercurial import hg, mdiff, fancyopts, ui, commands
21
21
22 def help():
22 def help():
23 ui.status("""\
23 ui.status("""\
24 commands:
24 commands:
25
25
26 add [files...] add the given files in the next commit
26 add [files...] add the given files in the next commit
27 addremove add all new files, delete all missing files
27 addremove add all new files, delete all missing files
28 annotate [files...] show changeset number per file line
28 annotate [files...] show changeset number per file line
29 branch <path> create a branch of <path> in this directory
29 branch <path> create a branch of <path> in this directory
30 checkout [changeset] checkout the latest or given changeset
30 checkout [changeset] checkout the latest or given changeset
31 commit commit all changes to the repository
31 commit commit all changes to the repository
32 diff [files...] diff working directory (or selected files)
32 diff [files...] diff working directory (or selected files)
33 dump <file> [rev] dump the latest or given revision of a file
33 dump <file> [rev] dump the latest or given revision of a file
34 dumpmanifest [rev] dump the latest or given revision of the manifest
34 dumpmanifest [rev] dump the latest or given revision of the manifest
35 export <rev> dump the changeset header and diffs for a revision
35 export <rev> dump the changeset header and diffs for a revision
36 history show changeset history
36 history show changeset history
37 init create a new repository in this directory
37 init create a new repository in this directory
38 log <file> show revision history of a single file
38 log <file> show revision history of a single file
39 merge <path> merge changes from <path> into local repository
39 merge <path> merge changes from <path> into local repository
40 recover rollback an interrupted transaction
40 recover rollback an interrupted transaction
41 remove [files...] remove the given files in the next commit
41 remove [files...] remove the given files in the next commit
42 serve export the repository via HTTP
42 serve export the repository via HTTP
43 status show new, missing, and changed files in working dir
43 status show new, missing, and changed files in working dir
44 tags show current changeset tags
44 tags show current changeset tags
45 undo undo the last transaction
45 undo undo the last transaction
46 """)
46 """)
47
47
48 def filterfiles(list, files):
48 def filterfiles(list, files):
49 l = [ x for x in list if x in files ]
49 l = [ x for x in list if x in files ]
50
50
51 for f in files:
51 for f in files:
52 if f[-1] != os.sep: f += os.sep
52 if f[-1] != os.sep: f += os.sep
53 l += [ x for x in list if x.startswith(f) ]
53 l += [ x for x in list if x.startswith(f) ]
54 return l
54 return l
55
55
56 def diff(files = None, node1 = None, node2 = None):
56 def diff(files = None, node1 = None, node2 = None):
57 def date(c):
57 def date(c):
58 return time.asctime(time.gmtime(float(c[2].split(' ')[0])))
58 return time.asctime(time.gmtime(float(c[2].split(' ')[0])))
59
59
60 if node2:
60 if node2:
61 change = repo.changelog.read(node2)
61 change = repo.changelog.read(node2)
62 mmap2 = repo.manifest.read(change[0])
62 mmap2 = repo.manifest.read(change[0])
63 (c, a, d) = repo.diffrevs(node1, node2)
63 (c, a, d) = repo.diffrevs(node1, node2)
64 def read(f): return repo.file(f).read(mmap2[f])
64 def read(f): return repo.file(f).read(mmap2[f])
65 date2 = date(change)
65 date2 = date(change)
66 else:
66 else:
67 date2 = time.asctime()
67 date2 = time.asctime()
68 if not node1:
68 if not node1:
69 node1 = repo.current
69 node1 = repo.current
70 (c, a, d, u) = repo.diffdir(repo.root, node1)
70 (c, a, d, u) = repo.diffdir(repo.root, node1)
71 a = [] # ignore unknown files in repo, by popular request
71 a = [] # ignore unknown files in repo, by popular request
72 def read(f): return file(os.path.join(repo.root, f)).read()
72 def read(f): return file(os.path.join(repo.root, f)).read()
73
73
74 change = repo.changelog.read(node1)
74 change = repo.changelog.read(node1)
75 mmap = repo.manifest.read(change[0])
75 mmap = repo.manifest.read(change[0])
76 date1 = date(change)
76 date1 = date(change)
77
77
78 if files:
78 if files:
79 c, a, d = map(lambda x: filterfiles(x, files), (c, a, d))
79 c, a, d = map(lambda x: filterfiles(x, files), (c, a, d))
80
80
81 for f in c:
81 for f in c:
82 to = repo.file(f).read(mmap[f])
82 to = repo.file(f).read(mmap[f])
83 tn = read(f)
83 tn = read(f)
84 sys.stdout.write(mdiff.unidiff(to, date1, tn, date2, f))
84 sys.stdout.write(mdiff.unidiff(to, date1, tn, date2, f))
85 for f in a:
85 for f in a:
86 to = ""
86 to = ""
87 tn = read(f)
87 tn = read(f)
88 sys.stdout.write(mdiff.unidiff(to, date1, tn, date2, f))
88 sys.stdout.write(mdiff.unidiff(to, date1, tn, date2, f))
89 for f in d:
89 for f in d:
90 to = repo.file(f).read(mmap[f])
90 to = repo.file(f).read(mmap[f])
91 tn = ""
91 tn = ""
92 sys.stdout.write(mdiff.unidiff(to, date1, tn, date2, f))
92 sys.stdout.write(mdiff.unidiff(to, date1, tn, date2, f))
93
93
94
94
95 try:
95 try:
96 sys.exit(commands.dispatch(sys.argv[1:]))
96 sys.exit(commands.dispatch(sys.argv[1:]))
97 except commands.UnknownCommand:
97 except commands.UnknownCommand:
98 # fall through
98 # fall through
99 pass
99 pass
100
100
101 options = {}
101 options = {}
102 opts = [('v', 'verbose', None, 'verbose'),
102 opts = [('v', 'verbose', None, 'verbose'),
103 ('d', 'debug', None, 'debug'),
103 ('d', 'debug', None, 'debug'),
104 ('q', 'quiet', None, 'quiet'),
104 ('q', 'quiet', None, 'quiet'),
105 ('y', 'noninteractive', None, 'run non-interactively'),
105 ('y', 'noninteractive', None, 'run non-interactively'),
106 ]
106 ]
107
107
108 args = fancyopts.fancyopts(sys.argv[1:], opts, options,
108 args = fancyopts.fancyopts(sys.argv[1:], opts, options,
109 'hg [options] <command> [command options] [files]')
109 'hg [options] <command> [command options] [files]')
110
110
111 try:
111 try:
112 cmd = args[0]
112 cmd = args[0]
113 args = args[1:]
113 args = args[1:]
114 except:
114 except:
115 cmd = "help"
115 cmd = "help"
116
116
117 ui = ui.ui(options["verbose"], options["debug"], options["quiet"],
117 ui = ui.ui(options["verbose"], options["debug"], options["quiet"],
118 not options["noninteractive"])
118 not options["noninteractive"])
119
119
120 try:
120 try:
121 repo = hg.repository(ui=ui)
121 repo = hg.repository(ui=ui)
122 except IOError:
122 except IOError:
123 ui.warn("Unable to open repository\n")
123 ui.warn("Unable to open repository\n")
124 sys.exit(0)
124 sys.exit(0)
125
125
126 relpath = None
126 relpath = None
127 if os.getcwd() != repo.root:
127 if os.getcwd() != repo.root:
128 relpath = os.getcwd()[len(repo.root) + 1: ]
128 relpath = os.getcwd()[len(repo.root) + 1: ]
129
129
130 elif cmd == "add":
130 elif cmd == "add":
131 repo.add(args)
131 repo.add(args)
132
132
133 elif cmd == "forget":
133 elif cmd == "forget":
134 repo.forget(args)
134 repo.forget(args)
135
135
136 elif cmd == "remove" or cmd == "rm" or cmd == "del" or cmd == "delete":
136 elif cmd == "remove" or cmd == "rm" or cmd == "del" or cmd == "delete":
137 repo.remove(args)
137 repo.remove(args)
138
138
139 elif cmd == "commit" or cmd == "checkin" or cmd == "ci":
139 elif cmd == "commit" or cmd == "checkin" or cmd == "ci":
140 if 1:
140 if 1:
141 if len(args) > 0:
141 if len(args) > 0:
142 repo.commit(repo.current, args)
142 repo.commit(repo.current, args)
143 else:
143 else:
144 repo.commit(repo.current)
144 repo.commit(repo.current)
145 elif cmd == "rawcommit":
145 elif cmd == "rawcommit":
146 "raw commit interface"
146 "raw commit interface"
147 rc = {}
147 rc = {}
148 opts = [('p', 'parent', [], 'parent'),
148 opts = [('p', 'parent', [], 'parent'),
149 ('d', 'date', "", 'data'),
149 ('d', 'date', "", 'data'),
150 ('u', 'user', "", 'user'),
150 ('u', 'user', "", 'user'),
151 ('F', 'files', "", 'file list'),
151 ('F', 'files', "", 'file list'),
152 ('t', 'text', "", 'commit text'),
152 ('t', 'text', "", 'commit text'),
153 ('l', 'logfile', "", 'commit text file')
153 ('l', 'logfile', "", 'commit text file')
154 ]
154 ]
155 args = fancyopts.fancyopts(args, opts, rc,
155 args = fancyopts.fancyopts(args, opts, rc,
156 "hg rawcommit [options] files")
156 "hg rawcommit [options] files")
157 text = rc['text']
157 text = rc['text']
158 if not text and rc['logfile']:
158 if not text and rc['logfile']:
159 try: text = open(rc['logfile']).read()
159 try: text = open(rc['logfile']).read()
160 except IOError: pass
160 except IOError: pass
161 if not text and not rc['logfile']:
161 if not text and not rc['logfile']:
162 print "missing commit text"
162 print "missing commit text"
163 sys.exit(0)
163 sys.exit(0)
164 if rc['files']:
164 if rc['files']:
165 files = open(rc['files']).read().splitlines()
165 files = open(rc['files']).read().splitlines()
166 else:
166 else:
167 files = args
167 files = args
168
168
169 repo.rawcommit(files, text, rc['user'], rc['date'], *rc['parent'])
169 repo.rawcommit(files, text, rc['user'], rc['date'], *rc['parent'])
170
170
171
171
172 elif cmd == "import" or cmd == "patch":
172 elif cmd == "import" or cmd == "patch":
173 try:
173 try:
174 import psyco
174 import psyco
175 psyco.full()
175 psyco.full()
176 except:
176 except:
177 pass
177 pass
178
178
179 ioptions = {}
179 ioptions = {}
180 opts = [('p', 'strip', 1, 'path strip'),
180 opts = [('p', 'strip', 1, 'path strip'),
181 ('b', 'base', "", 'base path'),
181 ('b', 'base', "", 'base path'),
182 ('q', 'quiet', "", 'silence diff')
182 ('q', 'quiet', "", 'silence diff')
183 ]
183 ]
184
184
185 args = fancyopts.fancyopts(args, opts, ioptions,
185 args = fancyopts.fancyopts(args, opts, ioptions,
186 'hg import [options] <patch names>')
186 'hg import [options] <patch names>')
187 d = ioptions["base"]
187 d = ioptions["base"]
188 strip = ioptions["strip"]
188 strip = ioptions["strip"]
189 quiet = ioptions["quiet"] and "> /dev/null" or ""
189 quiet = ioptions["quiet"] and "> /dev/null" or ""
190
190
191 for patch in args:
191 for patch in args:
192 ui.status("applying %s\n" % patch)
192 ui.status("applying %s\n" % patch)
193 pf = os.path.join(d, patch)
193 pf = os.path.join(d, patch)
194
194
195 text = ""
195 text = ""
196 for l in file(pf):
196 for l in file(pf):
197 if l[:4] == "--- ": break
197 if l[:4] == "--- ": break
198 text += l
198 text += l
199
199
200 f = os.popen("lsdiff --strip %d %s" % (strip, pf))
200 f = os.popen("lsdiff --strip %d %s" % (strip, pf))
201 files = filter(None, map(lambda x: x.rstrip(), f.read().splitlines()))
201 files = filter(None, map(lambda x: x.rstrip(), f.read().splitlines()))
202 f.close()
202 f.close()
203
203
204 if files:
204 if files:
205 if os.system("patch -p%d < %s %s" % (strip, pf, quiet)):
205 if os.system("patch -p%d < %s %s" % (strip, pf, quiet)):
206 raise "patch failed!"
206 raise "patch failed!"
207 repo.commit(repo.current, files, text)
207 repo.commit(repo.current, files, text)
208
208
209 elif cmd == "diff":
209 elif cmd == "diff":
210 revs = []
210 revs = []
211
211
212 if args:
212 if args:
213 doptions = {}
213 doptions = {}
214 opts = [('r', 'revision', [], 'revision')]
214 opts = [('r', 'revision', [], 'revision')]
215 args = fancyopts.fancyopts(args, opts, doptions,
215 args = fancyopts.fancyopts(args, opts, doptions,
216 'hg diff [options] [files]')
216 'hg diff [options] [files]')
217 revs = map(lambda x: repo.lookup(x), doptions['revision'])
217 revs = map(lambda x: repo.lookup(x), doptions['revision'])
218
218
219 if len(revs) > 2:
219 if len(revs) > 2:
220 self.ui.warn("too many revisions to diff\n")
220 self.ui.warn("too many revisions to diff\n")
221 sys.exit(1)
221 sys.exit(1)
222
222
223 if relpath:
223 if relpath:
224 if not args: args = [ relpath ]
224 if not args: args = [ relpath ]
225 else: args = [ os.path.join(relpath, x) for x in args ]
225 else: args = [ os.path.join(relpath, x) for x in args ]
226
226
227 diff(args, *revs)
227 diff(args, *revs)
228
228
229 elif cmd == "export":
229 elif cmd == "export":
230 node = repo.lookup(args[0])
230 node = repo.lookup(args[0])
231 prev, other = repo.changelog.parents(node)
231 prev, other = repo.changelog.parents(node)
232 change = repo.changelog.read(node)
232 change = repo.changelog.read(node)
233 print "# HG changeset patch"
233 print "# HG changeset patch"
234 print "# User %s" % change[1]
234 print "# User %s" % change[1]
235 print "# Node ID %s" % hg.hex(node)
235 print "# Node ID %s" % hg.hex(node)
236 print "# Parent %s" % hg.hex(prev)
236 print "# Parent %s" % hg.hex(prev)
237 print
237 print
238 if other != hg.nullid:
238 if other != hg.nullid:
239 print "# Parent %s" % hg.hex(other)
239 print "# Parent %s" % hg.hex(other)
240 print change[4]
240 print change[4]
241
241
242 diff(None, prev, node)
242 diff(None, prev, node)
243
243
244 elif cmd == "debugchangegroup":
244 elif cmd == "debugchangegroup":
245 newer = repo.newer(map(repo.lookup, args))
245 newer = repo.newer(map(repo.lookup, args))
246 for chunk in repo.changegroup(newer):
246 for chunk in repo.changegroup(newer):
247 sys.stdout.write(chunk)
247 sys.stdout.write(chunk)
248
248
249 elif cmd == "debugaddchangegroup":
249 elif cmd == "debugaddchangegroup":
250 data = sys.stdin.read()
250 data = sys.stdin.read()
251 repo.addchangegroup(data)
251 repo.addchangegroup(data)
252
252
253 elif cmd == "addremove":
253 elif cmd == "addremove":
254 (c, a, d, u) = repo.diffdir(repo.root, repo.current)
254 (c, a, d, u) = repo.diffdir(repo.root, repo.current)
255 repo.add(a)
255 repo.add(a)
256 repo.remove(d)
256 repo.remove(d)
257
257
258 elif cmd == "history":
258 elif cmd == "history":
259 for i in range(repo.changelog.count()):
259 for i in range(repo.changelog.count()):
260 n = repo.changelog.node(i)
260 n = repo.changelog.node(i)
261 changes = repo.changelog.read(n)
261 changes = repo.changelog.read(n)
262 (p1, p2) = repo.changelog.parents(n)
262 (p1, p2) = repo.changelog.parents(n)
263 (h, h1, h2) = map(hg.hex, (n, p1, p2))
263 (h, h1, h2) = map(hg.hex, (n, p1, p2))
264 (i1, i2) = map(repo.changelog.rev, (p1, p2))
264 (i1, i2) = map(repo.changelog.rev, (p1, p2))
265 print "rev: %4d:%s" % (i, h)
265 print "rev: %4d:%s" % (i, h)
266 print "parents: %4d:%s" % (i1, h1)
266 print "parents: %4d:%s" % (i1, h1)
267 if i2: print " %4d:%s" % (i2, h2)
267 if i2: print " %4d:%s" % (i2, h2)
268 print "manifest: %4d:%s" % (repo.manifest.rev(changes[0]),
268 print "manifest: %4d:%s" % (repo.manifest.rev(changes[0]),
269 hg.hex(changes[0]))
269 hg.hex(changes[0]))
270 print "user:", changes[1]
270 print "user:", changes[1]
271 print "date:", time.asctime(
271 print "date:", time.asctime(
272 time.localtime(float(changes[2].split(' ')[0])))
272 time.localtime(float(changes[2].split(' ')[0])))
273 if ui.verbose: print "files:", " ".join(changes[3])
273 if ui.verbose: print "files:", " ".join(changes[3])
274 print "description:"
274 print "description:"
275 print changes[4]
275 print changes[4]
276
276
277 elif cmd == "tip":
277 elif cmd == "tip":
278 n = repo.changelog.tip()
278 n = repo.changelog.tip()
279 t = repo.changelog.rev(n)
279 t = repo.changelog.rev(n)
280 ui.status("%d:%s\n" % (t, hg.hex(n)))
280 ui.status("%d:%s\n" % (t, hg.hex(n)))
281
281
282 elif cmd == "log":
282 elif cmd == "log":
283
283
284 if len(args) == 1:
284 if len(args) == 1:
285 if relpath:
285 if relpath:
286 args[0] = os.path.join(relpath, args[0])
286 args[0] = os.path.join(relpath, args[0])
287
287
288 r = repo.file(args[0])
288 r = repo.file(args[0])
289 for i in range(r.count()):
289 for i in range(r.count()):
290 n = r.node(i)
290 n = r.node(i)
291 (p1, p2) = r.parents(n)
291 (p1, p2) = r.parents(n)
292 (h, h1, h2) = map(hg.hex, (n, p1, p2))
292 (h, h1, h2) = map(hg.hex, (n, p1, p2))
293 (i1, i2) = map(r.rev, (p1, p2))
293 (i1, i2) = map(r.rev, (p1, p2))
294 cr = r.linkrev(n)
294 cr = r.linkrev(n)
295 cn = hg.hex(repo.changelog.node(cr))
295 cn = hg.hex(repo.changelog.node(cr))
296 print "rev: %4d:%s" % (i, h)
296 print "rev: %4d:%s" % (i, h)
297 print "changeset: %4d:%s" % (cr, cn)
297 print "changeset: %4d:%s" % (cr, cn)
298 print "parents: %4d:%s" % (i1, h1)
298 print "parents: %4d:%s" % (i1, h1)
299 if i2: print " %4d:%s" % (i2, h2)
299 if i2: print " %4d:%s" % (i2, h2)
300 changes = repo.changelog.read(repo.changelog.node(cr))
300 changes = repo.changelog.read(repo.changelog.node(cr))
301 print "user: %s" % changes[1]
301 print "user: %s" % changes[1]
302 print "date: %s" % time.asctime(
302 print "date: %s" % time.asctime(
303 time.localtime(float(changes[2].split(' ')[0])))
303 time.localtime(float(changes[2].split(' ')[0])))
304 print "description:"
304 print "description:"
305 print changes[4]
305 print changes[4]
306 print
306 print
307 elif len(args) > 1:
307 elif len(args) > 1:
308 print "too many args"
308 print "too many args"
309 else:
309 else:
310 print "missing filename"
310 print "missing filename"
311
311
312 elif cmd == "dump":
312 elif cmd == "dump":
313 if args:
313 if args:
314 r = repo.file(args[0])
314 r = repo.file(args[0])
315 n = r.tip()
315 n = r.tip()
316 if len(args) > 1: n = r.lookup(args[1])
316 if len(args) > 1: n = r.lookup(args[1])
317 sys.stdout.write(r.read(n))
317 sys.stdout.write(r.read(n))
318 else:
318 else:
319 print "missing filename"
319 print "missing filename"
320
320
321 elif cmd == "dumpmanifest":
321 elif cmd == "dumpmanifest":
322 n = repo.manifest.tip()
322 n = repo.manifest.tip()
323 if len(args) > 0:
323 if len(args) > 0:
324 n = repo.manifest.lookup(args[0])
324 n = repo.manifest.lookup(args[0])
325 m = repo.manifest.read(n)
325 m = repo.manifest.read(n)
326 files = m.keys()
326 files = m.keys()
327 files.sort()
327 files.sort()
328
328
329 for f in files:
329 for f in files:
330 print hg.hex(m[f]), f
330 print hg.hex(m[f]), f
331
331
332 elif cmd == "debugindex":
332 elif cmd == "debugindex":
333 if ".hg" not in args[0]:
333 if ".hg" not in args[0]:
334 args[0] = ".hg/data/" + repo.file(args[0]).encodepath(args[0]) + "i"
334 args[0] = ".hg/data/" + repo.file(args[0]).encodepath(args[0]) + "i"
335
335
336 r = hg.revlog(open, args[0], "")
336 r = hg.revlog(open, args[0], "")
337 print " rev offset length base linkrev"+\
337 print " rev offset length base linkrev"+\
338 " p1 p2 nodeid"
338 " p1 p2 nodeid"
339 for i in range(r.count()):
339 for i in range(r.count()):
340 e = r.index[i]
340 e = r.index[i]
341 print "% 6d % 9d % 7d % 6d % 7d %s.. %s.. %s.." % (
341 print "% 6d % 9d % 7d % 6d % 7d %s.. %s.. %s.." % (
342 i, e[0], e[1], e[2], e[3],
342 i, e[0], e[1], e[2], e[3],
343 hg.hex(e[4][:5]), hg.hex(e[5][:5]), hg.hex(e[6][:5]))
343 hg.hex(e[4][:5]), hg.hex(e[5][:5]), hg.hex(e[6][:5]))
344
344
345 elif cmd == "debugindexdot":
345 elif cmd == "debugindexdot":
346 if ".hg" not in args[0]:
346 if ".hg" not in args[0]:
347 args[0] = ".hg/data/" + repo.file(args[0]).encodepath(args[0]) + "i"
347 args[0] = ".hg/data/" + repo.file(args[0]).encodepath(args[0]) + "i"
348
348
349 r = hg.revlog(open, args[0], "")
349 r = hg.revlog(open, args[0], "")
350 print "digraph G {"
350 print "digraph G {"
351 for i in range(r.count()):
351 for i in range(r.count()):
352 e = r.index[i]
352 e = r.index[i]
353 print "\t%d -> %d" % (r.rev(e[4]), i)
353 print "\t%d -> %d" % (r.rev(e[4]), i)
354 if e[5] != hg.nullid:
354 if e[5] != hg.nullid:
355 print "\t%d -> %d" % (r.rev(e[5]), i)
355 print "\t%d -> %d" % (r.rev(e[5]), i)
356 print "}"
356 print "}"
357
357
358 elif cmd == "merge":
358 elif cmd == "merge":
359 (c, a, d, u) = repo.diffdir(repo.root, repo.current)
359 (c, a, d, u) = repo.diffdir(repo.root, repo.current)
360 if c or a or d:
360 if c or a or d:
361 ui.warn("aborting (outstanding changes in working directory)\n")
361 ui.warn("aborting (outstanding changes in working directory)\n")
362 sys.exit(1)
362 sys.exit(1)
363
363
364 if args:
364 if args:
365 paths = {}
365 paths = {}
366 try:
366 try:
367 pf = os.path.join(os.environ["HOME"], ".hgpaths")
367 pf = os.path.join(os.environ["HOME"], ".hgpaths")
368 for l in file(pf):
368 for l in file(pf):
369 name, path = l.split()
369 name, path = l.split()
370 paths[name] = path
370 paths[name] = path
371 except:
371 except:
372 pass
372 pass
373
373
374 if args[0] in paths: args[0] = paths[args[0]]
374 if args[0] in paths: args[0] = paths[args[0]]
375
375
376 other = hg.repository(ui, args[0])
376 other = hg.repository(ui, args[0])
377 cg = repo.getchangegroup(other)
377 cg = repo.getchangegroup(other)
378 repo.addchangegroup(cg)
378 repo.addchangegroup(cg)
379 else:
379 else:
380 print "missing source repository"
380 print "missing source repository"
381
381
382 elif cmd == "tags":
382 elif cmd == "tags":
383 repo.lookup(0) # prime the cache
383 repo.lookup(0) # prime the cache
384 i = repo.tags.items()
384 i = repo.tags.items()
385 i.sort()
385 i.sort()
386 for k, n in i:
386 for k, n in i:
387 try:
387 try:
388 r = repo.changelog.rev(n)
388 r = repo.changelog.rev(n)
389 except KeyError:
389 except KeyError:
390 r = "?"
390 r = "?"
391 print "%-30s %5d:%s" % (k, repo.changelog.rev(n), hg.hex(n))
391 print "%-30s %5d:%s" % (k, repo.changelog.rev(n), hg.hex(n))
392
392
393 elif cmd == "recover":
393 elif cmd == "recover":
394 repo.recover()
394 repo.recover()
395
395
396 elif cmd == "verify":
396 elif cmd == "verify":
397 filelinkrevs = {}
397 filelinkrevs = {}
398 filenodes = {}
398 filenodes = {}
399 manifestchangeset = {}
399 manifestchangeset = {}
400 changesets = revisions = files = 0
400 changesets = revisions = files = 0
401 errors = 0
401 errors = 0
402
402
403 ui.status("checking changesets\n")
403 ui.status("checking changesets\n")
404 for i in range(repo.changelog.count()):
404 for i in range(repo.changelog.count()):
405 changesets += 1
405 changesets += 1
406 n = repo.changelog.node(i)
406 n = repo.changelog.node(i)
407 for p in repo.changelog.parents(n):
407 for p in repo.changelog.parents(n):
408 if p not in repo.changelog.nodemap:
408 if p not in repo.changelog.nodemap:
409 ui.warn("changeset %s has unknown parent %s\n" %
409 ui.warn("changeset %s has unknown parent %s\n" %
410 (hg.short(n), hg.short(p)))
410 (hg.short(n), hg.short(p)))
411 errors += 1
411 errors += 1
412 try:
412 try:
413 changes = repo.changelog.read(n)
413 changes = repo.changelog.read(n)
414 except Exception, inst:
414 except Exception, inst:
415 ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
415 ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
416 errors += 1
416 errors += 1
417
417
418 manifestchangeset[changes[0]] = n
418 manifestchangeset[changes[0]] = n
419 for f in changes[3]:
419 for f in changes[3]:
420 revisions += 1
421 filelinkrevs.setdefault(f, []).append(i)
420 filelinkrevs.setdefault(f, []).append(i)
422
421
423 ui.status("checking manifests\n")
422 ui.status("checking manifests\n")
424 for i in range(repo.manifest.count()):
423 for i in range(repo.manifest.count()):
425 n = repo.manifest.node(i)
424 n = repo.manifest.node(i)
426 for p in repo.manifest.parents(n):
425 for p in repo.manifest.parents(n):
427 if p not in repo.manifest.nodemap:
426 if p not in repo.manifest.nodemap:
428 ui.warn("manifest %s has unknown parent %s\n" %
427 ui.warn("manifest %s has unknown parent %s\n" %
429 (hg.short(n), hg.short(p)))
428 (hg.short(n), hg.short(p)))
430 errors += 1
429 errors += 1
431 ca = repo.changelog.node(repo.manifest.linkrev(n))
430 ca = repo.changelog.node(repo.manifest.linkrev(n))
432 cc = manifestchangeset[n]
431 cc = manifestchangeset[n]
433 if ca != cc:
432 if ca != cc:
434 ui.warn("manifest %s points to %s, not %s\n" %
433 ui.warn("manifest %s points to %s, not %s\n" %
435 (hg.hex(n), hg.hex(ca), hg.hex(cc)))
434 (hg.hex(n), hg.hex(ca), hg.hex(cc)))
436 errors += 1
435 errors += 1
437
436
438 try:
437 try:
439 delta = mdiff.patchtext(repo.manifest.delta(n))
438 delta = mdiff.patchtext(repo.manifest.delta(n))
440 except KeyboardInterrupt:
439 except KeyboardInterrupt:
441 print "aborted"
440 print "aborted"
442 sys.exit(0)
441 sys.exit(0)
443 except Exception, inst:
442 except Exception, inst:
444 ui.warn("unpacking manifest %s: %s\n" % (hg.short(n), inst))
443 ui.warn("unpacking manifest %s: %s\n" % (hg.short(n), inst))
445 errors += 1
444 errors += 1
446
445
447 ff = [ l.split('\0') for l in delta.splitlines() ]
446 ff = [ l.split('\0') for l in delta.splitlines() ]
448 for f, fn in ff:
447 for f, fn in ff:
449 filenodes.setdefault(f, {})[hg.bin(fn)] = 1
448 filenodes.setdefault(f, {})[hg.bin(fn)] = 1
450
449
451 ui.status("crosschecking files in changesets and manifests\n")
450 ui.status("crosschecking files in changesets and manifests\n")
452 for f in filenodes:
451 for f in filenodes:
453 if f not in filelinkrevs:
452 if f not in filelinkrevs:
454 ui.warn("file %s in manifest but not in changesets\n" % f)
453 ui.warn("file %s in manifest but not in changesets\n" % f)
455 errors += 1
454 errors += 1
456
455
457 for f in filelinkrevs:
456 for f in filelinkrevs:
458 if f not in filenodes:
457 if f not in filenodes:
459 ui.warn("file %s in changeset but not in manifest\n" % f)
458 ui.warn("file %s in changeset but not in manifest\n" % f)
460 errors += 1
459 errors += 1
461
460
462 ui.status("checking files\n")
461 ui.status("checking files\n")
463 ff = filenodes.keys()
462 ff = filenodes.keys()
464 ff.sort()
463 ff.sort()
465 for f in ff:
464 for f in ff:
466 if f == "/dev/null": continue
465 if f == "/dev/null": continue
467 files += 1
466 files += 1
468 fl = repo.file(f)
467 fl = repo.file(f)
469 nodes = { hg.nullid: 1 }
468 nodes = { hg.nullid: 1 }
470 for i in range(fl.count()):
469 for i in range(fl.count()):
470 revisions += 1
471 n = fl.node(i)
471 n = fl.node(i)
472
472
473 if n not in filenodes[f]:
473 if n not in filenodes[f]:
474 ui.warn("%s: %d:%s not in manifests\n" % (f, i, hg.short(n)))
474 ui.warn("%s: %d:%s not in manifests\n" % (f, i, hg.short(n)))
475 print len(filenodes[f].keys()), fl.count(), f
475 print len(filenodes[f].keys()), fl.count(), f
476 errors += 1
476 errors += 1
477 else:
477 else:
478 del filenodes[f][n]
478 del filenodes[f][n]
479
479
480 flr = fl.linkrev(n)
480 flr = fl.linkrev(n)
481 if flr not in filelinkrevs[f]:
481 if flr not in filelinkrevs[f]:
482 ui.warn("%s:%s points to unexpected changeset rev %d\n"
482 ui.warn("%s:%s points to unexpected changeset rev %d\n"
483 % (f, hg.short(n), fl.linkrev(n)))
483 % (f, hg.short(n), fl.linkrev(n)))
484 errors += 1
484 errors += 1
485 else:
485 else:
486 filelinkrevs[f].remove(flr)
486 filelinkrevs[f].remove(flr)
487
487
488 # verify contents
488 # verify contents
489 try:
489 try:
490 t = fl.read(n)
490 t = fl.read(n)
491 except Exception, inst:
491 except Exception, inst:
492 ui.warn("unpacking file %s %s: %s\n" % (f, hg.short(n), inst))
492 ui.warn("unpacking file %s %s: %s\n" % (f, hg.short(n), inst))
493 errors += 1
493 errors += 1
494
494
495 # verify parents
495 # verify parents
496 (p1, p2) = fl.parents(n)
496 (p1, p2) = fl.parents(n)
497 if p1 not in nodes:
497 if p1 not in nodes:
498 ui.warn("file %s:%s unknown parent 1 %s" %
498 ui.warn("file %s:%s unknown parent 1 %s" %
499 (f, hg.short(n), hg.short(p1)))
499 (f, hg.short(n), hg.short(p1)))
500 errors += 1
500 errors += 1
501 if p2 not in nodes:
501 if p2 not in nodes:
502 ui.warn("file %s:%s unknown parent 2 %s" %
502 ui.warn("file %s:%s unknown parent 2 %s" %
503 (f, hg.short(n), hg.short(p1)))
503 (f, hg.short(n), hg.short(p1)))
504 errors += 1
504 errors += 1
505 nodes[n] = 1
505 nodes[n] = 1
506
506
507 # cross-check
507 # cross-check
508 for flr in filelinkrevs[f]:
509 ui.warn("changeset rev %d not in %s\n" % (flr, f))
510 errors += 1
511
512 for node in filenodes[f]:
508 for node in filenodes[f]:
513 ui.warn("node %s in manifests not in %s\n" % (hg.hex(n), f))
509 ui.warn("node %s in manifests not in %s\n" % (hg.hex(n), f))
514 errors += 1
510 errors += 1
515
511
516 ui.status("%d files, %d changesets, %d total revisions\n" %
512 ui.status("%d files, %d changesets, %d total revisions\n" %
517 (files, changesets, revisions))
513 (files, changesets, revisions))
518
514
519 if errors:
515 if errors:
520 ui.warn("%d integrity errors encountered!\n" % errors)
516 ui.warn("%d integrity errors encountered!\n" % errors)
521 sys.exit(1)
517 sys.exit(1)
522
518
523 elif cmd == "serve":
519 elif cmd == "serve":
524 from mercurial import hgweb
520 from mercurial import hgweb
525
521
526 soptions = {}
522 soptions = {}
527 opts = [('p', 'port', 8000, 'listen port'),
523 opts = [('p', 'port', 8000, 'listen port'),
528 ('a', 'address', '', 'interface address'),
524 ('a', 'address', '', 'interface address'),
529 ('n', 'name', os.getcwd(), 'repository name'),
525 ('n', 'name', os.getcwd(), 'repository name'),
530 ('t', 'templates', "", 'template map')
526 ('t', 'templates', "", 'template map')
531 ]
527 ]
532
528
533 args = fancyopts.fancyopts(args, opts, soptions,
529 args = fancyopts.fancyopts(args, opts, soptions,
534 'hg serve [options]')
530 'hg serve [options]')
535
531
536 hgweb.server(repo.root, soptions["name"], soptions["templates"],
532 hgweb.server(repo.root, soptions["name"], soptions["templates"],
537 soptions["address"], soptions["port"])
533 soptions["address"], soptions["port"])
538
534
539 else:
535 else:
540 if cmd: ui.warn("unknown command\n\n")
536 if cmd: ui.warn("unknown command\n\n")
541 help()
537 help()
542 sys.exit(1)
538 sys.exit(1)
@@ -1,1125 +1,1125 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import sys, struct, sha, socket, os, time, re, urllib2, tempfile
8 import sys, struct, sha, socket, os, time, re, urllib2, tempfile
9 import urllib
9 import urllib
10 from mercurial import byterange, lock
10 from mercurial import byterange, lock
11 from mercurial.transaction import *
11 from mercurial.transaction import *
12 from mercurial.revlog import *
12 from mercurial.revlog import *
13 from difflib import SequenceMatcher
13 from difflib import SequenceMatcher
14
14
15 class filelog(revlog):
15 class filelog(revlog):
16 def __init__(self, opener, path):
16 def __init__(self, opener, path):
17 revlog.__init__(self, opener,
17 revlog.__init__(self, opener,
18 os.path.join("data", path + ".i"),
18 os.path.join("data", path + ".i"),
19 os.path.join("data", path + ".d"))
19 os.path.join("data", path + ".d"))
20
20
21 def read(self, node):
21 def read(self, node):
22 return self.revision(node)
22 return self.revision(node)
23 def add(self, text, transaction, link, p1=None, p2=None):
23 def add(self, text, transaction, link, p1=None, p2=None):
24 return self.addrevision(text, transaction, link, p1, p2)
24 return self.addrevision(text, transaction, link, p1, p2)
25
25
26 def annotate(self, node):
26 def annotate(self, node):
27
27
28 def decorate(text, rev):
28 def decorate(text, rev):
29 return [(rev, l) for l in text.splitlines(1)]
29 return [(rev, l) for l in text.splitlines(1)]
30
30
31 def strip(annotation):
31 def strip(annotation):
32 return [e[1] for e in annotation]
32 return [e[1] for e in annotation]
33
33
34 def pair(parent, child):
34 def pair(parent, child):
35 new = []
35 new = []
36 sm = SequenceMatcher(None, strip(parent), strip(child))
36 sm = SequenceMatcher(None, strip(parent), strip(child))
37 for o, m, n, s, t in sm.get_opcodes():
37 for o, m, n, s, t in sm.get_opcodes():
38 if o == 'equal':
38 if o == 'equal':
39 new += parent[m:n]
39 new += parent[m:n]
40 else:
40 else:
41 new += child[s:t]
41 new += child[s:t]
42 return new
42 return new
43
43
44 # find all ancestors
44 # find all ancestors
45 needed = {node:1}
45 needed = {node:1}
46 visit = [node]
46 visit = [node]
47 while visit:
47 while visit:
48 n = visit.pop(0)
48 n = visit.pop(0)
49 for p in self.parents(n):
49 for p in self.parents(n):
50 if p not in needed:
50 if p not in needed:
51 needed[p] = 1
51 needed[p] = 1
52 visit.append(p)
52 visit.append(p)
53 else:
53 else:
54 # count how many times we'll use this
54 # count how many times we'll use this
55 needed[p] += 1
55 needed[p] += 1
56
56
57 # sort by revision which is a topological order
57 # sort by revision which is a topological order
58 visit = needed.keys()
58 visit = needed.keys()
59 visit = [ (self.rev(n), n) for n in visit ]
59 visit = [ (self.rev(n), n) for n in visit ]
60 visit.sort()
60 visit.sort()
61 visit = [ p[1] for p in visit ]
61 visit = [ p[1] for p in visit ]
62 hist = {}
62 hist = {}
63
63
64 for n in visit:
64 for n in visit:
65 curr = decorate(self.read(n), self.linkrev(n))
65 curr = decorate(self.read(n), self.linkrev(n))
66 for p in self.parents(n):
66 for p in self.parents(n):
67 if p != nullid:
67 if p != nullid:
68 curr = pair(hist[p], curr)
68 curr = pair(hist[p], curr)
69 # trim the history of unneeded revs
69 # trim the history of unneeded revs
70 needed[p] -= 1
70 needed[p] -= 1
71 if not needed[p]:
71 if not needed[p]:
72 del hist[p]
72 del hist[p]
73 hist[n] = curr
73 hist[n] = curr
74
74
75 return hist[n]
75 return hist[n]
76
76
77 class manifest(revlog):
77 class manifest(revlog):
78 def __init__(self, opener):
78 def __init__(self, opener):
79 self.mapcache = None
79 self.mapcache = None
80 self.listcache = None
80 self.listcache = None
81 self.addlist = None
81 self.addlist = None
82 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
82 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
83
83
84 def read(self, node):
84 def read(self, node):
85 if self.mapcache and self.mapcache[0] == node:
85 if self.mapcache and self.mapcache[0] == node:
86 return self.mapcache[1].copy()
86 return self.mapcache[1].copy()
87 text = self.revision(node)
87 text = self.revision(node)
88 map = {}
88 map = {}
89 self.listcache = (text, text.splitlines(1))
89 self.listcache = (text, text.splitlines(1))
90 for l in self.listcache[1]:
90 for l in self.listcache[1]:
91 (f, n) = l.split('\0')
91 (f, n) = l.split('\0')
92 map[f] = bin(n[:40])
92 map[f] = bin(n[:40])
93 self.mapcache = (node, map)
93 self.mapcache = (node, map)
94 return map
94 return map
95
95
96 def diff(self, a, b):
96 def diff(self, a, b):
97 # this is sneaky, as we're not actually using a and b
97 # this is sneaky, as we're not actually using a and b
98 if self.listcache and self.addlist and self.listcache[0] == a:
98 if self.listcache and self.addlist and self.listcache[0] == a:
99 d = mdiff.diff(self.listcache[1], self.addlist, 1)
99 d = mdiff.diff(self.listcache[1], self.addlist, 1)
100 if mdiff.patch(a, d) != b:
100 if mdiff.patch(a, d) != b:
101 sys.stderr.write("*** sortdiff failed, falling back ***\n")
101 sys.stderr.write("*** sortdiff failed, falling back ***\n")
102 return mdiff.textdiff(a, b)
102 return mdiff.textdiff(a, b)
103 return d
103 return d
104 else:
104 else:
105 return mdiff.textdiff(a, b)
105 return mdiff.textdiff(a, b)
106
106
107 def add(self, map, transaction, link, p1=None, p2=None):
107 def add(self, map, transaction, link, p1=None, p2=None):
108 files = map.keys()
108 files = map.keys()
109 files.sort()
109 files.sort()
110
110
111 self.addlist = ["%s\000%s\n" % (f, hex(map[f])) for f in files]
111 self.addlist = ["%s\000%s\n" % (f, hex(map[f])) for f in files]
112 text = "".join(self.addlist)
112 text = "".join(self.addlist)
113
113
114 n = self.addrevision(text, transaction, link, p1, p2)
114 n = self.addrevision(text, transaction, link, p1, p2)
115 self.mapcache = (n, map)
115 self.mapcache = (n, map)
116 self.listcache = (text, self.addlist)
116 self.listcache = (text, self.addlist)
117 self.addlist = None
117 self.addlist = None
118
118
119 return n
119 return n
120
120
121 class changelog(revlog):
121 class changelog(revlog):
122 def __init__(self, opener):
122 def __init__(self, opener):
123 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
123 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
124
124
125 def extract(self, text):
125 def extract(self, text):
126 if not text:
126 if not text:
127 return (nullid, "", "0", [], "")
127 return (nullid, "", "0", [], "")
128 last = text.index("\n\n")
128 last = text.index("\n\n")
129 desc = text[last + 2:]
129 desc = text[last + 2:]
130 l = text[:last].splitlines()
130 l = text[:last].splitlines()
131 manifest = bin(l[0])
131 manifest = bin(l[0])
132 user = l[1]
132 user = l[1]
133 date = l[2]
133 date = l[2]
134 files = l[3:]
134 files = l[3:]
135 return (manifest, user, date, files, desc)
135 return (manifest, user, date, files, desc)
136
136
137 def read(self, node):
137 def read(self, node):
138 return self.extract(self.revision(node))
138 return self.extract(self.revision(node))
139
139
140 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
140 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
141 user=None, date=None):
141 user=None, date=None):
142 user = (user or
142 user = (user or
143 os.environ.get("HGUSER") or
143 os.environ.get("HGUSER") or
144 os.environ.get("EMAIL") or
144 os.environ.get("EMAIL") or
145 os.environ.get("LOGNAME", "unknown") + '@' + socket.getfqdn())
145 os.environ.get("LOGNAME", "unknown") + '@' + socket.getfqdn())
146 date = date or "%d %d" % (time.time(), time.timezone)
146 date = date or "%d %d" % (time.time(), time.timezone)
147 list.sort()
147 list.sort()
148 l = [hex(manifest), user, date] + list + ["", desc]
148 l = [hex(manifest), user, date] + list + ["", desc]
149 text = "\n".join(l)
149 text = "\n".join(l)
150 return self.addrevision(text, transaction, self.count(), p1, p2)
150 return self.addrevision(text, transaction, self.count(), p1, p2)
151
151
152 class dirstate:
152 class dirstate:
153 def __init__(self, opener, ui):
153 def __init__(self, opener, ui):
154 self.opener = opener
154 self.opener = opener
155 self.dirty = 0
155 self.dirty = 0
156 self.ui = ui
156 self.ui = ui
157 self.map = None
157 self.map = None
158
158
159 def __del__(self):
159 def __del__(self):
160 if self.dirty:
160 if self.dirty:
161 self.write()
161 self.write()
162
162
163 def __getitem__(self, key):
163 def __getitem__(self, key):
164 try:
164 try:
165 return self.map[key]
165 return self.map[key]
166 except TypeError:
166 except TypeError:
167 self.read()
167 self.read()
168 return self[key]
168 return self[key]
169
169
170 def __contains__(self, key):
170 def __contains__(self, key):
171 if not self.map: self.read()
171 if not self.map: self.read()
172 return key in self.map
172 return key in self.map
173
173
174 def state(self, key):
174 def state(self, key):
175 try:
175 try:
176 return self[key][0]
176 return self[key][0]
177 except KeyError:
177 except KeyError:
178 return "?"
178 return "?"
179
179
180 def read(self):
180 def read(self):
181 if self.map is not None: return self.map
181 if self.map is not None: return self.map
182
182
183 self.map = {}
183 self.map = {}
184 try:
184 try:
185 st = self.opener("dirstate").read()
185 st = self.opener("dirstate").read()
186 except: return
186 except: return
187
187
188 pos = 0
188 pos = 0
189 while pos < len(st):
189 while pos < len(st):
190 e = struct.unpack(">cllll", st[pos:pos+17])
190 e = struct.unpack(">cllll", st[pos:pos+17])
191 l = e[4]
191 l = e[4]
192 pos += 17
192 pos += 17
193 f = st[pos:pos + l]
193 f = st[pos:pos + l]
194 self.map[f] = e[:4]
194 self.map[f] = e[:4]
195 pos += l
195 pos += l
196
196
197 def update(self, files, state):
197 def update(self, files, state):
198 ''' current states:
198 ''' current states:
199 n normal
199 n normal
200 i invalid
200 i invalid
201 r marked for removal
201 r marked for removal
202 a marked for addition'''
202 a marked for addition'''
203
203
204 if not files: return
204 if not files: return
205 self.read()
205 self.read()
206 self.dirty = 1
206 self.dirty = 1
207 for f in files:
207 for f in files:
208 if state == "r":
208 if state == "r":
209 self.map[f] = ('r', 0, 0, 0)
209 self.map[f] = ('r', 0, 0, 0)
210 else:
210 else:
211 try:
211 try:
212 s = os.stat(f)
212 s = os.stat(f)
213 self.map[f] = (state, s.st_mode, s.st_size, s.st_mtime)
213 self.map[f] = (state, s.st_mode, s.st_size, s.st_mtime)
214 except OSError:
214 except OSError:
215 if state != "i": raise
215 if state != "i": raise
216 self.map[f] = ('r', 0, 0, 0)
216 self.map[f] = ('r', 0, 0, 0)
217
217
218 def forget(self, files):
218 def forget(self, files):
219 if not files: return
219 if not files: return
220 self.read()
220 self.read()
221 self.dirty = 1
221 self.dirty = 1
222 for f in files:
222 for f in files:
223 try:
223 try:
224 del self.map[f]
224 del self.map[f]
225 except KeyError:
225 except KeyError:
226 self.ui.warn("not in dirstate: %s!\n" % f)
226 self.ui.warn("not in dirstate: %s!\n" % f)
227 pass
227 pass
228
228
229 def clear(self):
229 def clear(self):
230 self.map = {}
230 self.map = {}
231 self.dirty = 1
231 self.dirty = 1
232
232
233 def write(self):
233 def write(self):
234 st = self.opener("dirstate", "w")
234 st = self.opener("dirstate", "w")
235 for f, e in self.map.items():
235 for f, e in self.map.items():
236 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
236 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
237 st.write(e + f)
237 st.write(e + f)
238 self.dirty = 0
238 self.dirty = 0
239
239
240 def copy(self):
240 def copy(self):
241 self.read()
241 self.read()
242 return self.map.copy()
242 return self.map.copy()
243
243
244 # used to avoid circular references so destructors work
244 # used to avoid circular references so destructors work
245 def opener(base):
245 def opener(base):
246 p = base
246 p = base
247 def o(path, mode="r"):
247 def o(path, mode="r"):
248 if p[:7] == "http://":
248 if p[:7] == "http://":
249 f = os.path.join(p, urllib.quote(path))
249 f = os.path.join(p, urllib.quote(path))
250 return httprangereader(f)
250 return httprangereader(f)
251
251
252 f = os.path.join(p, path)
252 f = os.path.join(p, path)
253
253
254 if mode != "r":
254 if mode != "r":
255 try:
255 try:
256 s = os.stat(f)
256 s = os.stat(f)
257 except OSError:
257 except OSError:
258 d = os.path.dirname(f)
258 d = os.path.dirname(f)
259 if not os.path.isdir(d):
259 if not os.path.isdir(d):
260 os.makedirs(d)
260 os.makedirs(d)
261 else:
261 else:
262 if s.st_nlink > 1:
262 if s.st_nlink > 1:
263 file(f + ".tmp", "w").write(file(f).read())
263 file(f + ".tmp", "w").write(file(f).read())
264 os.rename(f+".tmp", f)
264 os.rename(f+".tmp", f)
265
265
266 return file(f, mode)
266 return file(f, mode)
267
267
268 return o
268 return o
269
269
270 class localrepository:
270 class localrepository:
271 def __init__(self, ui, path=None, create=0):
271 def __init__(self, ui, path=None, create=0):
272 self.remote = 0
272 self.remote = 0
273 if path and path[:7] == "http://":
273 if path and path[:7] == "http://":
274 self.remote = 1
274 self.remote = 1
275 self.path = path
275 self.path = path
276 else:
276 else:
277 if not path:
277 if not path:
278 p = os.getcwd()
278 p = os.getcwd()
279 while not os.path.isdir(os.path.join(p, ".hg")):
279 while not os.path.isdir(os.path.join(p, ".hg")):
280 p = os.path.dirname(p)
280 p = os.path.dirname(p)
281 if p == "/": raise "No repo found"
281 if p == "/": raise "No repo found"
282 path = p
282 path = p
283 self.path = os.path.join(path, ".hg")
283 self.path = os.path.join(path, ".hg")
284
284
285 self.root = path
285 self.root = path
286 self.ui = ui
286 self.ui = ui
287
287
288 if create:
288 if create:
289 os.mkdir(self.path)
289 os.mkdir(self.path)
290 os.mkdir(self.join("data"))
290 os.mkdir(self.join("data"))
291
291
292 self.opener = opener(self.path)
292 self.opener = opener(self.path)
293 self.manifest = manifest(self.opener)
293 self.manifest = manifest(self.opener)
294 self.changelog = changelog(self.opener)
294 self.changelog = changelog(self.opener)
295 self.ignorelist = None
295 self.ignorelist = None
296 self.tags = None
296 self.tags = None
297
297
298 if not self.remote:
298 if not self.remote:
299 self.dirstate = dirstate(self.opener, ui)
299 self.dirstate = dirstate(self.opener, ui)
300 try:
300 try:
301 self.current = bin(self.opener("current").read())
301 self.current = bin(self.opener("current").read())
302 except IOError:
302 except IOError:
303 self.current = None
303 self.current = None
304
304
305 def setcurrent(self, node):
305 def setcurrent(self, node):
306 self.current = node
306 self.current = node
307 self.opener("current", "w").write(hex(node))
307 self.opener("current", "w").write(hex(node))
308
308
309 def ignore(self, f):
309 def ignore(self, f):
310 if self.ignorelist is None:
310 if self.ignorelist is None:
311 self.ignorelist = []
311 self.ignorelist = []
312 try:
312 try:
313 l = open(os.path.join(self.root, ".hgignore"))
313 l = open(os.path.join(self.root, ".hgignore"))
314 for pat in l:
314 for pat in l:
315 if pat != "\n":
315 if pat != "\n":
316 self.ignorelist.append(re.compile(pat[:-1]))
316 self.ignorelist.append(re.compile(pat[:-1]))
317 except IOError: pass
317 except IOError: pass
318 for pat in self.ignorelist:
318 for pat in self.ignorelist:
319 if pat.search(f): return True
319 if pat.search(f): return True
320 return False
320 return False
321
321
322 def lookup(self, key):
322 def lookup(self, key):
323 if self.tags is None:
323 if self.tags is None:
324 self.tags = {}
324 self.tags = {}
325 try:
325 try:
326 fl = self.file(".hgtags")
326 fl = self.file(".hgtags")
327 for l in fl.revision(fl.tip()).splitlines():
327 for l in fl.revision(fl.tip()).splitlines():
328 if l:
328 if l:
329 n, k = l.split(" ")
329 n, k = l.split(" ")
330 self.tags[k] = bin(n)
330 self.tags[k] = bin(n)
331 except KeyError: pass
331 except KeyError: pass
332 try:
332 try:
333 return self.tags[key]
333 return self.tags[key]
334 except KeyError:
334 except KeyError:
335 return self.changelog.lookup(key)
335 return self.changelog.lookup(key)
336
336
337 def join(self, f):
337 def join(self, f):
338 return os.path.join(self.path, f)
338 return os.path.join(self.path, f)
339
339
340 def file(self, f):
340 def file(self, f):
341 if f[0] == '/': f = f[1:]
341 if f[0] == '/': f = f[1:]
342 return filelog(self.opener, f)
342 return filelog(self.opener, f)
343
343
344 def transaction(self):
344 def transaction(self):
345 return transaction(self.opener, self.join("journal"),
345 return transaction(self.opener, self.join("journal"),
346 self.join("undo"))
346 self.join("undo"))
347
347
348 def recover(self):
348 def recover(self):
349 self.lock()
349 self.lock()
350 if os.path.exists(self.join("recover")):
350 if os.path.exists(self.join("recover")):
351 self.ui.status("attempting to rollback interrupted transaction\n")
351 self.ui.status("attempting to rollback interrupted transaction\n")
352 return rollback(self.opener, self.join("recover"))
352 return rollback(self.opener, self.join("recover"))
353 else:
353 else:
354 self.ui.warn("no interrupted transaction available\n")
354 self.ui.warn("no interrupted transaction available\n")
355
355
356 def undo(self):
356 def undo(self):
357 self.lock()
357 self.lock()
358 if os.path.exists(self.join("undo")):
358 if os.path.exists(self.join("undo")):
359 f = self.changelog.read(self.changelog.tip())[3]
359 f = self.changelog.read(self.changelog.tip())[3]
360 self.ui.status("attempting to rollback last transaction\n")
360 self.ui.status("attempting to rollback last transaction\n")
361 rollback(self.opener, self.join("undo"))
361 rollback(self.opener, self.join("undo"))
362 self.manifest = manifest(self.opener)
362 self.manifest = manifest(self.opener)
363 self.changelog = changelog(self.opener)
363 self.changelog = changelog(self.opener)
364
364
365 self.ui.status("discarding dirstate\n")
365 self.ui.status("discarding dirstate\n")
366 node = self.changelog.tip()
366 node = self.changelog.tip()
367 f.sort()
367 f.sort()
368
368
369 self.setcurrent(node)
369 self.setcurrent(node)
370 self.dirstate.update(f, 'i')
370 self.dirstate.update(f, 'i')
371
371
372 else:
372 else:
373 self.ui.warn("no undo information available\n")
373 self.ui.warn("no undo information available\n")
374
374
375 def lock(self, wait = 1):
375 def lock(self, wait = 1):
376 try:
376 try:
377 return lock.lock(self.join("lock"), 0)
377 return lock.lock(self.join("lock"), 0)
378 except lock.LockHeld, inst:
378 except lock.LockHeld, inst:
379 if wait:
379 if wait:
380 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
380 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
381 return lock.lock(self.join("lock"), wait)
381 return lock.lock(self.join("lock"), wait)
382 raise inst
382 raise inst
383
383
384 def rawcommit(self, files, text, user, date, p1=None, p2=None):
384 def rawcommit(self, files, text, user, date, p1=None, p2=None):
385 p1 = p1 or self.current or nullid
385 p1 = p1 or self.current or nullid
386 pchange = self.changelog.read(p1)
386 pchange = self.changelog.read(p1)
387 pmmap = self.manifest.read(pchange[0])
387 pmmap = self.manifest.read(pchange[0])
388 tr = self.transaction()
388 tr = self.transaction()
389 mmap = {}
389 mmap = {}
390 linkrev = self.changelog.count()
390 linkrev = self.changelog.count()
391 for f in files:
391 for f in files:
392 try:
392 try:
393 t = file(f).read()
393 t = file(f).read()
394 except IOError:
394 except IOError:
395 self.ui.warn("Read file %s error, skipped\n" % f)
395 self.ui.warn("Read file %s error, skipped\n" % f)
396 continue
396 continue
397 r = self.file(f)
397 r = self.file(f)
398 prev = pmmap.get(f, nullid)
398 prev = pmmap.get(f, nullid)
399 mmap[f] = r.add(t, tr, linkrev, prev)
399 mmap[f] = r.add(t, tr, linkrev, prev)
400
400
401 mnode = self.manifest.add(mmap, tr, linkrev, pchange[0])
401 mnode = self.manifest.add(mmap, tr, linkrev, pchange[0])
402 n = self.changelog.add(mnode, files, text, tr, p1, p2, user ,date, )
402 n = self.changelog.add(mnode, files, text, tr, p1, p2, user ,date, )
403 tr.close()
403 tr.close()
404 self.setcurrent(n)
404 self.setcurrent(n)
405 self.dirstate.clear()
405 self.dirstate.clear()
406 self.dirstate.update(mmap.keys(), "n")
406 self.dirstate.update(mmap.keys(), "n")
407
407
408 def commit(self, parent, files = None, text = ""):
408 def commit(self, parent, files = None, text = ""):
409 self.lock()
409 self.lock()
410
410
411 commit = []
411 commit = []
412 remove = []
412 remove = []
413 if files:
413 if files:
414 for f in files:
414 for f in files:
415 s = self.dirstate.state(f)
415 s = self.dirstate.state(f)
416 if s in 'cai':
416 if s in 'cai':
417 commit.append(f)
417 commit.append(f)
418 elif s == 'r':
418 elif s == 'r':
419 remove.append(f)
419 remove.append(f)
420 else:
420 else:
421 self.warn("%s not tracked!\n")
421 self.warn("%s not tracked!\n")
422 else:
422 else:
423 (c, a, d, u) = self.diffdir(self.root, parent)
423 (c, a, d, u) = self.diffdir(self.root, parent)
424 commit = c + a
424 commit = c + a
425 remove = d
425 remove = d
426
426
427 if not commit and not remove:
427 if not commit and not remove:
428 self.ui.status("nothing changed\n")
428 self.ui.status("nothing changed\n")
429 return
429 return
430
430
431 tr = self.transaction()
431 tr = self.transaction()
432
432
433 # check in files
433 # check in files
434 new = {}
434 new = {}
435 linkrev = self.changelog.count()
435 linkrev = self.changelog.count()
436 commit.sort()
436 commit.sort()
437 for f in commit:
437 for f in commit:
438 self.ui.note(f + "\n")
438 self.ui.note(f + "\n")
439 try:
439 try:
440 t = file(f).read()
440 t = file(f).read()
441 except IOError:
441 except IOError:
442 self.warn("trouble committing %s!\n" % f)
442 self.warn("trouble committing %s!\n" % f)
443 raise
443 raise
444
444
445 r = self.file(f)
445 r = self.file(f)
446 new[f] = r.add(t, tr, linkrev)
446 new[f] = r.add(t, tr, linkrev)
447
447
448 # update manifest
448 # update manifest
449 mmap = self.manifest.read(self.manifest.tip())
449 mmap = self.manifest.read(self.manifest.tip())
450 mmap.update(new)
450 mmap.update(new)
451 for f in remove:
451 for f in remove:
452 del mmap[f]
452 del mmap[f]
453 mnode = self.manifest.add(mmap, tr, linkrev)
453 mnode = self.manifest.add(mmap, tr, linkrev)
454
454
455 # add changeset
455 # add changeset
456 new = new.keys()
456 new = new.keys()
457 new.sort()
457 new.sort()
458
458
459 edittext = text + "\n" + "HG: manifest hash %s\n" % hex(mnode)
459 edittext = text + "\n" + "HG: manifest hash %s\n" % hex(mnode)
460 edittext += "".join(["HG: changed %s\n" % f for f in new])
460 edittext += "".join(["HG: changed %s\n" % f for f in new])
461 edittext += "".join(["HG: removed %s\n" % f for f in remove])
461 edittext += "".join(["HG: removed %s\n" % f for f in remove])
462 edittext = self.ui.edit(edittext)
462 edittext = self.ui.edit(edittext)
463
463
464 n = self.changelog.add(mnode, new, edittext, tr)
464 n = self.changelog.add(mnode, new, edittext, tr)
465 tr.close()
465 tr.close()
466
466
467 self.setcurrent(n)
467 self.setcurrent(n)
468 self.dirstate.update(new, "n")
468 self.dirstate.update(new, "n")
469 self.dirstate.forget(remove)
469 self.dirstate.forget(remove)
470
470
471 def checkout(self, node):
471 def checkout(self, node):
472 # checkout is really dumb at the moment
472 # checkout is really dumb at the moment
473 # it ought to basically merge
473 # it ought to basically merge
474 change = self.changelog.read(node)
474 change = self.changelog.read(node)
475 l = self.manifest.read(change[0]).items()
475 l = self.manifest.read(change[0]).items()
476 l.sort()
476 l.sort()
477
477
478 for f,n in l:
478 for f,n in l:
479 if f[0] == "/": continue
479 if f[0] == "/": continue
480 self.ui.note(f, "\n")
480 self.ui.note(f, "\n")
481 t = self.file(f).revision(n)
481 t = self.file(f).revision(n)
482 try:
482 try:
483 file(f, "w").write(t)
483 file(f, "w").write(t)
484 except IOError:
484 except IOError:
485 os.makedirs(os.path.dirname(f))
485 os.makedirs(os.path.dirname(f))
486 file(f, "w").write(t)
486 file(f, "w").write(t)
487
487
488 self.setcurrent(node)
488 self.setcurrent(node)
489 self.dirstate.clear()
489 self.dirstate.clear()
490 self.dirstate.update([f for f,n in l], "n")
490 self.dirstate.update([f for f,n in l], "n")
491
491
492 def diffdir(self, path, changeset):
492 def diffdir(self, path, changeset):
493 changed = []
493 changed = []
494 added = []
494 added = []
495 unknown = []
495 unknown = []
496 mf = {}
496 mf = {}
497
497
498 if changeset:
498 if changeset:
499 change = self.changelog.read(changeset)
499 change = self.changelog.read(changeset)
500 mf = self.manifest.read(change[0])
500 mf = self.manifest.read(change[0])
501
501
502 if changeset == self.current:
502 if changeset == self.current:
503 dc = self.dirstate.copy()
503 dc = self.dirstate.copy()
504 else:
504 else:
505 dc = dict.fromkeys(mf)
505 dc = dict.fromkeys(mf)
506
506
507 def fcmp(fn):
507 def fcmp(fn):
508 t1 = file(os.path.join(self.root, fn)).read()
508 t1 = file(os.path.join(self.root, fn)).read()
509 t2 = self.file(fn).revision(mf[fn])
509 t2 = self.file(fn).revision(mf[fn])
510 return cmp(t1, t2)
510 return cmp(t1, t2)
511
511
512 for dir, subdirs, files in os.walk(self.root):
512 for dir, subdirs, files in os.walk(self.root):
513 d = dir[len(self.root)+1:]
513 d = dir[len(self.root)+1:]
514 if ".hg" in subdirs: subdirs.remove(".hg")
514 if ".hg" in subdirs: subdirs.remove(".hg")
515
515
516 for f in files:
516 for f in files:
517 fn = os.path.join(d, f)
517 fn = os.path.join(d, f)
518 try: s = os.stat(os.path.join(self.root, fn))
518 try: s = os.stat(os.path.join(self.root, fn))
519 except: continue
519 except: continue
520 if fn in dc:
520 if fn in dc:
521 c = dc[fn]
521 c = dc[fn]
522 del dc[fn]
522 del dc[fn]
523 if not c:
523 if not c:
524 if fcmp(fn):
524 if fcmp(fn):
525 changed.append(fn)
525 changed.append(fn)
526 if c[0] == 'i':
526 if c[0] == 'i':
527 if fn not in mf:
527 if fn not in mf:
528 added.append(fn)
528 added.append(fn)
529 elif fcmp(fn):
529 elif fcmp(fn):
530 changed.append(fn)
530 changed.append(fn)
531 elif c[0] == 'a':
531 elif c[0] == 'a':
532 added.append(fn)
532 added.append(fn)
533 elif c[0] == 'r':
533 elif c[0] == 'r':
534 unknown.append(fn)
534 unknown.append(fn)
535 elif c[2] != s.st_size:
535 elif c[2] != s.st_size:
536 changed.append(fn)
536 changed.append(fn)
537 elif c[1] != s.st_mode or c[3] != s.st_mtime:
537 elif c[1] != s.st_mode or c[3] != s.st_mtime:
538 if fcmp(fn):
538 if fcmp(fn):
539 changed.append(fn)
539 changed.append(fn)
540 else:
540 else:
541 if self.ignore(fn): continue
541 if self.ignore(fn): continue
542 unknown.append(fn)
542 unknown.append(fn)
543
543
544 deleted = dc.keys()
544 deleted = dc.keys()
545 deleted.sort()
545 deleted.sort()
546
546
547 return (changed, added, deleted, unknown)
547 return (changed, added, deleted, unknown)
548
548
549 def diffrevs(self, node1, node2):
549 def diffrevs(self, node1, node2):
550 changed, added = [], []
550 changed, added = [], []
551
551
552 change = self.changelog.read(node1)
552 change = self.changelog.read(node1)
553 mf1 = self.manifest.read(change[0])
553 mf1 = self.manifest.read(change[0])
554 change = self.changelog.read(node2)
554 change = self.changelog.read(node2)
555 mf2 = self.manifest.read(change[0])
555 mf2 = self.manifest.read(change[0])
556
556
557 for fn in mf2:
557 for fn in mf2:
558 if mf1.has_key(fn):
558 if mf1.has_key(fn):
559 if mf1[fn] != mf2[fn]:
559 if mf1[fn] != mf2[fn]:
560 changed.append(fn)
560 changed.append(fn)
561 del mf1[fn]
561 del mf1[fn]
562 else:
562 else:
563 added.append(fn)
563 added.append(fn)
564
564
565 deleted = mf1.keys()
565 deleted = mf1.keys()
566 deleted.sort()
566 deleted.sort()
567
567
568 return (changed, added, deleted)
568 return (changed, added, deleted)
569
569
570 def add(self, list):
570 def add(self, list):
571 for f in list:
571 for f in list:
572 p = os.path.join(self.root, f)
572 p = os.path.join(self.root, f)
573 if not os.path.isfile(p):
573 if not os.path.isfile(p):
574 self.ui.warn("%s does not exist!\n" % f)
574 self.ui.warn("%s does not exist!\n" % f)
575 elif self.dirstate.state(f) == 'n':
575 elif self.dirstate.state(f) == 'n':
576 self.ui.warn("%s already tracked!\n" % f)
576 self.ui.warn("%s already tracked!\n" % f)
577 else:
577 else:
578 self.dirstate.update([f], "a")
578 self.dirstate.update([f], "a")
579
579
580 def forget(self, list):
580 def forget(self, list):
581 for f in list:
581 for f in list:
582 if self.dirstate.state(f) not in 'ai':
582 if self.dirstate.state(f) not in 'ai':
583 self.ui.warn("%s not added!\n" % f)
583 self.ui.warn("%s not added!\n" % f)
584 else:
584 else:
585 self.dirstate.forget([f])
585 self.dirstate.forget([f])
586
586
587 def remove(self, list):
587 def remove(self, list):
588 for f in list:
588 for f in list:
589 p = os.path.join(self.root, f)
589 p = os.path.join(self.root, f)
590 if os.path.isfile(p):
590 if os.path.isfile(p):
591 self.ui.warn("%s still exists!\n" % f)
591 self.ui.warn("%s still exists!\n" % f)
592 elif f not in self.dirstate:
592 elif f not in self.dirstate:
593 self.ui.warn("%s not tracked!\n" % f)
593 self.ui.warn("%s not tracked!\n" % f)
594 else:
594 else:
595 self.dirstate.update([f], "r")
595 self.dirstate.update([f], "r")
596
596
597 def heads(self):
597 def heads(self):
598 return self.changelog.heads()
598 return self.changelog.heads()
599
599
600 def branches(self, nodes):
600 def branches(self, nodes):
601 if not nodes: nodes = [self.changelog.tip()]
601 if not nodes: nodes = [self.changelog.tip()]
602 b = []
602 b = []
603 for n in nodes:
603 for n in nodes:
604 t = n
604 t = n
605 while n:
605 while n:
606 p = self.changelog.parents(n)
606 p = self.changelog.parents(n)
607 if p[1] != nullid or p[0] == nullid:
607 if p[1] != nullid or p[0] == nullid:
608 b.append((t, n, p[0], p[1]))
608 b.append((t, n, p[0], p[1]))
609 break
609 break
610 n = p[0]
610 n = p[0]
611 return b
611 return b
612
612
613 def between(self, pairs):
613 def between(self, pairs):
614 r = []
614 r = []
615
615
616 for top, bottom in pairs:
616 for top, bottom in pairs:
617 n, l, i = top, [], 0
617 n, l, i = top, [], 0
618 f = 1
618 f = 1
619
619
620 while n != bottom:
620 while n != bottom:
621 p = self.changelog.parents(n)[0]
621 p = self.changelog.parents(n)[0]
622 if i == f:
622 if i == f:
623 l.append(n)
623 l.append(n)
624 f = f * 2
624 f = f * 2
625 n = p
625 n = p
626 i += 1
626 i += 1
627
627
628 r.append(l)
628 r.append(l)
629
629
630 return r
630 return r
631
631
632 def newer(self, nodes):
632 def newer(self, nodes):
633 m = {}
633 m = {}
634 nl = []
634 nl = []
635 pm = {}
635 pm = {}
636 cl = self.changelog
636 cl = self.changelog
637 t = l = cl.count()
637 t = l = cl.count()
638
638
639 # find the lowest numbered node
639 # find the lowest numbered node
640 for n in nodes:
640 for n in nodes:
641 l = min(l, cl.rev(n))
641 l = min(l, cl.rev(n))
642 m[n] = 1
642 m[n] = 1
643
643
644 for i in xrange(l, t):
644 for i in xrange(l, t):
645 n = cl.node(i)
645 n = cl.node(i)
646 if n in m: # explicitly listed
646 if n in m: # explicitly listed
647 pm[n] = 1
647 pm[n] = 1
648 nl.append(n)
648 nl.append(n)
649 continue
649 continue
650 for p in cl.parents(n):
650 for p in cl.parents(n):
651 if p in pm: # parent listed
651 if p in pm: # parent listed
652 pm[n] = 1
652 pm[n] = 1
653 nl.append(n)
653 nl.append(n)
654 break
654 break
655
655
656 return nl
656 return nl
657
657
658 def getchangegroup(self, remote):
658 def getchangegroup(self, remote):
659 m = self.changelog.nodemap
659 m = self.changelog.nodemap
660 search = []
660 search = []
661 fetch = []
661 fetch = []
662 seen = {}
662 seen = {}
663 seenbranch = {}
663 seenbranch = {}
664
664
665 # if we have an empty repo, fetch everything
665 # if we have an empty repo, fetch everything
666 if self.changelog.tip() == nullid:
666 if self.changelog.tip() == nullid:
667 self.ui.status("requesting all changes\n")
667 self.ui.status("requesting all changes\n")
668 return remote.changegroup([nullid])
668 return remote.changegroup([nullid])
669
669
670 # otherwise, assume we're closer to the tip than the root
670 # otherwise, assume we're closer to the tip than the root
671 self.ui.status("searching for changes\n")
671 self.ui.status("searching for changes\n")
672 heads = remote.heads()
672 heads = remote.heads()
673 unknown = []
673 unknown = []
674 for h in heads:
674 for h in heads:
675 if h not in m:
675 if h not in m:
676 unknown.append(h)
676 unknown.append(h)
677
677
678 if not unknown:
678 if not unknown:
679 self.ui.status("nothing to do!\n")
679 self.ui.status("nothing to do!\n")
680 return None
680 return None
681
681
682 unknown = remote.branches(unknown)
682 unknown = remote.branches(unknown)
683 while unknown:
683 while unknown:
684 n = unknown.pop(0)
684 n = unknown.pop(0)
685 seen[n[0]] = 1
685 seen[n[0]] = 1
686
686
687 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
687 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
688 if n == nullid: break
688 if n == nullid: break
689 if n in seenbranch:
689 if n in seenbranch:
690 self.ui.debug("branch already found\n")
690 self.ui.debug("branch already found\n")
691 continue
691 continue
692 if n[1] and n[1] in m: # do we know the base?
692 if n[1] and n[1] in m: # do we know the base?
693 self.ui.debug("found incomplete branch %s:%s\n"
693 self.ui.debug("found incomplete branch %s:%s\n"
694 % (short(n[0]), short(n[1])))
694 % (short(n[0]), short(n[1])))
695 search.append(n) # schedule branch range for scanning
695 search.append(n) # schedule branch range for scanning
696 seenbranch[n] = 1
696 seenbranch[n] = 1
697 else:
697 else:
698 if n[2] in m and n[3] in m:
698 if n[2] in m and n[3] in m:
699 if n[1] not in fetch:
699 if n[1] not in fetch:
700 self.ui.debug("found new changeset %s\n" %
700 self.ui.debug("found new changeset %s\n" %
701 short(n[1]))
701 short(n[1]))
702 fetch.append(n[1]) # earliest unknown
702 fetch.append(n[1]) # earliest unknown
703 continue
703 continue
704
704
705 r = []
705 r = []
706 for a in n[2:4]:
706 for a in n[2:4]:
707 if a not in seen: r.append(a)
707 if a not in seen: r.append(a)
708
708
709 if r:
709 if r:
710 self.ui.debug("requesting %s\n" %
710 self.ui.debug("requesting %s\n" %
711 " ".join(map(short, r)))
711 " ".join(map(short, r)))
712 for b in remote.branches(r):
712 for b in remote.branches(r):
713 self.ui.debug("received %s:%s\n" %
713 self.ui.debug("received %s:%s\n" %
714 (short(b[0]), short(b[1])))
714 (short(b[0]), short(b[1])))
715 if b[0] not in m and b[0] not in seen:
715 if b[0] not in m and b[0] not in seen:
716 unknown.append(b)
716 unknown.append(b)
717
717
718 while search:
718 while search:
719 n = search.pop(0)
719 n = search.pop(0)
720 l = remote.between([(n[0], n[1])])[0]
720 l = remote.between([(n[0], n[1])])[0]
721 p = n[0]
721 p = n[0]
722 f = 1
722 f = 1
723 for i in l + [n[1]]:
723 for i in l + [n[1]]:
724 if i in m:
724 if i in m:
725 if f <= 2:
725 if f <= 2:
726 self.ui.debug("found new branch changeset %s\n" %
726 self.ui.debug("found new branch changeset %s\n" %
727 short(p))
727 short(p))
728 fetch.append(p)
728 fetch.append(p)
729 else:
729 else:
730 self.ui.debug("narrowed branch search to %s:%s\n"
730 self.ui.debug("narrowed branch search to %s:%s\n"
731 % (short(p), short(i)))
731 % (short(p), short(i)))
732 search.append((p, i))
732 search.append((p, i))
733 break
733 break
734 p, f = i, f * 2
734 p, f = i, f * 2
735
735
736 for f in fetch:
736 for f in fetch:
737 if f in m:
737 if f in m:
738 raise "already have", short(f[:4])
738 raise "already have", short(f[:4])
739
739
740 self.ui.note("adding new changesets starting at " +
740 self.ui.note("adding new changesets starting at " +
741 " ".join([short(f) for f in fetch]) + "\n")
741 " ".join([short(f) for f in fetch]) + "\n")
742
742
743 return remote.changegroup(fetch)
743 return remote.changegroup(fetch)
744
744
745 def changegroup(self, basenodes):
745 def changegroup(self, basenodes):
746 nodes = self.newer(basenodes)
746 nodes = self.newer(basenodes)
747
747
748 # construct the link map
748 # construct the link map
749 linkmap = {}
749 linkmap = {}
750 for n in nodes:
750 for n in nodes:
751 linkmap[self.changelog.rev(n)] = n
751 linkmap[self.changelog.rev(n)] = n
752
752
753 # construct a list of all changed files
753 # construct a list of all changed files
754 changed = {}
754 changed = {}
755 for n in nodes:
755 for n in nodes:
756 c = self.changelog.read(n)
756 c = self.changelog.read(n)
757 for f in c[3]:
757 for f in c[3]:
758 changed[f] = 1
758 changed[f] = 1
759 changed = changed.keys()
759 changed = changed.keys()
760 changed.sort()
760 changed.sort()
761
761
762 # the changegroup is changesets + manifests + all file revs
762 # the changegroup is changesets + manifests + all file revs
763 revs = [ self.changelog.rev(n) for n in nodes ]
763 revs = [ self.changelog.rev(n) for n in nodes ]
764
764
765 for y in self.changelog.group(linkmap): yield y
765 for y in self.changelog.group(linkmap): yield y
766 for y in self.manifest.group(linkmap): yield y
766 for y in self.manifest.group(linkmap): yield y
767 for f in changed:
767 for f in changed:
768 yield struct.pack(">l", len(f) + 4) + f
768 yield struct.pack(">l", len(f) + 4) + f
769 g = self.file(f).group(linkmap)
769 g = self.file(f).group(linkmap)
770 for y in g:
770 for y in g:
771 yield y
771 yield y
772
772
773 def addchangegroup(self, generator):
773 def addchangegroup(self, generator):
774
774
775 class genread:
775 class genread:
776 def __init__(self, generator):
776 def __init__(self, generator):
777 self.g = generator
777 self.g = generator
778 self.buf = ""
778 self.buf = ""
779 def read(self, l):
779 def read(self, l):
780 while l > len(self.buf):
780 while l > len(self.buf):
781 try:
781 try:
782 self.buf += self.g.next()
782 self.buf += self.g.next()
783 except StopIteration:
783 except StopIteration:
784 break
784 break
785 d, self.buf = self.buf[:l], self.buf[l:]
785 d, self.buf = self.buf[:l], self.buf[l:]
786 return d
786 return d
787
787
788 def getchunk():
788 def getchunk():
789 d = source.read(4)
789 d = source.read(4)
790 if not d: return ""
790 if not d: return ""
791 l = struct.unpack(">l", d)[0]
791 l = struct.unpack(">l", d)[0]
792 if l <= 4: return ""
792 if l <= 4: return ""
793 return source.read(l - 4)
793 return source.read(l - 4)
794
794
795 def getgroup():
795 def getgroup():
796 while 1:
796 while 1:
797 c = getchunk()
797 c = getchunk()
798 if not c: break
798 if not c: break
799 yield c
799 yield c
800
800
801 def csmap(x):
801 def csmap(x):
802 self.ui.debug("add changeset %s\n" % short(x))
802 self.ui.debug("add changeset %s\n" % short(x))
803 return self.changelog.count()
803 return self.changelog.count()
804
804
805 def revmap(x):
805 def revmap(x):
806 return self.changelog.rev(x)
806 return self.changelog.rev(x)
807
807
808 if not generator: return
808 if not generator: return
809 changesets = files = revisions = 0
809 changesets = files = revisions = 0
810 self.lock()
810 self.lock()
811 source = genread(generator)
811 source = genread(generator)
812 tr = self.transaction()
812 tr = self.transaction()
813
813
814 # pull off the changeset group
814 # pull off the changeset group
815 self.ui.status("adding changesets\n")
815 self.ui.status("adding changesets\n")
816 co = self.changelog.tip()
816 co = self.changelog.tip()
817 cn = self.changelog.addgroup(getgroup(), csmap, tr)
817 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
818 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
818 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
819
819
820 # pull off the manifest group
820 # pull off the manifest group
821 self.ui.status("adding manifests\n")
821 self.ui.status("adding manifests\n")
822 mm = self.manifest.tip()
822 mm = self.manifest.tip()
823 mo = self.manifest.addgroup(getgroup(), revmap, tr)
823 mo = self.manifest.addgroup(getgroup(), revmap, tr)
824
824
825 # process the files
825 # process the files
826 self.ui.status("adding file revisions\n")
826 self.ui.status("adding file revisions\n")
827 while 1:
827 while 1:
828 f = getchunk()
828 f = getchunk()
829 if not f: break
829 if not f: break
830 self.ui.debug("adding %s revisions\n" % f)
830 self.ui.debug("adding %s revisions\n" % f)
831 fl = self.file(f)
831 fl = self.file(f)
832 o = fl.tip()
832 o = fl.tip()
833 n = fl.addgroup(getgroup(), revmap, tr)
833 n = fl.addgroup(getgroup(), revmap, tr)
834 revisions += fl.rev(n) - fl.rev(o)
834 revisions += fl.rev(n) - fl.rev(o)
835 files += 1
835 files += 1
836
836
837 self.ui.status(("modified %d files, added %d changesets" +
837 self.ui.status(("modified %d files, added %d changesets" +
838 " and %d new revisions\n")
838 " and %d new revisions\n")
839 % (files, changesets, revisions))
839 % (files, changesets, revisions))
840
840
841 tr.close()
841 tr.close()
842 return
842 return
843
843
844 def merge(self, generator):
844 def merge(self, generator):
845 changesets = files = revisions = 0
845 changesets = files = revisions = 0
846
846
847 self.lock()
847 self.lock()
848 class genread:
848 class genread:
849 def __init__(self, generator):
849 def __init__(self, generator):
850 self.g = generator
850 self.g = generator
851 self.buf = ""
851 self.buf = ""
852 def read(self, l):
852 def read(self, l):
853 while l > len(self.buf):
853 while l > len(self.buf):
854 try:
854 try:
855 self.buf += self.g.next()
855 self.buf += self.g.next()
856 except StopIteration:
856 except StopIteration:
857 break
857 break
858 d, self.buf = self.buf[:l], self.buf[l:]
858 d, self.buf = self.buf[:l], self.buf[l:]
859 return d
859 return d
860
860
861 if not generator: return
861 if not generator: return
862 source = genread(generator)
862 source = genread(generator)
863
863
864 def getchunk():
864 def getchunk():
865 d = source.read(4)
865 d = source.read(4)
866 if not d: return ""
866 if not d: return ""
867 l = struct.unpack(">l", d)[0]
867 l = struct.unpack(">l", d)[0]
868 if l <= 4: return ""
868 if l <= 4: return ""
869 return source.read(l - 4)
869 return source.read(l - 4)
870
870
871 def getgroup():
871 def getgroup():
872 while 1:
872 while 1:
873 c = getchunk()
873 c = getchunk()
874 if not c: break
874 if not c: break
875 yield c
875 yield c
876
876
877 tr = self.transaction()
877 tr = self.transaction()
878 simple = True
878 simple = True
879 need = {}
879 need = {}
880
880
881 self.ui.status("adding changesets\n")
881 self.ui.status("adding changesets\n")
882 # pull off the changeset group
882 # pull off the changeset group
883 def report(x):
883 def report(x):
884 self.ui.debug("add changeset %s\n" % short(x))
884 self.ui.debug("add changeset %s\n" % short(x))
885 return self.changelog.count()
885 return self.changelog.count()
886
886
887 co = self.changelog.tip()
887 co = self.changelog.tip()
888 cn = self.changelog.addgroup(getgroup(), report, tr)
888 cn = self.changelog.addgroup(getgroup(), report, tr)
889
889
890 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
890 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
891
891
892 self.ui.status("adding manifests\n")
892 self.ui.status("adding manifests\n")
893 # pull off the manifest group
893 # pull off the manifest group
894 mm = self.manifest.tip()
894 mm = self.manifest.tip()
895 mo = self.manifest.addgroup(getgroup(),
895 mo = self.manifest.addgroup(getgroup(),
896 lambda x: self.changelog.rev(x), tr)
896 lambda x: self.changelog.rev(x), tr)
897
897
898 # do we need a resolve?
898 # do we need a resolve?
899 if self.changelog.ancestor(co, cn) != co:
899 if self.changelog.ancestor(co, cn) != co:
900 simple = False
900 simple = False
901 resolverev = self.changelog.count()
901 resolverev = self.changelog.count()
902
902
903 # resolve the manifest to determine which files
903 # resolve the manifest to determine which files
904 # we care about merging
904 # we care about merging
905 self.ui.status("resolving manifests\n")
905 self.ui.status("resolving manifests\n")
906 ma = self.manifest.ancestor(mm, mo)
906 ma = self.manifest.ancestor(mm, mo)
907 omap = self.manifest.read(mo) # other
907 omap = self.manifest.read(mo) # other
908 amap = self.manifest.read(ma) # ancestor
908 amap = self.manifest.read(ma) # ancestor
909 mmap = self.manifest.read(mm) # mine
909 mmap = self.manifest.read(mm) # mine
910 nmap = {}
910 nmap = {}
911
911
912 self.ui.debug(" ancestor %s local %s remote %s\n" %
912 self.ui.debug(" ancestor %s local %s remote %s\n" %
913 (short(ma), short(mm), short(mo)))
913 (short(ma), short(mm), short(mo)))
914
914
915 for f, mid in mmap.iteritems():
915 for f, mid in mmap.iteritems():
916 if f in omap:
916 if f in omap:
917 if mid != omap[f]:
917 if mid != omap[f]:
918 self.ui.debug(" %s versions differ, do resolve\n" % f)
918 self.ui.debug(" %s versions differ, do resolve\n" % f)
919 need[f] = mid # use merged version or local version
919 need[f] = mid # use merged version or local version
920 else:
920 else:
921 nmap[f] = mid # keep ours
921 nmap[f] = mid # keep ours
922 del omap[f]
922 del omap[f]
923 elif f in amap:
923 elif f in amap:
924 if mid != amap[f]:
924 if mid != amap[f]:
925 r = self.ui.prompt(
925 r = self.ui.prompt(
926 (" local changed %s which remote deleted\n" % f) +
926 (" local changed %s which remote deleted\n" % f) +
927 "(k)eep or (d)elete?", "[kd]", "k")
927 "(k)eep or (d)elete?", "[kd]", "k")
928 if r == "k": nmap[f] = mid
928 if r == "k": nmap[f] = mid
929 else:
929 else:
930 self.ui.debug("other deleted %s\n" % f)
930 self.ui.debug("other deleted %s\n" % f)
931 pass # other deleted it
931 pass # other deleted it
932 else:
932 else:
933 self.ui.debug("local created %s\n" %f)
933 self.ui.debug("local created %s\n" %f)
934 nmap[f] = mid # we created it
934 nmap[f] = mid # we created it
935
935
936 del mmap
936 del mmap
937
937
938 for f, oid in omap.iteritems():
938 for f, oid in omap.iteritems():
939 if f in amap:
939 if f in amap:
940 if oid != amap[f]:
940 if oid != amap[f]:
941 r = self.ui.prompt(
941 r = self.ui.prompt(
942 ("remote changed %s which local deleted\n" % f) +
942 ("remote changed %s which local deleted\n" % f) +
943 "(k)eep or (d)elete?", "[kd]", "k")
943 "(k)eep or (d)elete?", "[kd]", "k")
944 if r == "k": nmap[f] = oid
944 if r == "k": nmap[f] = oid
945 else:
945 else:
946 pass # probably safe
946 pass # probably safe
947 else:
947 else:
948 self.ui.debug("remote created %s, do resolve\n" % f)
948 self.ui.debug("remote created %s, do resolve\n" % f)
949 need[f] = oid
949 need[f] = oid
950
950
951 del omap
951 del omap
952 del amap
952 del amap
953
953
954 new = need.keys()
954 new = need.keys()
955 new.sort()
955 new.sort()
956
956
957 # process the files
957 # process the files
958 self.ui.status("adding files\n")
958 self.ui.status("adding files\n")
959 while 1:
959 while 1:
960 f = getchunk()
960 f = getchunk()
961 if not f: break
961 if not f: break
962 self.ui.debug("adding %s revisions\n" % f)
962 self.ui.debug("adding %s revisions\n" % f)
963 fl = self.file(f)
963 fl = self.file(f)
964 o = fl.tip()
964 o = fl.tip()
965 n = fl.addgroup(getgroup(), lambda x: self.changelog.rev(x), tr)
965 n = fl.addgroup(getgroup(), lambda x: self.changelog.rev(x), tr)
966 revisions += fl.rev(n) - fl.rev(o)
966 revisions += fl.rev(n) - fl.rev(o)
967 files += 1
967 files += 1
968 if f in need:
968 if f in need:
969 del need[f]
969 del need[f]
970 # manifest resolve determined we need to merge the tips
970 # manifest resolve determined we need to merge the tips
971 nmap[f] = self.merge3(fl, f, o, n, tr, resolverev)
971 nmap[f] = self.merge3(fl, f, o, n, tr, resolverev)
972
972
973 if need:
973 if need:
974 # we need to do trivial merges on local files
974 # we need to do trivial merges on local files
975 for f in new:
975 for f in new:
976 if f not in need: continue
976 if f not in need: continue
977 fl = self.file(f)
977 fl = self.file(f)
978 nmap[f] = self.merge3(fl, f, need[f], fl.tip(), tr, resolverev)
978 nmap[f] = self.merge3(fl, f, need[f], fl.tip(), tr, resolverev)
979 revisions += 1
979 revisions += 1
980
980
981 # For simple merges, we don't need to resolve manifests or changesets
981 # For simple merges, we don't need to resolve manifests or changesets
982 if simple:
982 if simple:
983 self.ui.debug("simple merge, skipping resolve\n")
983 self.ui.debug("simple merge, skipping resolve\n")
984 self.ui.status(("modified %d files, added %d changesets" +
984 self.ui.status(("modified %d files, added %d changesets" +
985 " and %d new revisions\n")
985 " and %d new revisions\n")
986 % (files, changesets, revisions))
986 % (files, changesets, revisions))
987 tr.close()
987 tr.close()
988 return
988 return
989
989
990 node = self.manifest.add(nmap, tr, resolverev, mm, mo)
990 node = self.manifest.add(nmap, tr, resolverev, mm, mo)
991 revisions += 1
991 revisions += 1
992
992
993 # Now all files and manifests are merged, we add the changed files
993 # Now all files and manifests are merged, we add the changed files
994 # and manifest id to the changelog
994 # and manifest id to the changelog
995 self.ui.status("committing merge changeset\n")
995 self.ui.status("committing merge changeset\n")
996 if co == cn: cn = -1
996 if co == cn: cn = -1
997
997
998 edittext = "\nHG: merge resolve\n" + \
998 edittext = "\nHG: merge resolve\n" + \
999 "HG: manifest hash %s\n" % hex(node) + \
999 "HG: manifest hash %s\n" % hex(node) + \
1000 "".join(["HG: changed %s\n" % f for f in new])
1000 "".join(["HG: changed %s\n" % f for f in new])
1001 edittext = self.ui.edit(edittext)
1001 edittext = self.ui.edit(edittext)
1002 n = self.changelog.add(node, new, edittext, tr, co, cn)
1002 n = self.changelog.add(node, new, edittext, tr, co, cn)
1003 revisions += 1
1003 revisions += 1
1004
1004
1005 self.ui.status("added %d changesets, %d files, and %d new revisions\n"
1005 self.ui.status("added %d changesets, %d files, and %d new revisions\n"
1006 % (changesets, files, revisions))
1006 % (changesets, files, revisions))
1007
1007
1008 tr.close()
1008 tr.close()
1009
1009
1010 def merge3(self, fl, fn, my, other, transaction, link):
1010 def merge3(self, fl, fn, my, other, transaction, link):
1011 """perform a 3-way merge and append the result"""
1011 """perform a 3-way merge and append the result"""
1012
1012
1013 def temp(prefix, node):
1013 def temp(prefix, node):
1014 pre = "%s~%s." % (os.path.basename(fn), prefix)
1014 pre = "%s~%s." % (os.path.basename(fn), prefix)
1015 (fd, name) = tempfile.mkstemp("", pre)
1015 (fd, name) = tempfile.mkstemp("", pre)
1016 f = os.fdopen(fd, "w")
1016 f = os.fdopen(fd, "w")
1017 f.write(fl.revision(node))
1017 f.write(fl.revision(node))
1018 f.close()
1018 f.close()
1019 return name
1019 return name
1020
1020
1021 base = fl.ancestor(my, other)
1021 base = fl.ancestor(my, other)
1022 self.ui.note("resolving %s\n" % fn)
1022 self.ui.note("resolving %s\n" % fn)
1023 self.ui.debug("local %s remote %s ancestor %s\n" %
1023 self.ui.debug("local %s remote %s ancestor %s\n" %
1024 (short(my), short(other), short(base)))
1024 (short(my), short(other), short(base)))
1025
1025
1026 if my == base:
1026 if my == base:
1027 text = fl.revision(other)
1027 text = fl.revision(other)
1028 else:
1028 else:
1029 a = temp("local", my)
1029 a = temp("local", my)
1030 b = temp("remote", other)
1030 b = temp("remote", other)
1031 c = temp("parent", base)
1031 c = temp("parent", base)
1032
1032
1033 cmd = os.environ["HGMERGE"]
1033 cmd = os.environ["HGMERGE"]
1034 self.ui.debug("invoking merge with %s\n" % cmd)
1034 self.ui.debug("invoking merge with %s\n" % cmd)
1035 r = os.system("%s %s %s %s %s" % (cmd, a, b, c, fn))
1035 r = os.system("%s %s %s %s %s" % (cmd, a, b, c, fn))
1036 if r:
1036 if r:
1037 raise "Merge failed!"
1037 raise "Merge failed!"
1038
1038
1039 text = open(a).read()
1039 text = open(a).read()
1040 os.unlink(a)
1040 os.unlink(a)
1041 os.unlink(b)
1041 os.unlink(b)
1042 os.unlink(c)
1042 os.unlink(c)
1043
1043
1044 return fl.add(text, transaction, link, my, other)
1044 return fl.add(text, transaction, link, my, other)
1045
1045
1046 class remoterepository:
1046 class remoterepository:
1047 def __init__(self, ui, path):
1047 def __init__(self, ui, path):
1048 self.url = path
1048 self.url = path
1049 self.ui = ui
1049 self.ui = ui
1050
1050
1051 def do_cmd(self, cmd, **args):
1051 def do_cmd(self, cmd, **args):
1052 self.ui.debug("sending %s command\n" % cmd)
1052 self.ui.debug("sending %s command\n" % cmd)
1053 q = {"cmd": cmd}
1053 q = {"cmd": cmd}
1054 q.update(args)
1054 q.update(args)
1055 qs = urllib.urlencode(q)
1055 qs = urllib.urlencode(q)
1056 cu = "%s?%s" % (self.url, qs)
1056 cu = "%s?%s" % (self.url, qs)
1057 return urllib.urlopen(cu)
1057 return urllib.urlopen(cu)
1058
1058
1059 def heads(self):
1059 def heads(self):
1060 d = self.do_cmd("heads").read()
1060 d = self.do_cmd("heads").read()
1061 try:
1061 try:
1062 return map(bin, d[:-1].split(" "))
1062 return map(bin, d[:-1].split(" "))
1063 except:
1063 except:
1064 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1064 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1065 raise
1065 raise
1066
1066
1067 def branches(self, nodes):
1067 def branches(self, nodes):
1068 n = " ".join(map(hex, nodes))
1068 n = " ".join(map(hex, nodes))
1069 d = self.do_cmd("branches", nodes=n).read()
1069 d = self.do_cmd("branches", nodes=n).read()
1070 try:
1070 try:
1071 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1071 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1072 return br
1072 return br
1073 except:
1073 except:
1074 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1074 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1075 raise
1075 raise
1076
1076
1077 def between(self, pairs):
1077 def between(self, pairs):
1078 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1078 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1079 d = self.do_cmd("between", pairs=n).read()
1079 d = self.do_cmd("between", pairs=n).read()
1080 try:
1080 try:
1081 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1081 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1082 return p
1082 return p
1083 except:
1083 except:
1084 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1084 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1085 raise
1085 raise
1086
1086
1087 def changegroup(self, nodes):
1087 def changegroup(self, nodes):
1088 n = " ".join(map(hex, nodes))
1088 n = " ".join(map(hex, nodes))
1089 zd = zlib.decompressobj()
1089 zd = zlib.decompressobj()
1090 f = self.do_cmd("changegroup", roots=n)
1090 f = self.do_cmd("changegroup", roots=n)
1091 bytes = 0
1091 bytes = 0
1092 while 1:
1092 while 1:
1093 d = f.read(4096)
1093 d = f.read(4096)
1094 bytes += len(d)
1094 bytes += len(d)
1095 if not d:
1095 if not d:
1096 yield zd.flush()
1096 yield zd.flush()
1097 break
1097 break
1098 yield zd.decompress(d)
1098 yield zd.decompress(d)
1099 self.ui.note("%d bytes of data transfered\n" % bytes)
1099 self.ui.note("%d bytes of data transfered\n" % bytes)
1100
1100
1101 def repository(ui, path=None, create=0):
1101 def repository(ui, path=None, create=0):
1102 if path and path[:7] == "http://":
1102 if path and path[:7] == "http://":
1103 return remoterepository(ui, path)
1103 return remoterepository(ui, path)
1104 if path and path[:5] == "hg://":
1104 if path and path[:5] == "hg://":
1105 return remoterepository(ui, path.replace("hg://", "http://"))
1105 return remoterepository(ui, path.replace("hg://", "http://"))
1106 if path and path[:11] == "old-http://":
1106 if path and path[:11] == "old-http://":
1107 return localrepository(ui, path.replace("old-http://", "http://"))
1107 return localrepository(ui, path.replace("old-http://", "http://"))
1108 else:
1108 else:
1109 return localrepository(ui, path, create)
1109 return localrepository(ui, path, create)
1110
1110
1111 class httprangereader:
1111 class httprangereader:
1112 def __init__(self, url):
1112 def __init__(self, url):
1113 self.url = url
1113 self.url = url
1114 self.pos = 0
1114 self.pos = 0
1115 def seek(self, pos):
1115 def seek(self, pos):
1116 self.pos = pos
1116 self.pos = pos
1117 def read(self, bytes=None):
1117 def read(self, bytes=None):
1118 opener = urllib2.build_opener(byterange.HTTPRangeHandler())
1118 opener = urllib2.build_opener(byterange.HTTPRangeHandler())
1119 urllib2.install_opener(opener)
1119 urllib2.install_opener(opener)
1120 req = urllib2.Request(self.url)
1120 req = urllib2.Request(self.url)
1121 end = ''
1121 end = ''
1122 if bytes: end = self.pos + bytes
1122 if bytes: end = self.pos + bytes
1123 req.add_header('Range', 'bytes=%d-%s' % (self.pos, end))
1123 req.add_header('Range', 'bytes=%d-%s' % (self.pos, end))
1124 f = urllib2.urlopen(req)
1124 f = urllib2.urlopen(req)
1125 return f.read()
1125 return f.read()
@@ -1,507 +1,510 b''
1 # revlog.py - storage back-end for mercurial
1 # revlog.py - storage back-end for mercurial
2 #
2 #
3 # This provides efficient delta storage with O(1) retrieve and append
3 # This provides efficient delta storage with O(1) retrieve and append
4 # and O(changes) merge between branches
4 # and O(changes) merge between branches
5 #
5 #
6 # Copyright 2005 Matt Mackall <mpm@selenic.com>
6 # Copyright 2005 Matt Mackall <mpm@selenic.com>
7 #
7 #
8 # This software may be used and distributed according to the terms
8 # This software may be used and distributed according to the terms
9 # of the GNU General Public License, incorporated herein by reference.
9 # of the GNU General Public License, incorporated herein by reference.
10
10
11 import zlib, struct, sha, binascii, heapq
11 import zlib, struct, sha, binascii, heapq
12 from mercurial import mdiff
12 from mercurial import mdiff
13
13
14 def hex(node): return binascii.hexlify(node)
14 def hex(node): return binascii.hexlify(node)
15 def bin(node): return binascii.unhexlify(node)
15 def bin(node): return binascii.unhexlify(node)
16 def short(node): return hex(node[:4])
16 def short(node): return hex(node[:4])
17
17
18 def compress(text):
18 def compress(text):
19 if not text: return text
19 if not text: return text
20 if len(text) < 44:
20 if len(text) < 44:
21 if text[0] == '\0': return text
21 if text[0] == '\0': return text
22 return 'u' + text
22 return 'u' + text
23 bin = zlib.compress(text)
23 bin = zlib.compress(text)
24 if len(bin) > len(text):
24 if len(bin) > len(text):
25 if text[0] == '\0': return text
25 if text[0] == '\0': return text
26 return 'u' + text
26 return 'u' + text
27 return bin
27 return bin
28
28
29 def decompress(bin):
29 def decompress(bin):
30 if not bin: return bin
30 if not bin: return bin
31 t = bin[0]
31 t = bin[0]
32 if t == '\0': return bin
32 if t == '\0': return bin
33 if t == 'x': return zlib.decompress(bin)
33 if t == 'x': return zlib.decompress(bin)
34 if t == 'u': return bin[1:]
34 if t == 'u': return bin[1:]
35 raise "unknown compression type %s" % t
35 raise "unknown compression type %s" % t
36
36
37 def hash(text, p1, p2):
37 def hash(text, p1, p2):
38 l = [p1, p2]
38 l = [p1, p2]
39 l.sort()
39 l.sort()
40 return sha.sha(l[0] + l[1] + text).digest()
40 return sha.sha(l[0] + l[1] + text).digest()
41
41
42 nullid = "\0" * 20
42 nullid = "\0" * 20
43 indexformat = ">4l20s20s20s"
43 indexformat = ">4l20s20s20s"
44
44
45 class lazyparser:
45 class lazyparser:
46 def __init__(self, data):
46 def __init__(self, data):
47 self.data = data
47 self.data = data
48 self.s = struct.calcsize(indexformat)
48 self.s = struct.calcsize(indexformat)
49 self.l = len(data)/self.s
49 self.l = len(data)/self.s
50 self.index = [None] * self.l
50 self.index = [None] * self.l
51 self.map = {nullid: -1}
51 self.map = {nullid: -1}
52
52
53 def load(self, pos):
53 def load(self, pos):
54 block = pos / 1000
54 block = pos / 1000
55 i = block * 1000
55 i = block * 1000
56 end = min(self.l, i + 1000)
56 end = min(self.l, i + 1000)
57 while i < end:
57 while i < end:
58 d = self.data[i * self.s: (i + 1) * self.s]
58 d = self.data[i * self.s: (i + 1) * self.s]
59 e = struct.unpack(indexformat, d)
59 e = struct.unpack(indexformat, d)
60 self.index[i] = e
60 self.index[i] = e
61 self.map[e[6]] = i
61 self.map[e[6]] = i
62 i += 1
62 i += 1
63
63
64 class lazyindex:
64 class lazyindex:
65 def __init__(self, parser):
65 def __init__(self, parser):
66 self.p = parser
66 self.p = parser
67 def __len__(self):
67 def __len__(self):
68 return len(self.p.index)
68 return len(self.p.index)
69 def load(self, pos):
69 def load(self, pos):
70 self.p.load(pos)
70 self.p.load(pos)
71 return self.p.index[pos]
71 return self.p.index[pos]
72 def __getitem__(self, pos):
72 def __getitem__(self, pos):
73 return self.p.index[pos] or self.load(pos)
73 return self.p.index[pos] or self.load(pos)
74 def append(self, e):
74 def append(self, e):
75 self.p.index.append(e)
75 self.p.index.append(e)
76
76
77 class lazymap:
77 class lazymap:
78 def __init__(self, parser):
78 def __init__(self, parser):
79 self.p = parser
79 self.p = parser
80 def load(self, key):
80 def load(self, key):
81 n = self.p.data.find(key)
81 n = self.p.data.find(key)
82 if n < 0: raise KeyError("node " + hex(key))
82 if n < 0: raise KeyError("node " + hex(key))
83 pos = n / self.p.s
83 pos = n / self.p.s
84 self.p.load(pos)
84 self.p.load(pos)
85 def __contains__(self, key):
85 def __contains__(self, key):
86 try:
86 try:
87 self[key]
87 self[key]
88 return True
88 return True
89 except KeyError:
89 except KeyError:
90 return False
90 return False
91 def __iter__(self):
91 def __iter__(self):
92 for i in xrange(self.p.l):
92 for i in xrange(self.p.l):
93 try:
93 try:
94 yield self.p.index[i][6]
94 yield self.p.index[i][6]
95 except:
95 except:
96 self.p.load(i)
96 self.p.load(i)
97 yield self.p.index[i][6]
97 yield self.p.index[i][6]
98 def __getitem__(self, key):
98 def __getitem__(self, key):
99 try:
99 try:
100 return self.p.map[key]
100 return self.p.map[key]
101 except KeyError:
101 except KeyError:
102 try:
102 try:
103 self.load(key)
103 self.load(key)
104 return self.p.map[key]
104 return self.p.map[key]
105 except KeyError:
105 except KeyError:
106 raise KeyError("node " + hex(key))
106 raise KeyError("node " + hex(key))
107 def __setitem__(self, key, val):
107 def __setitem__(self, key, val):
108 self.p.map[key] = val
108 self.p.map[key] = val
109
109
110 class revlog:
110 class revlog:
111 def __init__(self, opener, indexfile, datafile):
111 def __init__(self, opener, indexfile, datafile):
112 self.indexfile = indexfile
112 self.indexfile = indexfile
113 self.datafile = datafile
113 self.datafile = datafile
114 self.opener = opener
114 self.opener = opener
115 self.cache = None
115 self.cache = None
116
116
117 try:
117 try:
118 i = self.opener(self.indexfile).read()
118 i = self.opener(self.indexfile).read()
119 except IOError:
119 except IOError:
120 i = ""
120 i = ""
121
121
122 if len(i) > 10000:
122 if len(i) > 10000:
123 # big index, let's parse it on demand
123 # big index, let's parse it on demand
124 parser = lazyparser(i)
124 parser = lazyparser(i)
125 self.index = lazyindex(parser)
125 self.index = lazyindex(parser)
126 self.nodemap = lazymap(parser)
126 self.nodemap = lazymap(parser)
127 else:
127 else:
128 s = struct.calcsize(indexformat)
128 s = struct.calcsize(indexformat)
129 l = len(i) / s
129 l = len(i) / s
130 self.index = [None] * l
130 self.index = [None] * l
131 m = [None] * l
131 m = [None] * l
132
132
133 n = 0
133 n = 0
134 for f in xrange(0, len(i), s):
134 for f in xrange(0, len(i), s):
135 # offset, size, base, linkrev, p1, p2, nodeid
135 # offset, size, base, linkrev, p1, p2, nodeid
136 e = struct.unpack(indexformat, i[f:f + s])
136 e = struct.unpack(indexformat, i[f:f + s])
137 m[n] = (e[6], n)
137 m[n] = (e[6], n)
138 self.index[n] = e
138 self.index[n] = e
139 n += 1
139 n += 1
140
140
141 self.nodemap = dict(m)
141 self.nodemap = dict(m)
142 self.nodemap[nullid] = -1
142 self.nodemap[nullid] = -1
143
143
144
144
145 def tip(self): return self.node(len(self.index) - 1)
145 def tip(self): return self.node(len(self.index) - 1)
146 def count(self): return len(self.index)
146 def count(self): return len(self.index)
147 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
147 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
148 def rev(self, node): return self.nodemap[node]
148 def rev(self, node): return self.nodemap[node]
149 def linkrev(self, node): return self.index[self.nodemap[node]][3]
149 def linkrev(self, node): return self.index[self.nodemap[node]][3]
150 def parents(self, node):
150 def parents(self, node):
151 if node == nullid: return (nullid, nullid)
151 if node == nullid: return (nullid, nullid)
152 return self.index[self.nodemap[node]][4:6]
152 return self.index[self.nodemap[node]][4:6]
153
153
154 def start(self, rev): return self.index[rev][0]
154 def start(self, rev): return self.index[rev][0]
155 def length(self, rev): return self.index[rev][1]
155 def length(self, rev): return self.index[rev][1]
156 def end(self, rev): return self.start(rev) + self.length(rev)
156 def end(self, rev): return self.start(rev) + self.length(rev)
157 def base(self, rev): return self.index[rev][2]
157 def base(self, rev): return self.index[rev][2]
158
158
159 def heads(self):
159 def heads(self):
160 p = {}
160 p = {}
161 h = []
161 h = []
162 for r in range(self.count() - 1, 0, -1):
162 for r in range(self.count() - 1, 0, -1):
163 n = self.node(r)
163 n = self.node(r)
164 if n not in p:
164 if n not in p:
165 h.append(n)
165 h.append(n)
166 for pn in self.parents(n):
166 for pn in self.parents(n):
167 p[pn] = 1
167 p[pn] = 1
168 return h
168 return h
169
169
170 def lookup(self, id):
170 def lookup(self, id):
171 try:
171 try:
172 rev = int(id)
172 rev = int(id)
173 return self.node(rev)
173 return self.node(rev)
174 except ValueError:
174 except ValueError:
175 c = []
175 c = []
176 for n in self.nodemap:
176 for n in self.nodemap:
177 if id in hex(n):
177 if id in hex(n):
178 c.append(n)
178 c.append(n)
179 if len(c) > 1: raise KeyError("Ambiguous identifier")
179 if len(c) > 1: raise KeyError("Ambiguous identifier")
180 if len(c) < 1: raise KeyError("No match found")
180 if len(c) < 1: raise KeyError("No match found")
181 return c[0]
181 return c[0]
182
182
183 return None
183 return None
184
184
185 def diff(self, a, b):
185 def diff(self, a, b):
186 return mdiff.textdiff(a, b)
186 return mdiff.textdiff(a, b)
187
187
188 def patches(self, t, pl):
188 def patches(self, t, pl):
189 return mdiff.patches(t, pl)
189 return mdiff.patches(t, pl)
190
190
191 def delta(self, node):
191 def delta(self, node):
192 r = self.rev(node)
192 r = self.rev(node)
193 b = self.base(r)
193 b = self.base(r)
194 if r == b:
194 if r == b:
195 return self.diff(self.revision(self.node(r - 1)),
195 return self.diff(self.revision(self.node(r - 1)),
196 self.revision(node))
196 self.revision(node))
197 else:
197 else:
198 f = self.opener(self.datafile)
198 f = self.opener(self.datafile)
199 f.seek(self.start(r))
199 f.seek(self.start(r))
200 data = f.read(self.length(r))
200 data = f.read(self.length(r))
201 return decompress(data)
201 return decompress(data)
202
202
203 def revision(self, node):
203 def revision(self, node):
204 if node == nullid: return ""
204 if node == nullid: return ""
205 if self.cache and self.cache[0] == node: return self.cache[2]
205 if self.cache and self.cache[0] == node: return self.cache[2]
206
206
207 text = None
207 text = None
208 rev = self.rev(node)
208 rev = self.rev(node)
209 start, length, base, link, p1, p2, node = self.index[rev]
209 start, length, base, link, p1, p2, node = self.index[rev]
210 end = start + length
210 end = start + length
211 if base != rev: start = self.start(base)
211 if base != rev: start = self.start(base)
212
212
213 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
213 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
214 base = self.cache[1]
214 base = self.cache[1]
215 start = self.start(base + 1)
215 start = self.start(base + 1)
216 text = self.cache[2]
216 text = self.cache[2]
217 last = 0
217 last = 0
218
218
219 f = self.opener(self.datafile)
219 f = self.opener(self.datafile)
220 f.seek(start)
220 f.seek(start)
221 data = f.read(end - start)
221 data = f.read(end - start)
222
222
223 if not text:
223 if not text:
224 last = self.length(base)
224 last = self.length(base)
225 text = decompress(data[:last])
225 text = decompress(data[:last])
226
226
227 bins = []
227 bins = []
228 for r in xrange(base + 1, rev + 1):
228 for r in xrange(base + 1, rev + 1):
229 s = self.length(r)
229 s = self.length(r)
230 bins.append(decompress(data[last:last + s]))
230 bins.append(decompress(data[last:last + s]))
231 last = last + s
231 last = last + s
232
232
233 text = mdiff.patches(text, bins)
233 text = mdiff.patches(text, bins)
234
234
235 if node != hash(text, p1, p2):
235 if node != hash(text, p1, p2):
236 raise IOError("integrity check failed on %s:%d"
236 raise IOError("integrity check failed on %s:%d"
237 % (self.datafile, rev))
237 % (self.datafile, rev))
238
238
239 self.cache = (node, rev, text)
239 self.cache = (node, rev, text)
240 return text
240 return text
241
241
242 def addrevision(self, text, transaction, link, p1=None, p2=None):
242 def addrevision(self, text, transaction, link, p1=None, p2=None):
243 if text is None: text = ""
243 if text is None: text = ""
244 if p1 is None: p1 = self.tip()
244 if p1 is None: p1 = self.tip()
245 if p2 is None: p2 = nullid
245 if p2 is None: p2 = nullid
246
246
247 node = hash(text, p1, p2)
247 node = hash(text, p1, p2)
248
248
249 n = self.count()
249 n = self.count()
250 t = n - 1
250 t = n - 1
251
251
252 if n:
252 if n:
253 base = self.base(t)
253 base = self.base(t)
254 start = self.start(base)
254 start = self.start(base)
255 end = self.end(t)
255 end = self.end(t)
256 prev = self.revision(self.tip())
256 prev = self.revision(self.tip())
257 d = self.diff(prev, text)
257 d = self.diff(prev, text)
258 data = compress(d)
258 data = compress(d)
259 dist = end - start + len(data)
259 dist = end - start + len(data)
260
260
261 # full versions are inserted when the needed deltas
261 # full versions are inserted when the needed deltas
262 # become comparable to the uncompressed text
262 # become comparable to the uncompressed text
263 if not n or dist > len(text) * 2:
263 if not n or dist > len(text) * 2:
264 data = compress(text)
264 data = compress(text)
265 base = n
265 base = n
266 else:
266 else:
267 base = self.base(t)
267 base = self.base(t)
268
268
269 offset = 0
269 offset = 0
270 if t >= 0:
270 if t >= 0:
271 offset = self.end(t)
271 offset = self.end(t)
272
272
273 e = (offset, len(data), base, link, p1, p2, node)
273 e = (offset, len(data), base, link, p1, p2, node)
274
274
275 self.index.append(e)
275 self.index.append(e)
276 self.nodemap[node] = n
276 self.nodemap[node] = n
277 entry = struct.pack(indexformat, *e)
277 entry = struct.pack(indexformat, *e)
278
278
279 transaction.add(self.datafile, e[0])
279 transaction.add(self.datafile, e[0])
280 self.opener(self.datafile, "a").write(data)
280 self.opener(self.datafile, "a").write(data)
281 transaction.add(self.indexfile, n * len(entry))
281 transaction.add(self.indexfile, n * len(entry))
282 self.opener(self.indexfile, "a").write(entry)
282 self.opener(self.indexfile, "a").write(entry)
283
283
284 self.cache = (node, n, text)
284 self.cache = (node, n, text)
285 return node
285 return node
286
286
287 def ancestor(self, a, b):
287 def ancestor(self, a, b):
288 # calculate the distance of every node from root
288 # calculate the distance of every node from root
289 dist = {nullid: 0}
289 dist = {nullid: 0}
290 for i in xrange(self.count()):
290 for i in xrange(self.count()):
291 n = self.node(i)
291 n = self.node(i)
292 p1, p2 = self.parents(n)
292 p1, p2 = self.parents(n)
293 dist[n] = max(dist[p1], dist[p2]) + 1
293 dist[n] = max(dist[p1], dist[p2]) + 1
294
294
295 # traverse ancestors in order of decreasing distance from root
295 # traverse ancestors in order of decreasing distance from root
296 def ancestors(node):
296 def ancestors(node):
297 # we store negative distances because heap returns smallest member
297 # we store negative distances because heap returns smallest member
298 h = [(-dist[node], node)]
298 h = [(-dist[node], node)]
299 seen = {}
299 seen = {}
300 earliest = self.count()
300 earliest = self.count()
301 while h:
301 while h:
302 d, n = heapq.heappop(h)
302 d, n = heapq.heappop(h)
303 r = self.rev(n)
303 r = self.rev(n)
304 if n not in seen:
304 if n not in seen:
305 seen[n] = 1
305 seen[n] = 1
306 yield (-d, n)
306 yield (-d, n)
307 for p in self.parents(n):
307 for p in self.parents(n):
308 heapq.heappush(h, (-dist[p], p))
308 heapq.heappush(h, (-dist[p], p))
309
309
310 x = ancestors(a)
310 x = ancestors(a)
311 y = ancestors(b)
311 y = ancestors(b)
312 lx = x.next()
312 lx = x.next()
313 ly = y.next()
313 ly = y.next()
314
314
315 # increment each ancestor list until it is closer to root than
315 # increment each ancestor list until it is closer to root than
316 # the other, or they match
316 # the other, or they match
317 while 1:
317 while 1:
318 if lx == ly:
318 if lx == ly:
319 return lx[1]
319 return lx[1]
320 elif lx < ly:
320 elif lx < ly:
321 ly = y.next()
321 ly = y.next()
322 elif lx > ly:
322 elif lx > ly:
323 lx = x.next()
323 lx = x.next()
324
324
325 def group(self, linkmap):
325 def group(self, linkmap):
326 # given a list of changeset revs, return a set of deltas and
326 # given a list of changeset revs, return a set of deltas and
327 # metadata corresponding to nodes. the first delta is
327 # metadata corresponding to nodes. the first delta is
328 # parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
328 # parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
329 # have this parent as it has all history before these
329 # have this parent as it has all history before these
330 # changesets. parent is parent[0]
330 # changesets. parent is parent[0]
331
331
332 revs = []
332 revs = []
333 needed = {}
333 needed = {}
334
334
335 # find file nodes/revs that match changeset revs
335 # find file nodes/revs that match changeset revs
336 for i in xrange(0, self.count()):
336 for i in xrange(0, self.count()):
337 if self.index[i][3] in linkmap:
337 if self.index[i][3] in linkmap:
338 revs.append(i)
338 revs.append(i)
339 needed[i] = 1
339 needed[i] = 1
340
340
341 # if we don't have any revisions touched by these changesets, bail
341 # if we don't have any revisions touched by these changesets, bail
342 if not revs:
342 if not revs:
343 yield struct.pack(">l", 0)
343 yield struct.pack(">l", 0)
344 return
344 return
345
345
346 # add the parent of the first rev
346 # add the parent of the first rev
347 p = self.parents(self.node(revs[0]))[0]
347 p = self.parents(self.node(revs[0]))[0]
348 revs.insert(0, self.rev(p))
348 revs.insert(0, self.rev(p))
349
349
350 # for each delta that isn't contiguous in the log, we need to
350 # for each delta that isn't contiguous in the log, we need to
351 # reconstruct the base, reconstruct the result, and then
351 # reconstruct the base, reconstruct the result, and then
352 # calculate the delta. We also need to do this where we've
352 # calculate the delta. We also need to do this where we've
353 # stored a full version and not a delta
353 # stored a full version and not a delta
354 for i in xrange(0, len(revs) - 1):
354 for i in xrange(0, len(revs) - 1):
355 a, b = revs[i], revs[i + 1]
355 a, b = revs[i], revs[i + 1]
356 if a + 1 != b or self.base(b) == b:
356 if a + 1 != b or self.base(b) == b:
357 for j in xrange(self.base(a), a + 1):
357 for j in xrange(self.base(a), a + 1):
358 needed[j] = 1
358 needed[j] = 1
359 for j in xrange(self.base(b), b + 1):
359 for j in xrange(self.base(b), b + 1):
360 needed[j] = 1
360 needed[j] = 1
361
361
362 # calculate spans to retrieve from datafile
362 # calculate spans to retrieve from datafile
363 needed = needed.keys()
363 needed = needed.keys()
364 needed.sort()
364 needed.sort()
365 spans = []
365 spans = []
366 oo = -1
366 oo = -1
367 ol = 0
367 ol = 0
368 for n in needed:
368 for n in needed:
369 if n < 0: continue
369 if n < 0: continue
370 o = self.start(n)
370 o = self.start(n)
371 l = self.length(n)
371 l = self.length(n)
372 if oo + ol == o: # can we merge with the previous?
372 if oo + ol == o: # can we merge with the previous?
373 nl = spans[-1][2]
373 nl = spans[-1][2]
374 nl.append((n, l))
374 nl.append((n, l))
375 ol += l
375 ol += l
376 spans[-1] = (oo, ol, nl)
376 spans[-1] = (oo, ol, nl)
377 else:
377 else:
378 oo = o
378 oo = o
379 ol = l
379 ol = l
380 spans.append((oo, ol, [(n, l)]))
380 spans.append((oo, ol, [(n, l)]))
381
381
382 # read spans in, divide up chunks
382 # read spans in, divide up chunks
383 chunks = {}
383 chunks = {}
384 for span in spans:
384 for span in spans:
385 # we reopen the file for each span to make http happy for now
385 # we reopen the file for each span to make http happy for now
386 f = self.opener(self.datafile)
386 f = self.opener(self.datafile)
387 f.seek(span[0])
387 f.seek(span[0])
388 data = f.read(span[1])
388 data = f.read(span[1])
389
389
390 # divide up the span
390 # divide up the span
391 pos = 0
391 pos = 0
392 for r, l in span[2]:
392 for r, l in span[2]:
393 chunks[r] = decompress(data[pos: pos + l])
393 chunks[r] = decompress(data[pos: pos + l])
394 pos += l
394 pos += l
395
395
396 # helper to reconstruct intermediate versions
396 # helper to reconstruct intermediate versions
397 def construct(text, base, rev):
397 def construct(text, base, rev):
398 bins = [chunks[r] for r in xrange(base + 1, rev + 1)]
398 bins = [chunks[r] for r in xrange(base + 1, rev + 1)]
399 return mdiff.patches(text, bins)
399 return mdiff.patches(text, bins)
400
400
401 # build deltas
401 # build deltas
402 deltas = []
402 deltas = []
403 for d in xrange(0, len(revs) - 1):
403 for d in xrange(0, len(revs) - 1):
404 a, b = revs[d], revs[d + 1]
404 a, b = revs[d], revs[d + 1]
405 n = self.node(b)
405 n = self.node(b)
406
406
407 # do we need to construct a new delta?
407 # do we need to construct a new delta?
408 if a + 1 != b or self.base(b) == b:
408 if a + 1 != b or self.base(b) == b:
409 if a >= 0:
409 if a >= 0:
410 base = self.base(a)
410 base = self.base(a)
411 ta = chunks[self.base(a)]
411 ta = chunks[self.base(a)]
412 ta = construct(ta, base, a)
412 ta = construct(ta, base, a)
413 else:
413 else:
414 ta = ""
414 ta = ""
415
415
416 base = self.base(b)
416 base = self.base(b)
417 if a > base:
417 if a > base:
418 base = a
418 base = a
419 tb = ta
419 tb = ta
420 else:
420 else:
421 tb = chunks[self.base(b)]
421 tb = chunks[self.base(b)]
422 tb = construct(tb, base, b)
422 tb = construct(tb, base, b)
423 d = self.diff(ta, tb)
423 d = self.diff(ta, tb)
424 else:
424 else:
425 d = chunks[b]
425 d = chunks[b]
426
426
427 p = self.parents(n)
427 p = self.parents(n)
428 meta = n + p[0] + p[1] + linkmap[self.linkrev(n)]
428 meta = n + p[0] + p[1] + linkmap[self.linkrev(n)]
429 l = struct.pack(">l", len(meta) + len(d) + 4)
429 l = struct.pack(">l", len(meta) + len(d) + 4)
430 yield l
430 yield l
431 yield meta
431 yield meta
432 yield d
432 yield d
433
433
434 yield struct.pack(">l", 0)
434 yield struct.pack(">l", 0)
435
435
436 def addgroup(self, revs, linkmapper, transaction):
436 def addgroup(self, revs, linkmapper, transaction, unique = 0):
437 # given a set of deltas, add them to the revision log. the
437 # given a set of deltas, add them to the revision log. the
438 # first delta is against its parent, which should be in our
438 # first delta is against its parent, which should be in our
439 # log, the rest are against the previous delta.
439 # log, the rest are against the previous delta.
440
440
441 # track the base of the current delta log
441 # track the base of the current delta log
442 r = self.count()
442 r = self.count()
443 t = r - 1
443 t = r - 1
444 node = nullid
444 node = nullid
445
445
446 base = prev = -1
446 base = prev = -1
447 start = end = 0
447 start = end = 0
448 if r:
448 if r:
449 start = self.start(self.base(t))
449 start = self.start(self.base(t))
450 end = self.end(t)
450 end = self.end(t)
451 measure = self.length(self.base(t))
451 measure = self.length(self.base(t))
452 base = self.base(t)
452 base = self.base(t)
453 prev = self.tip()
453 prev = self.tip()
454
454
455 transaction.add(self.datafile, end)
455 transaction.add(self.datafile, end)
456 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
456 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
457 dfh = self.opener(self.datafile, "a")
457 dfh = self.opener(self.datafile, "a")
458 ifh = self.opener(self.indexfile, "a")
458 ifh = self.opener(self.indexfile, "a")
459
459
460 # loop through our set of deltas
460 # loop through our set of deltas
461 chain = None
461 chain = None
462 for chunk in revs:
462 for chunk in revs:
463 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
463 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
464 link = linkmapper(cs)
464 link = linkmapper(cs)
465 if node in self.nodemap:
465 if node in self.nodemap:
466 raise "already have %s" % hex(node[:4])
466 # this can happen if two branches make the same change
467 if unique:
468 raise "already have %s" % hex(node[:4])
469 continue
467 delta = chunk[80:]
470 delta = chunk[80:]
468
471
469 if not chain:
472 if not chain:
470 # retrieve the parent revision of the delta chain
473 # retrieve the parent revision of the delta chain
471 chain = p1
474 chain = p1
472 if not chain in self.nodemap:
475 if not chain in self.nodemap:
473 raise "unknown base %s" % short(chain[:4])
476 raise "unknown base %s" % short(chain[:4])
474
477
475 # full versions are inserted when the needed deltas become
478 # full versions are inserted when the needed deltas become
476 # comparable to the uncompressed text or when the previous
479 # comparable to the uncompressed text or when the previous
477 # version is not the one we have a delta against. We use
480 # version is not the one we have a delta against. We use
478 # the size of the previous full rev as a proxy for the
481 # the size of the previous full rev as a proxy for the
479 # current size.
482 # current size.
480
483
481 if chain == prev:
484 if chain == prev:
482 cdelta = compress(delta)
485 cdelta = compress(delta)
483
486
484 if chain != prev or (end - start + len(cdelta)) > measure * 2:
487 if chain != prev or (end - start + len(cdelta)) > measure * 2:
485 # flush our writes here so we can read it in revision
488 # flush our writes here so we can read it in revision
486 dfh.flush()
489 dfh.flush()
487 ifh.flush()
490 ifh.flush()
488 text = self.revision(chain)
491 text = self.revision(chain)
489 text = self.patches(text, [delta])
492 text = self.patches(text, [delta])
490 chk = self.addrevision(text, transaction, link, p1, p2)
493 chk = self.addrevision(text, transaction, link, p1, p2)
491 if chk != node:
494 if chk != node:
492 raise "consistency error adding group"
495 raise "consistency error adding group"
493 measure = len(text)
496 measure = len(text)
494 else:
497 else:
495 e = (end, len(cdelta), self.base(t), link, p1, p2, node)
498 e = (end, len(cdelta), self.base(t), link, p1, p2, node)
496 self.index.append(e)
499 self.index.append(e)
497 self.nodemap[node] = r
500 self.nodemap[node] = r
498 dfh.write(cdelta)
501 dfh.write(cdelta)
499 ifh.write(struct.pack(indexformat, *e))
502 ifh.write(struct.pack(indexformat, *e))
500
503
501 t, r, chain, prev = r, r + 1, node, node
504 t, r, chain, prev = r, r + 1, node, node
502 start = self.start(self.base(t))
505 start = self.start(self.base(t))
503 end = self.end(t)
506 end = self.end(t)
504
507
505 dfh.close()
508 dfh.close()
506 ifh.close()
509 ifh.close()
507 return node
510 return node
General Comments 0
You need to be logged in to leave comments. Login now