##// END OF EJS Templates
Optimizing manifest reads in changegroupsubset by using deltas.
Eric Hopper -
r1462:12a8d772 default
parent child Browse files
Show More
@@ -1,1569 +1,1585
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import struct, os, util
8 import struct, os, util
9 import filelog, manifest, changelog, dirstate, repo
9 import filelog, manifest, changelog, dirstate, repo
10 from node import *
10 from node import *
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
12 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
13
13
14 class localrepository:
14 class localrepository:
15 def __init__(self, ui, path=None, create=0):
15 def __init__(self, ui, path=None, create=0):
16 if not path:
16 if not path:
17 p = os.getcwd()
17 p = os.getcwd()
18 while not os.path.isdir(os.path.join(p, ".hg")):
18 while not os.path.isdir(os.path.join(p, ".hg")):
19 oldp = p
19 oldp = p
20 p = os.path.dirname(p)
20 p = os.path.dirname(p)
21 if p == oldp: raise repo.RepoError("no repo found")
21 if p == oldp: raise repo.RepoError("no repo found")
22 path = p
22 path = p
23 self.path = os.path.join(path, ".hg")
23 self.path = os.path.join(path, ".hg")
24
24
25 if not create and not os.path.isdir(self.path):
25 if not create and not os.path.isdir(self.path):
26 raise repo.RepoError("repository %s not found" % self.path)
26 raise repo.RepoError("repository %s not found" % self.path)
27
27
28 self.root = os.path.abspath(path)
28 self.root = os.path.abspath(path)
29 self.ui = ui
29 self.ui = ui
30 self.opener = util.opener(self.path)
30 self.opener = util.opener(self.path)
31 self.wopener = util.opener(self.root)
31 self.wopener = util.opener(self.root)
32 self.manifest = manifest.manifest(self.opener)
32 self.manifest = manifest.manifest(self.opener)
33 self.changelog = changelog.changelog(self.opener)
33 self.changelog = changelog.changelog(self.opener)
34 self.tagscache = None
34 self.tagscache = None
35 self.nodetagscache = None
35 self.nodetagscache = None
36 self.encodepats = None
36 self.encodepats = None
37 self.decodepats = None
37 self.decodepats = None
38
38
39 if create:
39 if create:
40 os.mkdir(self.path)
40 os.mkdir(self.path)
41 os.mkdir(self.join("data"))
41 os.mkdir(self.join("data"))
42
42
43 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
43 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
44 try:
44 try:
45 self.ui.readconfig(self.opener("hgrc"))
45 self.ui.readconfig(self.opener("hgrc"))
46 except IOError: pass
46 except IOError: pass
47
47
48 def hook(self, name, **args):
48 def hook(self, name, **args):
49 s = self.ui.config("hooks", name)
49 s = self.ui.config("hooks", name)
50 if s:
50 if s:
51 self.ui.note("running hook %s: %s\n" % (name, s))
51 self.ui.note("running hook %s: %s\n" % (name, s))
52 old = {}
52 old = {}
53 for k, v in args.items():
53 for k, v in args.items():
54 k = k.upper()
54 k = k.upper()
55 old[k] = os.environ.get(k, None)
55 old[k] = os.environ.get(k, None)
56 os.environ[k] = v
56 os.environ[k] = v
57
57
58 # Hooks run in the repository root
58 # Hooks run in the repository root
59 olddir = os.getcwd()
59 olddir = os.getcwd()
60 os.chdir(self.root)
60 os.chdir(self.root)
61 r = os.system(s)
61 r = os.system(s)
62 os.chdir(olddir)
62 os.chdir(olddir)
63
63
64 for k, v in old.items():
64 for k, v in old.items():
65 if v != None:
65 if v != None:
66 os.environ[k] = v
66 os.environ[k] = v
67 else:
67 else:
68 del os.environ[k]
68 del os.environ[k]
69
69
70 if r:
70 if r:
71 self.ui.warn("abort: %s hook failed with status %d!\n" %
71 self.ui.warn("abort: %s hook failed with status %d!\n" %
72 (name, r))
72 (name, r))
73 return False
73 return False
74 return True
74 return True
75
75
76 def tags(self):
76 def tags(self):
77 '''return a mapping of tag to node'''
77 '''return a mapping of tag to node'''
78 if not self.tagscache:
78 if not self.tagscache:
79 self.tagscache = {}
79 self.tagscache = {}
80 def addtag(self, k, n):
80 def addtag(self, k, n):
81 try:
81 try:
82 bin_n = bin(n)
82 bin_n = bin(n)
83 except TypeError:
83 except TypeError:
84 bin_n = ''
84 bin_n = ''
85 self.tagscache[k.strip()] = bin_n
85 self.tagscache[k.strip()] = bin_n
86
86
87 try:
87 try:
88 # read each head of the tags file, ending with the tip
88 # read each head of the tags file, ending with the tip
89 # and add each tag found to the map, with "newer" ones
89 # and add each tag found to the map, with "newer" ones
90 # taking precedence
90 # taking precedence
91 fl = self.file(".hgtags")
91 fl = self.file(".hgtags")
92 h = fl.heads()
92 h = fl.heads()
93 h.reverse()
93 h.reverse()
94 for r in h:
94 for r in h:
95 for l in fl.read(r).splitlines():
95 for l in fl.read(r).splitlines():
96 if l:
96 if l:
97 n, k = l.split(" ", 1)
97 n, k = l.split(" ", 1)
98 addtag(self, k, n)
98 addtag(self, k, n)
99 except KeyError:
99 except KeyError:
100 pass
100 pass
101
101
102 try:
102 try:
103 f = self.opener("localtags")
103 f = self.opener("localtags")
104 for l in f:
104 for l in f:
105 n, k = l.split(" ", 1)
105 n, k = l.split(" ", 1)
106 addtag(self, k, n)
106 addtag(self, k, n)
107 except IOError:
107 except IOError:
108 pass
108 pass
109
109
110 self.tagscache['tip'] = self.changelog.tip()
110 self.tagscache['tip'] = self.changelog.tip()
111
111
112 return self.tagscache
112 return self.tagscache
113
113
114 def tagslist(self):
114 def tagslist(self):
115 '''return a list of tags ordered by revision'''
115 '''return a list of tags ordered by revision'''
116 l = []
116 l = []
117 for t, n in self.tags().items():
117 for t, n in self.tags().items():
118 try:
118 try:
119 r = self.changelog.rev(n)
119 r = self.changelog.rev(n)
120 except:
120 except:
121 r = -2 # sort to the beginning of the list if unknown
121 r = -2 # sort to the beginning of the list if unknown
122 l.append((r,t,n))
122 l.append((r,t,n))
123 l.sort()
123 l.sort()
124 return [(t,n) for r,t,n in l]
124 return [(t,n) for r,t,n in l]
125
125
126 def nodetags(self, node):
126 def nodetags(self, node):
127 '''return the tags associated with a node'''
127 '''return the tags associated with a node'''
128 if not self.nodetagscache:
128 if not self.nodetagscache:
129 self.nodetagscache = {}
129 self.nodetagscache = {}
130 for t,n in self.tags().items():
130 for t,n in self.tags().items():
131 self.nodetagscache.setdefault(n,[]).append(t)
131 self.nodetagscache.setdefault(n,[]).append(t)
132 return self.nodetagscache.get(node, [])
132 return self.nodetagscache.get(node, [])
133
133
134 def lookup(self, key):
134 def lookup(self, key):
135 try:
135 try:
136 return self.tags()[key]
136 return self.tags()[key]
137 except KeyError:
137 except KeyError:
138 try:
138 try:
139 return self.changelog.lookup(key)
139 return self.changelog.lookup(key)
140 except:
140 except:
141 raise repo.RepoError("unknown revision '%s'" % key)
141 raise repo.RepoError("unknown revision '%s'" % key)
142
142
143 def dev(self):
143 def dev(self):
144 return os.stat(self.path).st_dev
144 return os.stat(self.path).st_dev
145
145
146 def local(self):
146 def local(self):
147 return True
147 return True
148
148
149 def join(self, f):
149 def join(self, f):
150 return os.path.join(self.path, f)
150 return os.path.join(self.path, f)
151
151
152 def wjoin(self, f):
152 def wjoin(self, f):
153 return os.path.join(self.root, f)
153 return os.path.join(self.root, f)
154
154
155 def file(self, f):
155 def file(self, f):
156 if f[0] == '/': f = f[1:]
156 if f[0] == '/': f = f[1:]
157 return filelog.filelog(self.opener, f)
157 return filelog.filelog(self.opener, f)
158
158
159 def getcwd(self):
159 def getcwd(self):
160 return self.dirstate.getcwd()
160 return self.dirstate.getcwd()
161
161
162 def wfile(self, f, mode='r'):
162 def wfile(self, f, mode='r'):
163 return self.wopener(f, mode)
163 return self.wopener(f, mode)
164
164
165 def wread(self, filename):
165 def wread(self, filename):
166 if self.encodepats == None:
166 if self.encodepats == None:
167 l = []
167 l = []
168 for pat, cmd in self.ui.configitems("encode"):
168 for pat, cmd in self.ui.configitems("encode"):
169 mf = util.matcher("", "/", [pat], [], [])[1]
169 mf = util.matcher("", "/", [pat], [], [])[1]
170 l.append((mf, cmd))
170 l.append((mf, cmd))
171 self.encodepats = l
171 self.encodepats = l
172
172
173 data = self.wopener(filename, 'r').read()
173 data = self.wopener(filename, 'r').read()
174
174
175 for mf, cmd in self.encodepats:
175 for mf, cmd in self.encodepats:
176 if mf(filename):
176 if mf(filename):
177 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
177 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
178 data = util.filter(data, cmd)
178 data = util.filter(data, cmd)
179 break
179 break
180
180
181 return data
181 return data
182
182
183 def wwrite(self, filename, data, fd=None):
183 def wwrite(self, filename, data, fd=None):
184 if self.decodepats == None:
184 if self.decodepats == None:
185 l = []
185 l = []
186 for pat, cmd in self.ui.configitems("decode"):
186 for pat, cmd in self.ui.configitems("decode"):
187 mf = util.matcher("", "/", [pat], [], [])[1]
187 mf = util.matcher("", "/", [pat], [], [])[1]
188 l.append((mf, cmd))
188 l.append((mf, cmd))
189 self.decodepats = l
189 self.decodepats = l
190
190
191 for mf, cmd in self.decodepats:
191 for mf, cmd in self.decodepats:
192 if mf(filename):
192 if mf(filename):
193 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
193 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
194 data = util.filter(data, cmd)
194 data = util.filter(data, cmd)
195 break
195 break
196
196
197 if fd:
197 if fd:
198 return fd.write(data)
198 return fd.write(data)
199 return self.wopener(filename, 'w').write(data)
199 return self.wopener(filename, 'w').write(data)
200
200
201 def transaction(self):
201 def transaction(self):
202 # save dirstate for undo
202 # save dirstate for undo
203 try:
203 try:
204 ds = self.opener("dirstate").read()
204 ds = self.opener("dirstate").read()
205 except IOError:
205 except IOError:
206 ds = ""
206 ds = ""
207 self.opener("journal.dirstate", "w").write(ds)
207 self.opener("journal.dirstate", "w").write(ds)
208
208
209 def after():
209 def after():
210 util.rename(self.join("journal"), self.join("undo"))
210 util.rename(self.join("journal"), self.join("undo"))
211 util.rename(self.join("journal.dirstate"),
211 util.rename(self.join("journal.dirstate"),
212 self.join("undo.dirstate"))
212 self.join("undo.dirstate"))
213
213
214 return transaction.transaction(self.ui.warn, self.opener,
214 return transaction.transaction(self.ui.warn, self.opener,
215 self.join("journal"), after)
215 self.join("journal"), after)
216
216
217 def recover(self):
217 def recover(self):
218 lock = self.lock()
218 lock = self.lock()
219 if os.path.exists(self.join("journal")):
219 if os.path.exists(self.join("journal")):
220 self.ui.status("rolling back interrupted transaction\n")
220 self.ui.status("rolling back interrupted transaction\n")
221 return transaction.rollback(self.opener, self.join("journal"))
221 return transaction.rollback(self.opener, self.join("journal"))
222 else:
222 else:
223 self.ui.warn("no interrupted transaction available\n")
223 self.ui.warn("no interrupted transaction available\n")
224
224
225 def undo(self):
225 def undo(self):
226 lock = self.lock()
226 lock = self.lock()
227 if os.path.exists(self.join("undo")):
227 if os.path.exists(self.join("undo")):
228 self.ui.status("rolling back last transaction\n")
228 self.ui.status("rolling back last transaction\n")
229 transaction.rollback(self.opener, self.join("undo"))
229 transaction.rollback(self.opener, self.join("undo"))
230 self.dirstate = None
230 self.dirstate = None
231 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
231 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
232 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
232 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
233 else:
233 else:
234 self.ui.warn("no undo information available\n")
234 self.ui.warn("no undo information available\n")
235
235
236 def lock(self, wait=1):
236 def lock(self, wait=1):
237 try:
237 try:
238 return lock.lock(self.join("lock"), 0)
238 return lock.lock(self.join("lock"), 0)
239 except lock.LockHeld, inst:
239 except lock.LockHeld, inst:
240 if wait:
240 if wait:
241 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
241 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
242 return lock.lock(self.join("lock"), wait)
242 return lock.lock(self.join("lock"), wait)
243 raise inst
243 raise inst
244
244
245 def rawcommit(self, files, text, user, date, p1=None, p2=None):
245 def rawcommit(self, files, text, user, date, p1=None, p2=None):
246 orig_parent = self.dirstate.parents()[0] or nullid
246 orig_parent = self.dirstate.parents()[0] or nullid
247 p1 = p1 or self.dirstate.parents()[0] or nullid
247 p1 = p1 or self.dirstate.parents()[0] or nullid
248 p2 = p2 or self.dirstate.parents()[1] or nullid
248 p2 = p2 or self.dirstate.parents()[1] or nullid
249 c1 = self.changelog.read(p1)
249 c1 = self.changelog.read(p1)
250 c2 = self.changelog.read(p2)
250 c2 = self.changelog.read(p2)
251 m1 = self.manifest.read(c1[0])
251 m1 = self.manifest.read(c1[0])
252 mf1 = self.manifest.readflags(c1[0])
252 mf1 = self.manifest.readflags(c1[0])
253 m2 = self.manifest.read(c2[0])
253 m2 = self.manifest.read(c2[0])
254 changed = []
254 changed = []
255
255
256 if orig_parent == p1:
256 if orig_parent == p1:
257 update_dirstate = 1
257 update_dirstate = 1
258 else:
258 else:
259 update_dirstate = 0
259 update_dirstate = 0
260
260
261 tr = self.transaction()
261 tr = self.transaction()
262 mm = m1.copy()
262 mm = m1.copy()
263 mfm = mf1.copy()
263 mfm = mf1.copy()
264 linkrev = self.changelog.count()
264 linkrev = self.changelog.count()
265 for f in files:
265 for f in files:
266 try:
266 try:
267 t = self.wread(f)
267 t = self.wread(f)
268 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
268 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
269 r = self.file(f)
269 r = self.file(f)
270 mfm[f] = tm
270 mfm[f] = tm
271
271
272 fp1 = m1.get(f, nullid)
272 fp1 = m1.get(f, nullid)
273 fp2 = m2.get(f, nullid)
273 fp2 = m2.get(f, nullid)
274
274
275 # is the same revision on two branches of a merge?
275 # is the same revision on two branches of a merge?
276 if fp2 == fp1:
276 if fp2 == fp1:
277 fp2 = nullid
277 fp2 = nullid
278
278
279 if fp2 != nullid:
279 if fp2 != nullid:
280 # is one parent an ancestor of the other?
280 # is one parent an ancestor of the other?
281 fpa = r.ancestor(fp1, fp2)
281 fpa = r.ancestor(fp1, fp2)
282 if fpa == fp1:
282 if fpa == fp1:
283 fp1, fp2 = fp2, nullid
283 fp1, fp2 = fp2, nullid
284 elif fpa == fp2:
284 elif fpa == fp2:
285 fp2 = nullid
285 fp2 = nullid
286
286
287 # is the file unmodified from the parent?
287 # is the file unmodified from the parent?
288 if t == r.read(fp1):
288 if t == r.read(fp1):
289 # record the proper existing parent in manifest
289 # record the proper existing parent in manifest
290 # no need to add a revision
290 # no need to add a revision
291 mm[f] = fp1
291 mm[f] = fp1
292 continue
292 continue
293
293
294 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
294 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
295 changed.append(f)
295 changed.append(f)
296 if update_dirstate:
296 if update_dirstate:
297 self.dirstate.update([f], "n")
297 self.dirstate.update([f], "n")
298 except IOError:
298 except IOError:
299 try:
299 try:
300 del mm[f]
300 del mm[f]
301 del mfm[f]
301 del mfm[f]
302 if update_dirstate:
302 if update_dirstate:
303 self.dirstate.forget([f])
303 self.dirstate.forget([f])
304 except:
304 except:
305 # deleted from p2?
305 # deleted from p2?
306 pass
306 pass
307
307
308 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
308 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
309 user = user or self.ui.username()
309 user = user or self.ui.username()
310 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
310 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
311 tr.close()
311 tr.close()
312 if update_dirstate:
312 if update_dirstate:
313 self.dirstate.setparents(n, nullid)
313 self.dirstate.setparents(n, nullid)
314
314
315 def commit(self, files = None, text = "", user = None, date = None,
315 def commit(self, files = None, text = "", user = None, date = None,
316 match = util.always, force=False):
316 match = util.always, force=False):
317 commit = []
317 commit = []
318 remove = []
318 remove = []
319 changed = []
319 changed = []
320
320
321 if files:
321 if files:
322 for f in files:
322 for f in files:
323 s = self.dirstate.state(f)
323 s = self.dirstate.state(f)
324 if s in 'nmai':
324 if s in 'nmai':
325 commit.append(f)
325 commit.append(f)
326 elif s == 'r':
326 elif s == 'r':
327 remove.append(f)
327 remove.append(f)
328 else:
328 else:
329 self.ui.warn("%s not tracked!\n" % f)
329 self.ui.warn("%s not tracked!\n" % f)
330 else:
330 else:
331 (c, a, d, u) = self.changes(match=match)
331 (c, a, d, u) = self.changes(match=match)
332 commit = c + a
332 commit = c + a
333 remove = d
333 remove = d
334
334
335 p1, p2 = self.dirstate.parents()
335 p1, p2 = self.dirstate.parents()
336 c1 = self.changelog.read(p1)
336 c1 = self.changelog.read(p1)
337 c2 = self.changelog.read(p2)
337 c2 = self.changelog.read(p2)
338 m1 = self.manifest.read(c1[0])
338 m1 = self.manifest.read(c1[0])
339 mf1 = self.manifest.readflags(c1[0])
339 mf1 = self.manifest.readflags(c1[0])
340 m2 = self.manifest.read(c2[0])
340 m2 = self.manifest.read(c2[0])
341
341
342 if not commit and not remove and not force and p2 == nullid:
342 if not commit and not remove and not force and p2 == nullid:
343 self.ui.status("nothing changed\n")
343 self.ui.status("nothing changed\n")
344 return None
344 return None
345
345
346 if not self.hook("precommit"):
346 if not self.hook("precommit"):
347 return None
347 return None
348
348
349 lock = self.lock()
349 lock = self.lock()
350 tr = self.transaction()
350 tr = self.transaction()
351
351
352 # check in files
352 # check in files
353 new = {}
353 new = {}
354 linkrev = self.changelog.count()
354 linkrev = self.changelog.count()
355 commit.sort()
355 commit.sort()
356 for f in commit:
356 for f in commit:
357 self.ui.note(f + "\n")
357 self.ui.note(f + "\n")
358 try:
358 try:
359 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
359 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
360 t = self.wread(f)
360 t = self.wread(f)
361 except IOError:
361 except IOError:
362 self.ui.warn("trouble committing %s!\n" % f)
362 self.ui.warn("trouble committing %s!\n" % f)
363 raise
363 raise
364
364
365 r = self.file(f)
365 r = self.file(f)
366
366
367 meta = {}
367 meta = {}
368 cp = self.dirstate.copied(f)
368 cp = self.dirstate.copied(f)
369 if cp:
369 if cp:
370 meta["copy"] = cp
370 meta["copy"] = cp
371 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
371 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
372 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
372 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
373 fp1, fp2 = nullid, nullid
373 fp1, fp2 = nullid, nullid
374 else:
374 else:
375 fp1 = m1.get(f, nullid)
375 fp1 = m1.get(f, nullid)
376 fp2 = m2.get(f, nullid)
376 fp2 = m2.get(f, nullid)
377
377
378 # is the same revision on two branches of a merge?
378 # is the same revision on two branches of a merge?
379 if fp2 == fp1:
379 if fp2 == fp1:
380 fp2 = nullid
380 fp2 = nullid
381
381
382 if fp2 != nullid:
382 if fp2 != nullid:
383 # is one parent an ancestor of the other?
383 # is one parent an ancestor of the other?
384 fpa = r.ancestor(fp1, fp2)
384 fpa = r.ancestor(fp1, fp2)
385 if fpa == fp1:
385 if fpa == fp1:
386 fp1, fp2 = fp2, nullid
386 fp1, fp2 = fp2, nullid
387 elif fpa == fp2:
387 elif fpa == fp2:
388 fp2 = nullid
388 fp2 = nullid
389
389
390 # is the file unmodified from the parent?
390 # is the file unmodified from the parent?
391 if not meta and t == r.read(fp1):
391 if not meta and t == r.read(fp1):
392 # record the proper existing parent in manifest
392 # record the proper existing parent in manifest
393 # no need to add a revision
393 # no need to add a revision
394 new[f] = fp1
394 new[f] = fp1
395 continue
395 continue
396
396
397 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
397 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
398 # remember what we've added so that we can later calculate
398 # remember what we've added so that we can later calculate
399 # the files to pull from a set of changesets
399 # the files to pull from a set of changesets
400 changed.append(f)
400 changed.append(f)
401
401
402 # update manifest
402 # update manifest
403 m1.update(new)
403 m1.update(new)
404 for f in remove:
404 for f in remove:
405 if f in m1:
405 if f in m1:
406 del m1[f]
406 del m1[f]
407 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
407 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
408 (new, remove))
408 (new, remove))
409
409
410 # add changeset
410 # add changeset
411 new = new.keys()
411 new = new.keys()
412 new.sort()
412 new.sort()
413
413
414 if not text:
414 if not text:
415 edittext = ""
415 edittext = ""
416 if p2 != nullid:
416 if p2 != nullid:
417 edittext += "HG: branch merge\n"
417 edittext += "HG: branch merge\n"
418 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
418 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
419 edittext += "".join(["HG: changed %s\n" % f for f in changed])
419 edittext += "".join(["HG: changed %s\n" % f for f in changed])
420 edittext += "".join(["HG: removed %s\n" % f for f in remove])
420 edittext += "".join(["HG: removed %s\n" % f for f in remove])
421 if not changed and not remove:
421 if not changed and not remove:
422 edittext += "HG: no files changed\n"
422 edittext += "HG: no files changed\n"
423 edittext = self.ui.edit(edittext)
423 edittext = self.ui.edit(edittext)
424 if not edittext.rstrip():
424 if not edittext.rstrip():
425 return None
425 return None
426 text = edittext
426 text = edittext
427
427
428 user = user or self.ui.username()
428 user = user or self.ui.username()
429 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
429 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
430 tr.close()
430 tr.close()
431
431
432 self.dirstate.setparents(n)
432 self.dirstate.setparents(n)
433 self.dirstate.update(new, "n")
433 self.dirstate.update(new, "n")
434 self.dirstate.forget(remove)
434 self.dirstate.forget(remove)
435
435
436 if not self.hook("commit", node=hex(n)):
436 if not self.hook("commit", node=hex(n)):
437 return None
437 return None
438 return n
438 return n
439
439
440 def walk(self, node=None, files=[], match=util.always):
440 def walk(self, node=None, files=[], match=util.always):
441 if node:
441 if node:
442 for fn in self.manifest.read(self.changelog.read(node)[0]):
442 for fn in self.manifest.read(self.changelog.read(node)[0]):
443 if match(fn): yield 'm', fn
443 if match(fn): yield 'm', fn
444 else:
444 else:
445 for src, fn in self.dirstate.walk(files, match):
445 for src, fn in self.dirstate.walk(files, match):
446 yield src, fn
446 yield src, fn
447
447
448 def changes(self, node1 = None, node2 = None, files = [],
448 def changes(self, node1 = None, node2 = None, files = [],
449 match = util.always):
449 match = util.always):
450 mf2, u = None, []
450 mf2, u = None, []
451
451
452 def fcmp(fn, mf):
452 def fcmp(fn, mf):
453 t1 = self.wread(fn)
453 t1 = self.wread(fn)
454 t2 = self.file(fn).read(mf.get(fn, nullid))
454 t2 = self.file(fn).read(mf.get(fn, nullid))
455 return cmp(t1, t2)
455 return cmp(t1, t2)
456
456
457 def mfmatches(node):
457 def mfmatches(node):
458 mf = dict(self.manifest.read(node))
458 mf = dict(self.manifest.read(node))
459 for fn in mf.keys():
459 for fn in mf.keys():
460 if not match(fn):
460 if not match(fn):
461 del mf[fn]
461 del mf[fn]
462 return mf
462 return mf
463
463
464 # are we comparing the working directory?
464 # are we comparing the working directory?
465 if not node2:
465 if not node2:
466 l, c, a, d, u = self.dirstate.changes(files, match)
466 l, c, a, d, u = self.dirstate.changes(files, match)
467
467
468 # are we comparing working dir against its parent?
468 # are we comparing working dir against its parent?
469 if not node1:
469 if not node1:
470 if l:
470 if l:
471 # do a full compare of any files that might have changed
471 # do a full compare of any files that might have changed
472 change = self.changelog.read(self.dirstate.parents()[0])
472 change = self.changelog.read(self.dirstate.parents()[0])
473 mf2 = mfmatches(change[0])
473 mf2 = mfmatches(change[0])
474 for f in l:
474 for f in l:
475 if fcmp(f, mf2):
475 if fcmp(f, mf2):
476 c.append(f)
476 c.append(f)
477
477
478 for l in c, a, d, u:
478 for l in c, a, d, u:
479 l.sort()
479 l.sort()
480
480
481 return (c, a, d, u)
481 return (c, a, d, u)
482
482
483 # are we comparing working dir against non-tip?
483 # are we comparing working dir against non-tip?
484 # generate a pseudo-manifest for the working dir
484 # generate a pseudo-manifest for the working dir
485 if not node2:
485 if not node2:
486 if not mf2:
486 if not mf2:
487 change = self.changelog.read(self.dirstate.parents()[0])
487 change = self.changelog.read(self.dirstate.parents()[0])
488 mf2 = mfmatches(change[0])
488 mf2 = mfmatches(change[0])
489 for f in a + c + l:
489 for f in a + c + l:
490 mf2[f] = ""
490 mf2[f] = ""
491 for f in d:
491 for f in d:
492 if f in mf2: del mf2[f]
492 if f in mf2: del mf2[f]
493 else:
493 else:
494 change = self.changelog.read(node2)
494 change = self.changelog.read(node2)
495 mf2 = mfmatches(change[0])
495 mf2 = mfmatches(change[0])
496
496
497 # flush lists from dirstate before comparing manifests
497 # flush lists from dirstate before comparing manifests
498 c, a = [], []
498 c, a = [], []
499
499
500 change = self.changelog.read(node1)
500 change = self.changelog.read(node1)
501 mf1 = mfmatches(change[0])
501 mf1 = mfmatches(change[0])
502
502
503 for fn in mf2:
503 for fn in mf2:
504 if mf1.has_key(fn):
504 if mf1.has_key(fn):
505 if mf1[fn] != mf2[fn]:
505 if mf1[fn] != mf2[fn]:
506 if mf2[fn] != "" or fcmp(fn, mf1):
506 if mf2[fn] != "" or fcmp(fn, mf1):
507 c.append(fn)
507 c.append(fn)
508 del mf1[fn]
508 del mf1[fn]
509 else:
509 else:
510 a.append(fn)
510 a.append(fn)
511
511
512 d = mf1.keys()
512 d = mf1.keys()
513
513
514 for l in c, a, d, u:
514 for l in c, a, d, u:
515 l.sort()
515 l.sort()
516
516
517 return (c, a, d, u)
517 return (c, a, d, u)
518
518
519 def add(self, list):
519 def add(self, list):
520 for f in list:
520 for f in list:
521 p = self.wjoin(f)
521 p = self.wjoin(f)
522 if not os.path.exists(p):
522 if not os.path.exists(p):
523 self.ui.warn("%s does not exist!\n" % f)
523 self.ui.warn("%s does not exist!\n" % f)
524 elif not os.path.isfile(p):
524 elif not os.path.isfile(p):
525 self.ui.warn("%s not added: only files supported currently\n" % f)
525 self.ui.warn("%s not added: only files supported currently\n" % f)
526 elif self.dirstate.state(f) in 'an':
526 elif self.dirstate.state(f) in 'an':
527 self.ui.warn("%s already tracked!\n" % f)
527 self.ui.warn("%s already tracked!\n" % f)
528 else:
528 else:
529 self.dirstate.update([f], "a")
529 self.dirstate.update([f], "a")
530
530
531 def forget(self, list):
531 def forget(self, list):
532 for f in list:
532 for f in list:
533 if self.dirstate.state(f) not in 'ai':
533 if self.dirstate.state(f) not in 'ai':
534 self.ui.warn("%s not added!\n" % f)
534 self.ui.warn("%s not added!\n" % f)
535 else:
535 else:
536 self.dirstate.forget([f])
536 self.dirstate.forget([f])
537
537
538 def remove(self, list):
538 def remove(self, list):
539 for f in list:
539 for f in list:
540 p = self.wjoin(f)
540 p = self.wjoin(f)
541 if os.path.exists(p):
541 if os.path.exists(p):
542 self.ui.warn("%s still exists!\n" % f)
542 self.ui.warn("%s still exists!\n" % f)
543 elif self.dirstate.state(f) == 'a':
543 elif self.dirstate.state(f) == 'a':
544 self.ui.warn("%s never committed!\n" % f)
544 self.ui.warn("%s never committed!\n" % f)
545 self.dirstate.forget([f])
545 self.dirstate.forget([f])
546 elif f not in self.dirstate:
546 elif f not in self.dirstate:
547 self.ui.warn("%s not tracked!\n" % f)
547 self.ui.warn("%s not tracked!\n" % f)
548 else:
548 else:
549 self.dirstate.update([f], "r")
549 self.dirstate.update([f], "r")
550
550
551 def copy(self, source, dest):
551 def copy(self, source, dest):
552 p = self.wjoin(dest)
552 p = self.wjoin(dest)
553 if not os.path.exists(p):
553 if not os.path.exists(p):
554 self.ui.warn("%s does not exist!\n" % dest)
554 self.ui.warn("%s does not exist!\n" % dest)
555 elif not os.path.isfile(p):
555 elif not os.path.isfile(p):
556 self.ui.warn("copy failed: %s is not a file\n" % dest)
556 self.ui.warn("copy failed: %s is not a file\n" % dest)
557 else:
557 else:
558 if self.dirstate.state(dest) == '?':
558 if self.dirstate.state(dest) == '?':
559 self.dirstate.update([dest], "a")
559 self.dirstate.update([dest], "a")
560 self.dirstate.copy(source, dest)
560 self.dirstate.copy(source, dest)
561
561
562 def heads(self):
562 def heads(self):
563 return self.changelog.heads()
563 return self.changelog.heads()
564
564
565 # branchlookup returns a dict giving a list of branches for
565 # branchlookup returns a dict giving a list of branches for
566 # each head. A branch is defined as the tag of a node or
566 # each head. A branch is defined as the tag of a node or
567 # the branch of the node's parents. If a node has multiple
567 # the branch of the node's parents. If a node has multiple
568 # branch tags, tags are eliminated if they are visible from other
568 # branch tags, tags are eliminated if they are visible from other
569 # branch tags.
569 # branch tags.
570 #
570 #
571 # So, for this graph: a->b->c->d->e
571 # So, for this graph: a->b->c->d->e
572 # \ /
572 # \ /
573 # aa -----/
573 # aa -----/
574 # a has tag 2.6.12
574 # a has tag 2.6.12
575 # d has tag 2.6.13
575 # d has tag 2.6.13
576 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
576 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
577 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
577 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
578 # from the list.
578 # from the list.
579 #
579 #
580 # It is possible that more than one head will have the same branch tag.
580 # It is possible that more than one head will have the same branch tag.
581 # callers need to check the result for multiple heads under the same
581 # callers need to check the result for multiple heads under the same
582 # branch tag if that is a problem for them (ie checkout of a specific
582 # branch tag if that is a problem for them (ie checkout of a specific
583 # branch).
583 # branch).
584 #
584 #
585 # passing in a specific branch will limit the depth of the search
585 # passing in a specific branch will limit the depth of the search
586 # through the parents. It won't limit the branches returned in the
586 # through the parents. It won't limit the branches returned in the
587 # result though.
587 # result though.
588 def branchlookup(self, heads=None, branch=None):
588 def branchlookup(self, heads=None, branch=None):
589 if not heads:
589 if not heads:
590 heads = self.heads()
590 heads = self.heads()
591 headt = [ h for h in heads ]
591 headt = [ h for h in heads ]
592 chlog = self.changelog
592 chlog = self.changelog
593 branches = {}
593 branches = {}
594 merges = []
594 merges = []
595 seenmerge = {}
595 seenmerge = {}
596
596
597 # traverse the tree once for each head, recording in the branches
597 # traverse the tree once for each head, recording in the branches
598 # dict which tags are visible from this head. The branches
598 # dict which tags are visible from this head. The branches
599 # dict also records which tags are visible from each tag
599 # dict also records which tags are visible from each tag
600 # while we traverse.
600 # while we traverse.
601 while headt or merges:
601 while headt or merges:
602 if merges:
602 if merges:
603 n, found = merges.pop()
603 n, found = merges.pop()
604 visit = [n]
604 visit = [n]
605 else:
605 else:
606 h = headt.pop()
606 h = headt.pop()
607 visit = [h]
607 visit = [h]
608 found = [h]
608 found = [h]
609 seen = {}
609 seen = {}
610 while visit:
610 while visit:
611 n = visit.pop()
611 n = visit.pop()
612 if n in seen:
612 if n in seen:
613 continue
613 continue
614 pp = chlog.parents(n)
614 pp = chlog.parents(n)
615 tags = self.nodetags(n)
615 tags = self.nodetags(n)
616 if tags:
616 if tags:
617 for x in tags:
617 for x in tags:
618 if x == 'tip':
618 if x == 'tip':
619 continue
619 continue
620 for f in found:
620 for f in found:
621 branches.setdefault(f, {})[n] = 1
621 branches.setdefault(f, {})[n] = 1
622 branches.setdefault(n, {})[n] = 1
622 branches.setdefault(n, {})[n] = 1
623 break
623 break
624 if n not in found:
624 if n not in found:
625 found.append(n)
625 found.append(n)
626 if branch in tags:
626 if branch in tags:
627 continue
627 continue
628 seen[n] = 1
628 seen[n] = 1
629 if pp[1] != nullid and n not in seenmerge:
629 if pp[1] != nullid and n not in seenmerge:
630 merges.append((pp[1], [x for x in found]))
630 merges.append((pp[1], [x for x in found]))
631 seenmerge[n] = 1
631 seenmerge[n] = 1
632 if pp[0] != nullid:
632 if pp[0] != nullid:
633 visit.append(pp[0])
633 visit.append(pp[0])
634 # traverse the branches dict, eliminating branch tags from each
634 # traverse the branches dict, eliminating branch tags from each
635 # head that are visible from another branch tag for that head.
635 # head that are visible from another branch tag for that head.
636 out = {}
636 out = {}
637 viscache = {}
637 viscache = {}
638 for h in heads:
638 for h in heads:
639 def visible(node):
639 def visible(node):
640 if node in viscache:
640 if node in viscache:
641 return viscache[node]
641 return viscache[node]
642 ret = {}
642 ret = {}
643 visit = [node]
643 visit = [node]
644 while visit:
644 while visit:
645 x = visit.pop()
645 x = visit.pop()
646 if x in viscache:
646 if x in viscache:
647 ret.update(viscache[x])
647 ret.update(viscache[x])
648 elif x not in ret:
648 elif x not in ret:
649 ret[x] = 1
649 ret[x] = 1
650 if x in branches:
650 if x in branches:
651 visit[len(visit):] = branches[x].keys()
651 visit[len(visit):] = branches[x].keys()
652 viscache[node] = ret
652 viscache[node] = ret
653 return ret
653 return ret
654 if h not in branches:
654 if h not in branches:
655 continue
655 continue
656 # O(n^2), but somewhat limited. This only searches the
656 # O(n^2), but somewhat limited. This only searches the
657 # tags visible from a specific head, not all the tags in the
657 # tags visible from a specific head, not all the tags in the
658 # whole repo.
658 # whole repo.
659 for b in branches[h]:
659 for b in branches[h]:
660 vis = False
660 vis = False
661 for bb in branches[h].keys():
661 for bb in branches[h].keys():
662 if b != bb:
662 if b != bb:
663 if b in visible(bb):
663 if b in visible(bb):
664 vis = True
664 vis = True
665 break
665 break
666 if not vis:
666 if not vis:
667 l = out.setdefault(h, [])
667 l = out.setdefault(h, [])
668 l[len(l):] = self.nodetags(b)
668 l[len(l):] = self.nodetags(b)
669 return out
669 return out
670
670
671 def branches(self, nodes):
671 def branches(self, nodes):
672 if not nodes: nodes = [self.changelog.tip()]
672 if not nodes: nodes = [self.changelog.tip()]
673 b = []
673 b = []
674 for n in nodes:
674 for n in nodes:
675 t = n
675 t = n
676 while n:
676 while n:
677 p = self.changelog.parents(n)
677 p = self.changelog.parents(n)
678 if p[1] != nullid or p[0] == nullid:
678 if p[1] != nullid or p[0] == nullid:
679 b.append((t, n, p[0], p[1]))
679 b.append((t, n, p[0], p[1]))
680 break
680 break
681 n = p[0]
681 n = p[0]
682 return b
682 return b
683
683
684 def between(self, pairs):
684 def between(self, pairs):
685 r = []
685 r = []
686
686
687 for top, bottom in pairs:
687 for top, bottom in pairs:
688 n, l, i = top, [], 0
688 n, l, i = top, [], 0
689 f = 1
689 f = 1
690
690
691 while n != bottom:
691 while n != bottom:
692 p = self.changelog.parents(n)[0]
692 p = self.changelog.parents(n)[0]
693 if i == f:
693 if i == f:
694 l.append(n)
694 l.append(n)
695 f = f * 2
695 f = f * 2
696 n = p
696 n = p
697 i += 1
697 i += 1
698
698
699 r.append(l)
699 r.append(l)
700
700
701 return r
701 return r
702
702
703 def findincoming(self, remote, base=None, heads=None):
703 def findincoming(self, remote, base=None, heads=None):
704 m = self.changelog.nodemap
704 m = self.changelog.nodemap
705 search = []
705 search = []
706 fetch = {}
706 fetch = {}
707 seen = {}
707 seen = {}
708 seenbranch = {}
708 seenbranch = {}
709 if base == None:
709 if base == None:
710 base = {}
710 base = {}
711
711
712 # assume we're closer to the tip than the root
712 # assume we're closer to the tip than the root
713 # and start by examining the heads
713 # and start by examining the heads
714 self.ui.status("searching for changes\n")
714 self.ui.status("searching for changes\n")
715
715
716 if not heads:
716 if not heads:
717 heads = remote.heads()
717 heads = remote.heads()
718
718
719 unknown = []
719 unknown = []
720 for h in heads:
720 for h in heads:
721 if h not in m:
721 if h not in m:
722 unknown.append(h)
722 unknown.append(h)
723 else:
723 else:
724 base[h] = 1
724 base[h] = 1
725
725
726 if not unknown:
726 if not unknown:
727 return None
727 return None
728
728
729 rep = {}
729 rep = {}
730 reqcnt = 0
730 reqcnt = 0
731
731
732 # search through remote branches
732 # search through remote branches
733 # a 'branch' here is a linear segment of history, with four parts:
733 # a 'branch' here is a linear segment of history, with four parts:
734 # head, root, first parent, second parent
734 # head, root, first parent, second parent
735 # (a branch always has two parents (or none) by definition)
735 # (a branch always has two parents (or none) by definition)
736 unknown = remote.branches(unknown)
736 unknown = remote.branches(unknown)
737 while unknown:
737 while unknown:
738 r = []
738 r = []
739 while unknown:
739 while unknown:
740 n = unknown.pop(0)
740 n = unknown.pop(0)
741 if n[0] in seen:
741 if n[0] in seen:
742 continue
742 continue
743
743
744 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
744 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
745 if n[0] == nullid:
745 if n[0] == nullid:
746 break
746 break
747 if n in seenbranch:
747 if n in seenbranch:
748 self.ui.debug("branch already found\n")
748 self.ui.debug("branch already found\n")
749 continue
749 continue
750 if n[1] and n[1] in m: # do we know the base?
750 if n[1] and n[1] in m: # do we know the base?
751 self.ui.debug("found incomplete branch %s:%s\n"
751 self.ui.debug("found incomplete branch %s:%s\n"
752 % (short(n[0]), short(n[1])))
752 % (short(n[0]), short(n[1])))
753 search.append(n) # schedule branch range for scanning
753 search.append(n) # schedule branch range for scanning
754 seenbranch[n] = 1
754 seenbranch[n] = 1
755 else:
755 else:
756 if n[1] not in seen and n[1] not in fetch:
756 if n[1] not in seen and n[1] not in fetch:
757 if n[2] in m and n[3] in m:
757 if n[2] in m and n[3] in m:
758 self.ui.debug("found new changeset %s\n" %
758 self.ui.debug("found new changeset %s\n" %
759 short(n[1]))
759 short(n[1]))
760 fetch[n[1]] = 1 # earliest unknown
760 fetch[n[1]] = 1 # earliest unknown
761 base[n[2]] = 1 # latest known
761 base[n[2]] = 1 # latest known
762 continue
762 continue
763
763
764 for a in n[2:4]:
764 for a in n[2:4]:
765 if a not in rep:
765 if a not in rep:
766 r.append(a)
766 r.append(a)
767 rep[a] = 1
767 rep[a] = 1
768
768
769 seen[n[0]] = 1
769 seen[n[0]] = 1
770
770
771 if r:
771 if r:
772 reqcnt += 1
772 reqcnt += 1
773 self.ui.debug("request %d: %s\n" %
773 self.ui.debug("request %d: %s\n" %
774 (reqcnt, " ".join(map(short, r))))
774 (reqcnt, " ".join(map(short, r))))
775 for p in range(0, len(r), 10):
775 for p in range(0, len(r), 10):
776 for b in remote.branches(r[p:p+10]):
776 for b in remote.branches(r[p:p+10]):
777 self.ui.debug("received %s:%s\n" %
777 self.ui.debug("received %s:%s\n" %
778 (short(b[0]), short(b[1])))
778 (short(b[0]), short(b[1])))
779 if b[0] in m:
779 if b[0] in m:
780 self.ui.debug("found base node %s\n" % short(b[0]))
780 self.ui.debug("found base node %s\n" % short(b[0]))
781 base[b[0]] = 1
781 base[b[0]] = 1
782 elif b[0] not in seen:
782 elif b[0] not in seen:
783 unknown.append(b)
783 unknown.append(b)
784
784
785 # do binary search on the branches we found
785 # do binary search on the branches we found
786 while search:
786 while search:
787 n = search.pop(0)
787 n = search.pop(0)
788 reqcnt += 1
788 reqcnt += 1
789 l = remote.between([(n[0], n[1])])[0]
789 l = remote.between([(n[0], n[1])])[0]
790 l.append(n[1])
790 l.append(n[1])
791 p = n[0]
791 p = n[0]
792 f = 1
792 f = 1
793 for i in l:
793 for i in l:
794 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
794 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
795 if i in m:
795 if i in m:
796 if f <= 2:
796 if f <= 2:
797 self.ui.debug("found new branch changeset %s\n" %
797 self.ui.debug("found new branch changeset %s\n" %
798 short(p))
798 short(p))
799 fetch[p] = 1
799 fetch[p] = 1
800 base[i] = 1
800 base[i] = 1
801 else:
801 else:
802 self.ui.debug("narrowed branch search to %s:%s\n"
802 self.ui.debug("narrowed branch search to %s:%s\n"
803 % (short(p), short(i)))
803 % (short(p), short(i)))
804 search.append((p, i))
804 search.append((p, i))
805 break
805 break
806 p, f = i, f * 2
806 p, f = i, f * 2
807
807
808 # sanity check our fetch list
808 # sanity check our fetch list
809 for f in fetch.keys():
809 for f in fetch.keys():
810 if f in m:
810 if f in m:
811 raise repo.RepoError("already have changeset " + short(f[:4]))
811 raise repo.RepoError("already have changeset " + short(f[:4]))
812
812
813 if base.keys() == [nullid]:
813 if base.keys() == [nullid]:
814 self.ui.warn("warning: pulling from an unrelated repository!\n")
814 self.ui.warn("warning: pulling from an unrelated repository!\n")
815
815
816 self.ui.note("found new changesets starting at " +
816 self.ui.note("found new changesets starting at " +
817 " ".join([short(f) for f in fetch]) + "\n")
817 " ".join([short(f) for f in fetch]) + "\n")
818
818
819 self.ui.debug("%d total queries\n" % reqcnt)
819 self.ui.debug("%d total queries\n" % reqcnt)
820
820
821 return fetch.keys()
821 return fetch.keys()
822
822
823 def findoutgoing(self, remote, base=None, heads=None):
823 def findoutgoing(self, remote, base=None, heads=None):
824 if base == None:
824 if base == None:
825 base = {}
825 base = {}
826 self.findincoming(remote, base, heads)
826 self.findincoming(remote, base, heads)
827
827
828 self.ui.debug("common changesets up to "
828 self.ui.debug("common changesets up to "
829 + " ".join(map(short, base.keys())) + "\n")
829 + " ".join(map(short, base.keys())) + "\n")
830
830
831 remain = dict.fromkeys(self.changelog.nodemap)
831 remain = dict.fromkeys(self.changelog.nodemap)
832
832
833 # prune everything remote has from the tree
833 # prune everything remote has from the tree
834 del remain[nullid]
834 del remain[nullid]
835 remove = base.keys()
835 remove = base.keys()
836 while remove:
836 while remove:
837 n = remove.pop(0)
837 n = remove.pop(0)
838 if n in remain:
838 if n in remain:
839 del remain[n]
839 del remain[n]
840 for p in self.changelog.parents(n):
840 for p in self.changelog.parents(n):
841 remove.append(p)
841 remove.append(p)
842
842
843 # find every node whose parents have been pruned
843 # find every node whose parents have been pruned
844 subset = []
844 subset = []
845 for n in remain:
845 for n in remain:
846 p1, p2 = self.changelog.parents(n)
846 p1, p2 = self.changelog.parents(n)
847 if p1 not in remain and p2 not in remain:
847 if p1 not in remain and p2 not in remain:
848 subset.append(n)
848 subset.append(n)
849
849
850 # this is the set of all roots we have to push
850 # this is the set of all roots we have to push
851 return subset
851 return subset
852
852
853 def pull(self, remote, heads = None):
853 def pull(self, remote, heads = None):
854 lock = self.lock()
854 lock = self.lock()
855
855
856 # if we have an empty repo, fetch everything
856 # if we have an empty repo, fetch everything
857 if self.changelog.tip() == nullid:
857 if self.changelog.tip() == nullid:
858 self.ui.status("requesting all changes\n")
858 self.ui.status("requesting all changes\n")
859 fetch = [nullid]
859 fetch = [nullid]
860 else:
860 else:
861 fetch = self.findincoming(remote)
861 fetch = self.findincoming(remote)
862
862
863 if not fetch:
863 if not fetch:
864 self.ui.status("no changes found\n")
864 self.ui.status("no changes found\n")
865 return 1
865 return 1
866
866
867 if heads is None:
867 if heads is None:
868 cg = remote.changegroup(fetch)
868 cg = remote.changegroup(fetch)
869 else:
869 else:
870 cg = remote.changegroupsubset(fetch, heads)
870 cg = remote.changegroupsubset(fetch, heads)
871 return self.addchangegroup(cg)
871 return self.addchangegroup(cg)
872
872
873 def push(self, remote, force=False):
873 def push(self, remote, force=False):
874 lock = remote.lock()
874 lock = remote.lock()
875
875
876 base = {}
876 base = {}
877 heads = remote.heads()
877 heads = remote.heads()
878 inc = self.findincoming(remote, base, heads)
878 inc = self.findincoming(remote, base, heads)
879 if not force and inc:
879 if not force and inc:
880 self.ui.warn("abort: unsynced remote changes!\n")
880 self.ui.warn("abort: unsynced remote changes!\n")
881 self.ui.status("(did you forget to sync? use push -f to force)\n")
881 self.ui.status("(did you forget to sync? use push -f to force)\n")
882 return 1
882 return 1
883
883
884 update = self.findoutgoing(remote, base)
884 update = self.findoutgoing(remote, base)
885 if not update:
885 if not update:
886 self.ui.status("no changes found\n")
886 self.ui.status("no changes found\n")
887 return 1
887 return 1
888 elif not force:
888 elif not force:
889 if len(heads) < len(self.changelog.heads()):
889 if len(heads) < len(self.changelog.heads()):
890 self.ui.warn("abort: push creates new remote branches!\n")
890 self.ui.warn("abort: push creates new remote branches!\n")
891 self.ui.status("(did you forget to merge?" +
891 self.ui.status("(did you forget to merge?" +
892 " use push -f to force)\n")
892 " use push -f to force)\n")
893 return 1
893 return 1
894
894
895 cg = self.changegroup(update)
895 cg = self.changegroup(update)
896 return remote.addchangegroup(cg)
896 return remote.addchangegroup(cg)
897
897
898 def changegroupsubset(self, bases, heads):
898 def changegroupsubset(self, bases, heads):
899 cl = self.changelog
899 cl = self.changelog
900 # msng = missing
900 # msng = missing
901 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
901 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
902 junk = None
902 junk = None
903 knownheads = {}
903 knownheads = {}
904 for n in bases:
904 for n in bases:
905 for p in cl.parents(n):
905 for p in cl.parents(n):
906 if p != nullid:
906 if p != nullid:
907 knownheads[p] = 1
907 knownheads[p] = 1
908 knownheads = knownheads.keys()
908 knownheads = knownheads.keys()
909 if knownheads:
909 if knownheads:
910 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
910 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
911 has_cl_set = dict.fromkeys(hasnodeset)
911 has_cl_set = dict.fromkeys(hasnodeset)
912 else:
912 else:
913 has_cl_set = {}
913 has_cl_set = {}
914
914
915 mnfst = self.manifest
915 mnfst = self.manifest
916 msng_mnfst_set = {}
916 msng_mnfst_set = {}
917 msng_filenode_set = {}
917 msng_filenode_set = {}
918
918
919 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
919 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
920 junk = None
920 junk = None
921
921
922 def identity(x):
922 def identity(x):
923 return x
923 return x
924
924
925 def cmp_by_rev_func(revlog):
925 def cmp_by_rev_func(revlog):
926 def cmpfunc(a, b):
926 def cmpfunc(a, b):
927 return cmp(revlog.rev(a), revlog.rev(b))
927 return cmp(revlog.rev(a), revlog.rev(b))
928 return cmpfunc
928 return cmpfunc
929
929
930 def prune_parents(revlog, hasset, msngset):
930 def prune_parents(revlog, hasset, msngset):
931 haslst = hasset.keys()
931 haslst = hasset.keys()
932 haslst.sort(cmp_by_rev_func(revlog))
932 haslst.sort(cmp_by_rev_func(revlog))
933 for node in haslst:
933 for node in haslst:
934 parentlst = [p for p in revlog.parents(node) if p != nullid]
934 parentlst = [p for p in revlog.parents(node) if p != nullid]
935 while parentlst:
935 while parentlst:
936 n = parentlst.pop()
936 n = parentlst.pop()
937 if n not in hasset:
937 if n not in hasset:
938 hasset[n] = 1
938 hasset[n] = 1
939 p = [p for p in revlog.parents(n) if p != nullid]
939 p = [p for p in revlog.parents(n) if p != nullid]
940 parentlst.extend(p)
940 parentlst.extend(p)
941 for n in hasset:
941 for n in hasset:
942 msngset.pop(n, None)
942 msngset.pop(n, None)
943
943
944 def manifest_and_file_collector(changedfileset):
944 def manifest_and_file_collector(changedfileset):
945 def collect_manifests_and_files(clnode):
945 def collect_manifests_and_files(clnode):
946 c = cl.read(clnode)
946 c = cl.read(clnode)
947 for f in c[3]:
947 for f in c[3]:
948 # This is to make sure we only have one instance of each
948 # This is to make sure we only have one instance of each
949 # filename string for each filename.
949 # filename string for each filename.
950 changedfileset.setdefault(f, f)
950 changedfileset.setdefault(f, f)
951 msng_mnfst_set.setdefault(c[0], clnode)
951 msng_mnfst_set.setdefault(c[0], clnode)
952 return collect_manifests_and_files
952 return collect_manifests_and_files
953
953
954 def prune_manifests():
954 def prune_manifests():
955 has_mnfst_set = {}
955 has_mnfst_set = {}
956 for n in msng_mnfst_set:
956 for n in msng_mnfst_set:
957 linknode = cl.node(mnfst.linkrev(n))
957 linknode = cl.node(mnfst.linkrev(n))
958 if linknode in has_cl_set:
958 if linknode in has_cl_set:
959 has_mnfst_set[n] = 1
959 has_mnfst_set[n] = 1
960 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
960 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
961
961
962 def lookup_manifest_link(mnfstnode):
962 def lookup_manifest_link(mnfstnode):
963 return msng_mnfst_set[mnfstnode]
963 return msng_mnfst_set[mnfstnode]
964
964
965 def filenode_collector(changedfiles):
965 def filenode_collector(changedfiles):
966 next_rev = [0]
966 def collect_msng_filenodes(mnfstnode):
967 def collect_msng_filenodes(mnfstnode):
967 m = mnfst.read(mnfstnode)
968 r = mnfst.rev(mnfstnode)
968 for f in changedfiles:
969 if r == next_rev[0]:
969 fnode = m.get(f, None)
970 # If the last rev we looked at was the one just previous,
970 if fnode is not None:
971 # we only need to see a diff.
971 clnode = msng_mnfst_set[mnfstnode]
972 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
972 ndset = msng_filenode_set.setdefault(f, {})
973 for dline in delta.splitlines():
973 ndset.setdefault(fnode, clnode)
974 f, fnode = dline.split('\0')
975 fnode = bin(fnode[:40])
976 f = changedfiles.get(f, None)
977 if f is not None:
978 clnode = msng_mnfst_set[mnfstnode]
979 ndset = msng_filenode_set.setdefault(f, {})
980 ndset.setdefault(fnode, clnode)
981 else:
982 m = mnfst.read(mnfstnode)
983 for f in changedfiles:
984 fnode = m.get(f, None)
985 if fnode is not None:
986 clnode = msng_mnfst_set[mnfstnode]
987 ndset = msng_filenode_set.setdefault(f, {})
988 ndset.setdefault(fnode, clnode)
989 next_rev[0] = r + 1
974 return collect_msng_filenodes
990 return collect_msng_filenodes
975
991
976 def prune_filenodes(f, filerevlog):
992 def prune_filenodes(f, filerevlog):
977 msngset = msng_filenode_set[f]
993 msngset = msng_filenode_set[f]
978 hasset = {}
994 hasset = {}
979 for n in msngset:
995 for n in msngset:
980 clnode = cl.node(filerevlog.linkrev(n))
996 clnode = cl.node(filerevlog.linkrev(n))
981 if clnode in has_cl_set:
997 if clnode in has_cl_set:
982 hasset[n] = 1
998 hasset[n] = 1
983 prune_parents(filerevlog, hasset, msngset)
999 prune_parents(filerevlog, hasset, msngset)
984
1000
985 def lookup_filenode_link_func(fname):
1001 def lookup_filenode_link_func(fname):
986 msngset = msng_filenode_set[fname]
1002 msngset = msng_filenode_set[fname]
987 def lookup_filenode_link(fnode):
1003 def lookup_filenode_link(fnode):
988 return msngset[fnode]
1004 return msngset[fnode]
989 return lookup_filenode_link
1005 return lookup_filenode_link
990
1006
991 def gengroup():
1007 def gengroup():
992 changedfiles = {}
1008 changedfiles = {}
993 group = cl.group(msng_cl_lst, identity,
1009 group = cl.group(msng_cl_lst, identity,
994 manifest_and_file_collector(changedfiles))
1010 manifest_and_file_collector(changedfiles))
995 for chnk in group:
1011 for chnk in group:
996 yield chnk
1012 yield chnk
997 prune_manifests()
1013 prune_manifests()
998 msng_mnfst_lst = msng_mnfst_set.keys()
1014 msng_mnfst_lst = msng_mnfst_set.keys()
999 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1015 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1000 changedfiles = changedfiles.keys()
1001 changedfiles.sort()
1002 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1016 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1003 filenode_collector(changedfiles))
1017 filenode_collector(changedfiles))
1004 for chnk in group:
1018 for chnk in group:
1005 yield chnk
1019 yield chnk
1006 msng_mnfst_lst = None
1020 msng_mnfst_lst = None
1007 msng_mnfst_set.clear()
1021 msng_mnfst_set.clear()
1022 changedfiles = changedfiles.keys()
1023 changedfiles.sort()
1008 for fname in changedfiles:
1024 for fname in changedfiles:
1009 filerevlog = self.file(fname)
1025 filerevlog = self.file(fname)
1010 prune_filenodes(fname, filerevlog)
1026 prune_filenodes(fname, filerevlog)
1011 msng_filenode_lst = msng_filenode_set[fname].keys()
1027 msng_filenode_lst = msng_filenode_set[fname].keys()
1012 if len(msng_filenode_lst) > 0:
1028 if len(msng_filenode_lst) > 0:
1013 yield struct.pack(">l", len(fname) + 4) + fname
1029 yield struct.pack(">l", len(fname) + 4) + fname
1014 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1030 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1015 group = filerevlog.group(msng_filenode_lst,
1031 group = filerevlog.group(msng_filenode_lst,
1016 lookup_filenode_link_func(fname))
1032 lookup_filenode_link_func(fname))
1017 for chnk in group:
1033 for chnk in group:
1018 yield chnk
1034 yield chnk
1019 del msng_filenode_set[fname]
1035 del msng_filenode_set[fname]
1020 yield struct.pack(">l", 0)
1036 yield struct.pack(">l", 0)
1021
1037
1022 return util.chunkbuffer(gengroup())
1038 return util.chunkbuffer(gengroup())
1023
1039
1024 def changegroup(self, basenodes):
1040 def changegroup(self, basenodes):
1025 cl = self.changelog
1041 cl = self.changelog
1026 nodes = cl.nodesbetween(basenodes, None)[0]
1042 nodes = cl.nodesbetween(basenodes, None)[0]
1027 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1043 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1028
1044
1029 def identity(x):
1045 def identity(x):
1030 return x
1046 return x
1031
1047
1032 def gennodelst(revlog):
1048 def gennodelst(revlog):
1033 for r in xrange(0, revlog.count()):
1049 for r in xrange(0, revlog.count()):
1034 n = revlog.node(r)
1050 n = revlog.node(r)
1035 if revlog.linkrev(n) in revset:
1051 if revlog.linkrev(n) in revset:
1036 yield n
1052 yield n
1037
1053
1038 def changed_file_collector(changedfileset):
1054 def changed_file_collector(changedfileset):
1039 def collect_changed_files(clnode):
1055 def collect_changed_files(clnode):
1040 c = cl.read(clnode)
1056 c = cl.read(clnode)
1041 for fname in c[3]:
1057 for fname in c[3]:
1042 changedfileset[fname] = 1
1058 changedfileset[fname] = 1
1043 return collect_changed_files
1059 return collect_changed_files
1044
1060
1045 def lookuprevlink_func(revlog):
1061 def lookuprevlink_func(revlog):
1046 def lookuprevlink(n):
1062 def lookuprevlink(n):
1047 return cl.node(revlog.linkrev(n))
1063 return cl.node(revlog.linkrev(n))
1048 return lookuprevlink
1064 return lookuprevlink
1049
1065
1050 def gengroup():
1066 def gengroup():
1051 # construct a list of all changed files
1067 # construct a list of all changed files
1052 changedfiles = {}
1068 changedfiles = {}
1053
1069
1054 for chnk in cl.group(nodes, identity,
1070 for chnk in cl.group(nodes, identity,
1055 changed_file_collector(changedfiles)):
1071 changed_file_collector(changedfiles)):
1056 yield chnk
1072 yield chnk
1057 changedfiles = changedfiles.keys()
1073 changedfiles = changedfiles.keys()
1058 changedfiles.sort()
1074 changedfiles.sort()
1059
1075
1060 mnfst = self.manifest
1076 mnfst = self.manifest
1061 nodeiter = gennodelst(mnfst)
1077 nodeiter = gennodelst(mnfst)
1062 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1078 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1063 yield chnk
1079 yield chnk
1064
1080
1065 for fname in changedfiles:
1081 for fname in changedfiles:
1066 filerevlog = self.file(fname)
1082 filerevlog = self.file(fname)
1067 nodeiter = gennodelst(filerevlog)
1083 nodeiter = gennodelst(filerevlog)
1068 nodeiter = list(nodeiter)
1084 nodeiter = list(nodeiter)
1069 if nodeiter:
1085 if nodeiter:
1070 yield struct.pack(">l", len(fname) + 4) + fname
1086 yield struct.pack(">l", len(fname) + 4) + fname
1071 lookup = lookuprevlink_func(filerevlog)
1087 lookup = lookuprevlink_func(filerevlog)
1072 for chnk in filerevlog.group(nodeiter, lookup):
1088 for chnk in filerevlog.group(nodeiter, lookup):
1073 yield chnk
1089 yield chnk
1074
1090
1075 yield struct.pack(">l", 0)
1091 yield struct.pack(">l", 0)
1076
1092
1077 return util.chunkbuffer(gengroup())
1093 return util.chunkbuffer(gengroup())
1078
1094
1079 def addchangegroup(self, source):
1095 def addchangegroup(self, source):
1080
1096
1081 def getchunk():
1097 def getchunk():
1082 d = source.read(4)
1098 d = source.read(4)
1083 if not d: return ""
1099 if not d: return ""
1084 l = struct.unpack(">l", d)[0]
1100 l = struct.unpack(">l", d)[0]
1085 if l <= 4: return ""
1101 if l <= 4: return ""
1086 d = source.read(l - 4)
1102 d = source.read(l - 4)
1087 if len(d) < l - 4:
1103 if len(d) < l - 4:
1088 raise repo.RepoError("premature EOF reading chunk" +
1104 raise repo.RepoError("premature EOF reading chunk" +
1089 " (got %d bytes, expected %d)"
1105 " (got %d bytes, expected %d)"
1090 % (len(d), l - 4))
1106 % (len(d), l - 4))
1091 return d
1107 return d
1092
1108
1093 def getgroup():
1109 def getgroup():
1094 while 1:
1110 while 1:
1095 c = getchunk()
1111 c = getchunk()
1096 if not c: break
1112 if not c: break
1097 yield c
1113 yield c
1098
1114
1099 def csmap(x):
1115 def csmap(x):
1100 self.ui.debug("add changeset %s\n" % short(x))
1116 self.ui.debug("add changeset %s\n" % short(x))
1101 return self.changelog.count()
1117 return self.changelog.count()
1102
1118
1103 def revmap(x):
1119 def revmap(x):
1104 return self.changelog.rev(x)
1120 return self.changelog.rev(x)
1105
1121
1106 if not source: return
1122 if not source: return
1107 changesets = files = revisions = 0
1123 changesets = files = revisions = 0
1108
1124
1109 tr = self.transaction()
1125 tr = self.transaction()
1110
1126
1111 oldheads = len(self.changelog.heads())
1127 oldheads = len(self.changelog.heads())
1112
1128
1113 # pull off the changeset group
1129 # pull off the changeset group
1114 self.ui.status("adding changesets\n")
1130 self.ui.status("adding changesets\n")
1115 co = self.changelog.tip()
1131 co = self.changelog.tip()
1116 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1132 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1117 cnr, cor = map(self.changelog.rev, (cn, co))
1133 cnr, cor = map(self.changelog.rev, (cn, co))
1118 if cn == nullid:
1134 if cn == nullid:
1119 cnr = cor
1135 cnr = cor
1120 changesets = cnr - cor
1136 changesets = cnr - cor
1121
1137
1122 # pull off the manifest group
1138 # pull off the manifest group
1123 self.ui.status("adding manifests\n")
1139 self.ui.status("adding manifests\n")
1124 mm = self.manifest.tip()
1140 mm = self.manifest.tip()
1125 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1141 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1126
1142
1127 # process the files
1143 # process the files
1128 self.ui.status("adding file changes\n")
1144 self.ui.status("adding file changes\n")
1129 while 1:
1145 while 1:
1130 f = getchunk()
1146 f = getchunk()
1131 if not f: break
1147 if not f: break
1132 self.ui.debug("adding %s revisions\n" % f)
1148 self.ui.debug("adding %s revisions\n" % f)
1133 fl = self.file(f)
1149 fl = self.file(f)
1134 o = fl.count()
1150 o = fl.count()
1135 n = fl.addgroup(getgroup(), revmap, tr)
1151 n = fl.addgroup(getgroup(), revmap, tr)
1136 revisions += fl.count() - o
1152 revisions += fl.count() - o
1137 files += 1
1153 files += 1
1138
1154
1139 newheads = len(self.changelog.heads())
1155 newheads = len(self.changelog.heads())
1140 heads = ""
1156 heads = ""
1141 if oldheads and newheads > oldheads:
1157 if oldheads and newheads > oldheads:
1142 heads = " (+%d heads)" % (newheads - oldheads)
1158 heads = " (+%d heads)" % (newheads - oldheads)
1143
1159
1144 self.ui.status(("added %d changesets" +
1160 self.ui.status(("added %d changesets" +
1145 " with %d changes to %d files%s\n")
1161 " with %d changes to %d files%s\n")
1146 % (changesets, revisions, files, heads))
1162 % (changesets, revisions, files, heads))
1147
1163
1148 tr.close()
1164 tr.close()
1149
1165
1150 if changesets > 0:
1166 if changesets > 0:
1151 if not self.hook("changegroup",
1167 if not self.hook("changegroup",
1152 node=hex(self.changelog.node(cor+1))):
1168 node=hex(self.changelog.node(cor+1))):
1153 self.ui.warn("abort: changegroup hook returned failure!\n")
1169 self.ui.warn("abort: changegroup hook returned failure!\n")
1154 return 1
1170 return 1
1155
1171
1156 for i in range(cor + 1, cnr + 1):
1172 for i in range(cor + 1, cnr + 1):
1157 self.hook("commit", node=hex(self.changelog.node(i)))
1173 self.hook("commit", node=hex(self.changelog.node(i)))
1158
1174
1159 return
1175 return
1160
1176
1161 def update(self, node, allow=False, force=False, choose=None,
1177 def update(self, node, allow=False, force=False, choose=None,
1162 moddirstate=True):
1178 moddirstate=True):
1163 pl = self.dirstate.parents()
1179 pl = self.dirstate.parents()
1164 if not force and pl[1] != nullid:
1180 if not force and pl[1] != nullid:
1165 self.ui.warn("aborting: outstanding uncommitted merges\n")
1181 self.ui.warn("aborting: outstanding uncommitted merges\n")
1166 return 1
1182 return 1
1167
1183
1168 p1, p2 = pl[0], node
1184 p1, p2 = pl[0], node
1169 pa = self.changelog.ancestor(p1, p2)
1185 pa = self.changelog.ancestor(p1, p2)
1170 m1n = self.changelog.read(p1)[0]
1186 m1n = self.changelog.read(p1)[0]
1171 m2n = self.changelog.read(p2)[0]
1187 m2n = self.changelog.read(p2)[0]
1172 man = self.manifest.ancestor(m1n, m2n)
1188 man = self.manifest.ancestor(m1n, m2n)
1173 m1 = self.manifest.read(m1n)
1189 m1 = self.manifest.read(m1n)
1174 mf1 = self.manifest.readflags(m1n)
1190 mf1 = self.manifest.readflags(m1n)
1175 m2 = self.manifest.read(m2n)
1191 m2 = self.manifest.read(m2n)
1176 mf2 = self.manifest.readflags(m2n)
1192 mf2 = self.manifest.readflags(m2n)
1177 ma = self.manifest.read(man)
1193 ma = self.manifest.read(man)
1178 mfa = self.manifest.readflags(man)
1194 mfa = self.manifest.readflags(man)
1179
1195
1180 (c, a, d, u) = self.changes()
1196 (c, a, d, u) = self.changes()
1181
1197
1182 # is this a jump, or a merge? i.e. is there a linear path
1198 # is this a jump, or a merge? i.e. is there a linear path
1183 # from p1 to p2?
1199 # from p1 to p2?
1184 linear_path = (pa == p1 or pa == p2)
1200 linear_path = (pa == p1 or pa == p2)
1185
1201
1186 # resolve the manifest to determine which files
1202 # resolve the manifest to determine which files
1187 # we care about merging
1203 # we care about merging
1188 self.ui.note("resolving manifests\n")
1204 self.ui.note("resolving manifests\n")
1189 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1205 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1190 (force, allow, moddirstate, linear_path))
1206 (force, allow, moddirstate, linear_path))
1191 self.ui.debug(" ancestor %s local %s remote %s\n" %
1207 self.ui.debug(" ancestor %s local %s remote %s\n" %
1192 (short(man), short(m1n), short(m2n)))
1208 (short(man), short(m1n), short(m2n)))
1193
1209
1194 merge = {}
1210 merge = {}
1195 get = {}
1211 get = {}
1196 remove = []
1212 remove = []
1197
1213
1198 # construct a working dir manifest
1214 # construct a working dir manifest
1199 mw = m1.copy()
1215 mw = m1.copy()
1200 mfw = mf1.copy()
1216 mfw = mf1.copy()
1201 umap = dict.fromkeys(u)
1217 umap = dict.fromkeys(u)
1202
1218
1203 for f in a + c + u:
1219 for f in a + c + u:
1204 mw[f] = ""
1220 mw[f] = ""
1205 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1221 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1206
1222
1207 for f in d:
1223 for f in d:
1208 if f in mw: del mw[f]
1224 if f in mw: del mw[f]
1209
1225
1210 # If we're jumping between revisions (as opposed to merging),
1226 # If we're jumping between revisions (as opposed to merging),
1211 # and if neither the working directory nor the target rev has
1227 # and if neither the working directory nor the target rev has
1212 # the file, then we need to remove it from the dirstate, to
1228 # the file, then we need to remove it from the dirstate, to
1213 # prevent the dirstate from listing the file when it is no
1229 # prevent the dirstate from listing the file when it is no
1214 # longer in the manifest.
1230 # longer in the manifest.
1215 if moddirstate and linear_path and f not in m2:
1231 if moddirstate and linear_path and f not in m2:
1216 self.dirstate.forget((f,))
1232 self.dirstate.forget((f,))
1217
1233
1218 # Compare manifests
1234 # Compare manifests
1219 for f, n in mw.iteritems():
1235 for f, n in mw.iteritems():
1220 if choose and not choose(f): continue
1236 if choose and not choose(f): continue
1221 if f in m2:
1237 if f in m2:
1222 s = 0
1238 s = 0
1223
1239
1224 # is the wfile new since m1, and match m2?
1240 # is the wfile new since m1, and match m2?
1225 if f not in m1:
1241 if f not in m1:
1226 t1 = self.wread(f)
1242 t1 = self.wread(f)
1227 t2 = self.file(f).read(m2[f])
1243 t2 = self.file(f).read(m2[f])
1228 if cmp(t1, t2) == 0:
1244 if cmp(t1, t2) == 0:
1229 n = m2[f]
1245 n = m2[f]
1230 del t1, t2
1246 del t1, t2
1231
1247
1232 # are files different?
1248 # are files different?
1233 if n != m2[f]:
1249 if n != m2[f]:
1234 a = ma.get(f, nullid)
1250 a = ma.get(f, nullid)
1235 # are both different from the ancestor?
1251 # are both different from the ancestor?
1236 if n != a and m2[f] != a:
1252 if n != a and m2[f] != a:
1237 self.ui.debug(" %s versions differ, resolve\n" % f)
1253 self.ui.debug(" %s versions differ, resolve\n" % f)
1238 # merge executable bits
1254 # merge executable bits
1239 # "if we changed or they changed, change in merge"
1255 # "if we changed or they changed, change in merge"
1240 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1256 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1241 mode = ((a^b) | (a^c)) ^ a
1257 mode = ((a^b) | (a^c)) ^ a
1242 merge[f] = (m1.get(f, nullid), m2[f], mode)
1258 merge[f] = (m1.get(f, nullid), m2[f], mode)
1243 s = 1
1259 s = 1
1244 # are we clobbering?
1260 # are we clobbering?
1245 # is remote's version newer?
1261 # is remote's version newer?
1246 # or are we going back in time?
1262 # or are we going back in time?
1247 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1263 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1248 self.ui.debug(" remote %s is newer, get\n" % f)
1264 self.ui.debug(" remote %s is newer, get\n" % f)
1249 get[f] = m2[f]
1265 get[f] = m2[f]
1250 s = 1
1266 s = 1
1251 elif f in umap:
1267 elif f in umap:
1252 # this unknown file is the same as the checkout
1268 # this unknown file is the same as the checkout
1253 get[f] = m2[f]
1269 get[f] = m2[f]
1254
1270
1255 if not s and mfw[f] != mf2[f]:
1271 if not s and mfw[f] != mf2[f]:
1256 if force:
1272 if force:
1257 self.ui.debug(" updating permissions for %s\n" % f)
1273 self.ui.debug(" updating permissions for %s\n" % f)
1258 util.set_exec(self.wjoin(f), mf2[f])
1274 util.set_exec(self.wjoin(f), mf2[f])
1259 else:
1275 else:
1260 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1276 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1261 mode = ((a^b) | (a^c)) ^ a
1277 mode = ((a^b) | (a^c)) ^ a
1262 if mode != b:
1278 if mode != b:
1263 self.ui.debug(" updating permissions for %s\n" % f)
1279 self.ui.debug(" updating permissions for %s\n" % f)
1264 util.set_exec(self.wjoin(f), mode)
1280 util.set_exec(self.wjoin(f), mode)
1265 del m2[f]
1281 del m2[f]
1266 elif f in ma:
1282 elif f in ma:
1267 if n != ma[f]:
1283 if n != ma[f]:
1268 r = "d"
1284 r = "d"
1269 if not force and (linear_path or allow):
1285 if not force and (linear_path or allow):
1270 r = self.ui.prompt(
1286 r = self.ui.prompt(
1271 (" local changed %s which remote deleted\n" % f) +
1287 (" local changed %s which remote deleted\n" % f) +
1272 "(k)eep or (d)elete?", "[kd]", "k")
1288 "(k)eep or (d)elete?", "[kd]", "k")
1273 if r == "d":
1289 if r == "d":
1274 remove.append(f)
1290 remove.append(f)
1275 else:
1291 else:
1276 self.ui.debug("other deleted %s\n" % f)
1292 self.ui.debug("other deleted %s\n" % f)
1277 remove.append(f) # other deleted it
1293 remove.append(f) # other deleted it
1278 else:
1294 else:
1279 # file is created on branch or in working directory
1295 # file is created on branch or in working directory
1280 if force and f not in umap:
1296 if force and f not in umap:
1281 self.ui.debug("remote deleted %s, clobbering\n" % f)
1297 self.ui.debug("remote deleted %s, clobbering\n" % f)
1282 remove.append(f)
1298 remove.append(f)
1283 elif n == m1.get(f, nullid): # same as parent
1299 elif n == m1.get(f, nullid): # same as parent
1284 if p2 == pa: # going backwards?
1300 if p2 == pa: # going backwards?
1285 self.ui.debug("remote deleted %s\n" % f)
1301 self.ui.debug("remote deleted %s\n" % f)
1286 remove.append(f)
1302 remove.append(f)
1287 else:
1303 else:
1288 self.ui.debug("local modified %s, keeping\n" % f)
1304 self.ui.debug("local modified %s, keeping\n" % f)
1289 else:
1305 else:
1290 self.ui.debug("working dir created %s, keeping\n" % f)
1306 self.ui.debug("working dir created %s, keeping\n" % f)
1291
1307
1292 for f, n in m2.iteritems():
1308 for f, n in m2.iteritems():
1293 if choose and not choose(f): continue
1309 if choose and not choose(f): continue
1294 if f[0] == "/": continue
1310 if f[0] == "/": continue
1295 if f in ma and n != ma[f]:
1311 if f in ma and n != ma[f]:
1296 r = "k"
1312 r = "k"
1297 if not force and (linear_path or allow):
1313 if not force and (linear_path or allow):
1298 r = self.ui.prompt(
1314 r = self.ui.prompt(
1299 ("remote changed %s which local deleted\n" % f) +
1315 ("remote changed %s which local deleted\n" % f) +
1300 "(k)eep or (d)elete?", "[kd]", "k")
1316 "(k)eep or (d)elete?", "[kd]", "k")
1301 if r == "k": get[f] = n
1317 if r == "k": get[f] = n
1302 elif f not in ma:
1318 elif f not in ma:
1303 self.ui.debug("remote created %s\n" % f)
1319 self.ui.debug("remote created %s\n" % f)
1304 get[f] = n
1320 get[f] = n
1305 else:
1321 else:
1306 if force or p2 == pa: # going backwards?
1322 if force or p2 == pa: # going backwards?
1307 self.ui.debug("local deleted %s, recreating\n" % f)
1323 self.ui.debug("local deleted %s, recreating\n" % f)
1308 get[f] = n
1324 get[f] = n
1309 else:
1325 else:
1310 self.ui.debug("local deleted %s\n" % f)
1326 self.ui.debug("local deleted %s\n" % f)
1311
1327
1312 del mw, m1, m2, ma
1328 del mw, m1, m2, ma
1313
1329
1314 if force:
1330 if force:
1315 for f in merge:
1331 for f in merge:
1316 get[f] = merge[f][1]
1332 get[f] = merge[f][1]
1317 merge = {}
1333 merge = {}
1318
1334
1319 if linear_path or force:
1335 if linear_path or force:
1320 # we don't need to do any magic, just jump to the new rev
1336 # we don't need to do any magic, just jump to the new rev
1321 branch_merge = False
1337 branch_merge = False
1322 p1, p2 = p2, nullid
1338 p1, p2 = p2, nullid
1323 else:
1339 else:
1324 if not allow:
1340 if not allow:
1325 self.ui.status("this update spans a branch" +
1341 self.ui.status("this update spans a branch" +
1326 " affecting the following files:\n")
1342 " affecting the following files:\n")
1327 fl = merge.keys() + get.keys()
1343 fl = merge.keys() + get.keys()
1328 fl.sort()
1344 fl.sort()
1329 for f in fl:
1345 for f in fl:
1330 cf = ""
1346 cf = ""
1331 if f in merge: cf = " (resolve)"
1347 if f in merge: cf = " (resolve)"
1332 self.ui.status(" %s%s\n" % (f, cf))
1348 self.ui.status(" %s%s\n" % (f, cf))
1333 self.ui.warn("aborting update spanning branches!\n")
1349 self.ui.warn("aborting update spanning branches!\n")
1334 self.ui.status("(use update -m to merge across branches" +
1350 self.ui.status("(use update -m to merge across branches" +
1335 " or -C to lose changes)\n")
1351 " or -C to lose changes)\n")
1336 return 1
1352 return 1
1337 branch_merge = True
1353 branch_merge = True
1338
1354
1339 if moddirstate:
1355 if moddirstate:
1340 self.dirstate.setparents(p1, p2)
1356 self.dirstate.setparents(p1, p2)
1341
1357
1342 # get the files we don't need to change
1358 # get the files we don't need to change
1343 files = get.keys()
1359 files = get.keys()
1344 files.sort()
1360 files.sort()
1345 for f in files:
1361 for f in files:
1346 if f[0] == "/": continue
1362 if f[0] == "/": continue
1347 self.ui.note("getting %s\n" % f)
1363 self.ui.note("getting %s\n" % f)
1348 t = self.file(f).read(get[f])
1364 t = self.file(f).read(get[f])
1349 try:
1365 try:
1350 self.wwrite(f, t)
1366 self.wwrite(f, t)
1351 except IOError, e:
1367 except IOError, e:
1352 if e.errno != errno.ENOENT:
1368 if e.errno != errno.ENOENT:
1353 raise
1369 raise
1354 os.makedirs(os.path.dirname(self.wjoin(f)))
1370 os.makedirs(os.path.dirname(self.wjoin(f)))
1355 self.wwrite(f, t)
1371 self.wwrite(f, t)
1356 util.set_exec(self.wjoin(f), mf2[f])
1372 util.set_exec(self.wjoin(f), mf2[f])
1357 if moddirstate:
1373 if moddirstate:
1358 if branch_merge:
1374 if branch_merge:
1359 self.dirstate.update([f], 'n', st_mtime=-1)
1375 self.dirstate.update([f], 'n', st_mtime=-1)
1360 else:
1376 else:
1361 self.dirstate.update([f], 'n')
1377 self.dirstate.update([f], 'n')
1362
1378
1363 # merge the tricky bits
1379 # merge the tricky bits
1364 files = merge.keys()
1380 files = merge.keys()
1365 files.sort()
1381 files.sort()
1366 for f in files:
1382 for f in files:
1367 self.ui.status("merging %s\n" % f)
1383 self.ui.status("merging %s\n" % f)
1368 my, other, flag = merge[f]
1384 my, other, flag = merge[f]
1369 self.merge3(f, my, other)
1385 self.merge3(f, my, other)
1370 util.set_exec(self.wjoin(f), flag)
1386 util.set_exec(self.wjoin(f), flag)
1371 if moddirstate:
1387 if moddirstate:
1372 if branch_merge:
1388 if branch_merge:
1373 # We've done a branch merge, mark this file as merged
1389 # We've done a branch merge, mark this file as merged
1374 # so that we properly record the merger later
1390 # so that we properly record the merger later
1375 self.dirstate.update([f], 'm')
1391 self.dirstate.update([f], 'm')
1376 else:
1392 else:
1377 # We've update-merged a locally modified file, so
1393 # We've update-merged a locally modified file, so
1378 # we set the dirstate to emulate a normal checkout
1394 # we set the dirstate to emulate a normal checkout
1379 # of that file some time in the past. Thus our
1395 # of that file some time in the past. Thus our
1380 # merge will appear as a normal local file
1396 # merge will appear as a normal local file
1381 # modification.
1397 # modification.
1382 f_len = len(self.file(f).read(other))
1398 f_len = len(self.file(f).read(other))
1383 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1399 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1384
1400
1385 remove.sort()
1401 remove.sort()
1386 for f in remove:
1402 for f in remove:
1387 self.ui.note("removing %s\n" % f)
1403 self.ui.note("removing %s\n" % f)
1388 try:
1404 try:
1389 os.unlink(self.wjoin(f))
1405 os.unlink(self.wjoin(f))
1390 except OSError, inst:
1406 except OSError, inst:
1391 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1407 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1392 # try removing directories that might now be empty
1408 # try removing directories that might now be empty
1393 try: os.removedirs(os.path.dirname(self.wjoin(f)))
1409 try: os.removedirs(os.path.dirname(self.wjoin(f)))
1394 except: pass
1410 except: pass
1395 if moddirstate:
1411 if moddirstate:
1396 if branch_merge:
1412 if branch_merge:
1397 self.dirstate.update(remove, 'r')
1413 self.dirstate.update(remove, 'r')
1398 else:
1414 else:
1399 self.dirstate.forget(remove)
1415 self.dirstate.forget(remove)
1400
1416
1401 def merge3(self, fn, my, other):
1417 def merge3(self, fn, my, other):
1402 """perform a 3-way merge in the working directory"""
1418 """perform a 3-way merge in the working directory"""
1403
1419
1404 def temp(prefix, node):
1420 def temp(prefix, node):
1405 pre = "%s~%s." % (os.path.basename(fn), prefix)
1421 pre = "%s~%s." % (os.path.basename(fn), prefix)
1406 (fd, name) = tempfile.mkstemp("", pre)
1422 (fd, name) = tempfile.mkstemp("", pre)
1407 f = os.fdopen(fd, "wb")
1423 f = os.fdopen(fd, "wb")
1408 self.wwrite(fn, fl.read(node), f)
1424 self.wwrite(fn, fl.read(node), f)
1409 f.close()
1425 f.close()
1410 return name
1426 return name
1411
1427
1412 fl = self.file(fn)
1428 fl = self.file(fn)
1413 base = fl.ancestor(my, other)
1429 base = fl.ancestor(my, other)
1414 a = self.wjoin(fn)
1430 a = self.wjoin(fn)
1415 b = temp("base", base)
1431 b = temp("base", base)
1416 c = temp("other", other)
1432 c = temp("other", other)
1417
1433
1418 self.ui.note("resolving %s\n" % fn)
1434 self.ui.note("resolving %s\n" % fn)
1419 self.ui.debug("file %s: my %s other %s ancestor %s\n" %
1435 self.ui.debug("file %s: my %s other %s ancestor %s\n" %
1420 (fn, short(my), short(other), short(base)))
1436 (fn, short(my), short(other), short(base)))
1421
1437
1422 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1438 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1423 or "hgmerge")
1439 or "hgmerge")
1424 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1440 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1425 if r:
1441 if r:
1426 self.ui.warn("merging %s failed!\n" % fn)
1442 self.ui.warn("merging %s failed!\n" % fn)
1427
1443
1428 os.unlink(b)
1444 os.unlink(b)
1429 os.unlink(c)
1445 os.unlink(c)
1430
1446
1431 def verify(self):
1447 def verify(self):
1432 filelinkrevs = {}
1448 filelinkrevs = {}
1433 filenodes = {}
1449 filenodes = {}
1434 changesets = revisions = files = 0
1450 changesets = revisions = files = 0
1435 errors = [0]
1451 errors = [0]
1436 neededmanifests = {}
1452 neededmanifests = {}
1437
1453
1438 def err(msg):
1454 def err(msg):
1439 self.ui.warn(msg + "\n")
1455 self.ui.warn(msg + "\n")
1440 errors[0] += 1
1456 errors[0] += 1
1441
1457
1442 seen = {}
1458 seen = {}
1443 self.ui.status("checking changesets\n")
1459 self.ui.status("checking changesets\n")
1444 for i in range(self.changelog.count()):
1460 for i in range(self.changelog.count()):
1445 changesets += 1
1461 changesets += 1
1446 n = self.changelog.node(i)
1462 n = self.changelog.node(i)
1447 l = self.changelog.linkrev(n)
1463 l = self.changelog.linkrev(n)
1448 if l != i:
1464 if l != i:
1449 err("incorrect link (%d) for changeset revision %d" % (l, i))
1465 err("incorrect link (%d) for changeset revision %d" % (l, i))
1450 if n in seen:
1466 if n in seen:
1451 err("duplicate changeset at revision %d" % i)
1467 err("duplicate changeset at revision %d" % i)
1452 seen[n] = 1
1468 seen[n] = 1
1453
1469
1454 for p in self.changelog.parents(n):
1470 for p in self.changelog.parents(n):
1455 if p not in self.changelog.nodemap:
1471 if p not in self.changelog.nodemap:
1456 err("changeset %s has unknown parent %s" %
1472 err("changeset %s has unknown parent %s" %
1457 (short(n), short(p)))
1473 (short(n), short(p)))
1458 try:
1474 try:
1459 changes = self.changelog.read(n)
1475 changes = self.changelog.read(n)
1460 except Exception, inst:
1476 except Exception, inst:
1461 err("unpacking changeset %s: %s" % (short(n), inst))
1477 err("unpacking changeset %s: %s" % (short(n), inst))
1462
1478
1463 neededmanifests[changes[0]] = n
1479 neededmanifests[changes[0]] = n
1464
1480
1465 for f in changes[3]:
1481 for f in changes[3]:
1466 filelinkrevs.setdefault(f, []).append(i)
1482 filelinkrevs.setdefault(f, []).append(i)
1467
1483
1468 seen = {}
1484 seen = {}
1469 self.ui.status("checking manifests\n")
1485 self.ui.status("checking manifests\n")
1470 for i in range(self.manifest.count()):
1486 for i in range(self.manifest.count()):
1471 n = self.manifest.node(i)
1487 n = self.manifest.node(i)
1472 l = self.manifest.linkrev(n)
1488 l = self.manifest.linkrev(n)
1473
1489
1474 if l < 0 or l >= self.changelog.count():
1490 if l < 0 or l >= self.changelog.count():
1475 err("bad manifest link (%d) at revision %d" % (l, i))
1491 err("bad manifest link (%d) at revision %d" % (l, i))
1476
1492
1477 if n in neededmanifests:
1493 if n in neededmanifests:
1478 del neededmanifests[n]
1494 del neededmanifests[n]
1479
1495
1480 if n in seen:
1496 if n in seen:
1481 err("duplicate manifest at revision %d" % i)
1497 err("duplicate manifest at revision %d" % i)
1482
1498
1483 seen[n] = 1
1499 seen[n] = 1
1484
1500
1485 for p in self.manifest.parents(n):
1501 for p in self.manifest.parents(n):
1486 if p not in self.manifest.nodemap:
1502 if p not in self.manifest.nodemap:
1487 err("manifest %s has unknown parent %s" %
1503 err("manifest %s has unknown parent %s" %
1488 (short(n), short(p)))
1504 (short(n), short(p)))
1489
1505
1490 try:
1506 try:
1491 delta = mdiff.patchtext(self.manifest.delta(n))
1507 delta = mdiff.patchtext(self.manifest.delta(n))
1492 except KeyboardInterrupt:
1508 except KeyboardInterrupt:
1493 self.ui.warn("interrupted")
1509 self.ui.warn("interrupted")
1494 raise
1510 raise
1495 except Exception, inst:
1511 except Exception, inst:
1496 err("unpacking manifest %s: %s" % (short(n), inst))
1512 err("unpacking manifest %s: %s" % (short(n), inst))
1497
1513
1498 ff = [ l.split('\0') for l in delta.splitlines() ]
1514 ff = [ l.split('\0') for l in delta.splitlines() ]
1499 for f, fn in ff:
1515 for f, fn in ff:
1500 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1516 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1501
1517
1502 self.ui.status("crosschecking files in changesets and manifests\n")
1518 self.ui.status("crosschecking files in changesets and manifests\n")
1503
1519
1504 for m,c in neededmanifests.items():
1520 for m,c in neededmanifests.items():
1505 err("Changeset %s refers to unknown manifest %s" %
1521 err("Changeset %s refers to unknown manifest %s" %
1506 (short(m), short(c)))
1522 (short(m), short(c)))
1507 del neededmanifests
1523 del neededmanifests
1508
1524
1509 for f in filenodes:
1525 for f in filenodes:
1510 if f not in filelinkrevs:
1526 if f not in filelinkrevs:
1511 err("file %s in manifest but not in changesets" % f)
1527 err("file %s in manifest but not in changesets" % f)
1512
1528
1513 for f in filelinkrevs:
1529 for f in filelinkrevs:
1514 if f not in filenodes:
1530 if f not in filenodes:
1515 err("file %s in changeset but not in manifest" % f)
1531 err("file %s in changeset but not in manifest" % f)
1516
1532
1517 self.ui.status("checking files\n")
1533 self.ui.status("checking files\n")
1518 ff = filenodes.keys()
1534 ff = filenodes.keys()
1519 ff.sort()
1535 ff.sort()
1520 for f in ff:
1536 for f in ff:
1521 if f == "/dev/null": continue
1537 if f == "/dev/null": continue
1522 files += 1
1538 files += 1
1523 fl = self.file(f)
1539 fl = self.file(f)
1524 nodes = { nullid: 1 }
1540 nodes = { nullid: 1 }
1525 seen = {}
1541 seen = {}
1526 for i in range(fl.count()):
1542 for i in range(fl.count()):
1527 revisions += 1
1543 revisions += 1
1528 n = fl.node(i)
1544 n = fl.node(i)
1529
1545
1530 if n in seen:
1546 if n in seen:
1531 err("%s: duplicate revision %d" % (f, i))
1547 err("%s: duplicate revision %d" % (f, i))
1532 if n not in filenodes[f]:
1548 if n not in filenodes[f]:
1533 err("%s: %d:%s not in manifests" % (f, i, short(n)))
1549 err("%s: %d:%s not in manifests" % (f, i, short(n)))
1534 else:
1550 else:
1535 del filenodes[f][n]
1551 del filenodes[f][n]
1536
1552
1537 flr = fl.linkrev(n)
1553 flr = fl.linkrev(n)
1538 if flr not in filelinkrevs[f]:
1554 if flr not in filelinkrevs[f]:
1539 err("%s:%s points to unexpected changeset %d"
1555 err("%s:%s points to unexpected changeset %d"
1540 % (f, short(n), flr))
1556 % (f, short(n), flr))
1541 else:
1557 else:
1542 filelinkrevs[f].remove(flr)
1558 filelinkrevs[f].remove(flr)
1543
1559
1544 # verify contents
1560 # verify contents
1545 try:
1561 try:
1546 t = fl.read(n)
1562 t = fl.read(n)
1547 except Exception, inst:
1563 except Exception, inst:
1548 err("unpacking file %s %s: %s" % (f, short(n), inst))
1564 err("unpacking file %s %s: %s" % (f, short(n), inst))
1549
1565
1550 # verify parents
1566 # verify parents
1551 (p1, p2) = fl.parents(n)
1567 (p1, p2) = fl.parents(n)
1552 if p1 not in nodes:
1568 if p1 not in nodes:
1553 err("file %s:%s unknown parent 1 %s" %
1569 err("file %s:%s unknown parent 1 %s" %
1554 (f, short(n), short(p1)))
1570 (f, short(n), short(p1)))
1555 if p2 not in nodes:
1571 if p2 not in nodes:
1556 err("file %s:%s unknown parent 2 %s" %
1572 err("file %s:%s unknown parent 2 %s" %
1557 (f, short(n), short(p1)))
1573 (f, short(n), short(p1)))
1558 nodes[n] = 1
1574 nodes[n] = 1
1559
1575
1560 # cross-check
1576 # cross-check
1561 for node in filenodes[f]:
1577 for node in filenodes[f]:
1562 err("node %s in manifests not in %s" % (hex(node), f))
1578 err("node %s in manifests not in %s" % (hex(node), f))
1563
1579
1564 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1580 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1565 (files, changesets, revisions))
1581 (files, changesets, revisions))
1566
1582
1567 if errors[0]:
1583 if errors[0]:
1568 self.ui.warn("%d integrity errors encountered!\n" % errors[0])
1584 self.ui.warn("%d integrity errors encountered!\n" % errors[0])
1569 return 1
1585 return 1
General Comments 0
You need to be logged in to leave comments. Login now