##// END OF EJS Templates
fix race in localrepo.addchangegroup....
Vadim Gelfer -
r1998:65cc17ae default
parent child Browse files
Show More
@@ -1,1904 +1,1919 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import os, util
8 import os, util
9 import filelog, manifest, changelog, dirstate, repo
9 import filelog, manifest, changelog, dirstate, repo
10 from node import *
10 from node import *
11 from i18n import gettext as _
11 from i18n import gettext as _
12 from demandload import *
12 from demandload import *
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
14 demandload(globals(), "changegroup")
14 demandload(globals(), "appendfile changegroup")
15
15
16 class localrepository(object):
16 class localrepository(object):
17 def __del__(self):
17 def __del__(self):
18 self.transhandle = None
18 self.transhandle = None
19 def __init__(self, parentui, path=None, create=0):
19 def __init__(self, parentui, path=None, create=0):
20 if not path:
20 if not path:
21 p = os.getcwd()
21 p = os.getcwd()
22 while not os.path.isdir(os.path.join(p, ".hg")):
22 while not os.path.isdir(os.path.join(p, ".hg")):
23 oldp = p
23 oldp = p
24 p = os.path.dirname(p)
24 p = os.path.dirname(p)
25 if p == oldp:
25 if p == oldp:
26 raise repo.RepoError(_("no repo found"))
26 raise repo.RepoError(_("no repo found"))
27 path = p
27 path = p
28 self.path = os.path.join(path, ".hg")
28 self.path = os.path.join(path, ".hg")
29
29
30 if not create and not os.path.isdir(self.path):
30 if not create and not os.path.isdir(self.path):
31 raise repo.RepoError(_("repository %s not found") % path)
31 raise repo.RepoError(_("repository %s not found") % path)
32
32
33 self.root = os.path.abspath(path)
33 self.root = os.path.abspath(path)
34 self.ui = ui.ui(parentui=parentui)
34 self.ui = ui.ui(parentui=parentui)
35 self.opener = util.opener(self.path)
35 self.opener = util.opener(self.path)
36 self.wopener = util.opener(self.root)
36 self.wopener = util.opener(self.root)
37 self.manifest = manifest.manifest(self.opener)
37 self.manifest = manifest.manifest(self.opener)
38 self.changelog = changelog.changelog(self.opener)
38 self.changelog = changelog.changelog(self.opener)
39 self.tagscache = None
39 self.tagscache = None
40 self.nodetagscache = None
40 self.nodetagscache = None
41 self.encodepats = None
41 self.encodepats = None
42 self.decodepats = None
42 self.decodepats = None
43 self.transhandle = None
43 self.transhandle = None
44
44
45 if create:
45 if create:
46 os.mkdir(self.path)
46 os.mkdir(self.path)
47 os.mkdir(self.join("data"))
47 os.mkdir(self.join("data"))
48
48
49 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
49 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
50 try:
50 try:
51 self.ui.readconfig(self.join("hgrc"), self.root)
51 self.ui.readconfig(self.join("hgrc"), self.root)
52 except IOError:
52 except IOError:
53 pass
53 pass
54
54
55 def hook(self, name, throw=False, **args):
55 def hook(self, name, throw=False, **args):
56 def runhook(name, cmd):
56 def runhook(name, cmd):
57 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
57 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
58 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()] +
58 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()] +
59 [(k.upper(), v) for k, v in args.iteritems()])
59 [(k.upper(), v) for k, v in args.iteritems()])
60 r = util.system(cmd, environ=env, cwd=self.root)
60 r = util.system(cmd, environ=env, cwd=self.root)
61 if r:
61 if r:
62 desc, r = util.explain_exit(r)
62 desc, r = util.explain_exit(r)
63 if throw:
63 if throw:
64 raise util.Abort(_('%s hook %s') % (name, desc))
64 raise util.Abort(_('%s hook %s') % (name, desc))
65 self.ui.warn(_('error: %s hook %s\n') % (name, desc))
65 self.ui.warn(_('error: %s hook %s\n') % (name, desc))
66 return False
66 return False
67 return True
67 return True
68
68
69 r = True
69 r = True
70 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
70 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
71 if hname.split(".", 1)[0] == name and cmd]
71 if hname.split(".", 1)[0] == name and cmd]
72 hooks.sort()
72 hooks.sort()
73 for hname, cmd in hooks:
73 for hname, cmd in hooks:
74 r = runhook(hname, cmd) and r
74 r = runhook(hname, cmd) and r
75 return r
75 return r
76
76
77 def tags(self):
77 def tags(self):
78 '''return a mapping of tag to node'''
78 '''return a mapping of tag to node'''
79 if not self.tagscache:
79 if not self.tagscache:
80 self.tagscache = {}
80 self.tagscache = {}
81
81
82 def parsetag(line, context):
82 def parsetag(line, context):
83 if not line:
83 if not line:
84 return
84 return
85 s = l.split(" ", 1)
85 s = l.split(" ", 1)
86 if len(s) != 2:
86 if len(s) != 2:
87 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
87 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
88 return
88 return
89 node, key = s
89 node, key = s
90 try:
90 try:
91 bin_n = bin(node)
91 bin_n = bin(node)
92 except TypeError:
92 except TypeError:
93 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
93 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
94 return
94 return
95 if bin_n not in self.changelog.nodemap:
95 if bin_n not in self.changelog.nodemap:
96 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
96 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
97 return
97 return
98 self.tagscache[key.strip()] = bin_n
98 self.tagscache[key.strip()] = bin_n
99
99
100 # read each head of the tags file, ending with the tip
100 # read each head of the tags file, ending with the tip
101 # and add each tag found to the map, with "newer" ones
101 # and add each tag found to the map, with "newer" ones
102 # taking precedence
102 # taking precedence
103 fl = self.file(".hgtags")
103 fl = self.file(".hgtags")
104 h = fl.heads()
104 h = fl.heads()
105 h.reverse()
105 h.reverse()
106 for r in h:
106 for r in h:
107 count = 0
107 count = 0
108 for l in fl.read(r).splitlines():
108 for l in fl.read(r).splitlines():
109 count += 1
109 count += 1
110 parsetag(l, ".hgtags:%d" % count)
110 parsetag(l, ".hgtags:%d" % count)
111
111
112 try:
112 try:
113 f = self.opener("localtags")
113 f = self.opener("localtags")
114 count = 0
114 count = 0
115 for l in f:
115 for l in f:
116 count += 1
116 count += 1
117 parsetag(l, "localtags:%d" % count)
117 parsetag(l, "localtags:%d" % count)
118 except IOError:
118 except IOError:
119 pass
119 pass
120
120
121 self.tagscache['tip'] = self.changelog.tip()
121 self.tagscache['tip'] = self.changelog.tip()
122
122
123 return self.tagscache
123 return self.tagscache
124
124
125 def tagslist(self):
125 def tagslist(self):
126 '''return a list of tags ordered by revision'''
126 '''return a list of tags ordered by revision'''
127 l = []
127 l = []
128 for t, n in self.tags().items():
128 for t, n in self.tags().items():
129 try:
129 try:
130 r = self.changelog.rev(n)
130 r = self.changelog.rev(n)
131 except:
131 except:
132 r = -2 # sort to the beginning of the list if unknown
132 r = -2 # sort to the beginning of the list if unknown
133 l.append((r, t, n))
133 l.append((r, t, n))
134 l.sort()
134 l.sort()
135 return [(t, n) for r, t, n in l]
135 return [(t, n) for r, t, n in l]
136
136
137 def nodetags(self, node):
137 def nodetags(self, node):
138 '''return the tags associated with a node'''
138 '''return the tags associated with a node'''
139 if not self.nodetagscache:
139 if not self.nodetagscache:
140 self.nodetagscache = {}
140 self.nodetagscache = {}
141 for t, n in self.tags().items():
141 for t, n in self.tags().items():
142 self.nodetagscache.setdefault(n, []).append(t)
142 self.nodetagscache.setdefault(n, []).append(t)
143 return self.nodetagscache.get(node, [])
143 return self.nodetagscache.get(node, [])
144
144
145 def lookup(self, key):
145 def lookup(self, key):
146 try:
146 try:
147 return self.tags()[key]
147 return self.tags()[key]
148 except KeyError:
148 except KeyError:
149 try:
149 try:
150 return self.changelog.lookup(key)
150 return self.changelog.lookup(key)
151 except:
151 except:
152 raise repo.RepoError(_("unknown revision '%s'") % key)
152 raise repo.RepoError(_("unknown revision '%s'") % key)
153
153
154 def dev(self):
154 def dev(self):
155 return os.stat(self.path).st_dev
155 return os.stat(self.path).st_dev
156
156
157 def local(self):
157 def local(self):
158 return True
158 return True
159
159
160 def join(self, f):
160 def join(self, f):
161 return os.path.join(self.path, f)
161 return os.path.join(self.path, f)
162
162
163 def wjoin(self, f):
163 def wjoin(self, f):
164 return os.path.join(self.root, f)
164 return os.path.join(self.root, f)
165
165
166 def file(self, f):
166 def file(self, f):
167 if f[0] == '/':
167 if f[0] == '/':
168 f = f[1:]
168 f = f[1:]
169 return filelog.filelog(self.opener, f)
169 return filelog.filelog(self.opener, f)
170
170
171 def getcwd(self):
171 def getcwd(self):
172 return self.dirstate.getcwd()
172 return self.dirstate.getcwd()
173
173
174 def wfile(self, f, mode='r'):
174 def wfile(self, f, mode='r'):
175 return self.wopener(f, mode)
175 return self.wopener(f, mode)
176
176
177 def wread(self, filename):
177 def wread(self, filename):
178 if self.encodepats == None:
178 if self.encodepats == None:
179 l = []
179 l = []
180 for pat, cmd in self.ui.configitems("encode"):
180 for pat, cmd in self.ui.configitems("encode"):
181 mf = util.matcher(self.root, "", [pat], [], [])[1]
181 mf = util.matcher(self.root, "", [pat], [], [])[1]
182 l.append((mf, cmd))
182 l.append((mf, cmd))
183 self.encodepats = l
183 self.encodepats = l
184
184
185 data = self.wopener(filename, 'r').read()
185 data = self.wopener(filename, 'r').read()
186
186
187 for mf, cmd in self.encodepats:
187 for mf, cmd in self.encodepats:
188 if mf(filename):
188 if mf(filename):
189 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
189 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
190 data = util.filter(data, cmd)
190 data = util.filter(data, cmd)
191 break
191 break
192
192
193 return data
193 return data
194
194
195 def wwrite(self, filename, data, fd=None):
195 def wwrite(self, filename, data, fd=None):
196 if self.decodepats == None:
196 if self.decodepats == None:
197 l = []
197 l = []
198 for pat, cmd in self.ui.configitems("decode"):
198 for pat, cmd in self.ui.configitems("decode"):
199 mf = util.matcher(self.root, "", [pat], [], [])[1]
199 mf = util.matcher(self.root, "", [pat], [], [])[1]
200 l.append((mf, cmd))
200 l.append((mf, cmd))
201 self.decodepats = l
201 self.decodepats = l
202
202
203 for mf, cmd in self.decodepats:
203 for mf, cmd in self.decodepats:
204 if mf(filename):
204 if mf(filename):
205 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
205 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
206 data = util.filter(data, cmd)
206 data = util.filter(data, cmd)
207 break
207 break
208
208
209 if fd:
209 if fd:
210 return fd.write(data)
210 return fd.write(data)
211 return self.wopener(filename, 'w').write(data)
211 return self.wopener(filename, 'w').write(data)
212
212
213 def transaction(self):
213 def transaction(self):
214 tr = self.transhandle
214 tr = self.transhandle
215 if tr != None and tr.running():
215 if tr != None and tr.running():
216 return tr.nest()
216 return tr.nest()
217
217
218 # save dirstate for undo
218 # save dirstate for undo
219 try:
219 try:
220 ds = self.opener("dirstate").read()
220 ds = self.opener("dirstate").read()
221 except IOError:
221 except IOError:
222 ds = ""
222 ds = ""
223 self.opener("journal.dirstate", "w").write(ds)
223 self.opener("journal.dirstate", "w").write(ds)
224
224
225 tr = transaction.transaction(self.ui.warn, self.opener,
225 tr = transaction.transaction(self.ui.warn, self.opener,
226 self.join("journal"),
226 self.join("journal"),
227 aftertrans(self.path))
227 aftertrans(self.path))
228 self.transhandle = tr
228 self.transhandle = tr
229 return tr
229 return tr
230
230
231 def recover(self):
231 def recover(self):
232 l = self.lock()
232 l = self.lock()
233 if os.path.exists(self.join("journal")):
233 if os.path.exists(self.join("journal")):
234 self.ui.status(_("rolling back interrupted transaction\n"))
234 self.ui.status(_("rolling back interrupted transaction\n"))
235 transaction.rollback(self.opener, self.join("journal"))
235 transaction.rollback(self.opener, self.join("journal"))
236 self.reload()
236 self.reload()
237 return True
237 return True
238 else:
238 else:
239 self.ui.warn(_("no interrupted transaction available\n"))
239 self.ui.warn(_("no interrupted transaction available\n"))
240 return False
240 return False
241
241
242 def undo(self, wlock=None):
242 def undo(self, wlock=None):
243 if not wlock:
243 if not wlock:
244 wlock = self.wlock()
244 wlock = self.wlock()
245 l = self.lock()
245 l = self.lock()
246 if os.path.exists(self.join("undo")):
246 if os.path.exists(self.join("undo")):
247 self.ui.status(_("rolling back last transaction\n"))
247 self.ui.status(_("rolling back last transaction\n"))
248 transaction.rollback(self.opener, self.join("undo"))
248 transaction.rollback(self.opener, self.join("undo"))
249 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
249 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
250 self.reload()
250 self.reload()
251 self.wreload()
251 self.wreload()
252 else:
252 else:
253 self.ui.warn(_("no undo information available\n"))
253 self.ui.warn(_("no undo information available\n"))
254
254
255 def wreload(self):
255 def wreload(self):
256 self.dirstate.read()
256 self.dirstate.read()
257
257
258 def reload(self):
258 def reload(self):
259 self.changelog.load()
259 self.changelog.load()
260 self.manifest.load()
260 self.manifest.load()
261 self.tagscache = None
261 self.tagscache = None
262 self.nodetagscache = None
262 self.nodetagscache = None
263
263
264 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None):
264 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None):
265 try:
265 try:
266 l = lock.lock(self.join(lockname), 0, releasefn)
266 l = lock.lock(self.join(lockname), 0, releasefn)
267 except lock.LockHeld, inst:
267 except lock.LockHeld, inst:
268 if not wait:
268 if not wait:
269 raise inst
269 raise inst
270 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
270 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
271 try:
271 try:
272 # default to 600 seconds timeout
272 # default to 600 seconds timeout
273 l = lock.lock(self.join(lockname),
273 l = lock.lock(self.join(lockname),
274 int(self.ui.config("ui", "timeout") or 600),
274 int(self.ui.config("ui", "timeout") or 600),
275 releasefn)
275 releasefn)
276 except lock.LockHeld, inst:
276 except lock.LockHeld, inst:
277 raise util.Abort(_("timeout while waiting for "
277 raise util.Abort(_("timeout while waiting for "
278 "lock held by %s") % inst.args[0])
278 "lock held by %s") % inst.args[0])
279 if acquirefn:
279 if acquirefn:
280 acquirefn()
280 acquirefn()
281 return l
281 return l
282
282
283 def lock(self, wait=1):
283 def lock(self, wait=1):
284 return self.do_lock("lock", wait, acquirefn=self.reload)
284 return self.do_lock("lock", wait, acquirefn=self.reload)
285
285
286 def wlock(self, wait=1):
286 def wlock(self, wait=1):
287 return self.do_lock("wlock", wait,
287 return self.do_lock("wlock", wait,
288 self.dirstate.write,
288 self.dirstate.write,
289 self.wreload)
289 self.wreload)
290
290
291 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
291 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
292 "determine whether a new filenode is needed"
292 "determine whether a new filenode is needed"
293 fp1 = manifest1.get(filename, nullid)
293 fp1 = manifest1.get(filename, nullid)
294 fp2 = manifest2.get(filename, nullid)
294 fp2 = manifest2.get(filename, nullid)
295
295
296 if fp2 != nullid:
296 if fp2 != nullid:
297 # is one parent an ancestor of the other?
297 # is one parent an ancestor of the other?
298 fpa = filelog.ancestor(fp1, fp2)
298 fpa = filelog.ancestor(fp1, fp2)
299 if fpa == fp1:
299 if fpa == fp1:
300 fp1, fp2 = fp2, nullid
300 fp1, fp2 = fp2, nullid
301 elif fpa == fp2:
301 elif fpa == fp2:
302 fp2 = nullid
302 fp2 = nullid
303
303
304 # is the file unmodified from the parent? report existing entry
304 # is the file unmodified from the parent? report existing entry
305 if fp2 == nullid and text == filelog.read(fp1):
305 if fp2 == nullid and text == filelog.read(fp1):
306 return (fp1, None, None)
306 return (fp1, None, None)
307
307
308 return (None, fp1, fp2)
308 return (None, fp1, fp2)
309
309
310 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
310 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
311 orig_parent = self.dirstate.parents()[0] or nullid
311 orig_parent = self.dirstate.parents()[0] or nullid
312 p1 = p1 or self.dirstate.parents()[0] or nullid
312 p1 = p1 or self.dirstate.parents()[0] or nullid
313 p2 = p2 or self.dirstate.parents()[1] or nullid
313 p2 = p2 or self.dirstate.parents()[1] or nullid
314 c1 = self.changelog.read(p1)
314 c1 = self.changelog.read(p1)
315 c2 = self.changelog.read(p2)
315 c2 = self.changelog.read(p2)
316 m1 = self.manifest.read(c1[0])
316 m1 = self.manifest.read(c1[0])
317 mf1 = self.manifest.readflags(c1[0])
317 mf1 = self.manifest.readflags(c1[0])
318 m2 = self.manifest.read(c2[0])
318 m2 = self.manifest.read(c2[0])
319 changed = []
319 changed = []
320
320
321 if orig_parent == p1:
321 if orig_parent == p1:
322 update_dirstate = 1
322 update_dirstate = 1
323 else:
323 else:
324 update_dirstate = 0
324 update_dirstate = 0
325
325
326 if not wlock:
326 if not wlock:
327 wlock = self.wlock()
327 wlock = self.wlock()
328 l = self.lock()
328 l = self.lock()
329 tr = self.transaction()
329 tr = self.transaction()
330 mm = m1.copy()
330 mm = m1.copy()
331 mfm = mf1.copy()
331 mfm = mf1.copy()
332 linkrev = self.changelog.count()
332 linkrev = self.changelog.count()
333 for f in files:
333 for f in files:
334 try:
334 try:
335 t = self.wread(f)
335 t = self.wread(f)
336 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
336 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
337 r = self.file(f)
337 r = self.file(f)
338 mfm[f] = tm
338 mfm[f] = tm
339
339
340 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
340 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
341 if entry:
341 if entry:
342 mm[f] = entry
342 mm[f] = entry
343 continue
343 continue
344
344
345 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
345 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
346 changed.append(f)
346 changed.append(f)
347 if update_dirstate:
347 if update_dirstate:
348 self.dirstate.update([f], "n")
348 self.dirstate.update([f], "n")
349 except IOError:
349 except IOError:
350 try:
350 try:
351 del mm[f]
351 del mm[f]
352 del mfm[f]
352 del mfm[f]
353 if update_dirstate:
353 if update_dirstate:
354 self.dirstate.forget([f])
354 self.dirstate.forget([f])
355 except:
355 except:
356 # deleted from p2?
356 # deleted from p2?
357 pass
357 pass
358
358
359 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
359 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
360 user = user or self.ui.username()
360 user = user or self.ui.username()
361 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
361 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
362 tr.close()
362 tr.close()
363 if update_dirstate:
363 if update_dirstate:
364 self.dirstate.setparents(n, nullid)
364 self.dirstate.setparents(n, nullid)
365
365
366 def commit(self, files=None, text="", user=None, date=None,
366 def commit(self, files=None, text="", user=None, date=None,
367 match=util.always, force=False, lock=None, wlock=None):
367 match=util.always, force=False, lock=None, wlock=None):
368 commit = []
368 commit = []
369 remove = []
369 remove = []
370 changed = []
370 changed = []
371
371
372 if files:
372 if files:
373 for f in files:
373 for f in files:
374 s = self.dirstate.state(f)
374 s = self.dirstate.state(f)
375 if s in 'nmai':
375 if s in 'nmai':
376 commit.append(f)
376 commit.append(f)
377 elif s == 'r':
377 elif s == 'r':
378 remove.append(f)
378 remove.append(f)
379 else:
379 else:
380 self.ui.warn(_("%s not tracked!\n") % f)
380 self.ui.warn(_("%s not tracked!\n") % f)
381 else:
381 else:
382 modified, added, removed, deleted, unknown = self.changes(match=match)
382 modified, added, removed, deleted, unknown = self.changes(match=match)
383 commit = modified + added
383 commit = modified + added
384 remove = removed
384 remove = removed
385
385
386 p1, p2 = self.dirstate.parents()
386 p1, p2 = self.dirstate.parents()
387 c1 = self.changelog.read(p1)
387 c1 = self.changelog.read(p1)
388 c2 = self.changelog.read(p2)
388 c2 = self.changelog.read(p2)
389 m1 = self.manifest.read(c1[0])
389 m1 = self.manifest.read(c1[0])
390 mf1 = self.manifest.readflags(c1[0])
390 mf1 = self.manifest.readflags(c1[0])
391 m2 = self.manifest.read(c2[0])
391 m2 = self.manifest.read(c2[0])
392
392
393 if not commit and not remove and not force and p2 == nullid:
393 if not commit and not remove and not force and p2 == nullid:
394 self.ui.status(_("nothing changed\n"))
394 self.ui.status(_("nothing changed\n"))
395 return None
395 return None
396
396
397 xp1 = hex(p1)
397 xp1 = hex(p1)
398 if p2 == nullid: xp2 = ''
398 if p2 == nullid: xp2 = ''
399 else: xp2 = hex(p2)
399 else: xp2 = hex(p2)
400
400
401 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
401 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
402
402
403 if not wlock:
403 if not wlock:
404 wlock = self.wlock()
404 wlock = self.wlock()
405 if not lock:
405 if not lock:
406 lock = self.lock()
406 lock = self.lock()
407 tr = self.transaction()
407 tr = self.transaction()
408
408
409 # check in files
409 # check in files
410 new = {}
410 new = {}
411 linkrev = self.changelog.count()
411 linkrev = self.changelog.count()
412 commit.sort()
412 commit.sort()
413 for f in commit:
413 for f in commit:
414 self.ui.note(f + "\n")
414 self.ui.note(f + "\n")
415 try:
415 try:
416 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
416 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
417 t = self.wread(f)
417 t = self.wread(f)
418 except IOError:
418 except IOError:
419 self.ui.warn(_("trouble committing %s!\n") % f)
419 self.ui.warn(_("trouble committing %s!\n") % f)
420 raise
420 raise
421
421
422 r = self.file(f)
422 r = self.file(f)
423
423
424 meta = {}
424 meta = {}
425 cp = self.dirstate.copied(f)
425 cp = self.dirstate.copied(f)
426 if cp:
426 if cp:
427 meta["copy"] = cp
427 meta["copy"] = cp
428 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
428 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
429 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
429 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
430 fp1, fp2 = nullid, nullid
430 fp1, fp2 = nullid, nullid
431 else:
431 else:
432 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
432 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
433 if entry:
433 if entry:
434 new[f] = entry
434 new[f] = entry
435 continue
435 continue
436
436
437 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
437 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
438 # remember what we've added so that we can later calculate
438 # remember what we've added so that we can later calculate
439 # the files to pull from a set of changesets
439 # the files to pull from a set of changesets
440 changed.append(f)
440 changed.append(f)
441
441
442 # update manifest
442 # update manifest
443 m1 = m1.copy()
443 m1 = m1.copy()
444 m1.update(new)
444 m1.update(new)
445 for f in remove:
445 for f in remove:
446 if f in m1:
446 if f in m1:
447 del m1[f]
447 del m1[f]
448 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
448 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
449 (new, remove))
449 (new, remove))
450
450
451 # add changeset
451 # add changeset
452 new = new.keys()
452 new = new.keys()
453 new.sort()
453 new.sort()
454
454
455 user = user or self.ui.username()
455 user = user or self.ui.username()
456 if not text:
456 if not text:
457 edittext = [""]
457 edittext = [""]
458 if p2 != nullid:
458 if p2 != nullid:
459 edittext.append("HG: branch merge")
459 edittext.append("HG: branch merge")
460 edittext.extend(["HG: changed %s" % f for f in changed])
460 edittext.extend(["HG: changed %s" % f for f in changed])
461 edittext.extend(["HG: removed %s" % f for f in remove])
461 edittext.extend(["HG: removed %s" % f for f in remove])
462 if not changed and not remove:
462 if not changed and not remove:
463 edittext.append("HG: no files changed")
463 edittext.append("HG: no files changed")
464 edittext.append("")
464 edittext.append("")
465 # run editor in the repository root
465 # run editor in the repository root
466 olddir = os.getcwd()
466 olddir = os.getcwd()
467 os.chdir(self.root)
467 os.chdir(self.root)
468 edittext = self.ui.edit("\n".join(edittext), user)
468 edittext = self.ui.edit("\n".join(edittext), user)
469 os.chdir(olddir)
469 os.chdir(olddir)
470 if not edittext.rstrip():
470 if not edittext.rstrip():
471 return None
471 return None
472 text = edittext
472 text = edittext
473
473
474 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
474 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
475 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
475 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
476 parent2=xp2)
476 parent2=xp2)
477 tr.close()
477 tr.close()
478
478
479 self.dirstate.setparents(n)
479 self.dirstate.setparents(n)
480 self.dirstate.update(new, "n")
480 self.dirstate.update(new, "n")
481 self.dirstate.forget(remove)
481 self.dirstate.forget(remove)
482
482
483 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
483 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
484 return n
484 return n
485
485
486 def walk(self, node=None, files=[], match=util.always):
486 def walk(self, node=None, files=[], match=util.always):
487 if node:
487 if node:
488 fdict = dict.fromkeys(files)
488 fdict = dict.fromkeys(files)
489 for fn in self.manifest.read(self.changelog.read(node)[0]):
489 for fn in self.manifest.read(self.changelog.read(node)[0]):
490 fdict.pop(fn, None)
490 fdict.pop(fn, None)
491 if match(fn):
491 if match(fn):
492 yield 'm', fn
492 yield 'm', fn
493 for fn in fdict:
493 for fn in fdict:
494 self.ui.warn(_('%s: No such file in rev %s\n') % (
494 self.ui.warn(_('%s: No such file in rev %s\n') % (
495 util.pathto(self.getcwd(), fn), short(node)))
495 util.pathto(self.getcwd(), fn), short(node)))
496 else:
496 else:
497 for src, fn in self.dirstate.walk(files, match):
497 for src, fn in self.dirstate.walk(files, match):
498 yield src, fn
498 yield src, fn
499
499
500 def changes(self, node1=None, node2=None, files=[], match=util.always,
500 def changes(self, node1=None, node2=None, files=[], match=util.always,
501 wlock=None):
501 wlock=None):
502 """return changes between two nodes or node and working directory
502 """return changes between two nodes or node and working directory
503
503
504 If node1 is None, use the first dirstate parent instead.
504 If node1 is None, use the first dirstate parent instead.
505 If node2 is None, compare node1 with working directory.
505 If node2 is None, compare node1 with working directory.
506 """
506 """
507
507
508 def fcmp(fn, mf):
508 def fcmp(fn, mf):
509 t1 = self.wread(fn)
509 t1 = self.wread(fn)
510 t2 = self.file(fn).read(mf.get(fn, nullid))
510 t2 = self.file(fn).read(mf.get(fn, nullid))
511 return cmp(t1, t2)
511 return cmp(t1, t2)
512
512
513 def mfmatches(node):
513 def mfmatches(node):
514 change = self.changelog.read(node)
514 change = self.changelog.read(node)
515 mf = dict(self.manifest.read(change[0]))
515 mf = dict(self.manifest.read(change[0]))
516 for fn in mf.keys():
516 for fn in mf.keys():
517 if not match(fn):
517 if not match(fn):
518 del mf[fn]
518 del mf[fn]
519 return mf
519 return mf
520
520
521 if node1:
521 if node1:
522 # read the manifest from node1 before the manifest from node2,
522 # read the manifest from node1 before the manifest from node2,
523 # so that we'll hit the manifest cache if we're going through
523 # so that we'll hit the manifest cache if we're going through
524 # all the revisions in parent->child order.
524 # all the revisions in parent->child order.
525 mf1 = mfmatches(node1)
525 mf1 = mfmatches(node1)
526
526
527 # are we comparing the working directory?
527 # are we comparing the working directory?
528 if not node2:
528 if not node2:
529 if not wlock:
529 if not wlock:
530 try:
530 try:
531 wlock = self.wlock(wait=0)
531 wlock = self.wlock(wait=0)
532 except lock.LockException:
532 except lock.LockException:
533 wlock = None
533 wlock = None
534 lookup, modified, added, removed, deleted, unknown = (
534 lookup, modified, added, removed, deleted, unknown = (
535 self.dirstate.changes(files, match))
535 self.dirstate.changes(files, match))
536
536
537 # are we comparing working dir against its parent?
537 # are we comparing working dir against its parent?
538 if not node1:
538 if not node1:
539 if lookup:
539 if lookup:
540 # do a full compare of any files that might have changed
540 # do a full compare of any files that might have changed
541 mf2 = mfmatches(self.dirstate.parents()[0])
541 mf2 = mfmatches(self.dirstate.parents()[0])
542 for f in lookup:
542 for f in lookup:
543 if fcmp(f, mf2):
543 if fcmp(f, mf2):
544 modified.append(f)
544 modified.append(f)
545 elif wlock is not None:
545 elif wlock is not None:
546 self.dirstate.update([f], "n")
546 self.dirstate.update([f], "n")
547 else:
547 else:
548 # we are comparing working dir against non-parent
548 # we are comparing working dir against non-parent
549 # generate a pseudo-manifest for the working dir
549 # generate a pseudo-manifest for the working dir
550 mf2 = mfmatches(self.dirstate.parents()[0])
550 mf2 = mfmatches(self.dirstate.parents()[0])
551 for f in lookup + modified + added:
551 for f in lookup + modified + added:
552 mf2[f] = ""
552 mf2[f] = ""
553 for f in removed:
553 for f in removed:
554 if f in mf2:
554 if f in mf2:
555 del mf2[f]
555 del mf2[f]
556 else:
556 else:
557 # we are comparing two revisions
557 # we are comparing two revisions
558 deleted, unknown = [], []
558 deleted, unknown = [], []
559 mf2 = mfmatches(node2)
559 mf2 = mfmatches(node2)
560
560
561 if node1:
561 if node1:
562 # flush lists from dirstate before comparing manifests
562 # flush lists from dirstate before comparing manifests
563 modified, added = [], []
563 modified, added = [], []
564
564
565 for fn in mf2:
565 for fn in mf2:
566 if mf1.has_key(fn):
566 if mf1.has_key(fn):
567 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
567 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
568 modified.append(fn)
568 modified.append(fn)
569 del mf1[fn]
569 del mf1[fn]
570 else:
570 else:
571 added.append(fn)
571 added.append(fn)
572
572
573 removed = mf1.keys()
573 removed = mf1.keys()
574
574
575 # sort and return results:
575 # sort and return results:
576 for l in modified, added, removed, deleted, unknown:
576 for l in modified, added, removed, deleted, unknown:
577 l.sort()
577 l.sort()
578 return (modified, added, removed, deleted, unknown)
578 return (modified, added, removed, deleted, unknown)
579
579
580 def add(self, list, wlock=None):
580 def add(self, list, wlock=None):
581 if not wlock:
581 if not wlock:
582 wlock = self.wlock()
582 wlock = self.wlock()
583 for f in list:
583 for f in list:
584 p = self.wjoin(f)
584 p = self.wjoin(f)
585 if not os.path.exists(p):
585 if not os.path.exists(p):
586 self.ui.warn(_("%s does not exist!\n") % f)
586 self.ui.warn(_("%s does not exist!\n") % f)
587 elif not os.path.isfile(p):
587 elif not os.path.isfile(p):
588 self.ui.warn(_("%s not added: only files supported currently\n")
588 self.ui.warn(_("%s not added: only files supported currently\n")
589 % f)
589 % f)
590 elif self.dirstate.state(f) in 'an':
590 elif self.dirstate.state(f) in 'an':
591 self.ui.warn(_("%s already tracked!\n") % f)
591 self.ui.warn(_("%s already tracked!\n") % f)
592 else:
592 else:
593 self.dirstate.update([f], "a")
593 self.dirstate.update([f], "a")
594
594
595 def forget(self, list, wlock=None):
595 def forget(self, list, wlock=None):
596 if not wlock:
596 if not wlock:
597 wlock = self.wlock()
597 wlock = self.wlock()
598 for f in list:
598 for f in list:
599 if self.dirstate.state(f) not in 'ai':
599 if self.dirstate.state(f) not in 'ai':
600 self.ui.warn(_("%s not added!\n") % f)
600 self.ui.warn(_("%s not added!\n") % f)
601 else:
601 else:
602 self.dirstate.forget([f])
602 self.dirstate.forget([f])
603
603
604 def remove(self, list, unlink=False, wlock=None):
604 def remove(self, list, unlink=False, wlock=None):
605 if unlink:
605 if unlink:
606 for f in list:
606 for f in list:
607 try:
607 try:
608 util.unlink(self.wjoin(f))
608 util.unlink(self.wjoin(f))
609 except OSError, inst:
609 except OSError, inst:
610 if inst.errno != errno.ENOENT:
610 if inst.errno != errno.ENOENT:
611 raise
611 raise
612 if not wlock:
612 if not wlock:
613 wlock = self.wlock()
613 wlock = self.wlock()
614 for f in list:
614 for f in list:
615 p = self.wjoin(f)
615 p = self.wjoin(f)
616 if os.path.exists(p):
616 if os.path.exists(p):
617 self.ui.warn(_("%s still exists!\n") % f)
617 self.ui.warn(_("%s still exists!\n") % f)
618 elif self.dirstate.state(f) == 'a':
618 elif self.dirstate.state(f) == 'a':
619 self.dirstate.forget([f])
619 self.dirstate.forget([f])
620 elif f not in self.dirstate:
620 elif f not in self.dirstate:
621 self.ui.warn(_("%s not tracked!\n") % f)
621 self.ui.warn(_("%s not tracked!\n") % f)
622 else:
622 else:
623 self.dirstate.update([f], "r")
623 self.dirstate.update([f], "r")
624
624
625 def undelete(self, list, wlock=None):
625 def undelete(self, list, wlock=None):
626 p = self.dirstate.parents()[0]
626 p = self.dirstate.parents()[0]
627 mn = self.changelog.read(p)[0]
627 mn = self.changelog.read(p)[0]
628 mf = self.manifest.readflags(mn)
628 mf = self.manifest.readflags(mn)
629 m = self.manifest.read(mn)
629 m = self.manifest.read(mn)
630 if not wlock:
630 if not wlock:
631 wlock = self.wlock()
631 wlock = self.wlock()
632 for f in list:
632 for f in list:
633 if self.dirstate.state(f) not in "r":
633 if self.dirstate.state(f) not in "r":
634 self.ui.warn("%s not removed!\n" % f)
634 self.ui.warn("%s not removed!\n" % f)
635 else:
635 else:
636 t = self.file(f).read(m[f])
636 t = self.file(f).read(m[f])
637 self.wwrite(f, t)
637 self.wwrite(f, t)
638 util.set_exec(self.wjoin(f), mf[f])
638 util.set_exec(self.wjoin(f), mf[f])
639 self.dirstate.update([f], "n")
639 self.dirstate.update([f], "n")
640
640
641 def copy(self, source, dest, wlock=None):
641 def copy(self, source, dest, wlock=None):
642 p = self.wjoin(dest)
642 p = self.wjoin(dest)
643 if not os.path.exists(p):
643 if not os.path.exists(p):
644 self.ui.warn(_("%s does not exist!\n") % dest)
644 self.ui.warn(_("%s does not exist!\n") % dest)
645 elif not os.path.isfile(p):
645 elif not os.path.isfile(p):
646 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
646 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
647 else:
647 else:
648 if not wlock:
648 if not wlock:
649 wlock = self.wlock()
649 wlock = self.wlock()
650 if self.dirstate.state(dest) == '?':
650 if self.dirstate.state(dest) == '?':
651 self.dirstate.update([dest], "a")
651 self.dirstate.update([dest], "a")
652 self.dirstate.copy(source, dest)
652 self.dirstate.copy(source, dest)
653
653
654 def heads(self, start=None):
654 def heads(self, start=None):
655 heads = self.changelog.heads(start)
655 heads = self.changelog.heads(start)
656 # sort the output in rev descending order
656 # sort the output in rev descending order
657 heads = [(-self.changelog.rev(h), h) for h in heads]
657 heads = [(-self.changelog.rev(h), h) for h in heads]
658 heads.sort()
658 heads.sort()
659 return [n for (r, n) in heads]
659 return [n for (r, n) in heads]
660
660
661 # branchlookup returns a dict giving a list of branches for
661 # branchlookup returns a dict giving a list of branches for
662 # each head. A branch is defined as the tag of a node or
662 # each head. A branch is defined as the tag of a node or
663 # the branch of the node's parents. If a node has multiple
663 # the branch of the node's parents. If a node has multiple
664 # branch tags, tags are eliminated if they are visible from other
664 # branch tags, tags are eliminated if they are visible from other
665 # branch tags.
665 # branch tags.
666 #
666 #
667 # So, for this graph: a->b->c->d->e
667 # So, for this graph: a->b->c->d->e
668 # \ /
668 # \ /
669 # aa -----/
669 # aa -----/
670 # a has tag 2.6.12
670 # a has tag 2.6.12
671 # d has tag 2.6.13
671 # d has tag 2.6.13
672 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
672 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
673 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
673 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
674 # from the list.
674 # from the list.
675 #
675 #
676 # It is possible that more than one head will have the same branch tag.
676 # It is possible that more than one head will have the same branch tag.
677 # callers need to check the result for multiple heads under the same
677 # callers need to check the result for multiple heads under the same
678 # branch tag if that is a problem for them (ie checkout of a specific
678 # branch tag if that is a problem for them (ie checkout of a specific
679 # branch).
679 # branch).
680 #
680 #
681 # passing in a specific branch will limit the depth of the search
681 # passing in a specific branch will limit the depth of the search
682 # through the parents. It won't limit the branches returned in the
682 # through the parents. It won't limit the branches returned in the
683 # result though.
683 # result though.
684 def branchlookup(self, heads=None, branch=None):
684 def branchlookup(self, heads=None, branch=None):
685 if not heads:
685 if not heads:
686 heads = self.heads()
686 heads = self.heads()
687 headt = [ h for h in heads ]
687 headt = [ h for h in heads ]
688 chlog = self.changelog
688 chlog = self.changelog
689 branches = {}
689 branches = {}
690 merges = []
690 merges = []
691 seenmerge = {}
691 seenmerge = {}
692
692
693 # traverse the tree once for each head, recording in the branches
693 # traverse the tree once for each head, recording in the branches
694 # dict which tags are visible from this head. The branches
694 # dict which tags are visible from this head. The branches
695 # dict also records which tags are visible from each tag
695 # dict also records which tags are visible from each tag
696 # while we traverse.
696 # while we traverse.
697 while headt or merges:
697 while headt or merges:
698 if merges:
698 if merges:
699 n, found = merges.pop()
699 n, found = merges.pop()
700 visit = [n]
700 visit = [n]
701 else:
701 else:
702 h = headt.pop()
702 h = headt.pop()
703 visit = [h]
703 visit = [h]
704 found = [h]
704 found = [h]
705 seen = {}
705 seen = {}
706 while visit:
706 while visit:
707 n = visit.pop()
707 n = visit.pop()
708 if n in seen:
708 if n in seen:
709 continue
709 continue
710 pp = chlog.parents(n)
710 pp = chlog.parents(n)
711 tags = self.nodetags(n)
711 tags = self.nodetags(n)
712 if tags:
712 if tags:
713 for x in tags:
713 for x in tags:
714 if x == 'tip':
714 if x == 'tip':
715 continue
715 continue
716 for f in found:
716 for f in found:
717 branches.setdefault(f, {})[n] = 1
717 branches.setdefault(f, {})[n] = 1
718 branches.setdefault(n, {})[n] = 1
718 branches.setdefault(n, {})[n] = 1
719 break
719 break
720 if n not in found:
720 if n not in found:
721 found.append(n)
721 found.append(n)
722 if branch in tags:
722 if branch in tags:
723 continue
723 continue
724 seen[n] = 1
724 seen[n] = 1
725 if pp[1] != nullid and n not in seenmerge:
725 if pp[1] != nullid and n not in seenmerge:
726 merges.append((pp[1], [x for x in found]))
726 merges.append((pp[1], [x for x in found]))
727 seenmerge[n] = 1
727 seenmerge[n] = 1
728 if pp[0] != nullid:
728 if pp[0] != nullid:
729 visit.append(pp[0])
729 visit.append(pp[0])
730 # traverse the branches dict, eliminating branch tags from each
730 # traverse the branches dict, eliminating branch tags from each
731 # head that are visible from another branch tag for that head.
731 # head that are visible from another branch tag for that head.
732 out = {}
732 out = {}
733 viscache = {}
733 viscache = {}
734 for h in heads:
734 for h in heads:
735 def visible(node):
735 def visible(node):
736 if node in viscache:
736 if node in viscache:
737 return viscache[node]
737 return viscache[node]
738 ret = {}
738 ret = {}
739 visit = [node]
739 visit = [node]
740 while visit:
740 while visit:
741 x = visit.pop()
741 x = visit.pop()
742 if x in viscache:
742 if x in viscache:
743 ret.update(viscache[x])
743 ret.update(viscache[x])
744 elif x not in ret:
744 elif x not in ret:
745 ret[x] = 1
745 ret[x] = 1
746 if x in branches:
746 if x in branches:
747 visit[len(visit):] = branches[x].keys()
747 visit[len(visit):] = branches[x].keys()
748 viscache[node] = ret
748 viscache[node] = ret
749 return ret
749 return ret
750 if h not in branches:
750 if h not in branches:
751 continue
751 continue
752 # O(n^2), but somewhat limited. This only searches the
752 # O(n^2), but somewhat limited. This only searches the
753 # tags visible from a specific head, not all the tags in the
753 # tags visible from a specific head, not all the tags in the
754 # whole repo.
754 # whole repo.
755 for b in branches[h]:
755 for b in branches[h]:
756 vis = False
756 vis = False
757 for bb in branches[h].keys():
757 for bb in branches[h].keys():
758 if b != bb:
758 if b != bb:
759 if b in visible(bb):
759 if b in visible(bb):
760 vis = True
760 vis = True
761 break
761 break
762 if not vis:
762 if not vis:
763 l = out.setdefault(h, [])
763 l = out.setdefault(h, [])
764 l[len(l):] = self.nodetags(b)
764 l[len(l):] = self.nodetags(b)
765 return out
765 return out
766
766
767 def branches(self, nodes):
767 def branches(self, nodes):
768 if not nodes:
768 if not nodes:
769 nodes = [self.changelog.tip()]
769 nodes = [self.changelog.tip()]
770 b = []
770 b = []
771 for n in nodes:
771 for n in nodes:
772 t = n
772 t = n
773 while n:
773 while n:
774 p = self.changelog.parents(n)
774 p = self.changelog.parents(n)
775 if p[1] != nullid or p[0] == nullid:
775 if p[1] != nullid or p[0] == nullid:
776 b.append((t, n, p[0], p[1]))
776 b.append((t, n, p[0], p[1]))
777 break
777 break
778 n = p[0]
778 n = p[0]
779 return b
779 return b
780
780
781 def between(self, pairs):
781 def between(self, pairs):
782 r = []
782 r = []
783
783
784 for top, bottom in pairs:
784 for top, bottom in pairs:
785 n, l, i = top, [], 0
785 n, l, i = top, [], 0
786 f = 1
786 f = 1
787
787
788 while n != bottom:
788 while n != bottom:
789 p = self.changelog.parents(n)[0]
789 p = self.changelog.parents(n)[0]
790 if i == f:
790 if i == f:
791 l.append(n)
791 l.append(n)
792 f = f * 2
792 f = f * 2
793 n = p
793 n = p
794 i += 1
794 i += 1
795
795
796 r.append(l)
796 r.append(l)
797
797
798 return r
798 return r
799
799
800 def findincoming(self, remote, base=None, heads=None, force=False):
800 def findincoming(self, remote, base=None, heads=None, force=False):
801 m = self.changelog.nodemap
801 m = self.changelog.nodemap
802 search = []
802 search = []
803 fetch = {}
803 fetch = {}
804 seen = {}
804 seen = {}
805 seenbranch = {}
805 seenbranch = {}
806 if base == None:
806 if base == None:
807 base = {}
807 base = {}
808
808
809 # assume we're closer to the tip than the root
809 # assume we're closer to the tip than the root
810 # and start by examining the heads
810 # and start by examining the heads
811 self.ui.status(_("searching for changes\n"))
811 self.ui.status(_("searching for changes\n"))
812
812
813 if not heads:
813 if not heads:
814 heads = remote.heads()
814 heads = remote.heads()
815
815
816 unknown = []
816 unknown = []
817 for h in heads:
817 for h in heads:
818 if h not in m:
818 if h not in m:
819 unknown.append(h)
819 unknown.append(h)
820 else:
820 else:
821 base[h] = 1
821 base[h] = 1
822
822
823 if not unknown:
823 if not unknown:
824 return []
824 return []
825
825
826 rep = {}
826 rep = {}
827 reqcnt = 0
827 reqcnt = 0
828
828
829 # search through remote branches
829 # search through remote branches
830 # a 'branch' here is a linear segment of history, with four parts:
830 # a 'branch' here is a linear segment of history, with four parts:
831 # head, root, first parent, second parent
831 # head, root, first parent, second parent
832 # (a branch always has two parents (or none) by definition)
832 # (a branch always has two parents (or none) by definition)
833 unknown = remote.branches(unknown)
833 unknown = remote.branches(unknown)
834 while unknown:
834 while unknown:
835 r = []
835 r = []
836 while unknown:
836 while unknown:
837 n = unknown.pop(0)
837 n = unknown.pop(0)
838 if n[0] in seen:
838 if n[0] in seen:
839 continue
839 continue
840
840
841 self.ui.debug(_("examining %s:%s\n")
841 self.ui.debug(_("examining %s:%s\n")
842 % (short(n[0]), short(n[1])))
842 % (short(n[0]), short(n[1])))
843 if n[0] == nullid:
843 if n[0] == nullid:
844 break
844 break
845 if n in seenbranch:
845 if n in seenbranch:
846 self.ui.debug(_("branch already found\n"))
846 self.ui.debug(_("branch already found\n"))
847 continue
847 continue
848 if n[1] and n[1] in m: # do we know the base?
848 if n[1] and n[1] in m: # do we know the base?
849 self.ui.debug(_("found incomplete branch %s:%s\n")
849 self.ui.debug(_("found incomplete branch %s:%s\n")
850 % (short(n[0]), short(n[1])))
850 % (short(n[0]), short(n[1])))
851 search.append(n) # schedule branch range for scanning
851 search.append(n) # schedule branch range for scanning
852 seenbranch[n] = 1
852 seenbranch[n] = 1
853 else:
853 else:
854 if n[1] not in seen and n[1] not in fetch:
854 if n[1] not in seen and n[1] not in fetch:
855 if n[2] in m and n[3] in m:
855 if n[2] in m and n[3] in m:
856 self.ui.debug(_("found new changeset %s\n") %
856 self.ui.debug(_("found new changeset %s\n") %
857 short(n[1]))
857 short(n[1]))
858 fetch[n[1]] = 1 # earliest unknown
858 fetch[n[1]] = 1 # earliest unknown
859 base[n[2]] = 1 # latest known
859 base[n[2]] = 1 # latest known
860 continue
860 continue
861
861
862 for a in n[2:4]:
862 for a in n[2:4]:
863 if a not in rep:
863 if a not in rep:
864 r.append(a)
864 r.append(a)
865 rep[a] = 1
865 rep[a] = 1
866
866
867 seen[n[0]] = 1
867 seen[n[0]] = 1
868
868
869 if r:
869 if r:
870 reqcnt += 1
870 reqcnt += 1
871 self.ui.debug(_("request %d: %s\n") %
871 self.ui.debug(_("request %d: %s\n") %
872 (reqcnt, " ".join(map(short, r))))
872 (reqcnt, " ".join(map(short, r))))
873 for p in range(0, len(r), 10):
873 for p in range(0, len(r), 10):
874 for b in remote.branches(r[p:p+10]):
874 for b in remote.branches(r[p:p+10]):
875 self.ui.debug(_("received %s:%s\n") %
875 self.ui.debug(_("received %s:%s\n") %
876 (short(b[0]), short(b[1])))
876 (short(b[0]), short(b[1])))
877 if b[0] in m:
877 if b[0] in m:
878 self.ui.debug(_("found base node %s\n")
878 self.ui.debug(_("found base node %s\n")
879 % short(b[0]))
879 % short(b[0]))
880 base[b[0]] = 1
880 base[b[0]] = 1
881 elif b[0] not in seen:
881 elif b[0] not in seen:
882 unknown.append(b)
882 unknown.append(b)
883
883
884 # do binary search on the branches we found
884 # do binary search on the branches we found
885 while search:
885 while search:
886 n = search.pop(0)
886 n = search.pop(0)
887 reqcnt += 1
887 reqcnt += 1
888 l = remote.between([(n[0], n[1])])[0]
888 l = remote.between([(n[0], n[1])])[0]
889 l.append(n[1])
889 l.append(n[1])
890 p = n[0]
890 p = n[0]
891 f = 1
891 f = 1
892 for i in l:
892 for i in l:
893 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
893 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
894 if i in m:
894 if i in m:
895 if f <= 2:
895 if f <= 2:
896 self.ui.debug(_("found new branch changeset %s\n") %
896 self.ui.debug(_("found new branch changeset %s\n") %
897 short(p))
897 short(p))
898 fetch[p] = 1
898 fetch[p] = 1
899 base[i] = 1
899 base[i] = 1
900 else:
900 else:
901 self.ui.debug(_("narrowed branch search to %s:%s\n")
901 self.ui.debug(_("narrowed branch search to %s:%s\n")
902 % (short(p), short(i)))
902 % (short(p), short(i)))
903 search.append((p, i))
903 search.append((p, i))
904 break
904 break
905 p, f = i, f * 2
905 p, f = i, f * 2
906
906
907 # sanity check our fetch list
907 # sanity check our fetch list
908 for f in fetch.keys():
908 for f in fetch.keys():
909 if f in m:
909 if f in m:
910 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
910 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
911
911
912 if base.keys() == [nullid]:
912 if base.keys() == [nullid]:
913 if force:
913 if force:
914 self.ui.warn(_("warning: repository is unrelated\n"))
914 self.ui.warn(_("warning: repository is unrelated\n"))
915 else:
915 else:
916 raise util.Abort(_("repository is unrelated"))
916 raise util.Abort(_("repository is unrelated"))
917
917
918 self.ui.note(_("found new changesets starting at ") +
918 self.ui.note(_("found new changesets starting at ") +
919 " ".join([short(f) for f in fetch]) + "\n")
919 " ".join([short(f) for f in fetch]) + "\n")
920
920
921 self.ui.debug(_("%d total queries\n") % reqcnt)
921 self.ui.debug(_("%d total queries\n") % reqcnt)
922
922
923 return fetch.keys()
923 return fetch.keys()
924
924
925 def findoutgoing(self, remote, base=None, heads=None, force=False):
925 def findoutgoing(self, remote, base=None, heads=None, force=False):
926 if base == None:
926 if base == None:
927 base = {}
927 base = {}
928 self.findincoming(remote, base, heads, force=force)
928 self.findincoming(remote, base, heads, force=force)
929
929
930 self.ui.debug(_("common changesets up to ")
930 self.ui.debug(_("common changesets up to ")
931 + " ".join(map(short, base.keys())) + "\n")
931 + " ".join(map(short, base.keys())) + "\n")
932
932
933 remain = dict.fromkeys(self.changelog.nodemap)
933 remain = dict.fromkeys(self.changelog.nodemap)
934
934
935 # prune everything remote has from the tree
935 # prune everything remote has from the tree
936 del remain[nullid]
936 del remain[nullid]
937 remove = base.keys()
937 remove = base.keys()
938 while remove:
938 while remove:
939 n = remove.pop(0)
939 n = remove.pop(0)
940 if n in remain:
940 if n in remain:
941 del remain[n]
941 del remain[n]
942 for p in self.changelog.parents(n):
942 for p in self.changelog.parents(n):
943 remove.append(p)
943 remove.append(p)
944
944
945 # find every node whose parents have been pruned
945 # find every node whose parents have been pruned
946 subset = []
946 subset = []
947 for n in remain:
947 for n in remain:
948 p1, p2 = self.changelog.parents(n)
948 p1, p2 = self.changelog.parents(n)
949 if p1 not in remain and p2 not in remain:
949 if p1 not in remain and p2 not in remain:
950 subset.append(n)
950 subset.append(n)
951
951
952 # this is the set of all roots we have to push
952 # this is the set of all roots we have to push
953 return subset
953 return subset
954
954
955 def pull(self, remote, heads=None, force=False):
955 def pull(self, remote, heads=None, force=False):
956 l = self.lock()
956 l = self.lock()
957
957
958 # if we have an empty repo, fetch everything
958 # if we have an empty repo, fetch everything
959 if self.changelog.tip() == nullid:
959 if self.changelog.tip() == nullid:
960 self.ui.status(_("requesting all changes\n"))
960 self.ui.status(_("requesting all changes\n"))
961 fetch = [nullid]
961 fetch = [nullid]
962 else:
962 else:
963 fetch = self.findincoming(remote, force=force)
963 fetch = self.findincoming(remote, force=force)
964
964
965 if not fetch:
965 if not fetch:
966 self.ui.status(_("no changes found\n"))
966 self.ui.status(_("no changes found\n"))
967 return 1
967 return 1
968
968
969 if heads is None:
969 if heads is None:
970 cg = remote.changegroup(fetch, 'pull')
970 cg = remote.changegroup(fetch, 'pull')
971 else:
971 else:
972 cg = remote.changegroupsubset(fetch, heads, 'pull')
972 cg = remote.changegroupsubset(fetch, heads, 'pull')
973 return self.addchangegroup(cg)
973 return self.addchangegroup(cg)
974
974
975 def push(self, remote, force=False, revs=None):
975 def push(self, remote, force=False, revs=None):
976 lock = remote.lock()
976 lock = remote.lock()
977
977
978 base = {}
978 base = {}
979 heads = remote.heads()
979 heads = remote.heads()
980 inc = self.findincoming(remote, base, heads, force=force)
980 inc = self.findincoming(remote, base, heads, force=force)
981 if not force and inc:
981 if not force and inc:
982 self.ui.warn(_("abort: unsynced remote changes!\n"))
982 self.ui.warn(_("abort: unsynced remote changes!\n"))
983 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
983 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
984 return 1
984 return 1
985
985
986 update = self.findoutgoing(remote, base)
986 update = self.findoutgoing(remote, base)
987 if revs is not None:
987 if revs is not None:
988 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
988 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
989 else:
989 else:
990 bases, heads = update, self.changelog.heads()
990 bases, heads = update, self.changelog.heads()
991
991
992 if not bases:
992 if not bases:
993 self.ui.status(_("no changes found\n"))
993 self.ui.status(_("no changes found\n"))
994 return 1
994 return 1
995 elif not force:
995 elif not force:
996 if len(bases) < len(heads):
996 if len(bases) < len(heads):
997 self.ui.warn(_("abort: push creates new remote branches!\n"))
997 self.ui.warn(_("abort: push creates new remote branches!\n"))
998 self.ui.status(_("(did you forget to merge?"
998 self.ui.status(_("(did you forget to merge?"
999 " use push -f to force)\n"))
999 " use push -f to force)\n"))
1000 return 1
1000 return 1
1001
1001
1002 if revs is None:
1002 if revs is None:
1003 cg = self.changegroup(update, 'push')
1003 cg = self.changegroup(update, 'push')
1004 else:
1004 else:
1005 cg = self.changegroupsubset(update, revs, 'push')
1005 cg = self.changegroupsubset(update, revs, 'push')
1006 return remote.addchangegroup(cg)
1006 return remote.addchangegroup(cg)
1007
1007
1008 def changegroupsubset(self, bases, heads, source):
1008 def changegroupsubset(self, bases, heads, source):
1009 """This function generates a changegroup consisting of all the nodes
1009 """This function generates a changegroup consisting of all the nodes
1010 that are descendents of any of the bases, and ancestors of any of
1010 that are descendents of any of the bases, and ancestors of any of
1011 the heads.
1011 the heads.
1012
1012
1013 It is fairly complex as determining which filenodes and which
1013 It is fairly complex as determining which filenodes and which
1014 manifest nodes need to be included for the changeset to be complete
1014 manifest nodes need to be included for the changeset to be complete
1015 is non-trivial.
1015 is non-trivial.
1016
1016
1017 Another wrinkle is doing the reverse, figuring out which changeset in
1017 Another wrinkle is doing the reverse, figuring out which changeset in
1018 the changegroup a particular filenode or manifestnode belongs to."""
1018 the changegroup a particular filenode or manifestnode belongs to."""
1019
1019
1020 self.hook('preoutgoing', throw=True, source=source)
1020 self.hook('preoutgoing', throw=True, source=source)
1021
1021
1022 # Set up some initial variables
1022 # Set up some initial variables
1023 # Make it easy to refer to self.changelog
1023 # Make it easy to refer to self.changelog
1024 cl = self.changelog
1024 cl = self.changelog
1025 # msng is short for missing - compute the list of changesets in this
1025 # msng is short for missing - compute the list of changesets in this
1026 # changegroup.
1026 # changegroup.
1027 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1027 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1028 # Some bases may turn out to be superfluous, and some heads may be
1028 # Some bases may turn out to be superfluous, and some heads may be
1029 # too. nodesbetween will return the minimal set of bases and heads
1029 # too. nodesbetween will return the minimal set of bases and heads
1030 # necessary to re-create the changegroup.
1030 # necessary to re-create the changegroup.
1031
1031
1032 # Known heads are the list of heads that it is assumed the recipient
1032 # Known heads are the list of heads that it is assumed the recipient
1033 # of this changegroup will know about.
1033 # of this changegroup will know about.
1034 knownheads = {}
1034 knownheads = {}
1035 # We assume that all parents of bases are known heads.
1035 # We assume that all parents of bases are known heads.
1036 for n in bases:
1036 for n in bases:
1037 for p in cl.parents(n):
1037 for p in cl.parents(n):
1038 if p != nullid:
1038 if p != nullid:
1039 knownheads[p] = 1
1039 knownheads[p] = 1
1040 knownheads = knownheads.keys()
1040 knownheads = knownheads.keys()
1041 if knownheads:
1041 if knownheads:
1042 # Now that we know what heads are known, we can compute which
1042 # Now that we know what heads are known, we can compute which
1043 # changesets are known. The recipient must know about all
1043 # changesets are known. The recipient must know about all
1044 # changesets required to reach the known heads from the null
1044 # changesets required to reach the known heads from the null
1045 # changeset.
1045 # changeset.
1046 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1046 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1047 junk = None
1047 junk = None
1048 # Transform the list into an ersatz set.
1048 # Transform the list into an ersatz set.
1049 has_cl_set = dict.fromkeys(has_cl_set)
1049 has_cl_set = dict.fromkeys(has_cl_set)
1050 else:
1050 else:
1051 # If there were no known heads, the recipient cannot be assumed to
1051 # If there were no known heads, the recipient cannot be assumed to
1052 # know about any changesets.
1052 # know about any changesets.
1053 has_cl_set = {}
1053 has_cl_set = {}
1054
1054
1055 # Make it easy to refer to self.manifest
1055 # Make it easy to refer to self.manifest
1056 mnfst = self.manifest
1056 mnfst = self.manifest
1057 # We don't know which manifests are missing yet
1057 # We don't know which manifests are missing yet
1058 msng_mnfst_set = {}
1058 msng_mnfst_set = {}
1059 # Nor do we know which filenodes are missing.
1059 # Nor do we know which filenodes are missing.
1060 msng_filenode_set = {}
1060 msng_filenode_set = {}
1061
1061
1062 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1062 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1063 junk = None
1063 junk = None
1064
1064
1065 # A changeset always belongs to itself, so the changenode lookup
1065 # A changeset always belongs to itself, so the changenode lookup
1066 # function for a changenode is identity.
1066 # function for a changenode is identity.
1067 def identity(x):
1067 def identity(x):
1068 return x
1068 return x
1069
1069
1070 # A function generating function. Sets up an environment for the
1070 # A function generating function. Sets up an environment for the
1071 # inner function.
1071 # inner function.
1072 def cmp_by_rev_func(revlog):
1072 def cmp_by_rev_func(revlog):
1073 # Compare two nodes by their revision number in the environment's
1073 # Compare two nodes by their revision number in the environment's
1074 # revision history. Since the revision number both represents the
1074 # revision history. Since the revision number both represents the
1075 # most efficient order to read the nodes in, and represents a
1075 # most efficient order to read the nodes in, and represents a
1076 # topological sorting of the nodes, this function is often useful.
1076 # topological sorting of the nodes, this function is often useful.
1077 def cmp_by_rev(a, b):
1077 def cmp_by_rev(a, b):
1078 return cmp(revlog.rev(a), revlog.rev(b))
1078 return cmp(revlog.rev(a), revlog.rev(b))
1079 return cmp_by_rev
1079 return cmp_by_rev
1080
1080
1081 # If we determine that a particular file or manifest node must be a
1081 # If we determine that a particular file or manifest node must be a
1082 # node that the recipient of the changegroup will already have, we can
1082 # node that the recipient of the changegroup will already have, we can
1083 # also assume the recipient will have all the parents. This function
1083 # also assume the recipient will have all the parents. This function
1084 # prunes them from the set of missing nodes.
1084 # prunes them from the set of missing nodes.
1085 def prune_parents(revlog, hasset, msngset):
1085 def prune_parents(revlog, hasset, msngset):
1086 haslst = hasset.keys()
1086 haslst = hasset.keys()
1087 haslst.sort(cmp_by_rev_func(revlog))
1087 haslst.sort(cmp_by_rev_func(revlog))
1088 for node in haslst:
1088 for node in haslst:
1089 parentlst = [p for p in revlog.parents(node) if p != nullid]
1089 parentlst = [p for p in revlog.parents(node) if p != nullid]
1090 while parentlst:
1090 while parentlst:
1091 n = parentlst.pop()
1091 n = parentlst.pop()
1092 if n not in hasset:
1092 if n not in hasset:
1093 hasset[n] = 1
1093 hasset[n] = 1
1094 p = [p for p in revlog.parents(n) if p != nullid]
1094 p = [p for p in revlog.parents(n) if p != nullid]
1095 parentlst.extend(p)
1095 parentlst.extend(p)
1096 for n in hasset:
1096 for n in hasset:
1097 msngset.pop(n, None)
1097 msngset.pop(n, None)
1098
1098
1099 # This is a function generating function used to set up an environment
1099 # This is a function generating function used to set up an environment
1100 # for the inner function to execute in.
1100 # for the inner function to execute in.
1101 def manifest_and_file_collector(changedfileset):
1101 def manifest_and_file_collector(changedfileset):
1102 # This is an information gathering function that gathers
1102 # This is an information gathering function that gathers
1103 # information from each changeset node that goes out as part of
1103 # information from each changeset node that goes out as part of
1104 # the changegroup. The information gathered is a list of which
1104 # the changegroup. The information gathered is a list of which
1105 # manifest nodes are potentially required (the recipient may
1105 # manifest nodes are potentially required (the recipient may
1106 # already have them) and total list of all files which were
1106 # already have them) and total list of all files which were
1107 # changed in any changeset in the changegroup.
1107 # changed in any changeset in the changegroup.
1108 #
1108 #
1109 # We also remember the first changenode we saw any manifest
1109 # We also remember the first changenode we saw any manifest
1110 # referenced by so we can later determine which changenode 'owns'
1110 # referenced by so we can later determine which changenode 'owns'
1111 # the manifest.
1111 # the manifest.
1112 def collect_manifests_and_files(clnode):
1112 def collect_manifests_and_files(clnode):
1113 c = cl.read(clnode)
1113 c = cl.read(clnode)
1114 for f in c[3]:
1114 for f in c[3]:
1115 # This is to make sure we only have one instance of each
1115 # This is to make sure we only have one instance of each
1116 # filename string for each filename.
1116 # filename string for each filename.
1117 changedfileset.setdefault(f, f)
1117 changedfileset.setdefault(f, f)
1118 msng_mnfst_set.setdefault(c[0], clnode)
1118 msng_mnfst_set.setdefault(c[0], clnode)
1119 return collect_manifests_and_files
1119 return collect_manifests_and_files
1120
1120
1121 # Figure out which manifest nodes (of the ones we think might be part
1121 # Figure out which manifest nodes (of the ones we think might be part
1122 # of the changegroup) the recipient must know about and remove them
1122 # of the changegroup) the recipient must know about and remove them
1123 # from the changegroup.
1123 # from the changegroup.
1124 def prune_manifests():
1124 def prune_manifests():
1125 has_mnfst_set = {}
1125 has_mnfst_set = {}
1126 for n in msng_mnfst_set:
1126 for n in msng_mnfst_set:
1127 # If a 'missing' manifest thinks it belongs to a changenode
1127 # If a 'missing' manifest thinks it belongs to a changenode
1128 # the recipient is assumed to have, obviously the recipient
1128 # the recipient is assumed to have, obviously the recipient
1129 # must have that manifest.
1129 # must have that manifest.
1130 linknode = cl.node(mnfst.linkrev(n))
1130 linknode = cl.node(mnfst.linkrev(n))
1131 if linknode in has_cl_set:
1131 if linknode in has_cl_set:
1132 has_mnfst_set[n] = 1
1132 has_mnfst_set[n] = 1
1133 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1133 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1134
1134
1135 # Use the information collected in collect_manifests_and_files to say
1135 # Use the information collected in collect_manifests_and_files to say
1136 # which changenode any manifestnode belongs to.
1136 # which changenode any manifestnode belongs to.
1137 def lookup_manifest_link(mnfstnode):
1137 def lookup_manifest_link(mnfstnode):
1138 return msng_mnfst_set[mnfstnode]
1138 return msng_mnfst_set[mnfstnode]
1139
1139
1140 # A function generating function that sets up the initial environment
1140 # A function generating function that sets up the initial environment
1141 # the inner function.
1141 # the inner function.
1142 def filenode_collector(changedfiles):
1142 def filenode_collector(changedfiles):
1143 next_rev = [0]
1143 next_rev = [0]
1144 # This gathers information from each manifestnode included in the
1144 # This gathers information from each manifestnode included in the
1145 # changegroup about which filenodes the manifest node references
1145 # changegroup about which filenodes the manifest node references
1146 # so we can include those in the changegroup too.
1146 # so we can include those in the changegroup too.
1147 #
1147 #
1148 # It also remembers which changenode each filenode belongs to. It
1148 # It also remembers which changenode each filenode belongs to. It
1149 # does this by assuming the a filenode belongs to the changenode
1149 # does this by assuming the a filenode belongs to the changenode
1150 # the first manifest that references it belongs to.
1150 # the first manifest that references it belongs to.
1151 def collect_msng_filenodes(mnfstnode):
1151 def collect_msng_filenodes(mnfstnode):
1152 r = mnfst.rev(mnfstnode)
1152 r = mnfst.rev(mnfstnode)
1153 if r == next_rev[0]:
1153 if r == next_rev[0]:
1154 # If the last rev we looked at was the one just previous,
1154 # If the last rev we looked at was the one just previous,
1155 # we only need to see a diff.
1155 # we only need to see a diff.
1156 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1156 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1157 # For each line in the delta
1157 # For each line in the delta
1158 for dline in delta.splitlines():
1158 for dline in delta.splitlines():
1159 # get the filename and filenode for that line
1159 # get the filename and filenode for that line
1160 f, fnode = dline.split('\0')
1160 f, fnode = dline.split('\0')
1161 fnode = bin(fnode[:40])
1161 fnode = bin(fnode[:40])
1162 f = changedfiles.get(f, None)
1162 f = changedfiles.get(f, None)
1163 # And if the file is in the list of files we care
1163 # And if the file is in the list of files we care
1164 # about.
1164 # about.
1165 if f is not None:
1165 if f is not None:
1166 # Get the changenode this manifest belongs to
1166 # Get the changenode this manifest belongs to
1167 clnode = msng_mnfst_set[mnfstnode]
1167 clnode = msng_mnfst_set[mnfstnode]
1168 # Create the set of filenodes for the file if
1168 # Create the set of filenodes for the file if
1169 # there isn't one already.
1169 # there isn't one already.
1170 ndset = msng_filenode_set.setdefault(f, {})
1170 ndset = msng_filenode_set.setdefault(f, {})
1171 # And set the filenode's changelog node to the
1171 # And set the filenode's changelog node to the
1172 # manifest's if it hasn't been set already.
1172 # manifest's if it hasn't been set already.
1173 ndset.setdefault(fnode, clnode)
1173 ndset.setdefault(fnode, clnode)
1174 else:
1174 else:
1175 # Otherwise we need a full manifest.
1175 # Otherwise we need a full manifest.
1176 m = mnfst.read(mnfstnode)
1176 m = mnfst.read(mnfstnode)
1177 # For every file in we care about.
1177 # For every file in we care about.
1178 for f in changedfiles:
1178 for f in changedfiles:
1179 fnode = m.get(f, None)
1179 fnode = m.get(f, None)
1180 # If it's in the manifest
1180 # If it's in the manifest
1181 if fnode is not None:
1181 if fnode is not None:
1182 # See comments above.
1182 # See comments above.
1183 clnode = msng_mnfst_set[mnfstnode]
1183 clnode = msng_mnfst_set[mnfstnode]
1184 ndset = msng_filenode_set.setdefault(f, {})
1184 ndset = msng_filenode_set.setdefault(f, {})
1185 ndset.setdefault(fnode, clnode)
1185 ndset.setdefault(fnode, clnode)
1186 # Remember the revision we hope to see next.
1186 # Remember the revision we hope to see next.
1187 next_rev[0] = r + 1
1187 next_rev[0] = r + 1
1188 return collect_msng_filenodes
1188 return collect_msng_filenodes
1189
1189
1190 # We have a list of filenodes we think we need for a file, lets remove
1190 # We have a list of filenodes we think we need for a file, lets remove
1191 # all those we now the recipient must have.
1191 # all those we now the recipient must have.
1192 def prune_filenodes(f, filerevlog):
1192 def prune_filenodes(f, filerevlog):
1193 msngset = msng_filenode_set[f]
1193 msngset = msng_filenode_set[f]
1194 hasset = {}
1194 hasset = {}
1195 # If a 'missing' filenode thinks it belongs to a changenode we
1195 # If a 'missing' filenode thinks it belongs to a changenode we
1196 # assume the recipient must have, then the recipient must have
1196 # assume the recipient must have, then the recipient must have
1197 # that filenode.
1197 # that filenode.
1198 for n in msngset:
1198 for n in msngset:
1199 clnode = cl.node(filerevlog.linkrev(n))
1199 clnode = cl.node(filerevlog.linkrev(n))
1200 if clnode in has_cl_set:
1200 if clnode in has_cl_set:
1201 hasset[n] = 1
1201 hasset[n] = 1
1202 prune_parents(filerevlog, hasset, msngset)
1202 prune_parents(filerevlog, hasset, msngset)
1203
1203
1204 # A function generator function that sets up the a context for the
1204 # A function generator function that sets up the a context for the
1205 # inner function.
1205 # inner function.
1206 def lookup_filenode_link_func(fname):
1206 def lookup_filenode_link_func(fname):
1207 msngset = msng_filenode_set[fname]
1207 msngset = msng_filenode_set[fname]
1208 # Lookup the changenode the filenode belongs to.
1208 # Lookup the changenode the filenode belongs to.
1209 def lookup_filenode_link(fnode):
1209 def lookup_filenode_link(fnode):
1210 return msngset[fnode]
1210 return msngset[fnode]
1211 return lookup_filenode_link
1211 return lookup_filenode_link
1212
1212
1213 # Now that we have all theses utility functions to help out and
1213 # Now that we have all theses utility functions to help out and
1214 # logically divide up the task, generate the group.
1214 # logically divide up the task, generate the group.
1215 def gengroup():
1215 def gengroup():
1216 # The set of changed files starts empty.
1216 # The set of changed files starts empty.
1217 changedfiles = {}
1217 changedfiles = {}
1218 # Create a changenode group generator that will call our functions
1218 # Create a changenode group generator that will call our functions
1219 # back to lookup the owning changenode and collect information.
1219 # back to lookup the owning changenode and collect information.
1220 group = cl.group(msng_cl_lst, identity,
1220 group = cl.group(msng_cl_lst, identity,
1221 manifest_and_file_collector(changedfiles))
1221 manifest_and_file_collector(changedfiles))
1222 for chnk in group:
1222 for chnk in group:
1223 yield chnk
1223 yield chnk
1224
1224
1225 # The list of manifests has been collected by the generator
1225 # The list of manifests has been collected by the generator
1226 # calling our functions back.
1226 # calling our functions back.
1227 prune_manifests()
1227 prune_manifests()
1228 msng_mnfst_lst = msng_mnfst_set.keys()
1228 msng_mnfst_lst = msng_mnfst_set.keys()
1229 # Sort the manifestnodes by revision number.
1229 # Sort the manifestnodes by revision number.
1230 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1230 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1231 # Create a generator for the manifestnodes that calls our lookup
1231 # Create a generator for the manifestnodes that calls our lookup
1232 # and data collection functions back.
1232 # and data collection functions back.
1233 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1233 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1234 filenode_collector(changedfiles))
1234 filenode_collector(changedfiles))
1235 for chnk in group:
1235 for chnk in group:
1236 yield chnk
1236 yield chnk
1237
1237
1238 # These are no longer needed, dereference and toss the memory for
1238 # These are no longer needed, dereference and toss the memory for
1239 # them.
1239 # them.
1240 msng_mnfst_lst = None
1240 msng_mnfst_lst = None
1241 msng_mnfst_set.clear()
1241 msng_mnfst_set.clear()
1242
1242
1243 changedfiles = changedfiles.keys()
1243 changedfiles = changedfiles.keys()
1244 changedfiles.sort()
1244 changedfiles.sort()
1245 # Go through all our files in order sorted by name.
1245 # Go through all our files in order sorted by name.
1246 for fname in changedfiles:
1246 for fname in changedfiles:
1247 filerevlog = self.file(fname)
1247 filerevlog = self.file(fname)
1248 # Toss out the filenodes that the recipient isn't really
1248 # Toss out the filenodes that the recipient isn't really
1249 # missing.
1249 # missing.
1250 if msng_filenode_set.has_key(fname):
1250 if msng_filenode_set.has_key(fname):
1251 prune_filenodes(fname, filerevlog)
1251 prune_filenodes(fname, filerevlog)
1252 msng_filenode_lst = msng_filenode_set[fname].keys()
1252 msng_filenode_lst = msng_filenode_set[fname].keys()
1253 else:
1253 else:
1254 msng_filenode_lst = []
1254 msng_filenode_lst = []
1255 # If any filenodes are left, generate the group for them,
1255 # If any filenodes are left, generate the group for them,
1256 # otherwise don't bother.
1256 # otherwise don't bother.
1257 if len(msng_filenode_lst) > 0:
1257 if len(msng_filenode_lst) > 0:
1258 yield changegroup.genchunk(fname)
1258 yield changegroup.genchunk(fname)
1259 # Sort the filenodes by their revision #
1259 # Sort the filenodes by their revision #
1260 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1260 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1261 # Create a group generator and only pass in a changenode
1261 # Create a group generator and only pass in a changenode
1262 # lookup function as we need to collect no information
1262 # lookup function as we need to collect no information
1263 # from filenodes.
1263 # from filenodes.
1264 group = filerevlog.group(msng_filenode_lst,
1264 group = filerevlog.group(msng_filenode_lst,
1265 lookup_filenode_link_func(fname))
1265 lookup_filenode_link_func(fname))
1266 for chnk in group:
1266 for chnk in group:
1267 yield chnk
1267 yield chnk
1268 if msng_filenode_set.has_key(fname):
1268 if msng_filenode_set.has_key(fname):
1269 # Don't need this anymore, toss it to free memory.
1269 # Don't need this anymore, toss it to free memory.
1270 del msng_filenode_set[fname]
1270 del msng_filenode_set[fname]
1271 # Signal that no more groups are left.
1271 # Signal that no more groups are left.
1272 yield changegroup.closechunk()
1272 yield changegroup.closechunk()
1273
1273
1274 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1274 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1275
1275
1276 return util.chunkbuffer(gengroup())
1276 return util.chunkbuffer(gengroup())
1277
1277
1278 def changegroup(self, basenodes, source):
1278 def changegroup(self, basenodes, source):
1279 """Generate a changegroup of all nodes that we have that a recipient
1279 """Generate a changegroup of all nodes that we have that a recipient
1280 doesn't.
1280 doesn't.
1281
1281
1282 This is much easier than the previous function as we can assume that
1282 This is much easier than the previous function as we can assume that
1283 the recipient has any changenode we aren't sending them."""
1283 the recipient has any changenode we aren't sending them."""
1284
1284
1285 self.hook('preoutgoing', throw=True, source=source)
1285 self.hook('preoutgoing', throw=True, source=source)
1286
1286
1287 cl = self.changelog
1287 cl = self.changelog
1288 nodes = cl.nodesbetween(basenodes, None)[0]
1288 nodes = cl.nodesbetween(basenodes, None)[0]
1289 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1289 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1290
1290
1291 def identity(x):
1291 def identity(x):
1292 return x
1292 return x
1293
1293
1294 def gennodelst(revlog):
1294 def gennodelst(revlog):
1295 for r in xrange(0, revlog.count()):
1295 for r in xrange(0, revlog.count()):
1296 n = revlog.node(r)
1296 n = revlog.node(r)
1297 if revlog.linkrev(n) in revset:
1297 if revlog.linkrev(n) in revset:
1298 yield n
1298 yield n
1299
1299
1300 def changed_file_collector(changedfileset):
1300 def changed_file_collector(changedfileset):
1301 def collect_changed_files(clnode):
1301 def collect_changed_files(clnode):
1302 c = cl.read(clnode)
1302 c = cl.read(clnode)
1303 for fname in c[3]:
1303 for fname in c[3]:
1304 changedfileset[fname] = 1
1304 changedfileset[fname] = 1
1305 return collect_changed_files
1305 return collect_changed_files
1306
1306
1307 def lookuprevlink_func(revlog):
1307 def lookuprevlink_func(revlog):
1308 def lookuprevlink(n):
1308 def lookuprevlink(n):
1309 return cl.node(revlog.linkrev(n))
1309 return cl.node(revlog.linkrev(n))
1310 return lookuprevlink
1310 return lookuprevlink
1311
1311
1312 def gengroup():
1312 def gengroup():
1313 # construct a list of all changed files
1313 # construct a list of all changed files
1314 changedfiles = {}
1314 changedfiles = {}
1315
1315
1316 for chnk in cl.group(nodes, identity,
1316 for chnk in cl.group(nodes, identity,
1317 changed_file_collector(changedfiles)):
1317 changed_file_collector(changedfiles)):
1318 yield chnk
1318 yield chnk
1319 changedfiles = changedfiles.keys()
1319 changedfiles = changedfiles.keys()
1320 changedfiles.sort()
1320 changedfiles.sort()
1321
1321
1322 mnfst = self.manifest
1322 mnfst = self.manifest
1323 nodeiter = gennodelst(mnfst)
1323 nodeiter = gennodelst(mnfst)
1324 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1324 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1325 yield chnk
1325 yield chnk
1326
1326
1327 for fname in changedfiles:
1327 for fname in changedfiles:
1328 filerevlog = self.file(fname)
1328 filerevlog = self.file(fname)
1329 nodeiter = gennodelst(filerevlog)
1329 nodeiter = gennodelst(filerevlog)
1330 nodeiter = list(nodeiter)
1330 nodeiter = list(nodeiter)
1331 if nodeiter:
1331 if nodeiter:
1332 yield changegroup.genchunk(fname)
1332 yield changegroup.genchunk(fname)
1333 lookup = lookuprevlink_func(filerevlog)
1333 lookup = lookuprevlink_func(filerevlog)
1334 for chnk in filerevlog.group(nodeiter, lookup):
1334 for chnk in filerevlog.group(nodeiter, lookup):
1335 yield chnk
1335 yield chnk
1336
1336
1337 yield changegroup.closechunk()
1337 yield changegroup.closechunk()
1338 self.hook('outgoing', node=hex(nodes[0]), source=source)
1338 self.hook('outgoing', node=hex(nodes[0]), source=source)
1339
1339
1340 return util.chunkbuffer(gengroup())
1340 return util.chunkbuffer(gengroup())
1341
1341
1342 def addchangegroup(self, source):
1342 def addchangegroup(self, source):
1343
1343
1344 def csmap(x):
1344 def csmap(x):
1345 self.ui.debug(_("add changeset %s\n") % short(x))
1345 self.ui.debug(_("add changeset %s\n") % short(x))
1346 return self.changelog.count()
1346 return cl.count()
1347
1347
1348 def revmap(x):
1348 def revmap(x):
1349 return self.changelog.rev(x)
1349 return cl.rev(x)
1350
1350
1351 if not source:
1351 if not source:
1352 return
1352 return
1353
1353
1354 self.hook('prechangegroup', throw=True)
1354 self.hook('prechangegroup', throw=True)
1355
1355
1356 changesets = files = revisions = 0
1356 changesets = files = revisions = 0
1357
1357
1358 tr = self.transaction()
1358 tr = self.transaction()
1359
1359
1360 oldheads = len(self.changelog.heads())
1360 # write changelog and manifest data to temp files so
1361 # concurrent readers will not see inconsistent view
1362 cl = appendfile.appendchangelog(self.opener)
1363
1364 oldheads = len(cl.heads())
1361
1365
1362 # pull off the changeset group
1366 # pull off the changeset group
1363 self.ui.status(_("adding changesets\n"))
1367 self.ui.status(_("adding changesets\n"))
1364 co = self.changelog.tip()
1368 co = cl.tip()
1365 chunkiter = changegroup.chunkiter(source)
1369 chunkiter = changegroup.chunkiter(source)
1366 cn = self.changelog.addgroup(chunkiter, csmap, tr, 1) # unique
1370 cn = cl.addgroup(chunkiter, csmap, tr, 1) # unique
1367 cnr, cor = map(self.changelog.rev, (cn, co))
1371 cnr, cor = map(cl.rev, (cn, co))
1368 if cn == nullid:
1372 if cn == nullid:
1369 cnr = cor
1373 cnr = cor
1370 changesets = cnr - cor
1374 changesets = cnr - cor
1371
1375
1376 mf = appendfile.appendmanifest(self.opener)
1377
1372 # pull off the manifest group
1378 # pull off the manifest group
1373 self.ui.status(_("adding manifests\n"))
1379 self.ui.status(_("adding manifests\n"))
1374 mm = self.manifest.tip()
1380 mm = mf.tip()
1375 chunkiter = changegroup.chunkiter(source)
1381 chunkiter = changegroup.chunkiter(source)
1376 mo = self.manifest.addgroup(chunkiter, revmap, tr)
1382 mo = mf.addgroup(chunkiter, revmap, tr)
1377
1383
1378 # process the files
1384 # process the files
1379 self.ui.status(_("adding file changes\n"))
1385 self.ui.status(_("adding file changes\n"))
1380 while 1:
1386 while 1:
1381 f = changegroup.getchunk(source)
1387 f = changegroup.getchunk(source)
1382 if not f:
1388 if not f:
1383 break
1389 break
1384 self.ui.debug(_("adding %s revisions\n") % f)
1390 self.ui.debug(_("adding %s revisions\n") % f)
1385 fl = self.file(f)
1391 fl = self.file(f)
1386 o = fl.count()
1392 o = fl.count()
1387 chunkiter = changegroup.chunkiter(source)
1393 chunkiter = changegroup.chunkiter(source)
1388 n = fl.addgroup(chunkiter, revmap, tr)
1394 n = fl.addgroup(chunkiter, revmap, tr)
1389 revisions += fl.count() - o
1395 revisions += fl.count() - o
1390 files += 1
1396 files += 1
1391
1397
1398 # write order here is important so concurrent readers will see
1399 # consistent view of repo
1400 mf.writedata()
1401 cl.writedata()
1402
1403 # make changelog and manifest see real files again
1404 self.changelog = changelog.changelog(self.opener)
1405 self.manifest = manifest.manifest(self.opener)
1406
1392 newheads = len(self.changelog.heads())
1407 newheads = len(self.changelog.heads())
1393 heads = ""
1408 heads = ""
1394 if oldheads and newheads > oldheads:
1409 if oldheads and newheads > oldheads:
1395 heads = _(" (+%d heads)") % (newheads - oldheads)
1410 heads = _(" (+%d heads)") % (newheads - oldheads)
1396
1411
1397 self.ui.status(_("added %d changesets"
1412 self.ui.status(_("added %d changesets"
1398 " with %d changes to %d files%s\n")
1413 " with %d changes to %d files%s\n")
1399 % (changesets, revisions, files, heads))
1414 % (changesets, revisions, files, heads))
1400
1415
1401 self.hook('pretxnchangegroup', throw=True,
1416 self.hook('pretxnchangegroup', throw=True,
1402 node=hex(self.changelog.node(cor+1)))
1417 node=hex(self.changelog.node(cor+1)))
1403
1418
1404 tr.close()
1419 tr.close()
1405
1420
1406 if changesets > 0:
1421 if changesets > 0:
1407 self.hook("changegroup", node=hex(self.changelog.node(cor+1)))
1422 self.hook("changegroup", node=hex(self.changelog.node(cor+1)))
1408
1423
1409 for i in range(cor + 1, cnr + 1):
1424 for i in range(cor + 1, cnr + 1):
1410 self.hook("incoming", node=hex(self.changelog.node(i)))
1425 self.hook("incoming", node=hex(self.changelog.node(i)))
1411
1426
1412 def update(self, node, allow=False, force=False, choose=None,
1427 def update(self, node, allow=False, force=False, choose=None,
1413 moddirstate=True, forcemerge=False, wlock=None):
1428 moddirstate=True, forcemerge=False, wlock=None):
1414 pl = self.dirstate.parents()
1429 pl = self.dirstate.parents()
1415 if not force and pl[1] != nullid:
1430 if not force and pl[1] != nullid:
1416 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1431 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1417 return 1
1432 return 1
1418
1433
1419 err = False
1434 err = False
1420
1435
1421 p1, p2 = pl[0], node
1436 p1, p2 = pl[0], node
1422 pa = self.changelog.ancestor(p1, p2)
1437 pa = self.changelog.ancestor(p1, p2)
1423 m1n = self.changelog.read(p1)[0]
1438 m1n = self.changelog.read(p1)[0]
1424 m2n = self.changelog.read(p2)[0]
1439 m2n = self.changelog.read(p2)[0]
1425 man = self.manifest.ancestor(m1n, m2n)
1440 man = self.manifest.ancestor(m1n, m2n)
1426 m1 = self.manifest.read(m1n)
1441 m1 = self.manifest.read(m1n)
1427 mf1 = self.manifest.readflags(m1n)
1442 mf1 = self.manifest.readflags(m1n)
1428 m2 = self.manifest.read(m2n).copy()
1443 m2 = self.manifest.read(m2n).copy()
1429 mf2 = self.manifest.readflags(m2n)
1444 mf2 = self.manifest.readflags(m2n)
1430 ma = self.manifest.read(man)
1445 ma = self.manifest.read(man)
1431 mfa = self.manifest.readflags(man)
1446 mfa = self.manifest.readflags(man)
1432
1447
1433 modified, added, removed, deleted, unknown = self.changes()
1448 modified, added, removed, deleted, unknown = self.changes()
1434
1449
1435 # is this a jump, or a merge? i.e. is there a linear path
1450 # is this a jump, or a merge? i.e. is there a linear path
1436 # from p1 to p2?
1451 # from p1 to p2?
1437 linear_path = (pa == p1 or pa == p2)
1452 linear_path = (pa == p1 or pa == p2)
1438
1453
1439 if allow and linear_path:
1454 if allow and linear_path:
1440 raise util.Abort(_("there is nothing to merge, "
1455 raise util.Abort(_("there is nothing to merge, "
1441 "just use 'hg update'"))
1456 "just use 'hg update'"))
1442 if allow and not forcemerge:
1457 if allow and not forcemerge:
1443 if modified or added or removed:
1458 if modified or added or removed:
1444 raise util.Abort(_("outstanding uncommitted changes"))
1459 raise util.Abort(_("outstanding uncommitted changes"))
1445 if not forcemerge and not force:
1460 if not forcemerge and not force:
1446 for f in unknown:
1461 for f in unknown:
1447 if f in m2:
1462 if f in m2:
1448 t1 = self.wread(f)
1463 t1 = self.wread(f)
1449 t2 = self.file(f).read(m2[f])
1464 t2 = self.file(f).read(m2[f])
1450 if cmp(t1, t2) != 0:
1465 if cmp(t1, t2) != 0:
1451 raise util.Abort(_("'%s' already exists in the working"
1466 raise util.Abort(_("'%s' already exists in the working"
1452 " dir and differs from remote") % f)
1467 " dir and differs from remote") % f)
1453
1468
1454 # resolve the manifest to determine which files
1469 # resolve the manifest to determine which files
1455 # we care about merging
1470 # we care about merging
1456 self.ui.note(_("resolving manifests\n"))
1471 self.ui.note(_("resolving manifests\n"))
1457 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1472 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1458 (force, allow, moddirstate, linear_path))
1473 (force, allow, moddirstate, linear_path))
1459 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1474 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1460 (short(man), short(m1n), short(m2n)))
1475 (short(man), short(m1n), short(m2n)))
1461
1476
1462 merge = {}
1477 merge = {}
1463 get = {}
1478 get = {}
1464 remove = []
1479 remove = []
1465
1480
1466 # construct a working dir manifest
1481 # construct a working dir manifest
1467 mw = m1.copy()
1482 mw = m1.copy()
1468 mfw = mf1.copy()
1483 mfw = mf1.copy()
1469 umap = dict.fromkeys(unknown)
1484 umap = dict.fromkeys(unknown)
1470
1485
1471 for f in added + modified + unknown:
1486 for f in added + modified + unknown:
1472 mw[f] = ""
1487 mw[f] = ""
1473 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1488 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1474
1489
1475 if moddirstate and not wlock:
1490 if moddirstate and not wlock:
1476 wlock = self.wlock()
1491 wlock = self.wlock()
1477
1492
1478 for f in deleted + removed:
1493 for f in deleted + removed:
1479 if f in mw:
1494 if f in mw:
1480 del mw[f]
1495 del mw[f]
1481
1496
1482 # If we're jumping between revisions (as opposed to merging),
1497 # If we're jumping between revisions (as opposed to merging),
1483 # and if neither the working directory nor the target rev has
1498 # and if neither the working directory nor the target rev has
1484 # the file, then we need to remove it from the dirstate, to
1499 # the file, then we need to remove it from the dirstate, to
1485 # prevent the dirstate from listing the file when it is no
1500 # prevent the dirstate from listing the file when it is no
1486 # longer in the manifest.
1501 # longer in the manifest.
1487 if moddirstate and linear_path and f not in m2:
1502 if moddirstate and linear_path and f not in m2:
1488 self.dirstate.forget((f,))
1503 self.dirstate.forget((f,))
1489
1504
1490 # Compare manifests
1505 # Compare manifests
1491 for f, n in mw.iteritems():
1506 for f, n in mw.iteritems():
1492 if choose and not choose(f):
1507 if choose and not choose(f):
1493 continue
1508 continue
1494 if f in m2:
1509 if f in m2:
1495 s = 0
1510 s = 0
1496
1511
1497 # is the wfile new since m1, and match m2?
1512 # is the wfile new since m1, and match m2?
1498 if f not in m1:
1513 if f not in m1:
1499 t1 = self.wread(f)
1514 t1 = self.wread(f)
1500 t2 = self.file(f).read(m2[f])
1515 t2 = self.file(f).read(m2[f])
1501 if cmp(t1, t2) == 0:
1516 if cmp(t1, t2) == 0:
1502 n = m2[f]
1517 n = m2[f]
1503 del t1, t2
1518 del t1, t2
1504
1519
1505 # are files different?
1520 # are files different?
1506 if n != m2[f]:
1521 if n != m2[f]:
1507 a = ma.get(f, nullid)
1522 a = ma.get(f, nullid)
1508 # are both different from the ancestor?
1523 # are both different from the ancestor?
1509 if n != a and m2[f] != a:
1524 if n != a and m2[f] != a:
1510 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1525 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1511 # merge executable bits
1526 # merge executable bits
1512 # "if we changed or they changed, change in merge"
1527 # "if we changed or they changed, change in merge"
1513 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1528 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1514 mode = ((a^b) | (a^c)) ^ a
1529 mode = ((a^b) | (a^c)) ^ a
1515 merge[f] = (m1.get(f, nullid), m2[f], mode)
1530 merge[f] = (m1.get(f, nullid), m2[f], mode)
1516 s = 1
1531 s = 1
1517 # are we clobbering?
1532 # are we clobbering?
1518 # is remote's version newer?
1533 # is remote's version newer?
1519 # or are we going back in time?
1534 # or are we going back in time?
1520 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1535 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1521 self.ui.debug(_(" remote %s is newer, get\n") % f)
1536 self.ui.debug(_(" remote %s is newer, get\n") % f)
1522 get[f] = m2[f]
1537 get[f] = m2[f]
1523 s = 1
1538 s = 1
1524 elif f in umap:
1539 elif f in umap:
1525 # this unknown file is the same as the checkout
1540 # this unknown file is the same as the checkout
1526 get[f] = m2[f]
1541 get[f] = m2[f]
1527
1542
1528 if not s and mfw[f] != mf2[f]:
1543 if not s and mfw[f] != mf2[f]:
1529 if force:
1544 if force:
1530 self.ui.debug(_(" updating permissions for %s\n") % f)
1545 self.ui.debug(_(" updating permissions for %s\n") % f)
1531 util.set_exec(self.wjoin(f), mf2[f])
1546 util.set_exec(self.wjoin(f), mf2[f])
1532 else:
1547 else:
1533 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1548 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1534 mode = ((a^b) | (a^c)) ^ a
1549 mode = ((a^b) | (a^c)) ^ a
1535 if mode != b:
1550 if mode != b:
1536 self.ui.debug(_(" updating permissions for %s\n")
1551 self.ui.debug(_(" updating permissions for %s\n")
1537 % f)
1552 % f)
1538 util.set_exec(self.wjoin(f), mode)
1553 util.set_exec(self.wjoin(f), mode)
1539 del m2[f]
1554 del m2[f]
1540 elif f in ma:
1555 elif f in ma:
1541 if n != ma[f]:
1556 if n != ma[f]:
1542 r = _("d")
1557 r = _("d")
1543 if not force and (linear_path or allow):
1558 if not force and (linear_path or allow):
1544 r = self.ui.prompt(
1559 r = self.ui.prompt(
1545 (_(" local changed %s which remote deleted\n") % f) +
1560 (_(" local changed %s which remote deleted\n") % f) +
1546 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1561 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1547 if r == _("d"):
1562 if r == _("d"):
1548 remove.append(f)
1563 remove.append(f)
1549 else:
1564 else:
1550 self.ui.debug(_("other deleted %s\n") % f)
1565 self.ui.debug(_("other deleted %s\n") % f)
1551 remove.append(f) # other deleted it
1566 remove.append(f) # other deleted it
1552 else:
1567 else:
1553 # file is created on branch or in working directory
1568 # file is created on branch or in working directory
1554 if force and f not in umap:
1569 if force and f not in umap:
1555 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1570 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1556 remove.append(f)
1571 remove.append(f)
1557 elif n == m1.get(f, nullid): # same as parent
1572 elif n == m1.get(f, nullid): # same as parent
1558 if p2 == pa: # going backwards?
1573 if p2 == pa: # going backwards?
1559 self.ui.debug(_("remote deleted %s\n") % f)
1574 self.ui.debug(_("remote deleted %s\n") % f)
1560 remove.append(f)
1575 remove.append(f)
1561 else:
1576 else:
1562 self.ui.debug(_("local modified %s, keeping\n") % f)
1577 self.ui.debug(_("local modified %s, keeping\n") % f)
1563 else:
1578 else:
1564 self.ui.debug(_("working dir created %s, keeping\n") % f)
1579 self.ui.debug(_("working dir created %s, keeping\n") % f)
1565
1580
1566 for f, n in m2.iteritems():
1581 for f, n in m2.iteritems():
1567 if choose and not choose(f):
1582 if choose and not choose(f):
1568 continue
1583 continue
1569 if f[0] == "/":
1584 if f[0] == "/":
1570 continue
1585 continue
1571 if f in ma and n != ma[f]:
1586 if f in ma and n != ma[f]:
1572 r = _("k")
1587 r = _("k")
1573 if not force and (linear_path or allow):
1588 if not force and (linear_path or allow):
1574 r = self.ui.prompt(
1589 r = self.ui.prompt(
1575 (_("remote changed %s which local deleted\n") % f) +
1590 (_("remote changed %s which local deleted\n") % f) +
1576 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1591 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1577 if r == _("k"):
1592 if r == _("k"):
1578 get[f] = n
1593 get[f] = n
1579 elif f not in ma:
1594 elif f not in ma:
1580 self.ui.debug(_("remote created %s\n") % f)
1595 self.ui.debug(_("remote created %s\n") % f)
1581 get[f] = n
1596 get[f] = n
1582 else:
1597 else:
1583 if force or p2 == pa: # going backwards?
1598 if force or p2 == pa: # going backwards?
1584 self.ui.debug(_("local deleted %s, recreating\n") % f)
1599 self.ui.debug(_("local deleted %s, recreating\n") % f)
1585 get[f] = n
1600 get[f] = n
1586 else:
1601 else:
1587 self.ui.debug(_("local deleted %s\n") % f)
1602 self.ui.debug(_("local deleted %s\n") % f)
1588
1603
1589 del mw, m1, m2, ma
1604 del mw, m1, m2, ma
1590
1605
1591 if force:
1606 if force:
1592 for f in merge:
1607 for f in merge:
1593 get[f] = merge[f][1]
1608 get[f] = merge[f][1]
1594 merge = {}
1609 merge = {}
1595
1610
1596 if linear_path or force:
1611 if linear_path or force:
1597 # we don't need to do any magic, just jump to the new rev
1612 # we don't need to do any magic, just jump to the new rev
1598 branch_merge = False
1613 branch_merge = False
1599 p1, p2 = p2, nullid
1614 p1, p2 = p2, nullid
1600 else:
1615 else:
1601 if not allow:
1616 if not allow:
1602 self.ui.status(_("this update spans a branch"
1617 self.ui.status(_("this update spans a branch"
1603 " affecting the following files:\n"))
1618 " affecting the following files:\n"))
1604 fl = merge.keys() + get.keys()
1619 fl = merge.keys() + get.keys()
1605 fl.sort()
1620 fl.sort()
1606 for f in fl:
1621 for f in fl:
1607 cf = ""
1622 cf = ""
1608 if f in merge:
1623 if f in merge:
1609 cf = _(" (resolve)")
1624 cf = _(" (resolve)")
1610 self.ui.status(" %s%s\n" % (f, cf))
1625 self.ui.status(" %s%s\n" % (f, cf))
1611 self.ui.warn(_("aborting update spanning branches!\n"))
1626 self.ui.warn(_("aborting update spanning branches!\n"))
1612 self.ui.status(_("(use update -m to merge across branches"
1627 self.ui.status(_("(use update -m to merge across branches"
1613 " or -C to lose changes)\n"))
1628 " or -C to lose changes)\n"))
1614 return 1
1629 return 1
1615 branch_merge = True
1630 branch_merge = True
1616
1631
1617 # get the files we don't need to change
1632 # get the files we don't need to change
1618 files = get.keys()
1633 files = get.keys()
1619 files.sort()
1634 files.sort()
1620 for f in files:
1635 for f in files:
1621 if f[0] == "/":
1636 if f[0] == "/":
1622 continue
1637 continue
1623 self.ui.note(_("getting %s\n") % f)
1638 self.ui.note(_("getting %s\n") % f)
1624 t = self.file(f).read(get[f])
1639 t = self.file(f).read(get[f])
1625 self.wwrite(f, t)
1640 self.wwrite(f, t)
1626 util.set_exec(self.wjoin(f), mf2[f])
1641 util.set_exec(self.wjoin(f), mf2[f])
1627 if moddirstate:
1642 if moddirstate:
1628 if branch_merge:
1643 if branch_merge:
1629 self.dirstate.update([f], 'n', st_mtime=-1)
1644 self.dirstate.update([f], 'n', st_mtime=-1)
1630 else:
1645 else:
1631 self.dirstate.update([f], 'n')
1646 self.dirstate.update([f], 'n')
1632
1647
1633 # merge the tricky bits
1648 # merge the tricky bits
1634 failedmerge = []
1649 failedmerge = []
1635 files = merge.keys()
1650 files = merge.keys()
1636 files.sort()
1651 files.sort()
1637 xp1 = hex(p1)
1652 xp1 = hex(p1)
1638 xp2 = hex(p2)
1653 xp2 = hex(p2)
1639 for f in files:
1654 for f in files:
1640 self.ui.status(_("merging %s\n") % f)
1655 self.ui.status(_("merging %s\n") % f)
1641 my, other, flag = merge[f]
1656 my, other, flag = merge[f]
1642 ret = self.merge3(f, my, other, xp1, xp2)
1657 ret = self.merge3(f, my, other, xp1, xp2)
1643 if ret:
1658 if ret:
1644 err = True
1659 err = True
1645 failedmerge.append(f)
1660 failedmerge.append(f)
1646 util.set_exec(self.wjoin(f), flag)
1661 util.set_exec(self.wjoin(f), flag)
1647 if moddirstate:
1662 if moddirstate:
1648 if branch_merge:
1663 if branch_merge:
1649 # We've done a branch merge, mark this file as merged
1664 # We've done a branch merge, mark this file as merged
1650 # so that we properly record the merger later
1665 # so that we properly record the merger later
1651 self.dirstate.update([f], 'm')
1666 self.dirstate.update([f], 'm')
1652 else:
1667 else:
1653 # We've update-merged a locally modified file, so
1668 # We've update-merged a locally modified file, so
1654 # we set the dirstate to emulate a normal checkout
1669 # we set the dirstate to emulate a normal checkout
1655 # of that file some time in the past. Thus our
1670 # of that file some time in the past. Thus our
1656 # merge will appear as a normal local file
1671 # merge will appear as a normal local file
1657 # modification.
1672 # modification.
1658 f_len = len(self.file(f).read(other))
1673 f_len = len(self.file(f).read(other))
1659 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1674 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1660
1675
1661 remove.sort()
1676 remove.sort()
1662 for f in remove:
1677 for f in remove:
1663 self.ui.note(_("removing %s\n") % f)
1678 self.ui.note(_("removing %s\n") % f)
1664 util.audit_path(f)
1679 util.audit_path(f)
1665 try:
1680 try:
1666 util.unlink(self.wjoin(f))
1681 util.unlink(self.wjoin(f))
1667 except OSError, inst:
1682 except OSError, inst:
1668 if inst.errno != errno.ENOENT:
1683 if inst.errno != errno.ENOENT:
1669 self.ui.warn(_("update failed to remove %s: %s!\n") %
1684 self.ui.warn(_("update failed to remove %s: %s!\n") %
1670 (f, inst.strerror))
1685 (f, inst.strerror))
1671 if moddirstate:
1686 if moddirstate:
1672 if branch_merge:
1687 if branch_merge:
1673 self.dirstate.update(remove, 'r')
1688 self.dirstate.update(remove, 'r')
1674 else:
1689 else:
1675 self.dirstate.forget(remove)
1690 self.dirstate.forget(remove)
1676
1691
1677 if moddirstate:
1692 if moddirstate:
1678 self.dirstate.setparents(p1, p2)
1693 self.dirstate.setparents(p1, p2)
1679
1694
1680 stat = ((len(get), _("updated")),
1695 stat = ((len(get), _("updated")),
1681 (len(merge) - len(failedmerge), _("merged")),
1696 (len(merge) - len(failedmerge), _("merged")),
1682 (len(remove), _("removed")),
1697 (len(remove), _("removed")),
1683 (len(failedmerge), _("unresolved")))
1698 (len(failedmerge), _("unresolved")))
1684 note = ", ".join([_("%d files %s") % s for s in stat])
1699 note = ", ".join([_("%d files %s") % s for s in stat])
1685 self.ui.note("%s\n" % note)
1700 self.ui.note("%s\n" % note)
1686 if moddirstate and branch_merge:
1701 if moddirstate and branch_merge:
1687 self.ui.note(_("(branch merge, don't forget to commit)\n"))
1702 self.ui.note(_("(branch merge, don't forget to commit)\n"))
1688
1703
1689 return err
1704 return err
1690
1705
1691 def merge3(self, fn, my, other, p1, p2):
1706 def merge3(self, fn, my, other, p1, p2):
1692 """perform a 3-way merge in the working directory"""
1707 """perform a 3-way merge in the working directory"""
1693
1708
1694 def temp(prefix, node):
1709 def temp(prefix, node):
1695 pre = "%s~%s." % (os.path.basename(fn), prefix)
1710 pre = "%s~%s." % (os.path.basename(fn), prefix)
1696 (fd, name) = tempfile.mkstemp("", pre)
1711 (fd, name) = tempfile.mkstemp("", pre)
1697 f = os.fdopen(fd, "wb")
1712 f = os.fdopen(fd, "wb")
1698 self.wwrite(fn, fl.read(node), f)
1713 self.wwrite(fn, fl.read(node), f)
1699 f.close()
1714 f.close()
1700 return name
1715 return name
1701
1716
1702 fl = self.file(fn)
1717 fl = self.file(fn)
1703 base = fl.ancestor(my, other)
1718 base = fl.ancestor(my, other)
1704 a = self.wjoin(fn)
1719 a = self.wjoin(fn)
1705 b = temp("base", base)
1720 b = temp("base", base)
1706 c = temp("other", other)
1721 c = temp("other", other)
1707
1722
1708 self.ui.note(_("resolving %s\n") % fn)
1723 self.ui.note(_("resolving %s\n") % fn)
1709 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1724 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1710 (fn, short(my), short(other), short(base)))
1725 (fn, short(my), short(other), short(base)))
1711
1726
1712 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1727 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1713 or "hgmerge")
1728 or "hgmerge")
1714 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
1729 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
1715 environ={'HG_FILE': fn,
1730 environ={'HG_FILE': fn,
1716 'HG_MY_NODE': p1,
1731 'HG_MY_NODE': p1,
1717 'HG_OTHER_NODE': p2,
1732 'HG_OTHER_NODE': p2,
1718 'HG_FILE_MY_NODE': hex(my),
1733 'HG_FILE_MY_NODE': hex(my),
1719 'HG_FILE_OTHER_NODE': hex(other),
1734 'HG_FILE_OTHER_NODE': hex(other),
1720 'HG_FILE_BASE_NODE': hex(base)})
1735 'HG_FILE_BASE_NODE': hex(base)})
1721 if r:
1736 if r:
1722 self.ui.warn(_("merging %s failed!\n") % fn)
1737 self.ui.warn(_("merging %s failed!\n") % fn)
1723
1738
1724 os.unlink(b)
1739 os.unlink(b)
1725 os.unlink(c)
1740 os.unlink(c)
1726 return r
1741 return r
1727
1742
1728 def verify(self):
1743 def verify(self):
1729 filelinkrevs = {}
1744 filelinkrevs = {}
1730 filenodes = {}
1745 filenodes = {}
1731 changesets = revisions = files = 0
1746 changesets = revisions = files = 0
1732 errors = [0]
1747 errors = [0]
1733 neededmanifests = {}
1748 neededmanifests = {}
1734
1749
1735 def err(msg):
1750 def err(msg):
1736 self.ui.warn(msg + "\n")
1751 self.ui.warn(msg + "\n")
1737 errors[0] += 1
1752 errors[0] += 1
1738
1753
1739 def checksize(obj, name):
1754 def checksize(obj, name):
1740 d = obj.checksize()
1755 d = obj.checksize()
1741 if d[0]:
1756 if d[0]:
1742 err(_("%s data length off by %d bytes") % (name, d[0]))
1757 err(_("%s data length off by %d bytes") % (name, d[0]))
1743 if d[1]:
1758 if d[1]:
1744 err(_("%s index contains %d extra bytes") % (name, d[1]))
1759 err(_("%s index contains %d extra bytes") % (name, d[1]))
1745
1760
1746 seen = {}
1761 seen = {}
1747 self.ui.status(_("checking changesets\n"))
1762 self.ui.status(_("checking changesets\n"))
1748 checksize(self.changelog, "changelog")
1763 checksize(self.changelog, "changelog")
1749
1764
1750 for i in range(self.changelog.count()):
1765 for i in range(self.changelog.count()):
1751 changesets += 1
1766 changesets += 1
1752 n = self.changelog.node(i)
1767 n = self.changelog.node(i)
1753 l = self.changelog.linkrev(n)
1768 l = self.changelog.linkrev(n)
1754 if l != i:
1769 if l != i:
1755 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1770 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1756 if n in seen:
1771 if n in seen:
1757 err(_("duplicate changeset at revision %d") % i)
1772 err(_("duplicate changeset at revision %d") % i)
1758 seen[n] = 1
1773 seen[n] = 1
1759
1774
1760 for p in self.changelog.parents(n):
1775 for p in self.changelog.parents(n):
1761 if p not in self.changelog.nodemap:
1776 if p not in self.changelog.nodemap:
1762 err(_("changeset %s has unknown parent %s") %
1777 err(_("changeset %s has unknown parent %s") %
1763 (short(n), short(p)))
1778 (short(n), short(p)))
1764 try:
1779 try:
1765 changes = self.changelog.read(n)
1780 changes = self.changelog.read(n)
1766 except KeyboardInterrupt:
1781 except KeyboardInterrupt:
1767 self.ui.warn(_("interrupted"))
1782 self.ui.warn(_("interrupted"))
1768 raise
1783 raise
1769 except Exception, inst:
1784 except Exception, inst:
1770 err(_("unpacking changeset %s: %s") % (short(n), inst))
1785 err(_("unpacking changeset %s: %s") % (short(n), inst))
1771 continue
1786 continue
1772
1787
1773 neededmanifests[changes[0]] = n
1788 neededmanifests[changes[0]] = n
1774
1789
1775 for f in changes[3]:
1790 for f in changes[3]:
1776 filelinkrevs.setdefault(f, []).append(i)
1791 filelinkrevs.setdefault(f, []).append(i)
1777
1792
1778 seen = {}
1793 seen = {}
1779 self.ui.status(_("checking manifests\n"))
1794 self.ui.status(_("checking manifests\n"))
1780 checksize(self.manifest, "manifest")
1795 checksize(self.manifest, "manifest")
1781
1796
1782 for i in range(self.manifest.count()):
1797 for i in range(self.manifest.count()):
1783 n = self.manifest.node(i)
1798 n = self.manifest.node(i)
1784 l = self.manifest.linkrev(n)
1799 l = self.manifest.linkrev(n)
1785
1800
1786 if l < 0 or l >= self.changelog.count():
1801 if l < 0 or l >= self.changelog.count():
1787 err(_("bad manifest link (%d) at revision %d") % (l, i))
1802 err(_("bad manifest link (%d) at revision %d") % (l, i))
1788
1803
1789 if n in neededmanifests:
1804 if n in neededmanifests:
1790 del neededmanifests[n]
1805 del neededmanifests[n]
1791
1806
1792 if n in seen:
1807 if n in seen:
1793 err(_("duplicate manifest at revision %d") % i)
1808 err(_("duplicate manifest at revision %d") % i)
1794
1809
1795 seen[n] = 1
1810 seen[n] = 1
1796
1811
1797 for p in self.manifest.parents(n):
1812 for p in self.manifest.parents(n):
1798 if p not in self.manifest.nodemap:
1813 if p not in self.manifest.nodemap:
1799 err(_("manifest %s has unknown parent %s") %
1814 err(_("manifest %s has unknown parent %s") %
1800 (short(n), short(p)))
1815 (short(n), short(p)))
1801
1816
1802 try:
1817 try:
1803 delta = mdiff.patchtext(self.manifest.delta(n))
1818 delta = mdiff.patchtext(self.manifest.delta(n))
1804 except KeyboardInterrupt:
1819 except KeyboardInterrupt:
1805 self.ui.warn(_("interrupted"))
1820 self.ui.warn(_("interrupted"))
1806 raise
1821 raise
1807 except Exception, inst:
1822 except Exception, inst:
1808 err(_("unpacking manifest %s: %s") % (short(n), inst))
1823 err(_("unpacking manifest %s: %s") % (short(n), inst))
1809 continue
1824 continue
1810
1825
1811 try:
1826 try:
1812 ff = [ l.split('\0') for l in delta.splitlines() ]
1827 ff = [ l.split('\0') for l in delta.splitlines() ]
1813 for f, fn in ff:
1828 for f, fn in ff:
1814 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1829 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1815 except (ValueError, TypeError), inst:
1830 except (ValueError, TypeError), inst:
1816 err(_("broken delta in manifest %s: %s") % (short(n), inst))
1831 err(_("broken delta in manifest %s: %s") % (short(n), inst))
1817
1832
1818 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1833 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1819
1834
1820 for m, c in neededmanifests.items():
1835 for m, c in neededmanifests.items():
1821 err(_("Changeset %s refers to unknown manifest %s") %
1836 err(_("Changeset %s refers to unknown manifest %s") %
1822 (short(m), short(c)))
1837 (short(m), short(c)))
1823 del neededmanifests
1838 del neededmanifests
1824
1839
1825 for f in filenodes:
1840 for f in filenodes:
1826 if f not in filelinkrevs:
1841 if f not in filelinkrevs:
1827 err(_("file %s in manifest but not in changesets") % f)
1842 err(_("file %s in manifest but not in changesets") % f)
1828
1843
1829 for f in filelinkrevs:
1844 for f in filelinkrevs:
1830 if f not in filenodes:
1845 if f not in filenodes:
1831 err(_("file %s in changeset but not in manifest") % f)
1846 err(_("file %s in changeset but not in manifest") % f)
1832
1847
1833 self.ui.status(_("checking files\n"))
1848 self.ui.status(_("checking files\n"))
1834 ff = filenodes.keys()
1849 ff = filenodes.keys()
1835 ff.sort()
1850 ff.sort()
1836 for f in ff:
1851 for f in ff:
1837 if f == "/dev/null":
1852 if f == "/dev/null":
1838 continue
1853 continue
1839 files += 1
1854 files += 1
1840 if not f:
1855 if not f:
1841 err(_("file without name in manifest %s") % short(n))
1856 err(_("file without name in manifest %s") % short(n))
1842 continue
1857 continue
1843 fl = self.file(f)
1858 fl = self.file(f)
1844 checksize(fl, f)
1859 checksize(fl, f)
1845
1860
1846 nodes = {nullid: 1}
1861 nodes = {nullid: 1}
1847 seen = {}
1862 seen = {}
1848 for i in range(fl.count()):
1863 for i in range(fl.count()):
1849 revisions += 1
1864 revisions += 1
1850 n = fl.node(i)
1865 n = fl.node(i)
1851
1866
1852 if n in seen:
1867 if n in seen:
1853 err(_("%s: duplicate revision %d") % (f, i))
1868 err(_("%s: duplicate revision %d") % (f, i))
1854 if n not in filenodes[f]:
1869 if n not in filenodes[f]:
1855 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1870 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1856 else:
1871 else:
1857 del filenodes[f][n]
1872 del filenodes[f][n]
1858
1873
1859 flr = fl.linkrev(n)
1874 flr = fl.linkrev(n)
1860 if flr not in filelinkrevs.get(f, []):
1875 if flr not in filelinkrevs.get(f, []):
1861 err(_("%s:%s points to unexpected changeset %d")
1876 err(_("%s:%s points to unexpected changeset %d")
1862 % (f, short(n), flr))
1877 % (f, short(n), flr))
1863 else:
1878 else:
1864 filelinkrevs[f].remove(flr)
1879 filelinkrevs[f].remove(flr)
1865
1880
1866 # verify contents
1881 # verify contents
1867 try:
1882 try:
1868 t = fl.read(n)
1883 t = fl.read(n)
1869 except KeyboardInterrupt:
1884 except KeyboardInterrupt:
1870 self.ui.warn(_("interrupted"))
1885 self.ui.warn(_("interrupted"))
1871 raise
1886 raise
1872 except Exception, inst:
1887 except Exception, inst:
1873 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1888 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1874
1889
1875 # verify parents
1890 # verify parents
1876 (p1, p2) = fl.parents(n)
1891 (p1, p2) = fl.parents(n)
1877 if p1 not in nodes:
1892 if p1 not in nodes:
1878 err(_("file %s:%s unknown parent 1 %s") %
1893 err(_("file %s:%s unknown parent 1 %s") %
1879 (f, short(n), short(p1)))
1894 (f, short(n), short(p1)))
1880 if p2 not in nodes:
1895 if p2 not in nodes:
1881 err(_("file %s:%s unknown parent 2 %s") %
1896 err(_("file %s:%s unknown parent 2 %s") %
1882 (f, short(n), short(p1)))
1897 (f, short(n), short(p1)))
1883 nodes[n] = 1
1898 nodes[n] = 1
1884
1899
1885 # cross-check
1900 # cross-check
1886 for node in filenodes[f]:
1901 for node in filenodes[f]:
1887 err(_("node %s in manifests not in %s") % (hex(node), f))
1902 err(_("node %s in manifests not in %s") % (hex(node), f))
1888
1903
1889 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1904 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1890 (files, changesets, revisions))
1905 (files, changesets, revisions))
1891
1906
1892 if errors[0]:
1907 if errors[0]:
1893 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1908 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1894 return 1
1909 return 1
1895
1910
1896 # used to avoid circular references so destructors work
1911 # used to avoid circular references so destructors work
1897 def aftertrans(base):
1912 def aftertrans(base):
1898 p = base
1913 p = base
1899 def a():
1914 def a():
1900 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1915 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1901 util.rename(os.path.join(p, "journal.dirstate"),
1916 util.rename(os.path.join(p, "journal.dirstate"),
1902 os.path.join(p, "undo.dirstate"))
1917 os.path.join(p, "undo.dirstate"))
1903 return a
1918 return a
1904
1919
General Comments 0
You need to be logged in to leave comments. Login now