##// END OF EJS Templates
Deal with merge abort more gracefully...
Matt Mackall -
r1495:1e265c2b default
parent child Browse files
Show More
@@ -1,1746 +1,1746 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import struct, os, util
8 import struct, os, util
9 import filelog, manifest, changelog, dirstate, repo
9 import filelog, manifest, changelog, dirstate, repo
10 from node import *
10 from node import *
11 from i18n import gettext as _
11 from i18n import gettext as _
12 from demandload import *
12 from demandload import *
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
14
14
15 class localrepository:
15 class localrepository:
16 def __init__(self, ui, path=None, create=0):
16 def __init__(self, ui, path=None, create=0):
17 if not path:
17 if not path:
18 p = os.getcwd()
18 p = os.getcwd()
19 while not os.path.isdir(os.path.join(p, ".hg")):
19 while not os.path.isdir(os.path.join(p, ".hg")):
20 oldp = p
20 oldp = p
21 p = os.path.dirname(p)
21 p = os.path.dirname(p)
22 if p == oldp: raise repo.RepoError(_("no repo found"))
22 if p == oldp: raise repo.RepoError(_("no repo found"))
23 path = p
23 path = p
24 self.path = os.path.join(path, ".hg")
24 self.path = os.path.join(path, ".hg")
25
25
26 if not create and not os.path.isdir(self.path):
26 if not create and not os.path.isdir(self.path):
27 raise repo.RepoError(_("repository %s not found") % self.path)
27 raise repo.RepoError(_("repository %s not found") % self.path)
28
28
29 self.root = os.path.abspath(path)
29 self.root = os.path.abspath(path)
30 self.ui = ui
30 self.ui = ui
31 self.opener = util.opener(self.path)
31 self.opener = util.opener(self.path)
32 self.wopener = util.opener(self.root)
32 self.wopener = util.opener(self.root)
33 self.manifest = manifest.manifest(self.opener)
33 self.manifest = manifest.manifest(self.opener)
34 self.changelog = changelog.changelog(self.opener)
34 self.changelog = changelog.changelog(self.opener)
35 self.tagscache = None
35 self.tagscache = None
36 self.nodetagscache = None
36 self.nodetagscache = None
37 self.encodepats = None
37 self.encodepats = None
38 self.decodepats = None
38 self.decodepats = None
39
39
40 if create:
40 if create:
41 os.mkdir(self.path)
41 os.mkdir(self.path)
42 os.mkdir(self.join("data"))
42 os.mkdir(self.join("data"))
43
43
44 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
44 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
45 try:
45 try:
46 self.ui.readconfig(os.path.join(self.path, "hgrc"))
46 self.ui.readconfig(os.path.join(self.path, "hgrc"))
47 except IOError: pass
47 except IOError: pass
48
48
49 def hook(self, name, **args):
49 def hook(self, name, **args):
50 def runhook(name, cmd):
50 def runhook(name, cmd):
51 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
51 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
52 old = {}
52 old = {}
53 for k, v in args.items():
53 for k, v in args.items():
54 k = k.upper()
54 k = k.upper()
55 old[k] = os.environ.get(k, None)
55 old[k] = os.environ.get(k, None)
56 os.environ[k] = v
56 os.environ[k] = v
57
57
58 # Hooks run in the repository root
58 # Hooks run in the repository root
59 olddir = os.getcwd()
59 olddir = os.getcwd()
60 os.chdir(self.root)
60 os.chdir(self.root)
61 r = os.system(cmd)
61 r = os.system(cmd)
62 os.chdir(olddir)
62 os.chdir(olddir)
63
63
64 for k, v in old.items():
64 for k, v in old.items():
65 if v != None:
65 if v != None:
66 os.environ[k] = v
66 os.environ[k] = v
67 else:
67 else:
68 del os.environ[k]
68 del os.environ[k]
69
69
70 if r:
70 if r:
71 self.ui.warn(_("abort: %s hook failed with status %d!\n") %
71 self.ui.warn(_("abort: %s hook failed with status %d!\n") %
72 (name, r))
72 (name, r))
73 return False
73 return False
74 return True
74 return True
75
75
76 r = True
76 r = True
77 for hname, cmd in self.ui.configitems("hooks"):
77 for hname, cmd in self.ui.configitems("hooks"):
78 s = hname.split(".")
78 s = hname.split(".")
79 if s[0] == name and cmd:
79 if s[0] == name and cmd:
80 r = runhook(hname, cmd) and r
80 r = runhook(hname, cmd) and r
81 return r
81 return r
82
82
83 def tags(self):
83 def tags(self):
84 '''return a mapping of tag to node'''
84 '''return a mapping of tag to node'''
85 if not self.tagscache:
85 if not self.tagscache:
86 self.tagscache = {}
86 self.tagscache = {}
87 def addtag(self, k, n):
87 def addtag(self, k, n):
88 try:
88 try:
89 bin_n = bin(n)
89 bin_n = bin(n)
90 except TypeError:
90 except TypeError:
91 bin_n = ''
91 bin_n = ''
92 self.tagscache[k.strip()] = bin_n
92 self.tagscache[k.strip()] = bin_n
93
93
94 try:
94 try:
95 # read each head of the tags file, ending with the tip
95 # read each head of the tags file, ending with the tip
96 # and add each tag found to the map, with "newer" ones
96 # and add each tag found to the map, with "newer" ones
97 # taking precedence
97 # taking precedence
98 fl = self.file(".hgtags")
98 fl = self.file(".hgtags")
99 h = fl.heads()
99 h = fl.heads()
100 h.reverse()
100 h.reverse()
101 for r in h:
101 for r in h:
102 for l in fl.read(r).splitlines():
102 for l in fl.read(r).splitlines():
103 if l:
103 if l:
104 n, k = l.split(" ", 1)
104 n, k = l.split(" ", 1)
105 addtag(self, k, n)
105 addtag(self, k, n)
106 except KeyError:
106 except KeyError:
107 pass
107 pass
108
108
109 try:
109 try:
110 f = self.opener("localtags")
110 f = self.opener("localtags")
111 for l in f:
111 for l in f:
112 n, k = l.split(" ", 1)
112 n, k = l.split(" ", 1)
113 addtag(self, k, n)
113 addtag(self, k, n)
114 except IOError:
114 except IOError:
115 pass
115 pass
116
116
117 self.tagscache['tip'] = self.changelog.tip()
117 self.tagscache['tip'] = self.changelog.tip()
118
118
119 return self.tagscache
119 return self.tagscache
120
120
121 def tagslist(self):
121 def tagslist(self):
122 '''return a list of tags ordered by revision'''
122 '''return a list of tags ordered by revision'''
123 l = []
123 l = []
124 for t, n in self.tags().items():
124 for t, n in self.tags().items():
125 try:
125 try:
126 r = self.changelog.rev(n)
126 r = self.changelog.rev(n)
127 except:
127 except:
128 r = -2 # sort to the beginning of the list if unknown
128 r = -2 # sort to the beginning of the list if unknown
129 l.append((r,t,n))
129 l.append((r,t,n))
130 l.sort()
130 l.sort()
131 return [(t,n) for r,t,n in l]
131 return [(t,n) for r,t,n in l]
132
132
133 def nodetags(self, node):
133 def nodetags(self, node):
134 '''return the tags associated with a node'''
134 '''return the tags associated with a node'''
135 if not self.nodetagscache:
135 if not self.nodetagscache:
136 self.nodetagscache = {}
136 self.nodetagscache = {}
137 for t,n in self.tags().items():
137 for t,n in self.tags().items():
138 self.nodetagscache.setdefault(n,[]).append(t)
138 self.nodetagscache.setdefault(n,[]).append(t)
139 return self.nodetagscache.get(node, [])
139 return self.nodetagscache.get(node, [])
140
140
141 def lookup(self, key):
141 def lookup(self, key):
142 try:
142 try:
143 return self.tags()[key]
143 return self.tags()[key]
144 except KeyError:
144 except KeyError:
145 try:
145 try:
146 return self.changelog.lookup(key)
146 return self.changelog.lookup(key)
147 except:
147 except:
148 raise repo.RepoError(_("unknown revision '%s'") % key)
148 raise repo.RepoError(_("unknown revision '%s'") % key)
149
149
150 def dev(self):
150 def dev(self):
151 return os.stat(self.path).st_dev
151 return os.stat(self.path).st_dev
152
152
153 def local(self):
153 def local(self):
154 return True
154 return True
155
155
156 def join(self, f):
156 def join(self, f):
157 return os.path.join(self.path, f)
157 return os.path.join(self.path, f)
158
158
159 def wjoin(self, f):
159 def wjoin(self, f):
160 return os.path.join(self.root, f)
160 return os.path.join(self.root, f)
161
161
162 def file(self, f):
162 def file(self, f):
163 if f[0] == '/': f = f[1:]
163 if f[0] == '/': f = f[1:]
164 return filelog.filelog(self.opener, f)
164 return filelog.filelog(self.opener, f)
165
165
166 def getcwd(self):
166 def getcwd(self):
167 return self.dirstate.getcwd()
167 return self.dirstate.getcwd()
168
168
169 def wfile(self, f, mode='r'):
169 def wfile(self, f, mode='r'):
170 return self.wopener(f, mode)
170 return self.wopener(f, mode)
171
171
172 def wread(self, filename):
172 def wread(self, filename):
173 if self.encodepats == None:
173 if self.encodepats == None:
174 l = []
174 l = []
175 for pat, cmd in self.ui.configitems("encode"):
175 for pat, cmd in self.ui.configitems("encode"):
176 mf = util.matcher("", "/", [pat], [], [])[1]
176 mf = util.matcher("", "/", [pat], [], [])[1]
177 l.append((mf, cmd))
177 l.append((mf, cmd))
178 self.encodepats = l
178 self.encodepats = l
179
179
180 data = self.wopener(filename, 'r').read()
180 data = self.wopener(filename, 'r').read()
181
181
182 for mf, cmd in self.encodepats:
182 for mf, cmd in self.encodepats:
183 if mf(filename):
183 if mf(filename):
184 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
184 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
185 data = util.filter(data, cmd)
185 data = util.filter(data, cmd)
186 break
186 break
187
187
188 return data
188 return data
189
189
190 def wwrite(self, filename, data, fd=None):
190 def wwrite(self, filename, data, fd=None):
191 if self.decodepats == None:
191 if self.decodepats == None:
192 l = []
192 l = []
193 for pat, cmd in self.ui.configitems("decode"):
193 for pat, cmd in self.ui.configitems("decode"):
194 mf = util.matcher("", "/", [pat], [], [])[1]
194 mf = util.matcher("", "/", [pat], [], [])[1]
195 l.append((mf, cmd))
195 l.append((mf, cmd))
196 self.decodepats = l
196 self.decodepats = l
197
197
198 for mf, cmd in self.decodepats:
198 for mf, cmd in self.decodepats:
199 if mf(filename):
199 if mf(filename):
200 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
200 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
201 data = util.filter(data, cmd)
201 data = util.filter(data, cmd)
202 break
202 break
203
203
204 if fd:
204 if fd:
205 return fd.write(data)
205 return fd.write(data)
206 return self.wopener(filename, 'w').write(data)
206 return self.wopener(filename, 'w').write(data)
207
207
208 def transaction(self):
208 def transaction(self):
209 # save dirstate for undo
209 # save dirstate for undo
210 try:
210 try:
211 ds = self.opener("dirstate").read()
211 ds = self.opener("dirstate").read()
212 except IOError:
212 except IOError:
213 ds = ""
213 ds = ""
214 self.opener("journal.dirstate", "w").write(ds)
214 self.opener("journal.dirstate", "w").write(ds)
215
215
216 def after():
216 def after():
217 util.rename(self.join("journal"), self.join("undo"))
217 util.rename(self.join("journal"), self.join("undo"))
218 util.rename(self.join("journal.dirstate"),
218 util.rename(self.join("journal.dirstate"),
219 self.join("undo.dirstate"))
219 self.join("undo.dirstate"))
220
220
221 return transaction.transaction(self.ui.warn, self.opener,
221 return transaction.transaction(self.ui.warn, self.opener,
222 self.join("journal"), after)
222 self.join("journal"), after)
223
223
224 def recover(self):
224 def recover(self):
225 lock = self.lock()
225 lock = self.lock()
226 if os.path.exists(self.join("journal")):
226 if os.path.exists(self.join("journal")):
227 self.ui.status(_("rolling back interrupted transaction\n"))
227 self.ui.status(_("rolling back interrupted transaction\n"))
228 return transaction.rollback(self.opener, self.join("journal"))
228 return transaction.rollback(self.opener, self.join("journal"))
229 else:
229 else:
230 self.ui.warn(_("no interrupted transaction available\n"))
230 self.ui.warn(_("no interrupted transaction available\n"))
231
231
232 def undo(self):
232 def undo(self):
233 lock = self.lock()
233 lock = self.lock()
234 if os.path.exists(self.join("undo")):
234 if os.path.exists(self.join("undo")):
235 self.ui.status(_("rolling back last transaction\n"))
235 self.ui.status(_("rolling back last transaction\n"))
236 transaction.rollback(self.opener, self.join("undo"))
236 transaction.rollback(self.opener, self.join("undo"))
237 self.dirstate = None
237 self.dirstate = None
238 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
238 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
239 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
239 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
240 else:
240 else:
241 self.ui.warn(_("no undo information available\n"))
241 self.ui.warn(_("no undo information available\n"))
242
242
243 def lock(self, wait=1):
243 def lock(self, wait=1):
244 try:
244 try:
245 return lock.lock(self.join("lock"), 0)
245 return lock.lock(self.join("lock"), 0)
246 except lock.LockHeld, inst:
246 except lock.LockHeld, inst:
247 if wait:
247 if wait:
248 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
248 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
249 return lock.lock(self.join("lock"), wait)
249 return lock.lock(self.join("lock"), wait)
250 raise inst
250 raise inst
251
251
252 def rawcommit(self, files, text, user, date, p1=None, p2=None):
252 def rawcommit(self, files, text, user, date, p1=None, p2=None):
253 orig_parent = self.dirstate.parents()[0] or nullid
253 orig_parent = self.dirstate.parents()[0] or nullid
254 p1 = p1 or self.dirstate.parents()[0] or nullid
254 p1 = p1 or self.dirstate.parents()[0] or nullid
255 p2 = p2 or self.dirstate.parents()[1] or nullid
255 p2 = p2 or self.dirstate.parents()[1] or nullid
256 c1 = self.changelog.read(p1)
256 c1 = self.changelog.read(p1)
257 c2 = self.changelog.read(p2)
257 c2 = self.changelog.read(p2)
258 m1 = self.manifest.read(c1[0])
258 m1 = self.manifest.read(c1[0])
259 mf1 = self.manifest.readflags(c1[0])
259 mf1 = self.manifest.readflags(c1[0])
260 m2 = self.manifest.read(c2[0])
260 m2 = self.manifest.read(c2[0])
261 changed = []
261 changed = []
262
262
263 if orig_parent == p1:
263 if orig_parent == p1:
264 update_dirstate = 1
264 update_dirstate = 1
265 else:
265 else:
266 update_dirstate = 0
266 update_dirstate = 0
267
267
268 tr = self.transaction()
268 tr = self.transaction()
269 mm = m1.copy()
269 mm = m1.copy()
270 mfm = mf1.copy()
270 mfm = mf1.copy()
271 linkrev = self.changelog.count()
271 linkrev = self.changelog.count()
272 for f in files:
272 for f in files:
273 try:
273 try:
274 t = self.wread(f)
274 t = self.wread(f)
275 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
275 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
276 r = self.file(f)
276 r = self.file(f)
277 mfm[f] = tm
277 mfm[f] = tm
278
278
279 fp1 = m1.get(f, nullid)
279 fp1 = m1.get(f, nullid)
280 fp2 = m2.get(f, nullid)
280 fp2 = m2.get(f, nullid)
281
281
282 # is the same revision on two branches of a merge?
282 # is the same revision on two branches of a merge?
283 if fp2 == fp1:
283 if fp2 == fp1:
284 fp2 = nullid
284 fp2 = nullid
285
285
286 if fp2 != nullid:
286 if fp2 != nullid:
287 # is one parent an ancestor of the other?
287 # is one parent an ancestor of the other?
288 fpa = r.ancestor(fp1, fp2)
288 fpa = r.ancestor(fp1, fp2)
289 if fpa == fp1:
289 if fpa == fp1:
290 fp1, fp2 = fp2, nullid
290 fp1, fp2 = fp2, nullid
291 elif fpa == fp2:
291 elif fpa == fp2:
292 fp2 = nullid
292 fp2 = nullid
293
293
294 # is the file unmodified from the parent?
294 # is the file unmodified from the parent?
295 if t == r.read(fp1):
295 if t == r.read(fp1):
296 # record the proper existing parent in manifest
296 # record the proper existing parent in manifest
297 # no need to add a revision
297 # no need to add a revision
298 mm[f] = fp1
298 mm[f] = fp1
299 continue
299 continue
300
300
301 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
301 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
302 changed.append(f)
302 changed.append(f)
303 if update_dirstate:
303 if update_dirstate:
304 self.dirstate.update([f], "n")
304 self.dirstate.update([f], "n")
305 except IOError:
305 except IOError:
306 try:
306 try:
307 del mm[f]
307 del mm[f]
308 del mfm[f]
308 del mfm[f]
309 if update_dirstate:
309 if update_dirstate:
310 self.dirstate.forget([f])
310 self.dirstate.forget([f])
311 except:
311 except:
312 # deleted from p2?
312 # deleted from p2?
313 pass
313 pass
314
314
315 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
315 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
316 user = user or self.ui.username()
316 user = user or self.ui.username()
317 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
317 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
318 tr.close()
318 tr.close()
319 if update_dirstate:
319 if update_dirstate:
320 self.dirstate.setparents(n, nullid)
320 self.dirstate.setparents(n, nullid)
321
321
322 def commit(self, files = None, text = "", user = None, date = None,
322 def commit(self, files = None, text = "", user = None, date = None,
323 match = util.always, force=False):
323 match = util.always, force=False):
324 commit = []
324 commit = []
325 remove = []
325 remove = []
326 changed = []
326 changed = []
327
327
328 if files:
328 if files:
329 for f in files:
329 for f in files:
330 s = self.dirstate.state(f)
330 s = self.dirstate.state(f)
331 if s in 'nmai':
331 if s in 'nmai':
332 commit.append(f)
332 commit.append(f)
333 elif s == 'r':
333 elif s == 'r':
334 remove.append(f)
334 remove.append(f)
335 else:
335 else:
336 self.ui.warn(_("%s not tracked!\n") % f)
336 self.ui.warn(_("%s not tracked!\n") % f)
337 else:
337 else:
338 (c, a, d, u) = self.changes(match=match)
338 (c, a, d, u) = self.changes(match=match)
339 commit = c + a
339 commit = c + a
340 remove = d
340 remove = d
341
341
342 p1, p2 = self.dirstate.parents()
342 p1, p2 = self.dirstate.parents()
343 c1 = self.changelog.read(p1)
343 c1 = self.changelog.read(p1)
344 c2 = self.changelog.read(p2)
344 c2 = self.changelog.read(p2)
345 m1 = self.manifest.read(c1[0])
345 m1 = self.manifest.read(c1[0])
346 mf1 = self.manifest.readflags(c1[0])
346 mf1 = self.manifest.readflags(c1[0])
347 m2 = self.manifest.read(c2[0])
347 m2 = self.manifest.read(c2[0])
348
348
349 if not commit and not remove and not force and p2 == nullid:
349 if not commit and not remove and not force and p2 == nullid:
350 self.ui.status(_("nothing changed\n"))
350 self.ui.status(_("nothing changed\n"))
351 return None
351 return None
352
352
353 if not self.hook("precommit"):
353 if not self.hook("precommit"):
354 return None
354 return None
355
355
356 lock = self.lock()
356 lock = self.lock()
357 tr = self.transaction()
357 tr = self.transaction()
358
358
359 # check in files
359 # check in files
360 new = {}
360 new = {}
361 linkrev = self.changelog.count()
361 linkrev = self.changelog.count()
362 commit.sort()
362 commit.sort()
363 for f in commit:
363 for f in commit:
364 self.ui.note(f + "\n")
364 self.ui.note(f + "\n")
365 try:
365 try:
366 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
366 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
367 t = self.wread(f)
367 t = self.wread(f)
368 except IOError:
368 except IOError:
369 self.ui.warn(_("trouble committing %s!\n") % f)
369 self.ui.warn(_("trouble committing %s!\n") % f)
370 raise
370 raise
371
371
372 r = self.file(f)
372 r = self.file(f)
373
373
374 meta = {}
374 meta = {}
375 cp = self.dirstate.copied(f)
375 cp = self.dirstate.copied(f)
376 if cp:
376 if cp:
377 meta["copy"] = cp
377 meta["copy"] = cp
378 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
378 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
379 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
379 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
380 fp1, fp2 = nullid, nullid
380 fp1, fp2 = nullid, nullid
381 else:
381 else:
382 fp1 = m1.get(f, nullid)
382 fp1 = m1.get(f, nullid)
383 fp2 = m2.get(f, nullid)
383 fp2 = m2.get(f, nullid)
384
384
385 # is the same revision on two branches of a merge?
385 # is the same revision on two branches of a merge?
386 if fp2 == fp1:
386 if fp2 == fp1:
387 fp2 = nullid
387 fp2 = nullid
388
388
389 if fp2 != nullid:
389 if fp2 != nullid:
390 # is one parent an ancestor of the other?
390 # is one parent an ancestor of the other?
391 fpa = r.ancestor(fp1, fp2)
391 fpa = r.ancestor(fp1, fp2)
392 if fpa == fp1:
392 if fpa == fp1:
393 fp1, fp2 = fp2, nullid
393 fp1, fp2 = fp2, nullid
394 elif fpa == fp2:
394 elif fpa == fp2:
395 fp2 = nullid
395 fp2 = nullid
396
396
397 # is the file unmodified from the parent?
397 # is the file unmodified from the parent?
398 if not meta and t == r.read(fp1):
398 if not meta and t == r.read(fp1):
399 # record the proper existing parent in manifest
399 # record the proper existing parent in manifest
400 # no need to add a revision
400 # no need to add a revision
401 new[f] = fp1
401 new[f] = fp1
402 continue
402 continue
403
403
404 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
404 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
405 # remember what we've added so that we can later calculate
405 # remember what we've added so that we can later calculate
406 # the files to pull from a set of changesets
406 # the files to pull from a set of changesets
407 changed.append(f)
407 changed.append(f)
408
408
409 # update manifest
409 # update manifest
410 m1.update(new)
410 m1.update(new)
411 for f in remove:
411 for f in remove:
412 if f in m1:
412 if f in m1:
413 del m1[f]
413 del m1[f]
414 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
414 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
415 (new, remove))
415 (new, remove))
416
416
417 # add changeset
417 # add changeset
418 new = new.keys()
418 new = new.keys()
419 new.sort()
419 new.sort()
420
420
421 if not text:
421 if not text:
422 edittext = ""
422 edittext = ""
423 if p2 != nullid:
423 if p2 != nullid:
424 edittext += "HG: branch merge\n"
424 edittext += "HG: branch merge\n"
425 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
425 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
426 edittext += "".join(["HG: changed %s\n" % f for f in changed])
426 edittext += "".join(["HG: changed %s\n" % f for f in changed])
427 edittext += "".join(["HG: removed %s\n" % f for f in remove])
427 edittext += "".join(["HG: removed %s\n" % f for f in remove])
428 if not changed and not remove:
428 if not changed and not remove:
429 edittext += "HG: no files changed\n"
429 edittext += "HG: no files changed\n"
430 edittext = self.ui.edit(edittext)
430 edittext = self.ui.edit(edittext)
431 if not edittext.rstrip():
431 if not edittext.rstrip():
432 return None
432 return None
433 text = edittext
433 text = edittext
434
434
435 user = user or self.ui.username()
435 user = user or self.ui.username()
436 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
436 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
437 tr.close()
437 tr.close()
438
438
439 self.dirstate.setparents(n)
439 self.dirstate.setparents(n)
440 self.dirstate.update(new, "n")
440 self.dirstate.update(new, "n")
441 self.dirstate.forget(remove)
441 self.dirstate.forget(remove)
442
442
443 if not self.hook("commit", node=hex(n)):
443 if not self.hook("commit", node=hex(n)):
444 return None
444 return None
445 return n
445 return n
446
446
447 def walk(self, node=None, files=[], match=util.always):
447 def walk(self, node=None, files=[], match=util.always):
448 if node:
448 if node:
449 for fn in self.manifest.read(self.changelog.read(node)[0]):
449 for fn in self.manifest.read(self.changelog.read(node)[0]):
450 if match(fn): yield 'm', fn
450 if match(fn): yield 'm', fn
451 else:
451 else:
452 for src, fn in self.dirstate.walk(files, match):
452 for src, fn in self.dirstate.walk(files, match):
453 yield src, fn
453 yield src, fn
454
454
455 def changes(self, node1 = None, node2 = None, files = [],
455 def changes(self, node1 = None, node2 = None, files = [],
456 match = util.always):
456 match = util.always):
457 mf2, u = None, []
457 mf2, u = None, []
458
458
459 def fcmp(fn, mf):
459 def fcmp(fn, mf):
460 t1 = self.wread(fn)
460 t1 = self.wread(fn)
461 t2 = self.file(fn).read(mf.get(fn, nullid))
461 t2 = self.file(fn).read(mf.get(fn, nullid))
462 return cmp(t1, t2)
462 return cmp(t1, t2)
463
463
464 def mfmatches(node):
464 def mfmatches(node):
465 mf = dict(self.manifest.read(node))
465 mf = dict(self.manifest.read(node))
466 for fn in mf.keys():
466 for fn in mf.keys():
467 if not match(fn):
467 if not match(fn):
468 del mf[fn]
468 del mf[fn]
469 return mf
469 return mf
470
470
471 # are we comparing the working directory?
471 # are we comparing the working directory?
472 if not node2:
472 if not node2:
473 l, c, a, d, u = self.dirstate.changes(files, match)
473 l, c, a, d, u = self.dirstate.changes(files, match)
474
474
475 # are we comparing working dir against its parent?
475 # are we comparing working dir against its parent?
476 if not node1:
476 if not node1:
477 if l:
477 if l:
478 # do a full compare of any files that might have changed
478 # do a full compare of any files that might have changed
479 change = self.changelog.read(self.dirstate.parents()[0])
479 change = self.changelog.read(self.dirstate.parents()[0])
480 mf2 = mfmatches(change[0])
480 mf2 = mfmatches(change[0])
481 for f in l:
481 for f in l:
482 if fcmp(f, mf2):
482 if fcmp(f, mf2):
483 c.append(f)
483 c.append(f)
484
484
485 for l in c, a, d, u:
485 for l in c, a, d, u:
486 l.sort()
486 l.sort()
487
487
488 return (c, a, d, u)
488 return (c, a, d, u)
489
489
490 # are we comparing working dir against non-tip?
490 # are we comparing working dir against non-tip?
491 # generate a pseudo-manifest for the working dir
491 # generate a pseudo-manifest for the working dir
492 if not node2:
492 if not node2:
493 if not mf2:
493 if not mf2:
494 change = self.changelog.read(self.dirstate.parents()[0])
494 change = self.changelog.read(self.dirstate.parents()[0])
495 mf2 = mfmatches(change[0])
495 mf2 = mfmatches(change[0])
496 for f in a + c + l:
496 for f in a + c + l:
497 mf2[f] = ""
497 mf2[f] = ""
498 for f in d:
498 for f in d:
499 if f in mf2: del mf2[f]
499 if f in mf2: del mf2[f]
500 else:
500 else:
501 change = self.changelog.read(node2)
501 change = self.changelog.read(node2)
502 mf2 = mfmatches(change[0])
502 mf2 = mfmatches(change[0])
503
503
504 # flush lists from dirstate before comparing manifests
504 # flush lists from dirstate before comparing manifests
505 c, a = [], []
505 c, a = [], []
506
506
507 change = self.changelog.read(node1)
507 change = self.changelog.read(node1)
508 mf1 = mfmatches(change[0])
508 mf1 = mfmatches(change[0])
509
509
510 for fn in mf2:
510 for fn in mf2:
511 if mf1.has_key(fn):
511 if mf1.has_key(fn):
512 if mf1[fn] != mf2[fn]:
512 if mf1[fn] != mf2[fn]:
513 if mf2[fn] != "" or fcmp(fn, mf1):
513 if mf2[fn] != "" or fcmp(fn, mf1):
514 c.append(fn)
514 c.append(fn)
515 del mf1[fn]
515 del mf1[fn]
516 else:
516 else:
517 a.append(fn)
517 a.append(fn)
518
518
519 d = mf1.keys()
519 d = mf1.keys()
520
520
521 for l in c, a, d, u:
521 for l in c, a, d, u:
522 l.sort()
522 l.sort()
523
523
524 return (c, a, d, u)
524 return (c, a, d, u)
525
525
526 def add(self, list):
526 def add(self, list):
527 for f in list:
527 for f in list:
528 p = self.wjoin(f)
528 p = self.wjoin(f)
529 if not os.path.exists(p):
529 if not os.path.exists(p):
530 self.ui.warn(_("%s does not exist!\n") % f)
530 self.ui.warn(_("%s does not exist!\n") % f)
531 elif not os.path.isfile(p):
531 elif not os.path.isfile(p):
532 self.ui.warn(_("%s not added: only files supported currently\n") % f)
532 self.ui.warn(_("%s not added: only files supported currently\n") % f)
533 elif self.dirstate.state(f) in 'an':
533 elif self.dirstate.state(f) in 'an':
534 self.ui.warn(_("%s already tracked!\n") % f)
534 self.ui.warn(_("%s already tracked!\n") % f)
535 else:
535 else:
536 self.dirstate.update([f], "a")
536 self.dirstate.update([f], "a")
537
537
538 def forget(self, list):
538 def forget(self, list):
539 for f in list:
539 for f in list:
540 if self.dirstate.state(f) not in 'ai':
540 if self.dirstate.state(f) not in 'ai':
541 self.ui.warn(_("%s not added!\n") % f)
541 self.ui.warn(_("%s not added!\n") % f)
542 else:
542 else:
543 self.dirstate.forget([f])
543 self.dirstate.forget([f])
544
544
545 def remove(self, list, unlink=False):
545 def remove(self, list, unlink=False):
546 if unlink:
546 if unlink:
547 for f in list:
547 for f in list:
548 try:
548 try:
549 util.unlink(self.wjoin(f))
549 util.unlink(self.wjoin(f))
550 except OSError, inst:
550 except OSError, inst:
551 if inst.errno != errno.ENOENT: raise
551 if inst.errno != errno.ENOENT: raise
552 for f in list:
552 for f in list:
553 p = self.wjoin(f)
553 p = self.wjoin(f)
554 if os.path.exists(p):
554 if os.path.exists(p):
555 self.ui.warn(_("%s still exists!\n") % f)
555 self.ui.warn(_("%s still exists!\n") % f)
556 elif self.dirstate.state(f) == 'a':
556 elif self.dirstate.state(f) == 'a':
557 self.ui.warn(_("%s never committed!\n") % f)
557 self.ui.warn(_("%s never committed!\n") % f)
558 self.dirstate.forget([f])
558 self.dirstate.forget([f])
559 elif f not in self.dirstate:
559 elif f not in self.dirstate:
560 self.ui.warn(_("%s not tracked!\n") % f)
560 self.ui.warn(_("%s not tracked!\n") % f)
561 else:
561 else:
562 self.dirstate.update([f], "r")
562 self.dirstate.update([f], "r")
563
563
564 def undelete(self, list):
564 def undelete(self, list):
565 p = self.dirstate.parents()[0]
565 p = self.dirstate.parents()[0]
566 mn = self.changelog.read(p)[0]
566 mn = self.changelog.read(p)[0]
567 mf = self.manifest.readflags(mn)
567 mf = self.manifest.readflags(mn)
568 m = self.manifest.read(mn)
568 m = self.manifest.read(mn)
569 for f in list:
569 for f in list:
570 if self.dirstate.state(f) not in "r":
570 if self.dirstate.state(f) not in "r":
571 self.ui.warn("%s not removed!\n" % f)
571 self.ui.warn("%s not removed!\n" % f)
572 else:
572 else:
573 t = self.file(f).read(m[f])
573 t = self.file(f).read(m[f])
574 self.wwrite(f, t)
574 self.wwrite(f, t)
575 util.set_exec(self.wjoin(f), mf[f])
575 util.set_exec(self.wjoin(f), mf[f])
576 self.dirstate.update([f], "n")
576 self.dirstate.update([f], "n")
577
577
578 def copy(self, source, dest):
578 def copy(self, source, dest):
579 p = self.wjoin(dest)
579 p = self.wjoin(dest)
580 if not os.path.exists(p):
580 if not os.path.exists(p):
581 self.ui.warn(_("%s does not exist!\n") % dest)
581 self.ui.warn(_("%s does not exist!\n") % dest)
582 elif not os.path.isfile(p):
582 elif not os.path.isfile(p):
583 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
583 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
584 else:
584 else:
585 if self.dirstate.state(dest) == '?':
585 if self.dirstate.state(dest) == '?':
586 self.dirstate.update([dest], "a")
586 self.dirstate.update([dest], "a")
587 self.dirstate.copy(source, dest)
587 self.dirstate.copy(source, dest)
588
588
589 def heads(self):
589 def heads(self):
590 return self.changelog.heads()
590 return self.changelog.heads()
591
591
592 # branchlookup returns a dict giving a list of branches for
592 # branchlookup returns a dict giving a list of branches for
593 # each head. A branch is defined as the tag of a node or
593 # each head. A branch is defined as the tag of a node or
594 # the branch of the node's parents. If a node has multiple
594 # the branch of the node's parents. If a node has multiple
595 # branch tags, tags are eliminated if they are visible from other
595 # branch tags, tags are eliminated if they are visible from other
596 # branch tags.
596 # branch tags.
597 #
597 #
598 # So, for this graph: a->b->c->d->e
598 # So, for this graph: a->b->c->d->e
599 # \ /
599 # \ /
600 # aa -----/
600 # aa -----/
601 # a has tag 2.6.12
601 # a has tag 2.6.12
602 # d has tag 2.6.13
602 # d has tag 2.6.13
603 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
603 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
604 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
604 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
605 # from the list.
605 # from the list.
606 #
606 #
607 # It is possible that more than one head will have the same branch tag.
607 # It is possible that more than one head will have the same branch tag.
608 # callers need to check the result for multiple heads under the same
608 # callers need to check the result for multiple heads under the same
609 # branch tag if that is a problem for them (ie checkout of a specific
609 # branch tag if that is a problem for them (ie checkout of a specific
610 # branch).
610 # branch).
611 #
611 #
612 # passing in a specific branch will limit the depth of the search
612 # passing in a specific branch will limit the depth of the search
613 # through the parents. It won't limit the branches returned in the
613 # through the parents. It won't limit the branches returned in the
614 # result though.
614 # result though.
615 def branchlookup(self, heads=None, branch=None):
615 def branchlookup(self, heads=None, branch=None):
616 if not heads:
616 if not heads:
617 heads = self.heads()
617 heads = self.heads()
618 headt = [ h for h in heads ]
618 headt = [ h for h in heads ]
619 chlog = self.changelog
619 chlog = self.changelog
620 branches = {}
620 branches = {}
621 merges = []
621 merges = []
622 seenmerge = {}
622 seenmerge = {}
623
623
624 # traverse the tree once for each head, recording in the branches
624 # traverse the tree once for each head, recording in the branches
625 # dict which tags are visible from this head. The branches
625 # dict which tags are visible from this head. The branches
626 # dict also records which tags are visible from each tag
626 # dict also records which tags are visible from each tag
627 # while we traverse.
627 # while we traverse.
628 while headt or merges:
628 while headt or merges:
629 if merges:
629 if merges:
630 n, found = merges.pop()
630 n, found = merges.pop()
631 visit = [n]
631 visit = [n]
632 else:
632 else:
633 h = headt.pop()
633 h = headt.pop()
634 visit = [h]
634 visit = [h]
635 found = [h]
635 found = [h]
636 seen = {}
636 seen = {}
637 while visit:
637 while visit:
638 n = visit.pop()
638 n = visit.pop()
639 if n in seen:
639 if n in seen:
640 continue
640 continue
641 pp = chlog.parents(n)
641 pp = chlog.parents(n)
642 tags = self.nodetags(n)
642 tags = self.nodetags(n)
643 if tags:
643 if tags:
644 for x in tags:
644 for x in tags:
645 if x == 'tip':
645 if x == 'tip':
646 continue
646 continue
647 for f in found:
647 for f in found:
648 branches.setdefault(f, {})[n] = 1
648 branches.setdefault(f, {})[n] = 1
649 branches.setdefault(n, {})[n] = 1
649 branches.setdefault(n, {})[n] = 1
650 break
650 break
651 if n not in found:
651 if n not in found:
652 found.append(n)
652 found.append(n)
653 if branch in tags:
653 if branch in tags:
654 continue
654 continue
655 seen[n] = 1
655 seen[n] = 1
656 if pp[1] != nullid and n not in seenmerge:
656 if pp[1] != nullid and n not in seenmerge:
657 merges.append((pp[1], [x for x in found]))
657 merges.append((pp[1], [x for x in found]))
658 seenmerge[n] = 1
658 seenmerge[n] = 1
659 if pp[0] != nullid:
659 if pp[0] != nullid:
660 visit.append(pp[0])
660 visit.append(pp[0])
661 # traverse the branches dict, eliminating branch tags from each
661 # traverse the branches dict, eliminating branch tags from each
662 # head that are visible from another branch tag for that head.
662 # head that are visible from another branch tag for that head.
663 out = {}
663 out = {}
664 viscache = {}
664 viscache = {}
665 for h in heads:
665 for h in heads:
666 def visible(node):
666 def visible(node):
667 if node in viscache:
667 if node in viscache:
668 return viscache[node]
668 return viscache[node]
669 ret = {}
669 ret = {}
670 visit = [node]
670 visit = [node]
671 while visit:
671 while visit:
672 x = visit.pop()
672 x = visit.pop()
673 if x in viscache:
673 if x in viscache:
674 ret.update(viscache[x])
674 ret.update(viscache[x])
675 elif x not in ret:
675 elif x not in ret:
676 ret[x] = 1
676 ret[x] = 1
677 if x in branches:
677 if x in branches:
678 visit[len(visit):] = branches[x].keys()
678 visit[len(visit):] = branches[x].keys()
679 viscache[node] = ret
679 viscache[node] = ret
680 return ret
680 return ret
681 if h not in branches:
681 if h not in branches:
682 continue
682 continue
683 # O(n^2), but somewhat limited. This only searches the
683 # O(n^2), but somewhat limited. This only searches the
684 # tags visible from a specific head, not all the tags in the
684 # tags visible from a specific head, not all the tags in the
685 # whole repo.
685 # whole repo.
686 for b in branches[h]:
686 for b in branches[h]:
687 vis = False
687 vis = False
688 for bb in branches[h].keys():
688 for bb in branches[h].keys():
689 if b != bb:
689 if b != bb:
690 if b in visible(bb):
690 if b in visible(bb):
691 vis = True
691 vis = True
692 break
692 break
693 if not vis:
693 if not vis:
694 l = out.setdefault(h, [])
694 l = out.setdefault(h, [])
695 l[len(l):] = self.nodetags(b)
695 l[len(l):] = self.nodetags(b)
696 return out
696 return out
697
697
698 def branches(self, nodes):
698 def branches(self, nodes):
699 if not nodes: nodes = [self.changelog.tip()]
699 if not nodes: nodes = [self.changelog.tip()]
700 b = []
700 b = []
701 for n in nodes:
701 for n in nodes:
702 t = n
702 t = n
703 while n:
703 while n:
704 p = self.changelog.parents(n)
704 p = self.changelog.parents(n)
705 if p[1] != nullid or p[0] == nullid:
705 if p[1] != nullid or p[0] == nullid:
706 b.append((t, n, p[0], p[1]))
706 b.append((t, n, p[0], p[1]))
707 break
707 break
708 n = p[0]
708 n = p[0]
709 return b
709 return b
710
710
711 def between(self, pairs):
711 def between(self, pairs):
712 r = []
712 r = []
713
713
714 for top, bottom in pairs:
714 for top, bottom in pairs:
715 n, l, i = top, [], 0
715 n, l, i = top, [], 0
716 f = 1
716 f = 1
717
717
718 while n != bottom:
718 while n != bottom:
719 p = self.changelog.parents(n)[0]
719 p = self.changelog.parents(n)[0]
720 if i == f:
720 if i == f:
721 l.append(n)
721 l.append(n)
722 f = f * 2
722 f = f * 2
723 n = p
723 n = p
724 i += 1
724 i += 1
725
725
726 r.append(l)
726 r.append(l)
727
727
728 return r
728 return r
729
729
730 def findincoming(self, remote, base=None, heads=None):
730 def findincoming(self, remote, base=None, heads=None):
731 m = self.changelog.nodemap
731 m = self.changelog.nodemap
732 search = []
732 search = []
733 fetch = {}
733 fetch = {}
734 seen = {}
734 seen = {}
735 seenbranch = {}
735 seenbranch = {}
736 if base == None:
736 if base == None:
737 base = {}
737 base = {}
738
738
739 # assume we're closer to the tip than the root
739 # assume we're closer to the tip than the root
740 # and start by examining the heads
740 # and start by examining the heads
741 self.ui.status(_("searching for changes\n"))
741 self.ui.status(_("searching for changes\n"))
742
742
743 if not heads:
743 if not heads:
744 heads = remote.heads()
744 heads = remote.heads()
745
745
746 unknown = []
746 unknown = []
747 for h in heads:
747 for h in heads:
748 if h not in m:
748 if h not in m:
749 unknown.append(h)
749 unknown.append(h)
750 else:
750 else:
751 base[h] = 1
751 base[h] = 1
752
752
753 if not unknown:
753 if not unknown:
754 return None
754 return None
755
755
756 rep = {}
756 rep = {}
757 reqcnt = 0
757 reqcnt = 0
758
758
759 # search through remote branches
759 # search through remote branches
760 # a 'branch' here is a linear segment of history, with four parts:
760 # a 'branch' here is a linear segment of history, with four parts:
761 # head, root, first parent, second parent
761 # head, root, first parent, second parent
762 # (a branch always has two parents (or none) by definition)
762 # (a branch always has two parents (or none) by definition)
763 unknown = remote.branches(unknown)
763 unknown = remote.branches(unknown)
764 while unknown:
764 while unknown:
765 r = []
765 r = []
766 while unknown:
766 while unknown:
767 n = unknown.pop(0)
767 n = unknown.pop(0)
768 if n[0] in seen:
768 if n[0] in seen:
769 continue
769 continue
770
770
771 self.ui.debug(_("examining %s:%s\n") % (short(n[0]), short(n[1])))
771 self.ui.debug(_("examining %s:%s\n") % (short(n[0]), short(n[1])))
772 if n[0] == nullid:
772 if n[0] == nullid:
773 break
773 break
774 if n in seenbranch:
774 if n in seenbranch:
775 self.ui.debug(_("branch already found\n"))
775 self.ui.debug(_("branch already found\n"))
776 continue
776 continue
777 if n[1] and n[1] in m: # do we know the base?
777 if n[1] and n[1] in m: # do we know the base?
778 self.ui.debug(_("found incomplete branch %s:%s\n")
778 self.ui.debug(_("found incomplete branch %s:%s\n")
779 % (short(n[0]), short(n[1])))
779 % (short(n[0]), short(n[1])))
780 search.append(n) # schedule branch range for scanning
780 search.append(n) # schedule branch range for scanning
781 seenbranch[n] = 1
781 seenbranch[n] = 1
782 else:
782 else:
783 if n[1] not in seen and n[1] not in fetch:
783 if n[1] not in seen and n[1] not in fetch:
784 if n[2] in m and n[3] in m:
784 if n[2] in m and n[3] in m:
785 self.ui.debug(_("found new changeset %s\n") %
785 self.ui.debug(_("found new changeset %s\n") %
786 short(n[1]))
786 short(n[1]))
787 fetch[n[1]] = 1 # earliest unknown
787 fetch[n[1]] = 1 # earliest unknown
788 base[n[2]] = 1 # latest known
788 base[n[2]] = 1 # latest known
789 continue
789 continue
790
790
791 for a in n[2:4]:
791 for a in n[2:4]:
792 if a not in rep:
792 if a not in rep:
793 r.append(a)
793 r.append(a)
794 rep[a] = 1
794 rep[a] = 1
795
795
796 seen[n[0]] = 1
796 seen[n[0]] = 1
797
797
798 if r:
798 if r:
799 reqcnt += 1
799 reqcnt += 1
800 self.ui.debug(_("request %d: %s\n") %
800 self.ui.debug(_("request %d: %s\n") %
801 (reqcnt, " ".join(map(short, r))))
801 (reqcnt, " ".join(map(short, r))))
802 for p in range(0, len(r), 10):
802 for p in range(0, len(r), 10):
803 for b in remote.branches(r[p:p+10]):
803 for b in remote.branches(r[p:p+10]):
804 self.ui.debug(_("received %s:%s\n") %
804 self.ui.debug(_("received %s:%s\n") %
805 (short(b[0]), short(b[1])))
805 (short(b[0]), short(b[1])))
806 if b[0] in m:
806 if b[0] in m:
807 self.ui.debug(_("found base node %s\n") % short(b[0]))
807 self.ui.debug(_("found base node %s\n") % short(b[0]))
808 base[b[0]] = 1
808 base[b[0]] = 1
809 elif b[0] not in seen:
809 elif b[0] not in seen:
810 unknown.append(b)
810 unknown.append(b)
811
811
812 # do binary search on the branches we found
812 # do binary search on the branches we found
813 while search:
813 while search:
814 n = search.pop(0)
814 n = search.pop(0)
815 reqcnt += 1
815 reqcnt += 1
816 l = remote.between([(n[0], n[1])])[0]
816 l = remote.between([(n[0], n[1])])[0]
817 l.append(n[1])
817 l.append(n[1])
818 p = n[0]
818 p = n[0]
819 f = 1
819 f = 1
820 for i in l:
820 for i in l:
821 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
821 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
822 if i in m:
822 if i in m:
823 if f <= 2:
823 if f <= 2:
824 self.ui.debug(_("found new branch changeset %s\n") %
824 self.ui.debug(_("found new branch changeset %s\n") %
825 short(p))
825 short(p))
826 fetch[p] = 1
826 fetch[p] = 1
827 base[i] = 1
827 base[i] = 1
828 else:
828 else:
829 self.ui.debug(_("narrowed branch search to %s:%s\n")
829 self.ui.debug(_("narrowed branch search to %s:%s\n")
830 % (short(p), short(i)))
830 % (short(p), short(i)))
831 search.append((p, i))
831 search.append((p, i))
832 break
832 break
833 p, f = i, f * 2
833 p, f = i, f * 2
834
834
835 # sanity check our fetch list
835 # sanity check our fetch list
836 for f in fetch.keys():
836 for f in fetch.keys():
837 if f in m:
837 if f in m:
838 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
838 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
839
839
840 if base.keys() == [nullid]:
840 if base.keys() == [nullid]:
841 self.ui.warn(_("warning: pulling from an unrelated repository!\n"))
841 self.ui.warn(_("warning: pulling from an unrelated repository!\n"))
842
842
843 self.ui.note(_("found new changesets starting at ") +
843 self.ui.note(_("found new changesets starting at ") +
844 " ".join([short(f) for f in fetch]) + "\n")
844 " ".join([short(f) for f in fetch]) + "\n")
845
845
846 self.ui.debug(_("%d total queries\n") % reqcnt)
846 self.ui.debug(_("%d total queries\n") % reqcnt)
847
847
848 return fetch.keys()
848 return fetch.keys()
849
849
850 def findoutgoing(self, remote, base=None, heads=None):
850 def findoutgoing(self, remote, base=None, heads=None):
851 if base == None:
851 if base == None:
852 base = {}
852 base = {}
853 self.findincoming(remote, base, heads)
853 self.findincoming(remote, base, heads)
854
854
855 self.ui.debug(_("common changesets up to ")
855 self.ui.debug(_("common changesets up to ")
856 + " ".join(map(short, base.keys())) + "\n")
856 + " ".join(map(short, base.keys())) + "\n")
857
857
858 remain = dict.fromkeys(self.changelog.nodemap)
858 remain = dict.fromkeys(self.changelog.nodemap)
859
859
860 # prune everything remote has from the tree
860 # prune everything remote has from the tree
861 del remain[nullid]
861 del remain[nullid]
862 remove = base.keys()
862 remove = base.keys()
863 while remove:
863 while remove:
864 n = remove.pop(0)
864 n = remove.pop(0)
865 if n in remain:
865 if n in remain:
866 del remain[n]
866 del remain[n]
867 for p in self.changelog.parents(n):
867 for p in self.changelog.parents(n):
868 remove.append(p)
868 remove.append(p)
869
869
870 # find every node whose parents have been pruned
870 # find every node whose parents have been pruned
871 subset = []
871 subset = []
872 for n in remain:
872 for n in remain:
873 p1, p2 = self.changelog.parents(n)
873 p1, p2 = self.changelog.parents(n)
874 if p1 not in remain and p2 not in remain:
874 if p1 not in remain and p2 not in remain:
875 subset.append(n)
875 subset.append(n)
876
876
877 # this is the set of all roots we have to push
877 # this is the set of all roots we have to push
878 return subset
878 return subset
879
879
880 def pull(self, remote, heads = None):
880 def pull(self, remote, heads = None):
881 lock = self.lock()
881 lock = self.lock()
882
882
883 # if we have an empty repo, fetch everything
883 # if we have an empty repo, fetch everything
884 if self.changelog.tip() == nullid:
884 if self.changelog.tip() == nullid:
885 self.ui.status(_("requesting all changes\n"))
885 self.ui.status(_("requesting all changes\n"))
886 fetch = [nullid]
886 fetch = [nullid]
887 else:
887 else:
888 fetch = self.findincoming(remote)
888 fetch = self.findincoming(remote)
889
889
890 if not fetch:
890 if not fetch:
891 self.ui.status(_("no changes found\n"))
891 self.ui.status(_("no changes found\n"))
892 return 1
892 return 1
893
893
894 if heads is None:
894 if heads is None:
895 cg = remote.changegroup(fetch)
895 cg = remote.changegroup(fetch)
896 else:
896 else:
897 cg = remote.changegroupsubset(fetch, heads)
897 cg = remote.changegroupsubset(fetch, heads)
898 return self.addchangegroup(cg)
898 return self.addchangegroup(cg)
899
899
900 def push(self, remote, force=False):
900 def push(self, remote, force=False):
901 lock = remote.lock()
901 lock = remote.lock()
902
902
903 base = {}
903 base = {}
904 heads = remote.heads()
904 heads = remote.heads()
905 inc = self.findincoming(remote, base, heads)
905 inc = self.findincoming(remote, base, heads)
906 if not force and inc:
906 if not force and inc:
907 self.ui.warn(_("abort: unsynced remote changes!\n"))
907 self.ui.warn(_("abort: unsynced remote changes!\n"))
908 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
908 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
909 return 1
909 return 1
910
910
911 update = self.findoutgoing(remote, base)
911 update = self.findoutgoing(remote, base)
912 if not update:
912 if not update:
913 self.ui.status(_("no changes found\n"))
913 self.ui.status(_("no changes found\n"))
914 return 1
914 return 1
915 elif not force:
915 elif not force:
916 if len(heads) < len(self.changelog.heads()):
916 if len(heads) < len(self.changelog.heads()):
917 self.ui.warn(_("abort: push creates new remote branches!\n"))
917 self.ui.warn(_("abort: push creates new remote branches!\n"))
918 self.ui.status(_("(did you forget to merge?"
918 self.ui.status(_("(did you forget to merge?"
919 " use push -f to force)\n"))
919 " use push -f to force)\n"))
920 return 1
920 return 1
921
921
922 cg = self.changegroup(update)
922 cg = self.changegroup(update)
923 return remote.addchangegroup(cg)
923 return remote.addchangegroup(cg)
924
924
925 def changegroupsubset(self, bases, heads):
925 def changegroupsubset(self, bases, heads):
926 """This function generates a changegroup consisting of all the nodes
926 """This function generates a changegroup consisting of all the nodes
927 that are descendents of any of the bases, and ancestors of any of
927 that are descendents of any of the bases, and ancestors of any of
928 the heads.
928 the heads.
929
929
930 It is fairly complex as determining which filenodes and which
930 It is fairly complex as determining which filenodes and which
931 manifest nodes need to be included for the changeset to be complete
931 manifest nodes need to be included for the changeset to be complete
932 is non-trivial.
932 is non-trivial.
933
933
934 Another wrinkle is doing the reverse, figuring out which changeset in
934 Another wrinkle is doing the reverse, figuring out which changeset in
935 the changegroup a particular filenode or manifestnode belongs to."""
935 the changegroup a particular filenode or manifestnode belongs to."""
936
936
937 # Set up some initial variables
937 # Set up some initial variables
938 # Make it easy to refer to self.changelog
938 # Make it easy to refer to self.changelog
939 cl = self.changelog
939 cl = self.changelog
940 # msng is short for missing - compute the list of changesets in this
940 # msng is short for missing - compute the list of changesets in this
941 # changegroup.
941 # changegroup.
942 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
942 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
943 # Some bases may turn out to be superfluous, and some heads may be
943 # Some bases may turn out to be superfluous, and some heads may be
944 # too. nodesbetween will return the minimal set of bases and heads
944 # too. nodesbetween will return the minimal set of bases and heads
945 # necessary to re-create the changegroup.
945 # necessary to re-create the changegroup.
946
946
947 # Known heads are the list of heads that it is assumed the recipient
947 # Known heads are the list of heads that it is assumed the recipient
948 # of this changegroup will know about.
948 # of this changegroup will know about.
949 knownheads = {}
949 knownheads = {}
950 # We assume that all parents of bases are known heads.
950 # We assume that all parents of bases are known heads.
951 for n in bases:
951 for n in bases:
952 for p in cl.parents(n):
952 for p in cl.parents(n):
953 if p != nullid:
953 if p != nullid:
954 knownheads[p] = 1
954 knownheads[p] = 1
955 knownheads = knownheads.keys()
955 knownheads = knownheads.keys()
956 if knownheads:
956 if knownheads:
957 # Now that we know what heads are known, we can compute which
957 # Now that we know what heads are known, we can compute which
958 # changesets are known. The recipient must know about all
958 # changesets are known. The recipient must know about all
959 # changesets required to reach the known heads from the null
959 # changesets required to reach the known heads from the null
960 # changeset.
960 # changeset.
961 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
961 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
962 junk = None
962 junk = None
963 # Transform the list into an ersatz set.
963 # Transform the list into an ersatz set.
964 has_cl_set = dict.fromkeys(has_cl_set)
964 has_cl_set = dict.fromkeys(has_cl_set)
965 else:
965 else:
966 # If there were no known heads, the recipient cannot be assumed to
966 # If there were no known heads, the recipient cannot be assumed to
967 # know about any changesets.
967 # know about any changesets.
968 has_cl_set = {}
968 has_cl_set = {}
969
969
970 # Make it easy to refer to self.manifest
970 # Make it easy to refer to self.manifest
971 mnfst = self.manifest
971 mnfst = self.manifest
972 # We don't know which manifests are missing yet
972 # We don't know which manifests are missing yet
973 msng_mnfst_set = {}
973 msng_mnfst_set = {}
974 # Nor do we know which filenodes are missing.
974 # Nor do we know which filenodes are missing.
975 msng_filenode_set = {}
975 msng_filenode_set = {}
976
976
977 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
977 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
978 junk = None
978 junk = None
979
979
980 # A changeset always belongs to itself, so the changenode lookup
980 # A changeset always belongs to itself, so the changenode lookup
981 # function for a changenode is identity.
981 # function for a changenode is identity.
982 def identity(x):
982 def identity(x):
983 return x
983 return x
984
984
985 # A function generating function. Sets up an environment for the
985 # A function generating function. Sets up an environment for the
986 # inner function.
986 # inner function.
987 def cmp_by_rev_func(revlog):
987 def cmp_by_rev_func(revlog):
988 # Compare two nodes by their revision number in the environment's
988 # Compare two nodes by their revision number in the environment's
989 # revision history. Since the revision number both represents the
989 # revision history. Since the revision number both represents the
990 # most efficient order to read the nodes in, and represents a
990 # most efficient order to read the nodes in, and represents a
991 # topological sorting of the nodes, this function is often useful.
991 # topological sorting of the nodes, this function is often useful.
992 def cmp_by_rev(a, b):
992 def cmp_by_rev(a, b):
993 return cmp(revlog.rev(a), revlog.rev(b))
993 return cmp(revlog.rev(a), revlog.rev(b))
994 return cmp_by_rev
994 return cmp_by_rev
995
995
996 # If we determine that a particular file or manifest node must be a
996 # If we determine that a particular file or manifest node must be a
997 # node that the recipient of the changegroup will already have, we can
997 # node that the recipient of the changegroup will already have, we can
998 # also assume the recipient will have all the parents. This function
998 # also assume the recipient will have all the parents. This function
999 # prunes them from the set of missing nodes.
999 # prunes them from the set of missing nodes.
1000 def prune_parents(revlog, hasset, msngset):
1000 def prune_parents(revlog, hasset, msngset):
1001 haslst = hasset.keys()
1001 haslst = hasset.keys()
1002 haslst.sort(cmp_by_rev_func(revlog))
1002 haslst.sort(cmp_by_rev_func(revlog))
1003 for node in haslst:
1003 for node in haslst:
1004 parentlst = [p for p in revlog.parents(node) if p != nullid]
1004 parentlst = [p for p in revlog.parents(node) if p != nullid]
1005 while parentlst:
1005 while parentlst:
1006 n = parentlst.pop()
1006 n = parentlst.pop()
1007 if n not in hasset:
1007 if n not in hasset:
1008 hasset[n] = 1
1008 hasset[n] = 1
1009 p = [p for p in revlog.parents(n) if p != nullid]
1009 p = [p for p in revlog.parents(n) if p != nullid]
1010 parentlst.extend(p)
1010 parentlst.extend(p)
1011 for n in hasset:
1011 for n in hasset:
1012 msngset.pop(n, None)
1012 msngset.pop(n, None)
1013
1013
1014 # This is a function generating function used to set up an environment
1014 # This is a function generating function used to set up an environment
1015 # for the inner function to execute in.
1015 # for the inner function to execute in.
1016 def manifest_and_file_collector(changedfileset):
1016 def manifest_and_file_collector(changedfileset):
1017 # This is an information gathering function that gathers
1017 # This is an information gathering function that gathers
1018 # information from each changeset node that goes out as part of
1018 # information from each changeset node that goes out as part of
1019 # the changegroup. The information gathered is a list of which
1019 # the changegroup. The information gathered is a list of which
1020 # manifest nodes are potentially required (the recipient may
1020 # manifest nodes are potentially required (the recipient may
1021 # already have them) and total list of all files which were
1021 # already have them) and total list of all files which were
1022 # changed in any changeset in the changegroup.
1022 # changed in any changeset in the changegroup.
1023 #
1023 #
1024 # We also remember the first changenode we saw any manifest
1024 # We also remember the first changenode we saw any manifest
1025 # referenced by so we can later determine which changenode 'owns'
1025 # referenced by so we can later determine which changenode 'owns'
1026 # the manifest.
1026 # the manifest.
1027 def collect_manifests_and_files(clnode):
1027 def collect_manifests_and_files(clnode):
1028 c = cl.read(clnode)
1028 c = cl.read(clnode)
1029 for f in c[3]:
1029 for f in c[3]:
1030 # This is to make sure we only have one instance of each
1030 # This is to make sure we only have one instance of each
1031 # filename string for each filename.
1031 # filename string for each filename.
1032 changedfileset.setdefault(f, f)
1032 changedfileset.setdefault(f, f)
1033 msng_mnfst_set.setdefault(c[0], clnode)
1033 msng_mnfst_set.setdefault(c[0], clnode)
1034 return collect_manifests_and_files
1034 return collect_manifests_and_files
1035
1035
1036 # Figure out which manifest nodes (of the ones we think might be part
1036 # Figure out which manifest nodes (of the ones we think might be part
1037 # of the changegroup) the recipient must know about and remove them
1037 # of the changegroup) the recipient must know about and remove them
1038 # from the changegroup.
1038 # from the changegroup.
1039 def prune_manifests():
1039 def prune_manifests():
1040 has_mnfst_set = {}
1040 has_mnfst_set = {}
1041 for n in msng_mnfst_set:
1041 for n in msng_mnfst_set:
1042 # If a 'missing' manifest thinks it belongs to a changenode
1042 # If a 'missing' manifest thinks it belongs to a changenode
1043 # the recipient is assumed to have, obviously the recipient
1043 # the recipient is assumed to have, obviously the recipient
1044 # must have that manifest.
1044 # must have that manifest.
1045 linknode = cl.node(mnfst.linkrev(n))
1045 linknode = cl.node(mnfst.linkrev(n))
1046 if linknode in has_cl_set:
1046 if linknode in has_cl_set:
1047 has_mnfst_set[n] = 1
1047 has_mnfst_set[n] = 1
1048 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1048 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1049
1049
1050 # Use the information collected in collect_manifests_and_files to say
1050 # Use the information collected in collect_manifests_and_files to say
1051 # which changenode any manifestnode belongs to.
1051 # which changenode any manifestnode belongs to.
1052 def lookup_manifest_link(mnfstnode):
1052 def lookup_manifest_link(mnfstnode):
1053 return msng_mnfst_set[mnfstnode]
1053 return msng_mnfst_set[mnfstnode]
1054
1054
1055 # A function generating function that sets up the initial environment
1055 # A function generating function that sets up the initial environment
1056 # the inner function.
1056 # the inner function.
1057 def filenode_collector(changedfiles):
1057 def filenode_collector(changedfiles):
1058 next_rev = [0]
1058 next_rev = [0]
1059 # This gathers information from each manifestnode included in the
1059 # This gathers information from each manifestnode included in the
1060 # changegroup about which filenodes the manifest node references
1060 # changegroup about which filenodes the manifest node references
1061 # so we can include those in the changegroup too.
1061 # so we can include those in the changegroup too.
1062 #
1062 #
1063 # It also remembers which changenode each filenode belongs to. It
1063 # It also remembers which changenode each filenode belongs to. It
1064 # does this by assuming the a filenode belongs to the changenode
1064 # does this by assuming the a filenode belongs to the changenode
1065 # the first manifest that references it belongs to.
1065 # the first manifest that references it belongs to.
1066 def collect_msng_filenodes(mnfstnode):
1066 def collect_msng_filenodes(mnfstnode):
1067 r = mnfst.rev(mnfstnode)
1067 r = mnfst.rev(mnfstnode)
1068 if r == next_rev[0]:
1068 if r == next_rev[0]:
1069 # If the last rev we looked at was the one just previous,
1069 # If the last rev we looked at was the one just previous,
1070 # we only need to see a diff.
1070 # we only need to see a diff.
1071 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1071 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1072 # For each line in the delta
1072 # For each line in the delta
1073 for dline in delta.splitlines():
1073 for dline in delta.splitlines():
1074 # get the filename and filenode for that line
1074 # get the filename and filenode for that line
1075 f, fnode = dline.split('\0')
1075 f, fnode = dline.split('\0')
1076 fnode = bin(fnode[:40])
1076 fnode = bin(fnode[:40])
1077 f = changedfiles.get(f, None)
1077 f = changedfiles.get(f, None)
1078 # And if the file is in the list of files we care
1078 # And if the file is in the list of files we care
1079 # about.
1079 # about.
1080 if f is not None:
1080 if f is not None:
1081 # Get the changenode this manifest belongs to
1081 # Get the changenode this manifest belongs to
1082 clnode = msng_mnfst_set[mnfstnode]
1082 clnode = msng_mnfst_set[mnfstnode]
1083 # Create the set of filenodes for the file if
1083 # Create the set of filenodes for the file if
1084 # there isn't one already.
1084 # there isn't one already.
1085 ndset = msng_filenode_set.setdefault(f, {})
1085 ndset = msng_filenode_set.setdefault(f, {})
1086 # And set the filenode's changelog node to the
1086 # And set the filenode's changelog node to the
1087 # manifest's if it hasn't been set already.
1087 # manifest's if it hasn't been set already.
1088 ndset.setdefault(fnode, clnode)
1088 ndset.setdefault(fnode, clnode)
1089 else:
1089 else:
1090 # Otherwise we need a full manifest.
1090 # Otherwise we need a full manifest.
1091 m = mnfst.read(mnfstnode)
1091 m = mnfst.read(mnfstnode)
1092 # For every file in we care about.
1092 # For every file in we care about.
1093 for f in changedfiles:
1093 for f in changedfiles:
1094 fnode = m.get(f, None)
1094 fnode = m.get(f, None)
1095 # If it's in the manifest
1095 # If it's in the manifest
1096 if fnode is not None:
1096 if fnode is not None:
1097 # See comments above.
1097 # See comments above.
1098 clnode = msng_mnfst_set[mnfstnode]
1098 clnode = msng_mnfst_set[mnfstnode]
1099 ndset = msng_filenode_set.setdefault(f, {})
1099 ndset = msng_filenode_set.setdefault(f, {})
1100 ndset.setdefault(fnode, clnode)
1100 ndset.setdefault(fnode, clnode)
1101 # Remember the revision we hope to see next.
1101 # Remember the revision we hope to see next.
1102 next_rev[0] = r + 1
1102 next_rev[0] = r + 1
1103 return collect_msng_filenodes
1103 return collect_msng_filenodes
1104
1104
1105 # We have a list of filenodes we think we need for a file, lets remove
1105 # We have a list of filenodes we think we need for a file, lets remove
1106 # all those we now the recipient must have.
1106 # all those we now the recipient must have.
1107 def prune_filenodes(f, filerevlog):
1107 def prune_filenodes(f, filerevlog):
1108 msngset = msng_filenode_set[f]
1108 msngset = msng_filenode_set[f]
1109 hasset = {}
1109 hasset = {}
1110 # If a 'missing' filenode thinks it belongs to a changenode we
1110 # If a 'missing' filenode thinks it belongs to a changenode we
1111 # assume the recipient must have, then the recipient must have
1111 # assume the recipient must have, then the recipient must have
1112 # that filenode.
1112 # that filenode.
1113 for n in msngset:
1113 for n in msngset:
1114 clnode = cl.node(filerevlog.linkrev(n))
1114 clnode = cl.node(filerevlog.linkrev(n))
1115 if clnode in has_cl_set:
1115 if clnode in has_cl_set:
1116 hasset[n] = 1
1116 hasset[n] = 1
1117 prune_parents(filerevlog, hasset, msngset)
1117 prune_parents(filerevlog, hasset, msngset)
1118
1118
1119 # A function generator function that sets up the a context for the
1119 # A function generator function that sets up the a context for the
1120 # inner function.
1120 # inner function.
1121 def lookup_filenode_link_func(fname):
1121 def lookup_filenode_link_func(fname):
1122 msngset = msng_filenode_set[fname]
1122 msngset = msng_filenode_set[fname]
1123 # Lookup the changenode the filenode belongs to.
1123 # Lookup the changenode the filenode belongs to.
1124 def lookup_filenode_link(fnode):
1124 def lookup_filenode_link(fnode):
1125 return msngset[fnode]
1125 return msngset[fnode]
1126 return lookup_filenode_link
1126 return lookup_filenode_link
1127
1127
1128 # Now that we have all theses utility functions to help out and
1128 # Now that we have all theses utility functions to help out and
1129 # logically divide up the task, generate the group.
1129 # logically divide up the task, generate the group.
1130 def gengroup():
1130 def gengroup():
1131 # The set of changed files starts empty.
1131 # The set of changed files starts empty.
1132 changedfiles = {}
1132 changedfiles = {}
1133 # Create a changenode group generator that will call our functions
1133 # Create a changenode group generator that will call our functions
1134 # back to lookup the owning changenode and collect information.
1134 # back to lookup the owning changenode and collect information.
1135 group = cl.group(msng_cl_lst, identity,
1135 group = cl.group(msng_cl_lst, identity,
1136 manifest_and_file_collector(changedfiles))
1136 manifest_and_file_collector(changedfiles))
1137 for chnk in group:
1137 for chnk in group:
1138 yield chnk
1138 yield chnk
1139
1139
1140 # The list of manifests has been collected by the generator
1140 # The list of manifests has been collected by the generator
1141 # calling our functions back.
1141 # calling our functions back.
1142 prune_manifests()
1142 prune_manifests()
1143 msng_mnfst_lst = msng_mnfst_set.keys()
1143 msng_mnfst_lst = msng_mnfst_set.keys()
1144 # Sort the manifestnodes by revision number.
1144 # Sort the manifestnodes by revision number.
1145 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1145 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1146 # Create a generator for the manifestnodes that calls our lookup
1146 # Create a generator for the manifestnodes that calls our lookup
1147 # and data collection functions back.
1147 # and data collection functions back.
1148 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1148 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1149 filenode_collector(changedfiles))
1149 filenode_collector(changedfiles))
1150 for chnk in group:
1150 for chnk in group:
1151 yield chnk
1151 yield chnk
1152
1152
1153 # These are no longer needed, dereference and toss the memory for
1153 # These are no longer needed, dereference and toss the memory for
1154 # them.
1154 # them.
1155 msng_mnfst_lst = None
1155 msng_mnfst_lst = None
1156 msng_mnfst_set.clear()
1156 msng_mnfst_set.clear()
1157
1157
1158 changedfiles = changedfiles.keys()
1158 changedfiles = changedfiles.keys()
1159 changedfiles.sort()
1159 changedfiles.sort()
1160 # Go through all our files in order sorted by name.
1160 # Go through all our files in order sorted by name.
1161 for fname in changedfiles:
1161 for fname in changedfiles:
1162 filerevlog = self.file(fname)
1162 filerevlog = self.file(fname)
1163 # Toss out the filenodes that the recipient isn't really
1163 # Toss out the filenodes that the recipient isn't really
1164 # missing.
1164 # missing.
1165 prune_filenodes(fname, filerevlog)
1165 prune_filenodes(fname, filerevlog)
1166 msng_filenode_lst = msng_filenode_set[fname].keys()
1166 msng_filenode_lst = msng_filenode_set[fname].keys()
1167 # If any filenodes are left, generate the group for them,
1167 # If any filenodes are left, generate the group for them,
1168 # otherwise don't bother.
1168 # otherwise don't bother.
1169 if len(msng_filenode_lst) > 0:
1169 if len(msng_filenode_lst) > 0:
1170 yield struct.pack(">l", len(fname) + 4) + fname
1170 yield struct.pack(">l", len(fname) + 4) + fname
1171 # Sort the filenodes by their revision #
1171 # Sort the filenodes by their revision #
1172 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1172 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1173 # Create a group generator and only pass in a changenode
1173 # Create a group generator and only pass in a changenode
1174 # lookup function as we need to collect no information
1174 # lookup function as we need to collect no information
1175 # from filenodes.
1175 # from filenodes.
1176 group = filerevlog.group(msng_filenode_lst,
1176 group = filerevlog.group(msng_filenode_lst,
1177 lookup_filenode_link_func(fname))
1177 lookup_filenode_link_func(fname))
1178 for chnk in group:
1178 for chnk in group:
1179 yield chnk
1179 yield chnk
1180 # Don't need this anymore, toss it to free memory.
1180 # Don't need this anymore, toss it to free memory.
1181 del msng_filenode_set[fname]
1181 del msng_filenode_set[fname]
1182 # Signal that no more groups are left.
1182 # Signal that no more groups are left.
1183 yield struct.pack(">l", 0)
1183 yield struct.pack(">l", 0)
1184
1184
1185 return util.chunkbuffer(gengroup())
1185 return util.chunkbuffer(gengroup())
1186
1186
1187 def changegroup(self, basenodes):
1187 def changegroup(self, basenodes):
1188 """Generate a changegroup of all nodes that we have that a recipient
1188 """Generate a changegroup of all nodes that we have that a recipient
1189 doesn't.
1189 doesn't.
1190
1190
1191 This is much easier than the previous function as we can assume that
1191 This is much easier than the previous function as we can assume that
1192 the recipient has any changenode we aren't sending them."""
1192 the recipient has any changenode we aren't sending them."""
1193 cl = self.changelog
1193 cl = self.changelog
1194 nodes = cl.nodesbetween(basenodes, None)[0]
1194 nodes = cl.nodesbetween(basenodes, None)[0]
1195 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1195 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1196
1196
1197 def identity(x):
1197 def identity(x):
1198 return x
1198 return x
1199
1199
1200 def gennodelst(revlog):
1200 def gennodelst(revlog):
1201 for r in xrange(0, revlog.count()):
1201 for r in xrange(0, revlog.count()):
1202 n = revlog.node(r)
1202 n = revlog.node(r)
1203 if revlog.linkrev(n) in revset:
1203 if revlog.linkrev(n) in revset:
1204 yield n
1204 yield n
1205
1205
1206 def changed_file_collector(changedfileset):
1206 def changed_file_collector(changedfileset):
1207 def collect_changed_files(clnode):
1207 def collect_changed_files(clnode):
1208 c = cl.read(clnode)
1208 c = cl.read(clnode)
1209 for fname in c[3]:
1209 for fname in c[3]:
1210 changedfileset[fname] = 1
1210 changedfileset[fname] = 1
1211 return collect_changed_files
1211 return collect_changed_files
1212
1212
1213 def lookuprevlink_func(revlog):
1213 def lookuprevlink_func(revlog):
1214 def lookuprevlink(n):
1214 def lookuprevlink(n):
1215 return cl.node(revlog.linkrev(n))
1215 return cl.node(revlog.linkrev(n))
1216 return lookuprevlink
1216 return lookuprevlink
1217
1217
1218 def gengroup():
1218 def gengroup():
1219 # construct a list of all changed files
1219 # construct a list of all changed files
1220 changedfiles = {}
1220 changedfiles = {}
1221
1221
1222 for chnk in cl.group(nodes, identity,
1222 for chnk in cl.group(nodes, identity,
1223 changed_file_collector(changedfiles)):
1223 changed_file_collector(changedfiles)):
1224 yield chnk
1224 yield chnk
1225 changedfiles = changedfiles.keys()
1225 changedfiles = changedfiles.keys()
1226 changedfiles.sort()
1226 changedfiles.sort()
1227
1227
1228 mnfst = self.manifest
1228 mnfst = self.manifest
1229 nodeiter = gennodelst(mnfst)
1229 nodeiter = gennodelst(mnfst)
1230 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1230 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1231 yield chnk
1231 yield chnk
1232
1232
1233 for fname in changedfiles:
1233 for fname in changedfiles:
1234 filerevlog = self.file(fname)
1234 filerevlog = self.file(fname)
1235 nodeiter = gennodelst(filerevlog)
1235 nodeiter = gennodelst(filerevlog)
1236 nodeiter = list(nodeiter)
1236 nodeiter = list(nodeiter)
1237 if nodeiter:
1237 if nodeiter:
1238 yield struct.pack(">l", len(fname) + 4) + fname
1238 yield struct.pack(">l", len(fname) + 4) + fname
1239 lookup = lookuprevlink_func(filerevlog)
1239 lookup = lookuprevlink_func(filerevlog)
1240 for chnk in filerevlog.group(nodeiter, lookup):
1240 for chnk in filerevlog.group(nodeiter, lookup):
1241 yield chnk
1241 yield chnk
1242
1242
1243 yield struct.pack(">l", 0)
1243 yield struct.pack(">l", 0)
1244
1244
1245 return util.chunkbuffer(gengroup())
1245 return util.chunkbuffer(gengroup())
1246
1246
1247 def addchangegroup(self, source):
1247 def addchangegroup(self, source):
1248
1248
1249 def getchunk():
1249 def getchunk():
1250 d = source.read(4)
1250 d = source.read(4)
1251 if not d: return ""
1251 if not d: return ""
1252 l = struct.unpack(">l", d)[0]
1252 l = struct.unpack(">l", d)[0]
1253 if l <= 4: return ""
1253 if l <= 4: return ""
1254 d = source.read(l - 4)
1254 d = source.read(l - 4)
1255 if len(d) < l - 4:
1255 if len(d) < l - 4:
1256 raise repo.RepoError(_("premature EOF reading chunk"
1256 raise repo.RepoError(_("premature EOF reading chunk"
1257 " (got %d bytes, expected %d)")
1257 " (got %d bytes, expected %d)")
1258 % (len(d), l - 4))
1258 % (len(d), l - 4))
1259 return d
1259 return d
1260
1260
1261 def getgroup():
1261 def getgroup():
1262 while 1:
1262 while 1:
1263 c = getchunk()
1263 c = getchunk()
1264 if not c: break
1264 if not c: break
1265 yield c
1265 yield c
1266
1266
1267 def csmap(x):
1267 def csmap(x):
1268 self.ui.debug(_("add changeset %s\n") % short(x))
1268 self.ui.debug(_("add changeset %s\n") % short(x))
1269 return self.changelog.count()
1269 return self.changelog.count()
1270
1270
1271 def revmap(x):
1271 def revmap(x):
1272 return self.changelog.rev(x)
1272 return self.changelog.rev(x)
1273
1273
1274 if not source: return
1274 if not source: return
1275 changesets = files = revisions = 0
1275 changesets = files = revisions = 0
1276
1276
1277 tr = self.transaction()
1277 tr = self.transaction()
1278
1278
1279 oldheads = len(self.changelog.heads())
1279 oldheads = len(self.changelog.heads())
1280
1280
1281 # pull off the changeset group
1281 # pull off the changeset group
1282 self.ui.status(_("adding changesets\n"))
1282 self.ui.status(_("adding changesets\n"))
1283 co = self.changelog.tip()
1283 co = self.changelog.tip()
1284 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1284 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1285 cnr, cor = map(self.changelog.rev, (cn, co))
1285 cnr, cor = map(self.changelog.rev, (cn, co))
1286 if cn == nullid:
1286 if cn == nullid:
1287 cnr = cor
1287 cnr = cor
1288 changesets = cnr - cor
1288 changesets = cnr - cor
1289
1289
1290 # pull off the manifest group
1290 # pull off the manifest group
1291 self.ui.status(_("adding manifests\n"))
1291 self.ui.status(_("adding manifests\n"))
1292 mm = self.manifest.tip()
1292 mm = self.manifest.tip()
1293 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1293 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1294
1294
1295 # process the files
1295 # process the files
1296 self.ui.status(_("adding file changes\n"))
1296 self.ui.status(_("adding file changes\n"))
1297 while 1:
1297 while 1:
1298 f = getchunk()
1298 f = getchunk()
1299 if not f: break
1299 if not f: break
1300 self.ui.debug(_("adding %s revisions\n") % f)
1300 self.ui.debug(_("adding %s revisions\n") % f)
1301 fl = self.file(f)
1301 fl = self.file(f)
1302 o = fl.count()
1302 o = fl.count()
1303 n = fl.addgroup(getgroup(), revmap, tr)
1303 n = fl.addgroup(getgroup(), revmap, tr)
1304 revisions += fl.count() - o
1304 revisions += fl.count() - o
1305 files += 1
1305 files += 1
1306
1306
1307 newheads = len(self.changelog.heads())
1307 newheads = len(self.changelog.heads())
1308 heads = ""
1308 heads = ""
1309 if oldheads and newheads > oldheads:
1309 if oldheads and newheads > oldheads:
1310 heads = _(" (+%d heads)") % (newheads - oldheads)
1310 heads = _(" (+%d heads)") % (newheads - oldheads)
1311
1311
1312 self.ui.status(_("added %d changesets"
1312 self.ui.status(_("added %d changesets"
1313 " with %d changes to %d files%s\n")
1313 " with %d changes to %d files%s\n")
1314 % (changesets, revisions, files, heads))
1314 % (changesets, revisions, files, heads))
1315
1315
1316 tr.close()
1316 tr.close()
1317
1317
1318 if changesets > 0:
1318 if changesets > 0:
1319 if not self.hook("changegroup",
1319 if not self.hook("changegroup",
1320 node=hex(self.changelog.node(cor+1))):
1320 node=hex(self.changelog.node(cor+1))):
1321 self.ui.warn(_("abort: changegroup hook returned failure!\n"))
1321 self.ui.warn(_("abort: changegroup hook returned failure!\n"))
1322 return 1
1322 return 1
1323
1323
1324 for i in range(cor + 1, cnr + 1):
1324 for i in range(cor + 1, cnr + 1):
1325 self.hook("commit", node=hex(self.changelog.node(i)))
1325 self.hook("commit", node=hex(self.changelog.node(i)))
1326
1326
1327 return
1327 return
1328
1328
1329 def update(self, node, allow=False, force=False, choose=None,
1329 def update(self, node, allow=False, force=False, choose=None,
1330 moddirstate=True):
1330 moddirstate=True):
1331 pl = self.dirstate.parents()
1331 pl = self.dirstate.parents()
1332 if not force and pl[1] != nullid:
1332 if not force and pl[1] != nullid:
1333 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1333 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1334 return 1
1334 return 1
1335
1335
1336 p1, p2 = pl[0], node
1336 p1, p2 = pl[0], node
1337 pa = self.changelog.ancestor(p1, p2)
1337 pa = self.changelog.ancestor(p1, p2)
1338 m1n = self.changelog.read(p1)[0]
1338 m1n = self.changelog.read(p1)[0]
1339 m2n = self.changelog.read(p2)[0]
1339 m2n = self.changelog.read(p2)[0]
1340 man = self.manifest.ancestor(m1n, m2n)
1340 man = self.manifest.ancestor(m1n, m2n)
1341 m1 = self.manifest.read(m1n)
1341 m1 = self.manifest.read(m1n)
1342 mf1 = self.manifest.readflags(m1n)
1342 mf1 = self.manifest.readflags(m1n)
1343 m2 = self.manifest.read(m2n)
1343 m2 = self.manifest.read(m2n)
1344 mf2 = self.manifest.readflags(m2n)
1344 mf2 = self.manifest.readflags(m2n)
1345 ma = self.manifest.read(man)
1345 ma = self.manifest.read(man)
1346 mfa = self.manifest.readflags(man)
1346 mfa = self.manifest.readflags(man)
1347
1347
1348 (c, a, d, u) = self.changes()
1348 (c, a, d, u) = self.changes()
1349
1349
1350 # is this a jump, or a merge? i.e. is there a linear path
1350 # is this a jump, or a merge? i.e. is there a linear path
1351 # from p1 to p2?
1351 # from p1 to p2?
1352 linear_path = (pa == p1 or pa == p2)
1352 linear_path = (pa == p1 or pa == p2)
1353
1353
1354 # resolve the manifest to determine which files
1354 # resolve the manifest to determine which files
1355 # we care about merging
1355 # we care about merging
1356 self.ui.note(_("resolving manifests\n"))
1356 self.ui.note(_("resolving manifests\n"))
1357 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1357 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1358 (force, allow, moddirstate, linear_path))
1358 (force, allow, moddirstate, linear_path))
1359 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1359 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1360 (short(man), short(m1n), short(m2n)))
1360 (short(man), short(m1n), short(m2n)))
1361
1361
1362 merge = {}
1362 merge = {}
1363 get = {}
1363 get = {}
1364 remove = []
1364 remove = []
1365
1365
1366 # construct a working dir manifest
1366 # construct a working dir manifest
1367 mw = m1.copy()
1367 mw = m1.copy()
1368 mfw = mf1.copy()
1368 mfw = mf1.copy()
1369 umap = dict.fromkeys(u)
1369 umap = dict.fromkeys(u)
1370
1370
1371 for f in a + c + u:
1371 for f in a + c + u:
1372 mw[f] = ""
1372 mw[f] = ""
1373 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1373 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1374
1374
1375 for f in d:
1375 for f in d:
1376 if f in mw: del mw[f]
1376 if f in mw: del mw[f]
1377
1377
1378 # If we're jumping between revisions (as opposed to merging),
1378 # If we're jumping between revisions (as opposed to merging),
1379 # and if neither the working directory nor the target rev has
1379 # and if neither the working directory nor the target rev has
1380 # the file, then we need to remove it from the dirstate, to
1380 # the file, then we need to remove it from the dirstate, to
1381 # prevent the dirstate from listing the file when it is no
1381 # prevent the dirstate from listing the file when it is no
1382 # longer in the manifest.
1382 # longer in the manifest.
1383 if moddirstate and linear_path and f not in m2:
1383 if moddirstate and linear_path and f not in m2:
1384 self.dirstate.forget((f,))
1384 self.dirstate.forget((f,))
1385
1385
1386 # Compare manifests
1386 # Compare manifests
1387 for f, n in mw.iteritems():
1387 for f, n in mw.iteritems():
1388 if choose and not choose(f): continue
1388 if choose and not choose(f): continue
1389 if f in m2:
1389 if f in m2:
1390 s = 0
1390 s = 0
1391
1391
1392 # is the wfile new since m1, and match m2?
1392 # is the wfile new since m1, and match m2?
1393 if f not in m1:
1393 if f not in m1:
1394 t1 = self.wread(f)
1394 t1 = self.wread(f)
1395 t2 = self.file(f).read(m2[f])
1395 t2 = self.file(f).read(m2[f])
1396 if cmp(t1, t2) == 0:
1396 if cmp(t1, t2) == 0:
1397 n = m2[f]
1397 n = m2[f]
1398 del t1, t2
1398 del t1, t2
1399
1399
1400 # are files different?
1400 # are files different?
1401 if n != m2[f]:
1401 if n != m2[f]:
1402 a = ma.get(f, nullid)
1402 a = ma.get(f, nullid)
1403 # are both different from the ancestor?
1403 # are both different from the ancestor?
1404 if n != a and m2[f] != a:
1404 if n != a and m2[f] != a:
1405 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1405 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1406 # merge executable bits
1406 # merge executable bits
1407 # "if we changed or they changed, change in merge"
1407 # "if we changed or they changed, change in merge"
1408 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1408 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1409 mode = ((a^b) | (a^c)) ^ a
1409 mode = ((a^b) | (a^c)) ^ a
1410 merge[f] = (m1.get(f, nullid), m2[f], mode)
1410 merge[f] = (m1.get(f, nullid), m2[f], mode)
1411 s = 1
1411 s = 1
1412 # are we clobbering?
1412 # are we clobbering?
1413 # is remote's version newer?
1413 # is remote's version newer?
1414 # or are we going back in time?
1414 # or are we going back in time?
1415 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1415 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1416 self.ui.debug(_(" remote %s is newer, get\n") % f)
1416 self.ui.debug(_(" remote %s is newer, get\n") % f)
1417 get[f] = m2[f]
1417 get[f] = m2[f]
1418 s = 1
1418 s = 1
1419 elif f in umap:
1419 elif f in umap:
1420 # this unknown file is the same as the checkout
1420 # this unknown file is the same as the checkout
1421 get[f] = m2[f]
1421 get[f] = m2[f]
1422
1422
1423 if not s and mfw[f] != mf2[f]:
1423 if not s and mfw[f] != mf2[f]:
1424 if force:
1424 if force:
1425 self.ui.debug(_(" updating permissions for %s\n") % f)
1425 self.ui.debug(_(" updating permissions for %s\n") % f)
1426 util.set_exec(self.wjoin(f), mf2[f])
1426 util.set_exec(self.wjoin(f), mf2[f])
1427 else:
1427 else:
1428 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1428 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1429 mode = ((a^b) | (a^c)) ^ a
1429 mode = ((a^b) | (a^c)) ^ a
1430 if mode != b:
1430 if mode != b:
1431 self.ui.debug(_(" updating permissions for %s\n") % f)
1431 self.ui.debug(_(" updating permissions for %s\n") % f)
1432 util.set_exec(self.wjoin(f), mode)
1432 util.set_exec(self.wjoin(f), mode)
1433 del m2[f]
1433 del m2[f]
1434 elif f in ma:
1434 elif f in ma:
1435 if n != ma[f]:
1435 if n != ma[f]:
1436 r = _("d")
1436 r = _("d")
1437 if not force and (linear_path or allow):
1437 if not force and (linear_path or allow):
1438 r = self.ui.prompt(
1438 r = self.ui.prompt(
1439 (_(" local changed %s which remote deleted\n") % f) +
1439 (_(" local changed %s which remote deleted\n") % f) +
1440 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1440 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1441 if r == _("d"):
1441 if r == _("d"):
1442 remove.append(f)
1442 remove.append(f)
1443 else:
1443 else:
1444 self.ui.debug(_("other deleted %s\n") % f)
1444 self.ui.debug(_("other deleted %s\n") % f)
1445 remove.append(f) # other deleted it
1445 remove.append(f) # other deleted it
1446 else:
1446 else:
1447 # file is created on branch or in working directory
1447 # file is created on branch or in working directory
1448 if force and f not in umap:
1448 if force and f not in umap:
1449 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1449 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1450 remove.append(f)
1450 remove.append(f)
1451 elif n == m1.get(f, nullid): # same as parent
1451 elif n == m1.get(f, nullid): # same as parent
1452 if p2 == pa: # going backwards?
1452 if p2 == pa: # going backwards?
1453 self.ui.debug(_("remote deleted %s\n") % f)
1453 self.ui.debug(_("remote deleted %s\n") % f)
1454 remove.append(f)
1454 remove.append(f)
1455 else:
1455 else:
1456 self.ui.debug(_("local modified %s, keeping\n") % f)
1456 self.ui.debug(_("local modified %s, keeping\n") % f)
1457 else:
1457 else:
1458 self.ui.debug(_("working dir created %s, keeping\n") % f)
1458 self.ui.debug(_("working dir created %s, keeping\n") % f)
1459
1459
1460 for f, n in m2.iteritems():
1460 for f, n in m2.iteritems():
1461 if choose and not choose(f): continue
1461 if choose and not choose(f): continue
1462 if f[0] == "/": continue
1462 if f[0] == "/": continue
1463 if f in ma and n != ma[f]:
1463 if f in ma and n != ma[f]:
1464 r = _("k")
1464 r = _("k")
1465 if not force and (linear_path or allow):
1465 if not force and (linear_path or allow):
1466 r = self.ui.prompt(
1466 r = self.ui.prompt(
1467 (_("remote changed %s which local deleted\n") % f) +
1467 (_("remote changed %s which local deleted\n") % f) +
1468 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1468 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1469 if r == _("k"): get[f] = n
1469 if r == _("k"): get[f] = n
1470 elif f not in ma:
1470 elif f not in ma:
1471 self.ui.debug(_("remote created %s\n") % f)
1471 self.ui.debug(_("remote created %s\n") % f)
1472 get[f] = n
1472 get[f] = n
1473 else:
1473 else:
1474 if force or p2 == pa: # going backwards?
1474 if force or p2 == pa: # going backwards?
1475 self.ui.debug(_("local deleted %s, recreating\n") % f)
1475 self.ui.debug(_("local deleted %s, recreating\n") % f)
1476 get[f] = n
1476 get[f] = n
1477 else:
1477 else:
1478 self.ui.debug(_("local deleted %s\n") % f)
1478 self.ui.debug(_("local deleted %s\n") % f)
1479
1479
1480 del mw, m1, m2, ma
1480 del mw, m1, m2, ma
1481
1481
1482 if force:
1482 if force:
1483 for f in merge:
1483 for f in merge:
1484 get[f] = merge[f][1]
1484 get[f] = merge[f][1]
1485 merge = {}
1485 merge = {}
1486
1486
1487 if linear_path or force:
1487 if linear_path or force:
1488 # we don't need to do any magic, just jump to the new rev
1488 # we don't need to do any magic, just jump to the new rev
1489 branch_merge = False
1489 branch_merge = False
1490 p1, p2 = p2, nullid
1490 p1, p2 = p2, nullid
1491 else:
1491 else:
1492 if not allow:
1492 if not allow:
1493 self.ui.status(_("this update spans a branch"
1493 self.ui.status(_("this update spans a branch"
1494 " affecting the following files:\n"))
1494 " affecting the following files:\n"))
1495 fl = merge.keys() + get.keys()
1495 fl = merge.keys() + get.keys()
1496 fl.sort()
1496 fl.sort()
1497 for f in fl:
1497 for f in fl:
1498 cf = ""
1498 cf = ""
1499 if f in merge: cf = _(" (resolve)")
1499 if f in merge: cf = _(" (resolve)")
1500 self.ui.status(" %s%s\n" % (f, cf))
1500 self.ui.status(" %s%s\n" % (f, cf))
1501 self.ui.warn(_("aborting update spanning branches!\n"))
1501 self.ui.warn(_("aborting update spanning branches!\n"))
1502 self.ui.status(_("(use update -m to merge across branches"
1502 self.ui.status(_("(use update -m to merge across branches"
1503 " or -C to lose changes)\n"))
1503 " or -C to lose changes)\n"))
1504 return 1
1504 return 1
1505 branch_merge = True
1505 branch_merge = True
1506
1506
1507 if moddirstate:
1508 self.dirstate.setparents(p1, p2)
1509
1510 # get the files we don't need to change
1507 # get the files we don't need to change
1511 files = get.keys()
1508 files = get.keys()
1512 files.sort()
1509 files.sort()
1513 for f in files:
1510 for f in files:
1514 if f[0] == "/": continue
1511 if f[0] == "/": continue
1515 self.ui.note(_("getting %s\n") % f)
1512 self.ui.note(_("getting %s\n") % f)
1516 t = self.file(f).read(get[f])
1513 t = self.file(f).read(get[f])
1517 self.wwrite(f, t)
1514 self.wwrite(f, t)
1518 util.set_exec(self.wjoin(f), mf2[f])
1515 util.set_exec(self.wjoin(f), mf2[f])
1519 if moddirstate:
1516 if moddirstate:
1520 if branch_merge:
1517 if branch_merge:
1521 self.dirstate.update([f], 'n', st_mtime=-1)
1518 self.dirstate.update([f], 'n', st_mtime=-1)
1522 else:
1519 else:
1523 self.dirstate.update([f], 'n')
1520 self.dirstate.update([f], 'n')
1524
1521
1525 # merge the tricky bits
1522 # merge the tricky bits
1526 files = merge.keys()
1523 files = merge.keys()
1527 files.sort()
1524 files.sort()
1528 for f in files:
1525 for f in files:
1529 self.ui.status(_("merging %s\n") % f)
1526 self.ui.status(_("merging %s\n") % f)
1530 my, other, flag = merge[f]
1527 my, other, flag = merge[f]
1531 self.merge3(f, my, other)
1528 self.merge3(f, my, other)
1532 util.set_exec(self.wjoin(f), flag)
1529 util.set_exec(self.wjoin(f), flag)
1533 if moddirstate:
1530 if moddirstate:
1534 if branch_merge:
1531 if branch_merge:
1535 # We've done a branch merge, mark this file as merged
1532 # We've done a branch merge, mark this file as merged
1536 # so that we properly record the merger later
1533 # so that we properly record the merger later
1537 self.dirstate.update([f], 'm')
1534 self.dirstate.update([f], 'm')
1538 else:
1535 else:
1539 # We've update-merged a locally modified file, so
1536 # We've update-merged a locally modified file, so
1540 # we set the dirstate to emulate a normal checkout
1537 # we set the dirstate to emulate a normal checkout
1541 # of that file some time in the past. Thus our
1538 # of that file some time in the past. Thus our
1542 # merge will appear as a normal local file
1539 # merge will appear as a normal local file
1543 # modification.
1540 # modification.
1544 f_len = len(self.file(f).read(other))
1541 f_len = len(self.file(f).read(other))
1545 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1542 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1546
1543
1547 remove.sort()
1544 remove.sort()
1548 for f in remove:
1545 for f in remove:
1549 self.ui.note(_("removing %s\n") % f)
1546 self.ui.note(_("removing %s\n") % f)
1550 try:
1547 try:
1551 util.unlink(self.wjoin(f))
1548 util.unlink(self.wjoin(f))
1552 except OSError, inst:
1549 except OSError, inst:
1553 if inst.errno != errno.ENOENT:
1550 if inst.errno != errno.ENOENT:
1554 self.ui.warn(_("update failed to remove %s: %s!\n") %
1551 self.ui.warn(_("update failed to remove %s: %s!\n") %
1555 (f, inst.strerror))
1552 (f, inst.strerror))
1556 if moddirstate:
1553 if moddirstate:
1557 if branch_merge:
1554 if branch_merge:
1558 self.dirstate.update(remove, 'r')
1555 self.dirstate.update(remove, 'r')
1559 else:
1556 else:
1560 self.dirstate.forget(remove)
1557 self.dirstate.forget(remove)
1561
1558
1559 if moddirstate:
1560 self.dirstate.setparents(p1, p2)
1561
1562 def merge3(self, fn, my, other):
1562 def merge3(self, fn, my, other):
1563 """perform a 3-way merge in the working directory"""
1563 """perform a 3-way merge in the working directory"""
1564
1564
1565 def temp(prefix, node):
1565 def temp(prefix, node):
1566 pre = "%s~%s." % (os.path.basename(fn), prefix)
1566 pre = "%s~%s." % (os.path.basename(fn), prefix)
1567 (fd, name) = tempfile.mkstemp("", pre)
1567 (fd, name) = tempfile.mkstemp("", pre)
1568 f = os.fdopen(fd, "wb")
1568 f = os.fdopen(fd, "wb")
1569 self.wwrite(fn, fl.read(node), f)
1569 self.wwrite(fn, fl.read(node), f)
1570 f.close()
1570 f.close()
1571 return name
1571 return name
1572
1572
1573 fl = self.file(fn)
1573 fl = self.file(fn)
1574 base = fl.ancestor(my, other)
1574 base = fl.ancestor(my, other)
1575 a = self.wjoin(fn)
1575 a = self.wjoin(fn)
1576 b = temp("base", base)
1576 b = temp("base", base)
1577 c = temp("other", other)
1577 c = temp("other", other)
1578
1578
1579 self.ui.note(_("resolving %s\n") % fn)
1579 self.ui.note(_("resolving %s\n") % fn)
1580 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1580 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1581 (fn, short(my), short(other), short(base)))
1581 (fn, short(my), short(other), short(base)))
1582
1582
1583 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1583 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1584 or "hgmerge")
1584 or "hgmerge")
1585 r = os.system('%s "%s" "%s" "%s"' % (cmd, a, b, c))
1585 r = os.system('%s "%s" "%s" "%s"' % (cmd, a, b, c))
1586 if r:
1586 if r:
1587 self.ui.warn(_("merging %s failed!\n") % fn)
1587 self.ui.warn(_("merging %s failed!\n") % fn)
1588
1588
1589 os.unlink(b)
1589 os.unlink(b)
1590 os.unlink(c)
1590 os.unlink(c)
1591
1591
1592 def verify(self):
1592 def verify(self):
1593 filelinkrevs = {}
1593 filelinkrevs = {}
1594 filenodes = {}
1594 filenodes = {}
1595 changesets = revisions = files = 0
1595 changesets = revisions = files = 0
1596 errors = [0]
1596 errors = [0]
1597 neededmanifests = {}
1597 neededmanifests = {}
1598
1598
1599 def err(msg):
1599 def err(msg):
1600 self.ui.warn(msg + "\n")
1600 self.ui.warn(msg + "\n")
1601 errors[0] += 1
1601 errors[0] += 1
1602
1602
1603 seen = {}
1603 seen = {}
1604 self.ui.status(_("checking changesets\n"))
1604 self.ui.status(_("checking changesets\n"))
1605 d = self.changelog.checksize()
1605 d = self.changelog.checksize()
1606 if d:
1606 if d:
1607 err(_("changeset data short %d bytes") % d)
1607 err(_("changeset data short %d bytes") % d)
1608 for i in range(self.changelog.count()):
1608 for i in range(self.changelog.count()):
1609 changesets += 1
1609 changesets += 1
1610 n = self.changelog.node(i)
1610 n = self.changelog.node(i)
1611 l = self.changelog.linkrev(n)
1611 l = self.changelog.linkrev(n)
1612 if l != i:
1612 if l != i:
1613 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1613 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1614 if n in seen:
1614 if n in seen:
1615 err(_("duplicate changeset at revision %d") % i)
1615 err(_("duplicate changeset at revision %d") % i)
1616 seen[n] = 1
1616 seen[n] = 1
1617
1617
1618 for p in self.changelog.parents(n):
1618 for p in self.changelog.parents(n):
1619 if p not in self.changelog.nodemap:
1619 if p not in self.changelog.nodemap:
1620 err(_("changeset %s has unknown parent %s") %
1620 err(_("changeset %s has unknown parent %s") %
1621 (short(n), short(p)))
1621 (short(n), short(p)))
1622 try:
1622 try:
1623 changes = self.changelog.read(n)
1623 changes = self.changelog.read(n)
1624 except KeyboardInterrupt:
1624 except KeyboardInterrupt:
1625 self.ui.warn(_("interrupted"))
1625 self.ui.warn(_("interrupted"))
1626 raise
1626 raise
1627 except Exception, inst:
1627 except Exception, inst:
1628 err(_("unpacking changeset %s: %s") % (short(n), inst))
1628 err(_("unpacking changeset %s: %s") % (short(n), inst))
1629
1629
1630 neededmanifests[changes[0]] = n
1630 neededmanifests[changes[0]] = n
1631
1631
1632 for f in changes[3]:
1632 for f in changes[3]:
1633 filelinkrevs.setdefault(f, []).append(i)
1633 filelinkrevs.setdefault(f, []).append(i)
1634
1634
1635 seen = {}
1635 seen = {}
1636 self.ui.status(_("checking manifests\n"))
1636 self.ui.status(_("checking manifests\n"))
1637 d = self.manifest.checksize()
1637 d = self.manifest.checksize()
1638 if d:
1638 if d:
1639 err(_("manifest data short %d bytes") % d)
1639 err(_("manifest data short %d bytes") % d)
1640 for i in range(self.manifest.count()):
1640 for i in range(self.manifest.count()):
1641 n = self.manifest.node(i)
1641 n = self.manifest.node(i)
1642 l = self.manifest.linkrev(n)
1642 l = self.manifest.linkrev(n)
1643
1643
1644 if l < 0 or l >= self.changelog.count():
1644 if l < 0 or l >= self.changelog.count():
1645 err(_("bad manifest link (%d) at revision %d") % (l, i))
1645 err(_("bad manifest link (%d) at revision %d") % (l, i))
1646
1646
1647 if n in neededmanifests:
1647 if n in neededmanifests:
1648 del neededmanifests[n]
1648 del neededmanifests[n]
1649
1649
1650 if n in seen:
1650 if n in seen:
1651 err(_("duplicate manifest at revision %d") % i)
1651 err(_("duplicate manifest at revision %d") % i)
1652
1652
1653 seen[n] = 1
1653 seen[n] = 1
1654
1654
1655 for p in self.manifest.parents(n):
1655 for p in self.manifest.parents(n):
1656 if p not in self.manifest.nodemap:
1656 if p not in self.manifest.nodemap:
1657 err(_("manifest %s has unknown parent %s") %
1657 err(_("manifest %s has unknown parent %s") %
1658 (short(n), short(p)))
1658 (short(n), short(p)))
1659
1659
1660 try:
1660 try:
1661 delta = mdiff.patchtext(self.manifest.delta(n))
1661 delta = mdiff.patchtext(self.manifest.delta(n))
1662 except KeyboardInterrupt:
1662 except KeyboardInterrupt:
1663 self.ui.warn(_("interrupted"))
1663 self.ui.warn(_("interrupted"))
1664 raise
1664 raise
1665 except Exception, inst:
1665 except Exception, inst:
1666 err(_("unpacking manifest %s: %s") % (short(n), inst))
1666 err(_("unpacking manifest %s: %s") % (short(n), inst))
1667
1667
1668 ff = [ l.split('\0') for l in delta.splitlines() ]
1668 ff = [ l.split('\0') for l in delta.splitlines() ]
1669 for f, fn in ff:
1669 for f, fn in ff:
1670 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1670 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1671
1671
1672 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1672 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1673
1673
1674 for m,c in neededmanifests.items():
1674 for m,c in neededmanifests.items():
1675 err(_("Changeset %s refers to unknown manifest %s") %
1675 err(_("Changeset %s refers to unknown manifest %s") %
1676 (short(m), short(c)))
1676 (short(m), short(c)))
1677 del neededmanifests
1677 del neededmanifests
1678
1678
1679 for f in filenodes:
1679 for f in filenodes:
1680 if f not in filelinkrevs:
1680 if f not in filelinkrevs:
1681 err(_("file %s in manifest but not in changesets") % f)
1681 err(_("file %s in manifest but not in changesets") % f)
1682
1682
1683 for f in filelinkrevs:
1683 for f in filelinkrevs:
1684 if f not in filenodes:
1684 if f not in filenodes:
1685 err(_("file %s in changeset but not in manifest") % f)
1685 err(_("file %s in changeset but not in manifest") % f)
1686
1686
1687 self.ui.status(_("checking files\n"))
1687 self.ui.status(_("checking files\n"))
1688 ff = filenodes.keys()
1688 ff = filenodes.keys()
1689 ff.sort()
1689 ff.sort()
1690 for f in ff:
1690 for f in ff:
1691 if f == "/dev/null": continue
1691 if f == "/dev/null": continue
1692 files += 1
1692 files += 1
1693 fl = self.file(f)
1693 fl = self.file(f)
1694 d = fl.checksize()
1694 d = fl.checksize()
1695 if d:
1695 if d:
1696 err(_("%s file data short %d bytes") % (f, d))
1696 err(_("%s file data short %d bytes") % (f, d))
1697
1697
1698 nodes = { nullid: 1 }
1698 nodes = { nullid: 1 }
1699 seen = {}
1699 seen = {}
1700 for i in range(fl.count()):
1700 for i in range(fl.count()):
1701 revisions += 1
1701 revisions += 1
1702 n = fl.node(i)
1702 n = fl.node(i)
1703
1703
1704 if n in seen:
1704 if n in seen:
1705 err(_("%s: duplicate revision %d") % (f, i))
1705 err(_("%s: duplicate revision %d") % (f, i))
1706 if n not in filenodes[f]:
1706 if n not in filenodes[f]:
1707 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1707 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1708 else:
1708 else:
1709 del filenodes[f][n]
1709 del filenodes[f][n]
1710
1710
1711 flr = fl.linkrev(n)
1711 flr = fl.linkrev(n)
1712 if flr not in filelinkrevs[f]:
1712 if flr not in filelinkrevs[f]:
1713 err(_("%s:%s points to unexpected changeset %d")
1713 err(_("%s:%s points to unexpected changeset %d")
1714 % (f, short(n), flr))
1714 % (f, short(n), flr))
1715 else:
1715 else:
1716 filelinkrevs[f].remove(flr)
1716 filelinkrevs[f].remove(flr)
1717
1717
1718 # verify contents
1718 # verify contents
1719 try:
1719 try:
1720 t = fl.read(n)
1720 t = fl.read(n)
1721 except KeyboardInterrupt:
1721 except KeyboardInterrupt:
1722 self.ui.warn(_("interrupted"))
1722 self.ui.warn(_("interrupted"))
1723 raise
1723 raise
1724 except Exception, inst:
1724 except Exception, inst:
1725 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1725 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1726
1726
1727 # verify parents
1727 # verify parents
1728 (p1, p2) = fl.parents(n)
1728 (p1, p2) = fl.parents(n)
1729 if p1 not in nodes:
1729 if p1 not in nodes:
1730 err(_("file %s:%s unknown parent 1 %s") %
1730 err(_("file %s:%s unknown parent 1 %s") %
1731 (f, short(n), short(p1)))
1731 (f, short(n), short(p1)))
1732 if p2 not in nodes:
1732 if p2 not in nodes:
1733 err(_("file %s:%s unknown parent 2 %s") %
1733 err(_("file %s:%s unknown parent 2 %s") %
1734 (f, short(n), short(p1)))
1734 (f, short(n), short(p1)))
1735 nodes[n] = 1
1735 nodes[n] = 1
1736
1736
1737 # cross-check
1737 # cross-check
1738 for node in filenodes[f]:
1738 for node in filenodes[f]:
1739 err(_("node %s in manifests not in %s") % (hex(node), f))
1739 err(_("node %s in manifests not in %s") % (hex(node), f))
1740
1740
1741 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1741 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1742 (files, changesets, revisions))
1742 (files, changesets, revisions))
1743
1743
1744 if errors[0]:
1744 if errors[0]:
1745 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1745 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1746 return 1
1746 return 1
General Comments 0
You need to be logged in to leave comments. Login now