##// END OF EJS Templates
Restored old behaviour for 'hg co' and 'hg co -C' for removed files.
Thomas Arendsen Hein -
r1621:ee16f061 default
parent child Browse files
Show More
@@ -1,1815 +1,1815 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import struct, os, util
8 import struct, os, util
9 import filelog, manifest, changelog, dirstate, repo
9 import filelog, manifest, changelog, dirstate, repo
10 from node import *
10 from node import *
11 from i18n import gettext as _
11 from i18n import gettext as _
12 from demandload import *
12 from demandload import *
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
14
14
15 class localrepository(object):
15 class localrepository(object):
16 def __init__(self, ui, path=None, create=0):
16 def __init__(self, ui, path=None, create=0):
17 if not path:
17 if not path:
18 p = os.getcwd()
18 p = os.getcwd()
19 while not os.path.isdir(os.path.join(p, ".hg")):
19 while not os.path.isdir(os.path.join(p, ".hg")):
20 oldp = p
20 oldp = p
21 p = os.path.dirname(p)
21 p = os.path.dirname(p)
22 if p == oldp:
22 if p == oldp:
23 raise repo.RepoError(_("no repo found"))
23 raise repo.RepoError(_("no repo found"))
24 path = p
24 path = p
25 self.path = os.path.join(path, ".hg")
25 self.path = os.path.join(path, ".hg")
26
26
27 if not create and not os.path.isdir(self.path):
27 if not create and not os.path.isdir(self.path):
28 raise repo.RepoError(_("repository %s not found") % path)
28 raise repo.RepoError(_("repository %s not found") % path)
29
29
30 self.root = os.path.abspath(path)
30 self.root = os.path.abspath(path)
31 self.ui = ui
31 self.ui = ui
32 self.opener = util.opener(self.path)
32 self.opener = util.opener(self.path)
33 self.wopener = util.opener(self.root)
33 self.wopener = util.opener(self.root)
34 self.manifest = manifest.manifest(self.opener)
34 self.manifest = manifest.manifest(self.opener)
35 self.changelog = changelog.changelog(self.opener)
35 self.changelog = changelog.changelog(self.opener)
36 self.tagscache = None
36 self.tagscache = None
37 self.nodetagscache = None
37 self.nodetagscache = None
38 self.encodepats = None
38 self.encodepats = None
39 self.decodepats = None
39 self.decodepats = None
40
40
41 if create:
41 if create:
42 os.mkdir(self.path)
42 os.mkdir(self.path)
43 os.mkdir(self.join("data"))
43 os.mkdir(self.join("data"))
44
44
45 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
45 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
46 try:
46 try:
47 self.ui.readconfig(self.join("hgrc"))
47 self.ui.readconfig(self.join("hgrc"))
48 except IOError:
48 except IOError:
49 pass
49 pass
50
50
51 def hook(self, name, **args):
51 def hook(self, name, **args):
52 def runhook(name, cmd):
52 def runhook(name, cmd):
53 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
53 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
54 old = {}
54 old = {}
55 for k, v in args.items():
55 for k, v in args.items():
56 k = k.upper()
56 k = k.upper()
57 old[k] = os.environ.get(k, None)
57 old[k] = os.environ.get(k, None)
58 os.environ[k] = v
58 os.environ[k] = v
59
59
60 # Hooks run in the repository root
60 # Hooks run in the repository root
61 olddir = os.getcwd()
61 olddir = os.getcwd()
62 os.chdir(self.root)
62 os.chdir(self.root)
63 r = os.system(cmd)
63 r = os.system(cmd)
64 os.chdir(olddir)
64 os.chdir(olddir)
65
65
66 for k, v in old.items():
66 for k, v in old.items():
67 if v != None:
67 if v != None:
68 os.environ[k] = v
68 os.environ[k] = v
69 else:
69 else:
70 del os.environ[k]
70 del os.environ[k]
71
71
72 if r:
72 if r:
73 self.ui.warn(_("abort: %s hook failed with status %d!\n") %
73 self.ui.warn(_("abort: %s hook failed with status %d!\n") %
74 (name, r))
74 (name, r))
75 return False
75 return False
76 return True
76 return True
77
77
78 r = True
78 r = True
79 for hname, cmd in self.ui.configitems("hooks"):
79 for hname, cmd in self.ui.configitems("hooks"):
80 s = hname.split(".")
80 s = hname.split(".")
81 if s[0] == name and cmd:
81 if s[0] == name and cmd:
82 r = runhook(hname, cmd) and r
82 r = runhook(hname, cmd) and r
83 return r
83 return r
84
84
85 def tags(self):
85 def tags(self):
86 '''return a mapping of tag to node'''
86 '''return a mapping of tag to node'''
87 if not self.tagscache:
87 if not self.tagscache:
88 self.tagscache = {}
88 self.tagscache = {}
89 def addtag(self, k, n):
89 def addtag(self, k, n):
90 try:
90 try:
91 bin_n = bin(n)
91 bin_n = bin(n)
92 except TypeError:
92 except TypeError:
93 bin_n = ''
93 bin_n = ''
94 self.tagscache[k.strip()] = bin_n
94 self.tagscache[k.strip()] = bin_n
95
95
96 try:
96 try:
97 # read each head of the tags file, ending with the tip
97 # read each head of the tags file, ending with the tip
98 # and add each tag found to the map, with "newer" ones
98 # and add each tag found to the map, with "newer" ones
99 # taking precedence
99 # taking precedence
100 fl = self.file(".hgtags")
100 fl = self.file(".hgtags")
101 h = fl.heads()
101 h = fl.heads()
102 h.reverse()
102 h.reverse()
103 for r in h:
103 for r in h:
104 for l in fl.read(r).splitlines():
104 for l in fl.read(r).splitlines():
105 if l:
105 if l:
106 n, k = l.split(" ", 1)
106 n, k = l.split(" ", 1)
107 addtag(self, k, n)
107 addtag(self, k, n)
108 except KeyError:
108 except KeyError:
109 pass
109 pass
110
110
111 try:
111 try:
112 f = self.opener("localtags")
112 f = self.opener("localtags")
113 for l in f:
113 for l in f:
114 n, k = l.split(" ", 1)
114 n, k = l.split(" ", 1)
115 addtag(self, k, n)
115 addtag(self, k, n)
116 except IOError:
116 except IOError:
117 pass
117 pass
118
118
119 self.tagscache['tip'] = self.changelog.tip()
119 self.tagscache['tip'] = self.changelog.tip()
120
120
121 return self.tagscache
121 return self.tagscache
122
122
123 def tagslist(self):
123 def tagslist(self):
124 '''return a list of tags ordered by revision'''
124 '''return a list of tags ordered by revision'''
125 l = []
125 l = []
126 for t, n in self.tags().items():
126 for t, n in self.tags().items():
127 try:
127 try:
128 r = self.changelog.rev(n)
128 r = self.changelog.rev(n)
129 except:
129 except:
130 r = -2 # sort to the beginning of the list if unknown
130 r = -2 # sort to the beginning of the list if unknown
131 l.append((r, t, n))
131 l.append((r, t, n))
132 l.sort()
132 l.sort()
133 return [(t, n) for r, t, n in l]
133 return [(t, n) for r, t, n in l]
134
134
135 def nodetags(self, node):
135 def nodetags(self, node):
136 '''return the tags associated with a node'''
136 '''return the tags associated with a node'''
137 if not self.nodetagscache:
137 if not self.nodetagscache:
138 self.nodetagscache = {}
138 self.nodetagscache = {}
139 for t, n in self.tags().items():
139 for t, n in self.tags().items():
140 self.nodetagscache.setdefault(n, []).append(t)
140 self.nodetagscache.setdefault(n, []).append(t)
141 return self.nodetagscache.get(node, [])
141 return self.nodetagscache.get(node, [])
142
142
143 def lookup(self, key):
143 def lookup(self, key):
144 try:
144 try:
145 return self.tags()[key]
145 return self.tags()[key]
146 except KeyError:
146 except KeyError:
147 try:
147 try:
148 return self.changelog.lookup(key)
148 return self.changelog.lookup(key)
149 except:
149 except:
150 raise repo.RepoError(_("unknown revision '%s'") % key)
150 raise repo.RepoError(_("unknown revision '%s'") % key)
151
151
152 def dev(self):
152 def dev(self):
153 return os.stat(self.path).st_dev
153 return os.stat(self.path).st_dev
154
154
155 def local(self):
155 def local(self):
156 return True
156 return True
157
157
158 def join(self, f):
158 def join(self, f):
159 return os.path.join(self.path, f)
159 return os.path.join(self.path, f)
160
160
161 def wjoin(self, f):
161 def wjoin(self, f):
162 return os.path.join(self.root, f)
162 return os.path.join(self.root, f)
163
163
164 def file(self, f):
164 def file(self, f):
165 if f[0] == '/':
165 if f[0] == '/':
166 f = f[1:]
166 f = f[1:]
167 return filelog.filelog(self.opener, f)
167 return filelog.filelog(self.opener, f)
168
168
169 def getcwd(self):
169 def getcwd(self):
170 return self.dirstate.getcwd()
170 return self.dirstate.getcwd()
171
171
172 def wfile(self, f, mode='r'):
172 def wfile(self, f, mode='r'):
173 return self.wopener(f, mode)
173 return self.wopener(f, mode)
174
174
175 def wread(self, filename):
175 def wread(self, filename):
176 if self.encodepats == None:
176 if self.encodepats == None:
177 l = []
177 l = []
178 for pat, cmd in self.ui.configitems("encode"):
178 for pat, cmd in self.ui.configitems("encode"):
179 mf = util.matcher("", "/", [pat], [], [])[1]
179 mf = util.matcher("", "/", [pat], [], [])[1]
180 l.append((mf, cmd))
180 l.append((mf, cmd))
181 self.encodepats = l
181 self.encodepats = l
182
182
183 data = self.wopener(filename, 'r').read()
183 data = self.wopener(filename, 'r').read()
184
184
185 for mf, cmd in self.encodepats:
185 for mf, cmd in self.encodepats:
186 if mf(filename):
186 if mf(filename):
187 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
187 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
188 data = util.filter(data, cmd)
188 data = util.filter(data, cmd)
189 break
189 break
190
190
191 return data
191 return data
192
192
193 def wwrite(self, filename, data, fd=None):
193 def wwrite(self, filename, data, fd=None):
194 if self.decodepats == None:
194 if self.decodepats == None:
195 l = []
195 l = []
196 for pat, cmd in self.ui.configitems("decode"):
196 for pat, cmd in self.ui.configitems("decode"):
197 mf = util.matcher("", "/", [pat], [], [])[1]
197 mf = util.matcher("", "/", [pat], [], [])[1]
198 l.append((mf, cmd))
198 l.append((mf, cmd))
199 self.decodepats = l
199 self.decodepats = l
200
200
201 for mf, cmd in self.decodepats:
201 for mf, cmd in self.decodepats:
202 if mf(filename):
202 if mf(filename):
203 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
203 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
204 data = util.filter(data, cmd)
204 data = util.filter(data, cmd)
205 break
205 break
206
206
207 if fd:
207 if fd:
208 return fd.write(data)
208 return fd.write(data)
209 return self.wopener(filename, 'w').write(data)
209 return self.wopener(filename, 'w').write(data)
210
210
211 def transaction(self):
211 def transaction(self):
212 # save dirstate for undo
212 # save dirstate for undo
213 try:
213 try:
214 ds = self.opener("dirstate").read()
214 ds = self.opener("dirstate").read()
215 except IOError:
215 except IOError:
216 ds = ""
216 ds = ""
217 self.opener("journal.dirstate", "w").write(ds)
217 self.opener("journal.dirstate", "w").write(ds)
218
218
219 def after():
219 def after():
220 util.rename(self.join("journal"), self.join("undo"))
220 util.rename(self.join("journal"), self.join("undo"))
221 util.rename(self.join("journal.dirstate"),
221 util.rename(self.join("journal.dirstate"),
222 self.join("undo.dirstate"))
222 self.join("undo.dirstate"))
223
223
224 return transaction.transaction(self.ui.warn, self.opener,
224 return transaction.transaction(self.ui.warn, self.opener,
225 self.join("journal"), after)
225 self.join("journal"), after)
226
226
227 def recover(self):
227 def recover(self):
228 lock = self.lock()
228 lock = self.lock()
229 if os.path.exists(self.join("journal")):
229 if os.path.exists(self.join("journal")):
230 self.ui.status(_("rolling back interrupted transaction\n"))
230 self.ui.status(_("rolling back interrupted transaction\n"))
231 transaction.rollback(self.opener, self.join("journal"))
231 transaction.rollback(self.opener, self.join("journal"))
232 self.manifest = manifest.manifest(self.opener)
232 self.manifest = manifest.manifest(self.opener)
233 self.changelog = changelog.changelog(self.opener)
233 self.changelog = changelog.changelog(self.opener)
234 return True
234 return True
235 else:
235 else:
236 self.ui.warn(_("no interrupted transaction available\n"))
236 self.ui.warn(_("no interrupted transaction available\n"))
237 return False
237 return False
238
238
239 def undo(self):
239 def undo(self):
240 wlock = self.wlock()
240 wlock = self.wlock()
241 lock = self.lock()
241 lock = self.lock()
242 if os.path.exists(self.join("undo")):
242 if os.path.exists(self.join("undo")):
243 self.ui.status(_("rolling back last transaction\n"))
243 self.ui.status(_("rolling back last transaction\n"))
244 transaction.rollback(self.opener, self.join("undo"))
244 transaction.rollback(self.opener, self.join("undo"))
245 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
245 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
246 self.dirstate.read()
246 self.dirstate.read()
247 else:
247 else:
248 self.ui.warn(_("no undo information available\n"))
248 self.ui.warn(_("no undo information available\n"))
249
249
250 def lock(self, wait=1):
250 def lock(self, wait=1):
251 try:
251 try:
252 return lock.lock(self.join("lock"), 0)
252 return lock.lock(self.join("lock"), 0)
253 except lock.LockHeld, inst:
253 except lock.LockHeld, inst:
254 if wait:
254 if wait:
255 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
255 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
256 return lock.lock(self.join("lock"), wait)
256 return lock.lock(self.join("lock"), wait)
257 raise inst
257 raise inst
258
258
259 def wlock(self, wait=1):
259 def wlock(self, wait=1):
260 try:
260 try:
261 wlock = lock.lock(self.join("wlock"), 0, self.dirstate.write)
261 wlock = lock.lock(self.join("wlock"), 0, self.dirstate.write)
262 except lock.LockHeld, inst:
262 except lock.LockHeld, inst:
263 if not wait:
263 if not wait:
264 raise inst
264 raise inst
265 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
265 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
266 wlock = lock.lock(self.join("wlock"), wait, self.dirstate.write)
266 wlock = lock.lock(self.join("wlock"), wait, self.dirstate.write)
267 self.dirstate.read()
267 self.dirstate.read()
268 return wlock
268 return wlock
269
269
270 def rawcommit(self, files, text, user, date, p1=None, p2=None):
270 def rawcommit(self, files, text, user, date, p1=None, p2=None):
271 orig_parent = self.dirstate.parents()[0] or nullid
271 orig_parent = self.dirstate.parents()[0] or nullid
272 p1 = p1 or self.dirstate.parents()[0] or nullid
272 p1 = p1 or self.dirstate.parents()[0] or nullid
273 p2 = p2 or self.dirstate.parents()[1] or nullid
273 p2 = p2 or self.dirstate.parents()[1] or nullid
274 c1 = self.changelog.read(p1)
274 c1 = self.changelog.read(p1)
275 c2 = self.changelog.read(p2)
275 c2 = self.changelog.read(p2)
276 m1 = self.manifest.read(c1[0])
276 m1 = self.manifest.read(c1[0])
277 mf1 = self.manifest.readflags(c1[0])
277 mf1 = self.manifest.readflags(c1[0])
278 m2 = self.manifest.read(c2[0])
278 m2 = self.manifest.read(c2[0])
279 changed = []
279 changed = []
280
280
281 if orig_parent == p1:
281 if orig_parent == p1:
282 update_dirstate = 1
282 update_dirstate = 1
283 else:
283 else:
284 update_dirstate = 0
284 update_dirstate = 0
285
285
286 wlock = self.wlock()
286 wlock = self.wlock()
287 lock = self.lock()
287 lock = self.lock()
288 tr = self.transaction()
288 tr = self.transaction()
289 mm = m1.copy()
289 mm = m1.copy()
290 mfm = mf1.copy()
290 mfm = mf1.copy()
291 linkrev = self.changelog.count()
291 linkrev = self.changelog.count()
292 for f in files:
292 for f in files:
293 try:
293 try:
294 t = self.wread(f)
294 t = self.wread(f)
295 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
295 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
296 r = self.file(f)
296 r = self.file(f)
297 mfm[f] = tm
297 mfm[f] = tm
298
298
299 fp1 = m1.get(f, nullid)
299 fp1 = m1.get(f, nullid)
300 fp2 = m2.get(f, nullid)
300 fp2 = m2.get(f, nullid)
301
301
302 # is the same revision on two branches of a merge?
302 # is the same revision on two branches of a merge?
303 if fp2 == fp1:
303 if fp2 == fp1:
304 fp2 = nullid
304 fp2 = nullid
305
305
306 if fp2 != nullid:
306 if fp2 != nullid:
307 # is one parent an ancestor of the other?
307 # is one parent an ancestor of the other?
308 fpa = r.ancestor(fp1, fp2)
308 fpa = r.ancestor(fp1, fp2)
309 if fpa == fp1:
309 if fpa == fp1:
310 fp1, fp2 = fp2, nullid
310 fp1, fp2 = fp2, nullid
311 elif fpa == fp2:
311 elif fpa == fp2:
312 fp2 = nullid
312 fp2 = nullid
313
313
314 # is the file unmodified from the parent?
314 # is the file unmodified from the parent?
315 if t == r.read(fp1):
315 if t == r.read(fp1):
316 # record the proper existing parent in manifest
316 # record the proper existing parent in manifest
317 # no need to add a revision
317 # no need to add a revision
318 mm[f] = fp1
318 mm[f] = fp1
319 continue
319 continue
320
320
321 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
321 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
322 changed.append(f)
322 changed.append(f)
323 if update_dirstate:
323 if update_dirstate:
324 self.dirstate.update([f], "n")
324 self.dirstate.update([f], "n")
325 except IOError:
325 except IOError:
326 try:
326 try:
327 del mm[f]
327 del mm[f]
328 del mfm[f]
328 del mfm[f]
329 if update_dirstate:
329 if update_dirstate:
330 self.dirstate.forget([f])
330 self.dirstate.forget([f])
331 except:
331 except:
332 # deleted from p2?
332 # deleted from p2?
333 pass
333 pass
334
334
335 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
335 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
336 user = user or self.ui.username()
336 user = user or self.ui.username()
337 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
337 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
338 tr.close()
338 tr.close()
339 if update_dirstate:
339 if update_dirstate:
340 self.dirstate.setparents(n, nullid)
340 self.dirstate.setparents(n, nullid)
341
341
342 def commit(self, files=None, text="", user=None, date=None,
342 def commit(self, files=None, text="", user=None, date=None,
343 match=util.always, force=False):
343 match=util.always, force=False):
344 commit = []
344 commit = []
345 remove = []
345 remove = []
346 changed = []
346 changed = []
347
347
348 if files:
348 if files:
349 for f in files:
349 for f in files:
350 s = self.dirstate.state(f)
350 s = self.dirstate.state(f)
351 if s in 'nmai':
351 if s in 'nmai':
352 commit.append(f)
352 commit.append(f)
353 elif s == 'r':
353 elif s == 'r':
354 remove.append(f)
354 remove.append(f)
355 else:
355 else:
356 self.ui.warn(_("%s not tracked!\n") % f)
356 self.ui.warn(_("%s not tracked!\n") % f)
357 else:
357 else:
358 modified, added, removed, deleted, unknown = self.changes(match=match)
358 modified, added, removed, deleted, unknown = self.changes(match=match)
359 commit = modified + added
359 commit = modified + added
360 remove = removed
360 remove = removed
361
361
362 p1, p2 = self.dirstate.parents()
362 p1, p2 = self.dirstate.parents()
363 c1 = self.changelog.read(p1)
363 c1 = self.changelog.read(p1)
364 c2 = self.changelog.read(p2)
364 c2 = self.changelog.read(p2)
365 m1 = self.manifest.read(c1[0])
365 m1 = self.manifest.read(c1[0])
366 mf1 = self.manifest.readflags(c1[0])
366 mf1 = self.manifest.readflags(c1[0])
367 m2 = self.manifest.read(c2[0])
367 m2 = self.manifest.read(c2[0])
368
368
369 if not commit and not remove and not force and p2 == nullid:
369 if not commit and not remove and not force and p2 == nullid:
370 self.ui.status(_("nothing changed\n"))
370 self.ui.status(_("nothing changed\n"))
371 return None
371 return None
372
372
373 if not self.hook("precommit"):
373 if not self.hook("precommit"):
374 return None
374 return None
375
375
376 wlock = self.wlock()
376 wlock = self.wlock()
377 lock = self.lock()
377 lock = self.lock()
378 tr = self.transaction()
378 tr = self.transaction()
379
379
380 # check in files
380 # check in files
381 new = {}
381 new = {}
382 linkrev = self.changelog.count()
382 linkrev = self.changelog.count()
383 commit.sort()
383 commit.sort()
384 for f in commit:
384 for f in commit:
385 self.ui.note(f + "\n")
385 self.ui.note(f + "\n")
386 try:
386 try:
387 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
387 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
388 t = self.wread(f)
388 t = self.wread(f)
389 except IOError:
389 except IOError:
390 self.ui.warn(_("trouble committing %s!\n") % f)
390 self.ui.warn(_("trouble committing %s!\n") % f)
391 raise
391 raise
392
392
393 r = self.file(f)
393 r = self.file(f)
394
394
395 meta = {}
395 meta = {}
396 cp = self.dirstate.copied(f)
396 cp = self.dirstate.copied(f)
397 if cp:
397 if cp:
398 meta["copy"] = cp
398 meta["copy"] = cp
399 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
399 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
400 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
400 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
401 fp1, fp2 = nullid, nullid
401 fp1, fp2 = nullid, nullid
402 else:
402 else:
403 fp1 = m1.get(f, nullid)
403 fp1 = m1.get(f, nullid)
404 fp2 = m2.get(f, nullid)
404 fp2 = m2.get(f, nullid)
405
405
406 if fp2 != nullid:
406 if fp2 != nullid:
407 # is one parent an ancestor of the other?
407 # is one parent an ancestor of the other?
408 fpa = r.ancestor(fp1, fp2)
408 fpa = r.ancestor(fp1, fp2)
409 if fpa == fp1:
409 if fpa == fp1:
410 fp1, fp2 = fp2, nullid
410 fp1, fp2 = fp2, nullid
411 elif fpa == fp2:
411 elif fpa == fp2:
412 fp2 = nullid
412 fp2 = nullid
413
413
414 # is the file unmodified from the parent?
414 # is the file unmodified from the parent?
415 if not meta and t == r.read(fp1) and fp2 == nullid:
415 if not meta and t == r.read(fp1) and fp2 == nullid:
416 # record the proper existing parent in manifest
416 # record the proper existing parent in manifest
417 # no need to add a revision
417 # no need to add a revision
418 new[f] = fp1
418 new[f] = fp1
419 continue
419 continue
420
420
421 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
421 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
422 # remember what we've added so that we can later calculate
422 # remember what we've added so that we can later calculate
423 # the files to pull from a set of changesets
423 # the files to pull from a set of changesets
424 changed.append(f)
424 changed.append(f)
425
425
426 # update manifest
426 # update manifest
427 m1.update(new)
427 m1.update(new)
428 for f in remove:
428 for f in remove:
429 if f in m1:
429 if f in m1:
430 del m1[f]
430 del m1[f]
431 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
431 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
432 (new, remove))
432 (new, remove))
433
433
434 # add changeset
434 # add changeset
435 new = new.keys()
435 new = new.keys()
436 new.sort()
436 new.sort()
437
437
438 if not text:
438 if not text:
439 edittext = ""
439 edittext = ""
440 if p2 != nullid:
440 if p2 != nullid:
441 edittext += "HG: branch merge\n"
441 edittext += "HG: branch merge\n"
442 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
442 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
443 edittext += "".join(["HG: changed %s\n" % f for f in changed])
443 edittext += "".join(["HG: changed %s\n" % f for f in changed])
444 edittext += "".join(["HG: removed %s\n" % f for f in remove])
444 edittext += "".join(["HG: removed %s\n" % f for f in remove])
445 if not changed and not remove:
445 if not changed and not remove:
446 edittext += "HG: no files changed\n"
446 edittext += "HG: no files changed\n"
447 edittext = self.ui.edit(edittext)
447 edittext = self.ui.edit(edittext)
448 if not edittext.rstrip():
448 if not edittext.rstrip():
449 return None
449 return None
450 text = edittext
450 text = edittext
451
451
452 user = user or self.ui.username()
452 user = user or self.ui.username()
453 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
453 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
454 tr.close()
454 tr.close()
455
455
456 self.dirstate.setparents(n)
456 self.dirstate.setparents(n)
457 self.dirstate.update(new, "n")
457 self.dirstate.update(new, "n")
458 self.dirstate.forget(remove)
458 self.dirstate.forget(remove)
459
459
460 if not self.hook("commit", node=hex(n)):
460 if not self.hook("commit", node=hex(n)):
461 return None
461 return None
462 return n
462 return n
463
463
464 def walk(self, node=None, files=[], match=util.always):
464 def walk(self, node=None, files=[], match=util.always):
465 if node:
465 if node:
466 fdict = dict.fromkeys(files)
466 fdict = dict.fromkeys(files)
467 for fn in self.manifest.read(self.changelog.read(node)[0]):
467 for fn in self.manifest.read(self.changelog.read(node)[0]):
468 fdict.pop(fn, None)
468 fdict.pop(fn, None)
469 if match(fn):
469 if match(fn):
470 yield 'm', fn
470 yield 'm', fn
471 for fn in fdict:
471 for fn in fdict:
472 self.ui.warn(_('%s: No such file in rev %s\n') % (
472 self.ui.warn(_('%s: No such file in rev %s\n') % (
473 util.pathto(self.getcwd(), fn), short(node)))
473 util.pathto(self.getcwd(), fn), short(node)))
474 else:
474 else:
475 for src, fn in self.dirstate.walk(files, match):
475 for src, fn in self.dirstate.walk(files, match):
476 yield src, fn
476 yield src, fn
477
477
478 def changes(self, node1=None, node2=None, files=[], match=util.always):
478 def changes(self, node1=None, node2=None, files=[], match=util.always):
479 """return changes between two nodes or node and working directory
479 """return changes between two nodes or node and working directory
480
480
481 If node1 is None, use the first dirstate parent instead.
481 If node1 is None, use the first dirstate parent instead.
482 If node2 is None, compare node1 with working directory.
482 If node2 is None, compare node1 with working directory.
483 """
483 """
484
484
485 def fcmp(fn, mf):
485 def fcmp(fn, mf):
486 t1 = self.wread(fn)
486 t1 = self.wread(fn)
487 t2 = self.file(fn).read(mf.get(fn, nullid))
487 t2 = self.file(fn).read(mf.get(fn, nullid))
488 return cmp(t1, t2)
488 return cmp(t1, t2)
489
489
490 def mfmatches(node):
490 def mfmatches(node):
491 change = self.changelog.read(node)
491 change = self.changelog.read(node)
492 mf = dict(self.manifest.read(change[0]))
492 mf = dict(self.manifest.read(change[0]))
493 for fn in mf.keys():
493 for fn in mf.keys():
494 if not match(fn):
494 if not match(fn):
495 del mf[fn]
495 del mf[fn]
496 return mf
496 return mf
497
497
498 # are we comparing the working directory?
498 # are we comparing the working directory?
499 if not node2:
499 if not node2:
500 try:
500 try:
501 wlock = self.wlock(wait=0)
501 wlock = self.wlock(wait=0)
502 except lock.LockHeld:
502 except lock.LockHeld:
503 wlock = None
503 wlock = None
504 lookup, modified, added, removed, deleted, unknown = (
504 lookup, modified, added, removed, deleted, unknown = (
505 self.dirstate.changes(files, match))
505 self.dirstate.changes(files, match))
506
506
507 # are we comparing working dir against its parent?
507 # are we comparing working dir against its parent?
508 if not node1:
508 if not node1:
509 if lookup:
509 if lookup:
510 # do a full compare of any files that might have changed
510 # do a full compare of any files that might have changed
511 mf2 = mfmatches(self.dirstate.parents()[0])
511 mf2 = mfmatches(self.dirstate.parents()[0])
512 for f in lookup:
512 for f in lookup:
513 if fcmp(f, mf2):
513 if fcmp(f, mf2):
514 modified.append(f)
514 modified.append(f)
515 elif wlock is not None:
515 elif wlock is not None:
516 self.dirstate.update([f], "n")
516 self.dirstate.update([f], "n")
517 else:
517 else:
518 # we are comparing working dir against non-parent
518 # we are comparing working dir against non-parent
519 # generate a pseudo-manifest for the working dir
519 # generate a pseudo-manifest for the working dir
520 mf2 = mfmatches(self.dirstate.parents()[0])
520 mf2 = mfmatches(self.dirstate.parents()[0])
521 for f in lookup + modified + added:
521 for f in lookup + modified + added:
522 mf2[f] = ""
522 mf2[f] = ""
523 for f in removed:
523 for f in removed:
524 if f in mf2:
524 if f in mf2:
525 del mf2[f]
525 del mf2[f]
526 else:
526 else:
527 # we are comparing two revisions
527 # we are comparing two revisions
528 deleted, unknown = [], []
528 deleted, unknown = [], []
529 mf2 = mfmatches(node2)
529 mf2 = mfmatches(node2)
530
530
531 if node1:
531 if node1:
532 # flush lists from dirstate before comparing manifests
532 # flush lists from dirstate before comparing manifests
533 modified, added = [], []
533 modified, added = [], []
534
534
535 mf1 = mfmatches(node1)
535 mf1 = mfmatches(node1)
536
536
537 for fn in mf2:
537 for fn in mf2:
538 if mf1.has_key(fn):
538 if mf1.has_key(fn):
539 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
539 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
540 modified.append(fn)
540 modified.append(fn)
541 del mf1[fn]
541 del mf1[fn]
542 else:
542 else:
543 added.append(fn)
543 added.append(fn)
544
544
545 removed = mf1.keys()
545 removed = mf1.keys()
546
546
547 # sort and return results:
547 # sort and return results:
548 for l in modified, added, removed, deleted, unknown:
548 for l in modified, added, removed, deleted, unknown:
549 l.sort()
549 l.sort()
550 return (modified, added, removed, deleted, unknown)
550 return (modified, added, removed, deleted, unknown)
551
551
552 def add(self, list):
552 def add(self, list):
553 wlock = self.wlock()
553 wlock = self.wlock()
554 for f in list:
554 for f in list:
555 p = self.wjoin(f)
555 p = self.wjoin(f)
556 if not os.path.exists(p):
556 if not os.path.exists(p):
557 self.ui.warn(_("%s does not exist!\n") % f)
557 self.ui.warn(_("%s does not exist!\n") % f)
558 elif not os.path.isfile(p):
558 elif not os.path.isfile(p):
559 self.ui.warn(_("%s not added: only files supported currently\n")
559 self.ui.warn(_("%s not added: only files supported currently\n")
560 % f)
560 % f)
561 elif self.dirstate.state(f) in 'an':
561 elif self.dirstate.state(f) in 'an':
562 self.ui.warn(_("%s already tracked!\n") % f)
562 self.ui.warn(_("%s already tracked!\n") % f)
563 else:
563 else:
564 self.dirstate.update([f], "a")
564 self.dirstate.update([f], "a")
565
565
566 def forget(self, list):
566 def forget(self, list):
567 wlock = self.wlock()
567 wlock = self.wlock()
568 for f in list:
568 for f in list:
569 if self.dirstate.state(f) not in 'ai':
569 if self.dirstate.state(f) not in 'ai':
570 self.ui.warn(_("%s not added!\n") % f)
570 self.ui.warn(_("%s not added!\n") % f)
571 else:
571 else:
572 self.dirstate.forget([f])
572 self.dirstate.forget([f])
573
573
574 def remove(self, list, unlink=False):
574 def remove(self, list, unlink=False):
575 if unlink:
575 if unlink:
576 for f in list:
576 for f in list:
577 try:
577 try:
578 util.unlink(self.wjoin(f))
578 util.unlink(self.wjoin(f))
579 except OSError, inst:
579 except OSError, inst:
580 if inst.errno != errno.ENOENT:
580 if inst.errno != errno.ENOENT:
581 raise
581 raise
582 wlock = self.wlock()
582 wlock = self.wlock()
583 for f in list:
583 for f in list:
584 p = self.wjoin(f)
584 p = self.wjoin(f)
585 if os.path.exists(p):
585 if os.path.exists(p):
586 self.ui.warn(_("%s still exists!\n") % f)
586 self.ui.warn(_("%s still exists!\n") % f)
587 elif self.dirstate.state(f) == 'a':
587 elif self.dirstate.state(f) == 'a':
588 self.ui.warn(_("%s never committed!\n") % f)
588 self.ui.warn(_("%s never committed!\n") % f)
589 self.dirstate.forget([f])
589 self.dirstate.forget([f])
590 elif f not in self.dirstate:
590 elif f not in self.dirstate:
591 self.ui.warn(_("%s not tracked!\n") % f)
591 self.ui.warn(_("%s not tracked!\n") % f)
592 else:
592 else:
593 self.dirstate.update([f], "r")
593 self.dirstate.update([f], "r")
594
594
595 def undelete(self, list):
595 def undelete(self, list):
596 p = self.dirstate.parents()[0]
596 p = self.dirstate.parents()[0]
597 mn = self.changelog.read(p)[0]
597 mn = self.changelog.read(p)[0]
598 mf = self.manifest.readflags(mn)
598 mf = self.manifest.readflags(mn)
599 m = self.manifest.read(mn)
599 m = self.manifest.read(mn)
600 wlock = self.wlock()
600 wlock = self.wlock()
601 for f in list:
601 for f in list:
602 if self.dirstate.state(f) not in "r":
602 if self.dirstate.state(f) not in "r":
603 self.ui.warn("%s not removed!\n" % f)
603 self.ui.warn("%s not removed!\n" % f)
604 else:
604 else:
605 t = self.file(f).read(m[f])
605 t = self.file(f).read(m[f])
606 self.wwrite(f, t)
606 self.wwrite(f, t)
607 util.set_exec(self.wjoin(f), mf[f])
607 util.set_exec(self.wjoin(f), mf[f])
608 self.dirstate.update([f], "n")
608 self.dirstate.update([f], "n")
609
609
610 def copy(self, source, dest):
610 def copy(self, source, dest):
611 p = self.wjoin(dest)
611 p = self.wjoin(dest)
612 if not os.path.exists(p):
612 if not os.path.exists(p):
613 self.ui.warn(_("%s does not exist!\n") % dest)
613 self.ui.warn(_("%s does not exist!\n") % dest)
614 elif not os.path.isfile(p):
614 elif not os.path.isfile(p):
615 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
615 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
616 else:
616 else:
617 wlock = self.wlock()
617 wlock = self.wlock()
618 if self.dirstate.state(dest) == '?':
618 if self.dirstate.state(dest) == '?':
619 self.dirstate.update([dest], "a")
619 self.dirstate.update([dest], "a")
620 self.dirstate.copy(source, dest)
620 self.dirstate.copy(source, dest)
621
621
622 def heads(self, start=None):
622 def heads(self, start=None):
623 heads = self.changelog.heads(start)
623 heads = self.changelog.heads(start)
624 # sort the output in rev descending order
624 # sort the output in rev descending order
625 heads = [(-self.changelog.rev(h), h) for h in heads]
625 heads = [(-self.changelog.rev(h), h) for h in heads]
626 heads.sort()
626 heads.sort()
627 return [n for (r, n) in heads]
627 return [n for (r, n) in heads]
628
628
629 # branchlookup returns a dict giving a list of branches for
629 # branchlookup returns a dict giving a list of branches for
630 # each head. A branch is defined as the tag of a node or
630 # each head. A branch is defined as the tag of a node or
631 # the branch of the node's parents. If a node has multiple
631 # the branch of the node's parents. If a node has multiple
632 # branch tags, tags are eliminated if they are visible from other
632 # branch tags, tags are eliminated if they are visible from other
633 # branch tags.
633 # branch tags.
634 #
634 #
635 # So, for this graph: a->b->c->d->e
635 # So, for this graph: a->b->c->d->e
636 # \ /
636 # \ /
637 # aa -----/
637 # aa -----/
638 # a has tag 2.6.12
638 # a has tag 2.6.12
639 # d has tag 2.6.13
639 # d has tag 2.6.13
640 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
640 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
641 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
641 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
642 # from the list.
642 # from the list.
643 #
643 #
644 # It is possible that more than one head will have the same branch tag.
644 # It is possible that more than one head will have the same branch tag.
645 # callers need to check the result for multiple heads under the same
645 # callers need to check the result for multiple heads under the same
646 # branch tag if that is a problem for them (ie checkout of a specific
646 # branch tag if that is a problem for them (ie checkout of a specific
647 # branch).
647 # branch).
648 #
648 #
649 # passing in a specific branch will limit the depth of the search
649 # passing in a specific branch will limit the depth of the search
650 # through the parents. It won't limit the branches returned in the
650 # through the parents. It won't limit the branches returned in the
651 # result though.
651 # result though.
652 def branchlookup(self, heads=None, branch=None):
652 def branchlookup(self, heads=None, branch=None):
653 if not heads:
653 if not heads:
654 heads = self.heads()
654 heads = self.heads()
655 headt = [ h for h in heads ]
655 headt = [ h for h in heads ]
656 chlog = self.changelog
656 chlog = self.changelog
657 branches = {}
657 branches = {}
658 merges = []
658 merges = []
659 seenmerge = {}
659 seenmerge = {}
660
660
661 # traverse the tree once for each head, recording in the branches
661 # traverse the tree once for each head, recording in the branches
662 # dict which tags are visible from this head. The branches
662 # dict which tags are visible from this head. The branches
663 # dict also records which tags are visible from each tag
663 # dict also records which tags are visible from each tag
664 # while we traverse.
664 # while we traverse.
665 while headt or merges:
665 while headt or merges:
666 if merges:
666 if merges:
667 n, found = merges.pop()
667 n, found = merges.pop()
668 visit = [n]
668 visit = [n]
669 else:
669 else:
670 h = headt.pop()
670 h = headt.pop()
671 visit = [h]
671 visit = [h]
672 found = [h]
672 found = [h]
673 seen = {}
673 seen = {}
674 while visit:
674 while visit:
675 n = visit.pop()
675 n = visit.pop()
676 if n in seen:
676 if n in seen:
677 continue
677 continue
678 pp = chlog.parents(n)
678 pp = chlog.parents(n)
679 tags = self.nodetags(n)
679 tags = self.nodetags(n)
680 if tags:
680 if tags:
681 for x in tags:
681 for x in tags:
682 if x == 'tip':
682 if x == 'tip':
683 continue
683 continue
684 for f in found:
684 for f in found:
685 branches.setdefault(f, {})[n] = 1
685 branches.setdefault(f, {})[n] = 1
686 branches.setdefault(n, {})[n] = 1
686 branches.setdefault(n, {})[n] = 1
687 break
687 break
688 if n not in found:
688 if n not in found:
689 found.append(n)
689 found.append(n)
690 if branch in tags:
690 if branch in tags:
691 continue
691 continue
692 seen[n] = 1
692 seen[n] = 1
693 if pp[1] != nullid and n not in seenmerge:
693 if pp[1] != nullid and n not in seenmerge:
694 merges.append((pp[1], [x for x in found]))
694 merges.append((pp[1], [x for x in found]))
695 seenmerge[n] = 1
695 seenmerge[n] = 1
696 if pp[0] != nullid:
696 if pp[0] != nullid:
697 visit.append(pp[0])
697 visit.append(pp[0])
698 # traverse the branches dict, eliminating branch tags from each
698 # traverse the branches dict, eliminating branch tags from each
699 # head that are visible from another branch tag for that head.
699 # head that are visible from another branch tag for that head.
700 out = {}
700 out = {}
701 viscache = {}
701 viscache = {}
702 for h in heads:
702 for h in heads:
703 def visible(node):
703 def visible(node):
704 if node in viscache:
704 if node in viscache:
705 return viscache[node]
705 return viscache[node]
706 ret = {}
706 ret = {}
707 visit = [node]
707 visit = [node]
708 while visit:
708 while visit:
709 x = visit.pop()
709 x = visit.pop()
710 if x in viscache:
710 if x in viscache:
711 ret.update(viscache[x])
711 ret.update(viscache[x])
712 elif x not in ret:
712 elif x not in ret:
713 ret[x] = 1
713 ret[x] = 1
714 if x in branches:
714 if x in branches:
715 visit[len(visit):] = branches[x].keys()
715 visit[len(visit):] = branches[x].keys()
716 viscache[node] = ret
716 viscache[node] = ret
717 return ret
717 return ret
718 if h not in branches:
718 if h not in branches:
719 continue
719 continue
720 # O(n^2), but somewhat limited. This only searches the
720 # O(n^2), but somewhat limited. This only searches the
721 # tags visible from a specific head, not all the tags in the
721 # tags visible from a specific head, not all the tags in the
722 # whole repo.
722 # whole repo.
723 for b in branches[h]:
723 for b in branches[h]:
724 vis = False
724 vis = False
725 for bb in branches[h].keys():
725 for bb in branches[h].keys():
726 if b != bb:
726 if b != bb:
727 if b in visible(bb):
727 if b in visible(bb):
728 vis = True
728 vis = True
729 break
729 break
730 if not vis:
730 if not vis:
731 l = out.setdefault(h, [])
731 l = out.setdefault(h, [])
732 l[len(l):] = self.nodetags(b)
732 l[len(l):] = self.nodetags(b)
733 return out
733 return out
734
734
735 def branches(self, nodes):
735 def branches(self, nodes):
736 if not nodes:
736 if not nodes:
737 nodes = [self.changelog.tip()]
737 nodes = [self.changelog.tip()]
738 b = []
738 b = []
739 for n in nodes:
739 for n in nodes:
740 t = n
740 t = n
741 while n:
741 while n:
742 p = self.changelog.parents(n)
742 p = self.changelog.parents(n)
743 if p[1] != nullid or p[0] == nullid:
743 if p[1] != nullid or p[0] == nullid:
744 b.append((t, n, p[0], p[1]))
744 b.append((t, n, p[0], p[1]))
745 break
745 break
746 n = p[0]
746 n = p[0]
747 return b
747 return b
748
748
749 def between(self, pairs):
749 def between(self, pairs):
750 r = []
750 r = []
751
751
752 for top, bottom in pairs:
752 for top, bottom in pairs:
753 n, l, i = top, [], 0
753 n, l, i = top, [], 0
754 f = 1
754 f = 1
755
755
756 while n != bottom:
756 while n != bottom:
757 p = self.changelog.parents(n)[0]
757 p = self.changelog.parents(n)[0]
758 if i == f:
758 if i == f:
759 l.append(n)
759 l.append(n)
760 f = f * 2
760 f = f * 2
761 n = p
761 n = p
762 i += 1
762 i += 1
763
763
764 r.append(l)
764 r.append(l)
765
765
766 return r
766 return r
767
767
768 def findincoming(self, remote, base=None, heads=None):
768 def findincoming(self, remote, base=None, heads=None):
769 m = self.changelog.nodemap
769 m = self.changelog.nodemap
770 search = []
770 search = []
771 fetch = {}
771 fetch = {}
772 seen = {}
772 seen = {}
773 seenbranch = {}
773 seenbranch = {}
774 if base == None:
774 if base == None:
775 base = {}
775 base = {}
776
776
777 # assume we're closer to the tip than the root
777 # assume we're closer to the tip than the root
778 # and start by examining the heads
778 # and start by examining the heads
779 self.ui.status(_("searching for changes\n"))
779 self.ui.status(_("searching for changes\n"))
780
780
781 if not heads:
781 if not heads:
782 heads = remote.heads()
782 heads = remote.heads()
783
783
784 unknown = []
784 unknown = []
785 for h in heads:
785 for h in heads:
786 if h not in m:
786 if h not in m:
787 unknown.append(h)
787 unknown.append(h)
788 else:
788 else:
789 base[h] = 1
789 base[h] = 1
790
790
791 if not unknown:
791 if not unknown:
792 return None
792 return None
793
793
794 rep = {}
794 rep = {}
795 reqcnt = 0
795 reqcnt = 0
796
796
797 # search through remote branches
797 # search through remote branches
798 # a 'branch' here is a linear segment of history, with four parts:
798 # a 'branch' here is a linear segment of history, with four parts:
799 # head, root, first parent, second parent
799 # head, root, first parent, second parent
800 # (a branch always has two parents (or none) by definition)
800 # (a branch always has two parents (or none) by definition)
801 unknown = remote.branches(unknown)
801 unknown = remote.branches(unknown)
802 while unknown:
802 while unknown:
803 r = []
803 r = []
804 while unknown:
804 while unknown:
805 n = unknown.pop(0)
805 n = unknown.pop(0)
806 if n[0] in seen:
806 if n[0] in seen:
807 continue
807 continue
808
808
809 self.ui.debug(_("examining %s:%s\n")
809 self.ui.debug(_("examining %s:%s\n")
810 % (short(n[0]), short(n[1])))
810 % (short(n[0]), short(n[1])))
811 if n[0] == nullid:
811 if n[0] == nullid:
812 break
812 break
813 if n in seenbranch:
813 if n in seenbranch:
814 self.ui.debug(_("branch already found\n"))
814 self.ui.debug(_("branch already found\n"))
815 continue
815 continue
816 if n[1] and n[1] in m: # do we know the base?
816 if n[1] and n[1] in m: # do we know the base?
817 self.ui.debug(_("found incomplete branch %s:%s\n")
817 self.ui.debug(_("found incomplete branch %s:%s\n")
818 % (short(n[0]), short(n[1])))
818 % (short(n[0]), short(n[1])))
819 search.append(n) # schedule branch range for scanning
819 search.append(n) # schedule branch range for scanning
820 seenbranch[n] = 1
820 seenbranch[n] = 1
821 else:
821 else:
822 if n[1] not in seen and n[1] not in fetch:
822 if n[1] not in seen and n[1] not in fetch:
823 if n[2] in m and n[3] in m:
823 if n[2] in m and n[3] in m:
824 self.ui.debug(_("found new changeset %s\n") %
824 self.ui.debug(_("found new changeset %s\n") %
825 short(n[1]))
825 short(n[1]))
826 fetch[n[1]] = 1 # earliest unknown
826 fetch[n[1]] = 1 # earliest unknown
827 base[n[2]] = 1 # latest known
827 base[n[2]] = 1 # latest known
828 continue
828 continue
829
829
830 for a in n[2:4]:
830 for a in n[2:4]:
831 if a not in rep:
831 if a not in rep:
832 r.append(a)
832 r.append(a)
833 rep[a] = 1
833 rep[a] = 1
834
834
835 seen[n[0]] = 1
835 seen[n[0]] = 1
836
836
837 if r:
837 if r:
838 reqcnt += 1
838 reqcnt += 1
839 self.ui.debug(_("request %d: %s\n") %
839 self.ui.debug(_("request %d: %s\n") %
840 (reqcnt, " ".join(map(short, r))))
840 (reqcnt, " ".join(map(short, r))))
841 for p in range(0, len(r), 10):
841 for p in range(0, len(r), 10):
842 for b in remote.branches(r[p:p+10]):
842 for b in remote.branches(r[p:p+10]):
843 self.ui.debug(_("received %s:%s\n") %
843 self.ui.debug(_("received %s:%s\n") %
844 (short(b[0]), short(b[1])))
844 (short(b[0]), short(b[1])))
845 if b[0] in m:
845 if b[0] in m:
846 self.ui.debug(_("found base node %s\n")
846 self.ui.debug(_("found base node %s\n")
847 % short(b[0]))
847 % short(b[0]))
848 base[b[0]] = 1
848 base[b[0]] = 1
849 elif b[0] not in seen:
849 elif b[0] not in seen:
850 unknown.append(b)
850 unknown.append(b)
851
851
852 # do binary search on the branches we found
852 # do binary search on the branches we found
853 while search:
853 while search:
854 n = search.pop(0)
854 n = search.pop(0)
855 reqcnt += 1
855 reqcnt += 1
856 l = remote.between([(n[0], n[1])])[0]
856 l = remote.between([(n[0], n[1])])[0]
857 l.append(n[1])
857 l.append(n[1])
858 p = n[0]
858 p = n[0]
859 f = 1
859 f = 1
860 for i in l:
860 for i in l:
861 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
861 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
862 if i in m:
862 if i in m:
863 if f <= 2:
863 if f <= 2:
864 self.ui.debug(_("found new branch changeset %s\n") %
864 self.ui.debug(_("found new branch changeset %s\n") %
865 short(p))
865 short(p))
866 fetch[p] = 1
866 fetch[p] = 1
867 base[i] = 1
867 base[i] = 1
868 else:
868 else:
869 self.ui.debug(_("narrowed branch search to %s:%s\n")
869 self.ui.debug(_("narrowed branch search to %s:%s\n")
870 % (short(p), short(i)))
870 % (short(p), short(i)))
871 search.append((p, i))
871 search.append((p, i))
872 break
872 break
873 p, f = i, f * 2
873 p, f = i, f * 2
874
874
875 # sanity check our fetch list
875 # sanity check our fetch list
876 for f in fetch.keys():
876 for f in fetch.keys():
877 if f in m:
877 if f in m:
878 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
878 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
879
879
880 if base.keys() == [nullid]:
880 if base.keys() == [nullid]:
881 self.ui.warn(_("warning: pulling from an unrelated repository!\n"))
881 self.ui.warn(_("warning: pulling from an unrelated repository!\n"))
882
882
883 self.ui.note(_("found new changesets starting at ") +
883 self.ui.note(_("found new changesets starting at ") +
884 " ".join([short(f) for f in fetch]) + "\n")
884 " ".join([short(f) for f in fetch]) + "\n")
885
885
886 self.ui.debug(_("%d total queries\n") % reqcnt)
886 self.ui.debug(_("%d total queries\n") % reqcnt)
887
887
888 return fetch.keys()
888 return fetch.keys()
889
889
890 def findoutgoing(self, remote, base=None, heads=None):
890 def findoutgoing(self, remote, base=None, heads=None):
891 if base == None:
891 if base == None:
892 base = {}
892 base = {}
893 self.findincoming(remote, base, heads)
893 self.findincoming(remote, base, heads)
894
894
895 self.ui.debug(_("common changesets up to ")
895 self.ui.debug(_("common changesets up to ")
896 + " ".join(map(short, base.keys())) + "\n")
896 + " ".join(map(short, base.keys())) + "\n")
897
897
898 remain = dict.fromkeys(self.changelog.nodemap)
898 remain = dict.fromkeys(self.changelog.nodemap)
899
899
900 # prune everything remote has from the tree
900 # prune everything remote has from the tree
901 del remain[nullid]
901 del remain[nullid]
902 remove = base.keys()
902 remove = base.keys()
903 while remove:
903 while remove:
904 n = remove.pop(0)
904 n = remove.pop(0)
905 if n in remain:
905 if n in remain:
906 del remain[n]
906 del remain[n]
907 for p in self.changelog.parents(n):
907 for p in self.changelog.parents(n):
908 remove.append(p)
908 remove.append(p)
909
909
910 # find every node whose parents have been pruned
910 # find every node whose parents have been pruned
911 subset = []
911 subset = []
912 for n in remain:
912 for n in remain:
913 p1, p2 = self.changelog.parents(n)
913 p1, p2 = self.changelog.parents(n)
914 if p1 not in remain and p2 not in remain:
914 if p1 not in remain and p2 not in remain:
915 subset.append(n)
915 subset.append(n)
916
916
917 # this is the set of all roots we have to push
917 # this is the set of all roots we have to push
918 return subset
918 return subset
919
919
920 def pull(self, remote, heads=None):
920 def pull(self, remote, heads=None):
921 lock = self.lock()
921 lock = self.lock()
922
922
923 # if we have an empty repo, fetch everything
923 # if we have an empty repo, fetch everything
924 if self.changelog.tip() == nullid:
924 if self.changelog.tip() == nullid:
925 self.ui.status(_("requesting all changes\n"))
925 self.ui.status(_("requesting all changes\n"))
926 fetch = [nullid]
926 fetch = [nullid]
927 else:
927 else:
928 fetch = self.findincoming(remote)
928 fetch = self.findincoming(remote)
929
929
930 if not fetch:
930 if not fetch:
931 self.ui.status(_("no changes found\n"))
931 self.ui.status(_("no changes found\n"))
932 return 1
932 return 1
933
933
934 if heads is None:
934 if heads is None:
935 cg = remote.changegroup(fetch)
935 cg = remote.changegroup(fetch)
936 else:
936 else:
937 cg = remote.changegroupsubset(fetch, heads)
937 cg = remote.changegroupsubset(fetch, heads)
938 return self.addchangegroup(cg)
938 return self.addchangegroup(cg)
939
939
940 def push(self, remote, force=False):
940 def push(self, remote, force=False):
941 lock = remote.lock()
941 lock = remote.lock()
942
942
943 base = {}
943 base = {}
944 heads = remote.heads()
944 heads = remote.heads()
945 inc = self.findincoming(remote, base, heads)
945 inc = self.findincoming(remote, base, heads)
946 if not force and inc:
946 if not force and inc:
947 self.ui.warn(_("abort: unsynced remote changes!\n"))
947 self.ui.warn(_("abort: unsynced remote changes!\n"))
948 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
948 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
949 return 1
949 return 1
950
950
951 update = self.findoutgoing(remote, base)
951 update = self.findoutgoing(remote, base)
952 if not update:
952 if not update:
953 self.ui.status(_("no changes found\n"))
953 self.ui.status(_("no changes found\n"))
954 return 1
954 return 1
955 elif not force:
955 elif not force:
956 if len(heads) < len(self.changelog.heads()):
956 if len(heads) < len(self.changelog.heads()):
957 self.ui.warn(_("abort: push creates new remote branches!\n"))
957 self.ui.warn(_("abort: push creates new remote branches!\n"))
958 self.ui.status(_("(did you forget to merge?"
958 self.ui.status(_("(did you forget to merge?"
959 " use push -f to force)\n"))
959 " use push -f to force)\n"))
960 return 1
960 return 1
961
961
962 cg = self.changegroup(update)
962 cg = self.changegroup(update)
963 return remote.addchangegroup(cg)
963 return remote.addchangegroup(cg)
964
964
965 def changegroupsubset(self, bases, heads):
965 def changegroupsubset(self, bases, heads):
966 """This function generates a changegroup consisting of all the nodes
966 """This function generates a changegroup consisting of all the nodes
967 that are descendents of any of the bases, and ancestors of any of
967 that are descendents of any of the bases, and ancestors of any of
968 the heads.
968 the heads.
969
969
970 It is fairly complex as determining which filenodes and which
970 It is fairly complex as determining which filenodes and which
971 manifest nodes need to be included for the changeset to be complete
971 manifest nodes need to be included for the changeset to be complete
972 is non-trivial.
972 is non-trivial.
973
973
974 Another wrinkle is doing the reverse, figuring out which changeset in
974 Another wrinkle is doing the reverse, figuring out which changeset in
975 the changegroup a particular filenode or manifestnode belongs to."""
975 the changegroup a particular filenode or manifestnode belongs to."""
976
976
977 # Set up some initial variables
977 # Set up some initial variables
978 # Make it easy to refer to self.changelog
978 # Make it easy to refer to self.changelog
979 cl = self.changelog
979 cl = self.changelog
980 # msng is short for missing - compute the list of changesets in this
980 # msng is short for missing - compute the list of changesets in this
981 # changegroup.
981 # changegroup.
982 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
982 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
983 # Some bases may turn out to be superfluous, and some heads may be
983 # Some bases may turn out to be superfluous, and some heads may be
984 # too. nodesbetween will return the minimal set of bases and heads
984 # too. nodesbetween will return the minimal set of bases and heads
985 # necessary to re-create the changegroup.
985 # necessary to re-create the changegroup.
986
986
987 # Known heads are the list of heads that it is assumed the recipient
987 # Known heads are the list of heads that it is assumed the recipient
988 # of this changegroup will know about.
988 # of this changegroup will know about.
989 knownheads = {}
989 knownheads = {}
990 # We assume that all parents of bases are known heads.
990 # We assume that all parents of bases are known heads.
991 for n in bases:
991 for n in bases:
992 for p in cl.parents(n):
992 for p in cl.parents(n):
993 if p != nullid:
993 if p != nullid:
994 knownheads[p] = 1
994 knownheads[p] = 1
995 knownheads = knownheads.keys()
995 knownheads = knownheads.keys()
996 if knownheads:
996 if knownheads:
997 # Now that we know what heads are known, we can compute which
997 # Now that we know what heads are known, we can compute which
998 # changesets are known. The recipient must know about all
998 # changesets are known. The recipient must know about all
999 # changesets required to reach the known heads from the null
999 # changesets required to reach the known heads from the null
1000 # changeset.
1000 # changeset.
1001 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1001 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1002 junk = None
1002 junk = None
1003 # Transform the list into an ersatz set.
1003 # Transform the list into an ersatz set.
1004 has_cl_set = dict.fromkeys(has_cl_set)
1004 has_cl_set = dict.fromkeys(has_cl_set)
1005 else:
1005 else:
1006 # If there were no known heads, the recipient cannot be assumed to
1006 # If there were no known heads, the recipient cannot be assumed to
1007 # know about any changesets.
1007 # know about any changesets.
1008 has_cl_set = {}
1008 has_cl_set = {}
1009
1009
1010 # Make it easy to refer to self.manifest
1010 # Make it easy to refer to self.manifest
1011 mnfst = self.manifest
1011 mnfst = self.manifest
1012 # We don't know which manifests are missing yet
1012 # We don't know which manifests are missing yet
1013 msng_mnfst_set = {}
1013 msng_mnfst_set = {}
1014 # Nor do we know which filenodes are missing.
1014 # Nor do we know which filenodes are missing.
1015 msng_filenode_set = {}
1015 msng_filenode_set = {}
1016
1016
1017 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1017 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1018 junk = None
1018 junk = None
1019
1019
1020 # A changeset always belongs to itself, so the changenode lookup
1020 # A changeset always belongs to itself, so the changenode lookup
1021 # function for a changenode is identity.
1021 # function for a changenode is identity.
1022 def identity(x):
1022 def identity(x):
1023 return x
1023 return x
1024
1024
1025 # A function generating function. Sets up an environment for the
1025 # A function generating function. Sets up an environment for the
1026 # inner function.
1026 # inner function.
1027 def cmp_by_rev_func(revlog):
1027 def cmp_by_rev_func(revlog):
1028 # Compare two nodes by their revision number in the environment's
1028 # Compare two nodes by their revision number in the environment's
1029 # revision history. Since the revision number both represents the
1029 # revision history. Since the revision number both represents the
1030 # most efficient order to read the nodes in, and represents a
1030 # most efficient order to read the nodes in, and represents a
1031 # topological sorting of the nodes, this function is often useful.
1031 # topological sorting of the nodes, this function is often useful.
1032 def cmp_by_rev(a, b):
1032 def cmp_by_rev(a, b):
1033 return cmp(revlog.rev(a), revlog.rev(b))
1033 return cmp(revlog.rev(a), revlog.rev(b))
1034 return cmp_by_rev
1034 return cmp_by_rev
1035
1035
1036 # If we determine that a particular file or manifest node must be a
1036 # If we determine that a particular file or manifest node must be a
1037 # node that the recipient of the changegroup will already have, we can
1037 # node that the recipient of the changegroup will already have, we can
1038 # also assume the recipient will have all the parents. This function
1038 # also assume the recipient will have all the parents. This function
1039 # prunes them from the set of missing nodes.
1039 # prunes them from the set of missing nodes.
1040 def prune_parents(revlog, hasset, msngset):
1040 def prune_parents(revlog, hasset, msngset):
1041 haslst = hasset.keys()
1041 haslst = hasset.keys()
1042 haslst.sort(cmp_by_rev_func(revlog))
1042 haslst.sort(cmp_by_rev_func(revlog))
1043 for node in haslst:
1043 for node in haslst:
1044 parentlst = [p for p in revlog.parents(node) if p != nullid]
1044 parentlst = [p for p in revlog.parents(node) if p != nullid]
1045 while parentlst:
1045 while parentlst:
1046 n = parentlst.pop()
1046 n = parentlst.pop()
1047 if n not in hasset:
1047 if n not in hasset:
1048 hasset[n] = 1
1048 hasset[n] = 1
1049 p = [p for p in revlog.parents(n) if p != nullid]
1049 p = [p for p in revlog.parents(n) if p != nullid]
1050 parentlst.extend(p)
1050 parentlst.extend(p)
1051 for n in hasset:
1051 for n in hasset:
1052 msngset.pop(n, None)
1052 msngset.pop(n, None)
1053
1053
1054 # This is a function generating function used to set up an environment
1054 # This is a function generating function used to set up an environment
1055 # for the inner function to execute in.
1055 # for the inner function to execute in.
1056 def manifest_and_file_collector(changedfileset):
1056 def manifest_and_file_collector(changedfileset):
1057 # This is an information gathering function that gathers
1057 # This is an information gathering function that gathers
1058 # information from each changeset node that goes out as part of
1058 # information from each changeset node that goes out as part of
1059 # the changegroup. The information gathered is a list of which
1059 # the changegroup. The information gathered is a list of which
1060 # manifest nodes are potentially required (the recipient may
1060 # manifest nodes are potentially required (the recipient may
1061 # already have them) and total list of all files which were
1061 # already have them) and total list of all files which were
1062 # changed in any changeset in the changegroup.
1062 # changed in any changeset in the changegroup.
1063 #
1063 #
1064 # We also remember the first changenode we saw any manifest
1064 # We also remember the first changenode we saw any manifest
1065 # referenced by so we can later determine which changenode 'owns'
1065 # referenced by so we can later determine which changenode 'owns'
1066 # the manifest.
1066 # the manifest.
1067 def collect_manifests_and_files(clnode):
1067 def collect_manifests_and_files(clnode):
1068 c = cl.read(clnode)
1068 c = cl.read(clnode)
1069 for f in c[3]:
1069 for f in c[3]:
1070 # This is to make sure we only have one instance of each
1070 # This is to make sure we only have one instance of each
1071 # filename string for each filename.
1071 # filename string for each filename.
1072 changedfileset.setdefault(f, f)
1072 changedfileset.setdefault(f, f)
1073 msng_mnfst_set.setdefault(c[0], clnode)
1073 msng_mnfst_set.setdefault(c[0], clnode)
1074 return collect_manifests_and_files
1074 return collect_manifests_and_files
1075
1075
1076 # Figure out which manifest nodes (of the ones we think might be part
1076 # Figure out which manifest nodes (of the ones we think might be part
1077 # of the changegroup) the recipient must know about and remove them
1077 # of the changegroup) the recipient must know about and remove them
1078 # from the changegroup.
1078 # from the changegroup.
1079 def prune_manifests():
1079 def prune_manifests():
1080 has_mnfst_set = {}
1080 has_mnfst_set = {}
1081 for n in msng_mnfst_set:
1081 for n in msng_mnfst_set:
1082 # If a 'missing' manifest thinks it belongs to a changenode
1082 # If a 'missing' manifest thinks it belongs to a changenode
1083 # the recipient is assumed to have, obviously the recipient
1083 # the recipient is assumed to have, obviously the recipient
1084 # must have that manifest.
1084 # must have that manifest.
1085 linknode = cl.node(mnfst.linkrev(n))
1085 linknode = cl.node(mnfst.linkrev(n))
1086 if linknode in has_cl_set:
1086 if linknode in has_cl_set:
1087 has_mnfst_set[n] = 1
1087 has_mnfst_set[n] = 1
1088 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1088 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1089
1089
1090 # Use the information collected in collect_manifests_and_files to say
1090 # Use the information collected in collect_manifests_and_files to say
1091 # which changenode any manifestnode belongs to.
1091 # which changenode any manifestnode belongs to.
1092 def lookup_manifest_link(mnfstnode):
1092 def lookup_manifest_link(mnfstnode):
1093 return msng_mnfst_set[mnfstnode]
1093 return msng_mnfst_set[mnfstnode]
1094
1094
1095 # A function generating function that sets up the initial environment
1095 # A function generating function that sets up the initial environment
1096 # the inner function.
1096 # the inner function.
1097 def filenode_collector(changedfiles):
1097 def filenode_collector(changedfiles):
1098 next_rev = [0]
1098 next_rev = [0]
1099 # This gathers information from each manifestnode included in the
1099 # This gathers information from each manifestnode included in the
1100 # changegroup about which filenodes the manifest node references
1100 # changegroup about which filenodes the manifest node references
1101 # so we can include those in the changegroup too.
1101 # so we can include those in the changegroup too.
1102 #
1102 #
1103 # It also remembers which changenode each filenode belongs to. It
1103 # It also remembers which changenode each filenode belongs to. It
1104 # does this by assuming the a filenode belongs to the changenode
1104 # does this by assuming the a filenode belongs to the changenode
1105 # the first manifest that references it belongs to.
1105 # the first manifest that references it belongs to.
1106 def collect_msng_filenodes(mnfstnode):
1106 def collect_msng_filenodes(mnfstnode):
1107 r = mnfst.rev(mnfstnode)
1107 r = mnfst.rev(mnfstnode)
1108 if r == next_rev[0]:
1108 if r == next_rev[0]:
1109 # If the last rev we looked at was the one just previous,
1109 # If the last rev we looked at was the one just previous,
1110 # we only need to see a diff.
1110 # we only need to see a diff.
1111 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1111 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1112 # For each line in the delta
1112 # For each line in the delta
1113 for dline in delta.splitlines():
1113 for dline in delta.splitlines():
1114 # get the filename and filenode for that line
1114 # get the filename and filenode for that line
1115 f, fnode = dline.split('\0')
1115 f, fnode = dline.split('\0')
1116 fnode = bin(fnode[:40])
1116 fnode = bin(fnode[:40])
1117 f = changedfiles.get(f, None)
1117 f = changedfiles.get(f, None)
1118 # And if the file is in the list of files we care
1118 # And if the file is in the list of files we care
1119 # about.
1119 # about.
1120 if f is not None:
1120 if f is not None:
1121 # Get the changenode this manifest belongs to
1121 # Get the changenode this manifest belongs to
1122 clnode = msng_mnfst_set[mnfstnode]
1122 clnode = msng_mnfst_set[mnfstnode]
1123 # Create the set of filenodes for the file if
1123 # Create the set of filenodes for the file if
1124 # there isn't one already.
1124 # there isn't one already.
1125 ndset = msng_filenode_set.setdefault(f, {})
1125 ndset = msng_filenode_set.setdefault(f, {})
1126 # And set the filenode's changelog node to the
1126 # And set the filenode's changelog node to the
1127 # manifest's if it hasn't been set already.
1127 # manifest's if it hasn't been set already.
1128 ndset.setdefault(fnode, clnode)
1128 ndset.setdefault(fnode, clnode)
1129 else:
1129 else:
1130 # Otherwise we need a full manifest.
1130 # Otherwise we need a full manifest.
1131 m = mnfst.read(mnfstnode)
1131 m = mnfst.read(mnfstnode)
1132 # For every file in we care about.
1132 # For every file in we care about.
1133 for f in changedfiles:
1133 for f in changedfiles:
1134 fnode = m.get(f, None)
1134 fnode = m.get(f, None)
1135 # If it's in the manifest
1135 # If it's in the manifest
1136 if fnode is not None:
1136 if fnode is not None:
1137 # See comments above.
1137 # See comments above.
1138 clnode = msng_mnfst_set[mnfstnode]
1138 clnode = msng_mnfst_set[mnfstnode]
1139 ndset = msng_filenode_set.setdefault(f, {})
1139 ndset = msng_filenode_set.setdefault(f, {})
1140 ndset.setdefault(fnode, clnode)
1140 ndset.setdefault(fnode, clnode)
1141 # Remember the revision we hope to see next.
1141 # Remember the revision we hope to see next.
1142 next_rev[0] = r + 1
1142 next_rev[0] = r + 1
1143 return collect_msng_filenodes
1143 return collect_msng_filenodes
1144
1144
1145 # We have a list of filenodes we think we need for a file, lets remove
1145 # We have a list of filenodes we think we need for a file, lets remove
1146 # all those we now the recipient must have.
1146 # all those we now the recipient must have.
1147 def prune_filenodes(f, filerevlog):
1147 def prune_filenodes(f, filerevlog):
1148 msngset = msng_filenode_set[f]
1148 msngset = msng_filenode_set[f]
1149 hasset = {}
1149 hasset = {}
1150 # If a 'missing' filenode thinks it belongs to a changenode we
1150 # If a 'missing' filenode thinks it belongs to a changenode we
1151 # assume the recipient must have, then the recipient must have
1151 # assume the recipient must have, then the recipient must have
1152 # that filenode.
1152 # that filenode.
1153 for n in msngset:
1153 for n in msngset:
1154 clnode = cl.node(filerevlog.linkrev(n))
1154 clnode = cl.node(filerevlog.linkrev(n))
1155 if clnode in has_cl_set:
1155 if clnode in has_cl_set:
1156 hasset[n] = 1
1156 hasset[n] = 1
1157 prune_parents(filerevlog, hasset, msngset)
1157 prune_parents(filerevlog, hasset, msngset)
1158
1158
1159 # A function generator function that sets up the a context for the
1159 # A function generator function that sets up the a context for the
1160 # inner function.
1160 # inner function.
1161 def lookup_filenode_link_func(fname):
1161 def lookup_filenode_link_func(fname):
1162 msngset = msng_filenode_set[fname]
1162 msngset = msng_filenode_set[fname]
1163 # Lookup the changenode the filenode belongs to.
1163 # Lookup the changenode the filenode belongs to.
1164 def lookup_filenode_link(fnode):
1164 def lookup_filenode_link(fnode):
1165 return msngset[fnode]
1165 return msngset[fnode]
1166 return lookup_filenode_link
1166 return lookup_filenode_link
1167
1167
1168 # Now that we have all theses utility functions to help out and
1168 # Now that we have all theses utility functions to help out and
1169 # logically divide up the task, generate the group.
1169 # logically divide up the task, generate the group.
1170 def gengroup():
1170 def gengroup():
1171 # The set of changed files starts empty.
1171 # The set of changed files starts empty.
1172 changedfiles = {}
1172 changedfiles = {}
1173 # Create a changenode group generator that will call our functions
1173 # Create a changenode group generator that will call our functions
1174 # back to lookup the owning changenode and collect information.
1174 # back to lookup the owning changenode and collect information.
1175 group = cl.group(msng_cl_lst, identity,
1175 group = cl.group(msng_cl_lst, identity,
1176 manifest_and_file_collector(changedfiles))
1176 manifest_and_file_collector(changedfiles))
1177 for chnk in group:
1177 for chnk in group:
1178 yield chnk
1178 yield chnk
1179
1179
1180 # The list of manifests has been collected by the generator
1180 # The list of manifests has been collected by the generator
1181 # calling our functions back.
1181 # calling our functions back.
1182 prune_manifests()
1182 prune_manifests()
1183 msng_mnfst_lst = msng_mnfst_set.keys()
1183 msng_mnfst_lst = msng_mnfst_set.keys()
1184 # Sort the manifestnodes by revision number.
1184 # Sort the manifestnodes by revision number.
1185 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1185 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1186 # Create a generator for the manifestnodes that calls our lookup
1186 # Create a generator for the manifestnodes that calls our lookup
1187 # and data collection functions back.
1187 # and data collection functions back.
1188 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1188 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1189 filenode_collector(changedfiles))
1189 filenode_collector(changedfiles))
1190 for chnk in group:
1190 for chnk in group:
1191 yield chnk
1191 yield chnk
1192
1192
1193 # These are no longer needed, dereference and toss the memory for
1193 # These are no longer needed, dereference and toss the memory for
1194 # them.
1194 # them.
1195 msng_mnfst_lst = None
1195 msng_mnfst_lst = None
1196 msng_mnfst_set.clear()
1196 msng_mnfst_set.clear()
1197
1197
1198 changedfiles = changedfiles.keys()
1198 changedfiles = changedfiles.keys()
1199 changedfiles.sort()
1199 changedfiles.sort()
1200 # Go through all our files in order sorted by name.
1200 # Go through all our files in order sorted by name.
1201 for fname in changedfiles:
1201 for fname in changedfiles:
1202 filerevlog = self.file(fname)
1202 filerevlog = self.file(fname)
1203 # Toss out the filenodes that the recipient isn't really
1203 # Toss out the filenodes that the recipient isn't really
1204 # missing.
1204 # missing.
1205 prune_filenodes(fname, filerevlog)
1205 prune_filenodes(fname, filerevlog)
1206 msng_filenode_lst = msng_filenode_set[fname].keys()
1206 msng_filenode_lst = msng_filenode_set[fname].keys()
1207 # If any filenodes are left, generate the group for them,
1207 # If any filenodes are left, generate the group for them,
1208 # otherwise don't bother.
1208 # otherwise don't bother.
1209 if len(msng_filenode_lst) > 0:
1209 if len(msng_filenode_lst) > 0:
1210 yield struct.pack(">l", len(fname) + 4) + fname
1210 yield struct.pack(">l", len(fname) + 4) + fname
1211 # Sort the filenodes by their revision #
1211 # Sort the filenodes by their revision #
1212 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1212 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1213 # Create a group generator and only pass in a changenode
1213 # Create a group generator and only pass in a changenode
1214 # lookup function as we need to collect no information
1214 # lookup function as we need to collect no information
1215 # from filenodes.
1215 # from filenodes.
1216 group = filerevlog.group(msng_filenode_lst,
1216 group = filerevlog.group(msng_filenode_lst,
1217 lookup_filenode_link_func(fname))
1217 lookup_filenode_link_func(fname))
1218 for chnk in group:
1218 for chnk in group:
1219 yield chnk
1219 yield chnk
1220 # Don't need this anymore, toss it to free memory.
1220 # Don't need this anymore, toss it to free memory.
1221 del msng_filenode_set[fname]
1221 del msng_filenode_set[fname]
1222 # Signal that no more groups are left.
1222 # Signal that no more groups are left.
1223 yield struct.pack(">l", 0)
1223 yield struct.pack(">l", 0)
1224
1224
1225 return util.chunkbuffer(gengroup())
1225 return util.chunkbuffer(gengroup())
1226
1226
1227 def changegroup(self, basenodes):
1227 def changegroup(self, basenodes):
1228 """Generate a changegroup of all nodes that we have that a recipient
1228 """Generate a changegroup of all nodes that we have that a recipient
1229 doesn't.
1229 doesn't.
1230
1230
1231 This is much easier than the previous function as we can assume that
1231 This is much easier than the previous function as we can assume that
1232 the recipient has any changenode we aren't sending them."""
1232 the recipient has any changenode we aren't sending them."""
1233 cl = self.changelog
1233 cl = self.changelog
1234 nodes = cl.nodesbetween(basenodes, None)[0]
1234 nodes = cl.nodesbetween(basenodes, None)[0]
1235 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1235 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1236
1236
1237 def identity(x):
1237 def identity(x):
1238 return x
1238 return x
1239
1239
1240 def gennodelst(revlog):
1240 def gennodelst(revlog):
1241 for r in xrange(0, revlog.count()):
1241 for r in xrange(0, revlog.count()):
1242 n = revlog.node(r)
1242 n = revlog.node(r)
1243 if revlog.linkrev(n) in revset:
1243 if revlog.linkrev(n) in revset:
1244 yield n
1244 yield n
1245
1245
1246 def changed_file_collector(changedfileset):
1246 def changed_file_collector(changedfileset):
1247 def collect_changed_files(clnode):
1247 def collect_changed_files(clnode):
1248 c = cl.read(clnode)
1248 c = cl.read(clnode)
1249 for fname in c[3]:
1249 for fname in c[3]:
1250 changedfileset[fname] = 1
1250 changedfileset[fname] = 1
1251 return collect_changed_files
1251 return collect_changed_files
1252
1252
1253 def lookuprevlink_func(revlog):
1253 def lookuprevlink_func(revlog):
1254 def lookuprevlink(n):
1254 def lookuprevlink(n):
1255 return cl.node(revlog.linkrev(n))
1255 return cl.node(revlog.linkrev(n))
1256 return lookuprevlink
1256 return lookuprevlink
1257
1257
1258 def gengroup():
1258 def gengroup():
1259 # construct a list of all changed files
1259 # construct a list of all changed files
1260 changedfiles = {}
1260 changedfiles = {}
1261
1261
1262 for chnk in cl.group(nodes, identity,
1262 for chnk in cl.group(nodes, identity,
1263 changed_file_collector(changedfiles)):
1263 changed_file_collector(changedfiles)):
1264 yield chnk
1264 yield chnk
1265 changedfiles = changedfiles.keys()
1265 changedfiles = changedfiles.keys()
1266 changedfiles.sort()
1266 changedfiles.sort()
1267
1267
1268 mnfst = self.manifest
1268 mnfst = self.manifest
1269 nodeiter = gennodelst(mnfst)
1269 nodeiter = gennodelst(mnfst)
1270 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1270 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1271 yield chnk
1271 yield chnk
1272
1272
1273 for fname in changedfiles:
1273 for fname in changedfiles:
1274 filerevlog = self.file(fname)
1274 filerevlog = self.file(fname)
1275 nodeiter = gennodelst(filerevlog)
1275 nodeiter = gennodelst(filerevlog)
1276 nodeiter = list(nodeiter)
1276 nodeiter = list(nodeiter)
1277 if nodeiter:
1277 if nodeiter:
1278 yield struct.pack(">l", len(fname) + 4) + fname
1278 yield struct.pack(">l", len(fname) + 4) + fname
1279 lookup = lookuprevlink_func(filerevlog)
1279 lookup = lookuprevlink_func(filerevlog)
1280 for chnk in filerevlog.group(nodeiter, lookup):
1280 for chnk in filerevlog.group(nodeiter, lookup):
1281 yield chnk
1281 yield chnk
1282
1282
1283 yield struct.pack(">l", 0)
1283 yield struct.pack(">l", 0)
1284
1284
1285 return util.chunkbuffer(gengroup())
1285 return util.chunkbuffer(gengroup())
1286
1286
1287 def addchangegroup(self, source):
1287 def addchangegroup(self, source):
1288
1288
1289 def getchunk():
1289 def getchunk():
1290 d = source.read(4)
1290 d = source.read(4)
1291 if not d:
1291 if not d:
1292 return ""
1292 return ""
1293 l = struct.unpack(">l", d)[0]
1293 l = struct.unpack(">l", d)[0]
1294 if l <= 4:
1294 if l <= 4:
1295 return ""
1295 return ""
1296 d = source.read(l - 4)
1296 d = source.read(l - 4)
1297 if len(d) < l - 4:
1297 if len(d) < l - 4:
1298 raise repo.RepoError(_("premature EOF reading chunk"
1298 raise repo.RepoError(_("premature EOF reading chunk"
1299 " (got %d bytes, expected %d)")
1299 " (got %d bytes, expected %d)")
1300 % (len(d), l - 4))
1300 % (len(d), l - 4))
1301 return d
1301 return d
1302
1302
1303 def getgroup():
1303 def getgroup():
1304 while 1:
1304 while 1:
1305 c = getchunk()
1305 c = getchunk()
1306 if not c:
1306 if not c:
1307 break
1307 break
1308 yield c
1308 yield c
1309
1309
1310 def csmap(x):
1310 def csmap(x):
1311 self.ui.debug(_("add changeset %s\n") % short(x))
1311 self.ui.debug(_("add changeset %s\n") % short(x))
1312 return self.changelog.count()
1312 return self.changelog.count()
1313
1313
1314 def revmap(x):
1314 def revmap(x):
1315 return self.changelog.rev(x)
1315 return self.changelog.rev(x)
1316
1316
1317 if not source:
1317 if not source:
1318 return
1318 return
1319 changesets = files = revisions = 0
1319 changesets = files = revisions = 0
1320
1320
1321 tr = self.transaction()
1321 tr = self.transaction()
1322
1322
1323 oldheads = len(self.changelog.heads())
1323 oldheads = len(self.changelog.heads())
1324
1324
1325 # pull off the changeset group
1325 # pull off the changeset group
1326 self.ui.status(_("adding changesets\n"))
1326 self.ui.status(_("adding changesets\n"))
1327 co = self.changelog.tip()
1327 co = self.changelog.tip()
1328 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1328 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1329 cnr, cor = map(self.changelog.rev, (cn, co))
1329 cnr, cor = map(self.changelog.rev, (cn, co))
1330 if cn == nullid:
1330 if cn == nullid:
1331 cnr = cor
1331 cnr = cor
1332 changesets = cnr - cor
1332 changesets = cnr - cor
1333
1333
1334 # pull off the manifest group
1334 # pull off the manifest group
1335 self.ui.status(_("adding manifests\n"))
1335 self.ui.status(_("adding manifests\n"))
1336 mm = self.manifest.tip()
1336 mm = self.manifest.tip()
1337 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1337 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1338
1338
1339 # process the files
1339 # process the files
1340 self.ui.status(_("adding file changes\n"))
1340 self.ui.status(_("adding file changes\n"))
1341 while 1:
1341 while 1:
1342 f = getchunk()
1342 f = getchunk()
1343 if not f:
1343 if not f:
1344 break
1344 break
1345 self.ui.debug(_("adding %s revisions\n") % f)
1345 self.ui.debug(_("adding %s revisions\n") % f)
1346 fl = self.file(f)
1346 fl = self.file(f)
1347 o = fl.count()
1347 o = fl.count()
1348 n = fl.addgroup(getgroup(), revmap, tr)
1348 n = fl.addgroup(getgroup(), revmap, tr)
1349 revisions += fl.count() - o
1349 revisions += fl.count() - o
1350 files += 1
1350 files += 1
1351
1351
1352 newheads = len(self.changelog.heads())
1352 newheads = len(self.changelog.heads())
1353 heads = ""
1353 heads = ""
1354 if oldheads and newheads > oldheads:
1354 if oldheads and newheads > oldheads:
1355 heads = _(" (+%d heads)") % (newheads - oldheads)
1355 heads = _(" (+%d heads)") % (newheads - oldheads)
1356
1356
1357 self.ui.status(_("added %d changesets"
1357 self.ui.status(_("added %d changesets"
1358 " with %d changes to %d files%s\n")
1358 " with %d changes to %d files%s\n")
1359 % (changesets, revisions, files, heads))
1359 % (changesets, revisions, files, heads))
1360
1360
1361 tr.close()
1361 tr.close()
1362
1362
1363 if changesets > 0:
1363 if changesets > 0:
1364 if not self.hook("changegroup",
1364 if not self.hook("changegroup",
1365 node=hex(self.changelog.node(cor+1))):
1365 node=hex(self.changelog.node(cor+1))):
1366 self.ui.warn(_("abort: changegroup hook returned failure!\n"))
1366 self.ui.warn(_("abort: changegroup hook returned failure!\n"))
1367 return 1
1367 return 1
1368
1368
1369 for i in range(cor + 1, cnr + 1):
1369 for i in range(cor + 1, cnr + 1):
1370 self.hook("commit", node=hex(self.changelog.node(i)))
1370 self.hook("commit", node=hex(self.changelog.node(i)))
1371
1371
1372 return
1372 return
1373
1373
1374 def update(self, node, allow=False, force=False, choose=None,
1374 def update(self, node, allow=False, force=False, choose=None,
1375 moddirstate=True, forcemerge=False):
1375 moddirstate=True, forcemerge=False):
1376 pl = self.dirstate.parents()
1376 pl = self.dirstate.parents()
1377 if not force and pl[1] != nullid:
1377 if not force and pl[1] != nullid:
1378 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1378 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1379 return 1
1379 return 1
1380
1380
1381 p1, p2 = pl[0], node
1381 p1, p2 = pl[0], node
1382 pa = self.changelog.ancestor(p1, p2)
1382 pa = self.changelog.ancestor(p1, p2)
1383 m1n = self.changelog.read(p1)[0]
1383 m1n = self.changelog.read(p1)[0]
1384 m2n = self.changelog.read(p2)[0]
1384 m2n = self.changelog.read(p2)[0]
1385 man = self.manifest.ancestor(m1n, m2n)
1385 man = self.manifest.ancestor(m1n, m2n)
1386 m1 = self.manifest.read(m1n)
1386 m1 = self.manifest.read(m1n)
1387 mf1 = self.manifest.readflags(m1n)
1387 mf1 = self.manifest.readflags(m1n)
1388 m2 = self.manifest.read(m2n)
1388 m2 = self.manifest.read(m2n)
1389 mf2 = self.manifest.readflags(m2n)
1389 mf2 = self.manifest.readflags(m2n)
1390 ma = self.manifest.read(man)
1390 ma = self.manifest.read(man)
1391 mfa = self.manifest.readflags(man)
1391 mfa = self.manifest.readflags(man)
1392
1392
1393 modified, added, removed, deleted, unknown = self.changes()
1393 modified, added, removed, deleted, unknown = self.changes()
1394
1394
1395 if allow and not forcemerge:
1395 if allow and not forcemerge:
1396 if modified or added or removed:
1396 if modified or added or removed:
1397 raise util.Abort(_("outstanding uncommited changes"))
1397 raise util.Abort(_("outstanding uncommited changes"))
1398 if not forcemerge and not force:
1398 if not forcemerge and not force:
1399 for f in unknown:
1399 for f in unknown:
1400 if f in m2:
1400 if f in m2:
1401 t1 = self.wread(f)
1401 t1 = self.wread(f)
1402 t2 = self.file(f).read(m2[f])
1402 t2 = self.file(f).read(m2[f])
1403 if cmp(t1, t2) != 0:
1403 if cmp(t1, t2) != 0:
1404 raise util.Abort(_("'%s' already exists in the working"
1404 raise util.Abort(_("'%s' already exists in the working"
1405 " dir and differs from remote") % f)
1405 " dir and differs from remote") % f)
1406
1406
1407 # is this a jump, or a merge? i.e. is there a linear path
1407 # is this a jump, or a merge? i.e. is there a linear path
1408 # from p1 to p2?
1408 # from p1 to p2?
1409 linear_path = (pa == p1 or pa == p2)
1409 linear_path = (pa == p1 or pa == p2)
1410
1410
1411 # resolve the manifest to determine which files
1411 # resolve the manifest to determine which files
1412 # we care about merging
1412 # we care about merging
1413 self.ui.note(_("resolving manifests\n"))
1413 self.ui.note(_("resolving manifests\n"))
1414 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1414 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1415 (force, allow, moddirstate, linear_path))
1415 (force, allow, moddirstate, linear_path))
1416 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1416 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1417 (short(man), short(m1n), short(m2n)))
1417 (short(man), short(m1n), short(m2n)))
1418
1418
1419 merge = {}
1419 merge = {}
1420 get = {}
1420 get = {}
1421 remove = []
1421 remove = []
1422
1422
1423 # construct a working dir manifest
1423 # construct a working dir manifest
1424 mw = m1.copy()
1424 mw = m1.copy()
1425 mfw = mf1.copy()
1425 mfw = mf1.copy()
1426 umap = dict.fromkeys(unknown)
1426 umap = dict.fromkeys(unknown)
1427
1427
1428 for f in added + modified + unknown:
1428 for f in added + modified + unknown:
1429 mw[f] = ""
1429 mw[f] = ""
1430 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1430 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1431
1431
1432 if moddirstate:
1432 if moddirstate:
1433 wlock = self.wlock()
1433 wlock = self.wlock()
1434
1434
1435 for f in deleted:
1435 for f in deleted + removed:
1436 if f in mw:
1436 if f in mw:
1437 del mw[f]
1437 del mw[f]
1438
1438
1439 # If we're jumping between revisions (as opposed to merging),
1439 # If we're jumping between revisions (as opposed to merging),
1440 # and if neither the working directory nor the target rev has
1440 # and if neither the working directory nor the target rev has
1441 # the file, then we need to remove it from the dirstate, to
1441 # the file, then we need to remove it from the dirstate, to
1442 # prevent the dirstate from listing the file when it is no
1442 # prevent the dirstate from listing the file when it is no
1443 # longer in the manifest.
1443 # longer in the manifest.
1444 if moddirstate and linear_path and f not in m2:
1444 if moddirstate and linear_path and f not in m2:
1445 self.dirstate.forget((f,))
1445 self.dirstate.forget((f,))
1446
1446
1447 # Compare manifests
1447 # Compare manifests
1448 for f, n in mw.iteritems():
1448 for f, n in mw.iteritems():
1449 if choose and not choose(f):
1449 if choose and not choose(f):
1450 continue
1450 continue
1451 if f in m2:
1451 if f in m2:
1452 s = 0
1452 s = 0
1453
1453
1454 # is the wfile new since m1, and match m2?
1454 # is the wfile new since m1, and match m2?
1455 if f not in m1:
1455 if f not in m1:
1456 t1 = self.wread(f)
1456 t1 = self.wread(f)
1457 t2 = self.file(f).read(m2[f])
1457 t2 = self.file(f).read(m2[f])
1458 if cmp(t1, t2) == 0:
1458 if cmp(t1, t2) == 0:
1459 n = m2[f]
1459 n = m2[f]
1460 del t1, t2
1460 del t1, t2
1461
1461
1462 # are files different?
1462 # are files different?
1463 if n != m2[f]:
1463 if n != m2[f]:
1464 a = ma.get(f, nullid)
1464 a = ma.get(f, nullid)
1465 # are both different from the ancestor?
1465 # are both different from the ancestor?
1466 if n != a and m2[f] != a:
1466 if n != a and m2[f] != a:
1467 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1467 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1468 # merge executable bits
1468 # merge executable bits
1469 # "if we changed or they changed, change in merge"
1469 # "if we changed or they changed, change in merge"
1470 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1470 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1471 mode = ((a^b) | (a^c)) ^ a
1471 mode = ((a^b) | (a^c)) ^ a
1472 merge[f] = (m1.get(f, nullid), m2[f], mode)
1472 merge[f] = (m1.get(f, nullid), m2[f], mode)
1473 s = 1
1473 s = 1
1474 # are we clobbering?
1474 # are we clobbering?
1475 # is remote's version newer?
1475 # is remote's version newer?
1476 # or are we going back in time?
1476 # or are we going back in time?
1477 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1477 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1478 self.ui.debug(_(" remote %s is newer, get\n") % f)
1478 self.ui.debug(_(" remote %s is newer, get\n") % f)
1479 get[f] = m2[f]
1479 get[f] = m2[f]
1480 s = 1
1480 s = 1
1481 elif f in umap:
1481 elif f in umap:
1482 # this unknown file is the same as the checkout
1482 # this unknown file is the same as the checkout
1483 get[f] = m2[f]
1483 get[f] = m2[f]
1484
1484
1485 if not s and mfw[f] != mf2[f]:
1485 if not s and mfw[f] != mf2[f]:
1486 if force:
1486 if force:
1487 self.ui.debug(_(" updating permissions for %s\n") % f)
1487 self.ui.debug(_(" updating permissions for %s\n") % f)
1488 util.set_exec(self.wjoin(f), mf2[f])
1488 util.set_exec(self.wjoin(f), mf2[f])
1489 else:
1489 else:
1490 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1490 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1491 mode = ((a^b) | (a^c)) ^ a
1491 mode = ((a^b) | (a^c)) ^ a
1492 if mode != b:
1492 if mode != b:
1493 self.ui.debug(_(" updating permissions for %s\n")
1493 self.ui.debug(_(" updating permissions for %s\n")
1494 % f)
1494 % f)
1495 util.set_exec(self.wjoin(f), mode)
1495 util.set_exec(self.wjoin(f), mode)
1496 del m2[f]
1496 del m2[f]
1497 elif f in ma:
1497 elif f in ma:
1498 if n != ma[f]:
1498 if n != ma[f]:
1499 r = _("d")
1499 r = _("d")
1500 if not force and (linear_path or allow):
1500 if not force and (linear_path or allow):
1501 r = self.ui.prompt(
1501 r = self.ui.prompt(
1502 (_(" local changed %s which remote deleted\n") % f) +
1502 (_(" local changed %s which remote deleted\n") % f) +
1503 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1503 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1504 if r == _("d"):
1504 if r == _("d"):
1505 remove.append(f)
1505 remove.append(f)
1506 else:
1506 else:
1507 self.ui.debug(_("other deleted %s\n") % f)
1507 self.ui.debug(_("other deleted %s\n") % f)
1508 remove.append(f) # other deleted it
1508 remove.append(f) # other deleted it
1509 else:
1509 else:
1510 # file is created on branch or in working directory
1510 # file is created on branch or in working directory
1511 if force and f not in umap:
1511 if force and f not in umap:
1512 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1512 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1513 remove.append(f)
1513 remove.append(f)
1514 elif n == m1.get(f, nullid): # same as parent
1514 elif n == m1.get(f, nullid): # same as parent
1515 if p2 == pa: # going backwards?
1515 if p2 == pa: # going backwards?
1516 self.ui.debug(_("remote deleted %s\n") % f)
1516 self.ui.debug(_("remote deleted %s\n") % f)
1517 remove.append(f)
1517 remove.append(f)
1518 else:
1518 else:
1519 self.ui.debug(_("local modified %s, keeping\n") % f)
1519 self.ui.debug(_("local modified %s, keeping\n") % f)
1520 else:
1520 else:
1521 self.ui.debug(_("working dir created %s, keeping\n") % f)
1521 self.ui.debug(_("working dir created %s, keeping\n") % f)
1522
1522
1523 for f, n in m2.iteritems():
1523 for f, n in m2.iteritems():
1524 if choose and not choose(f):
1524 if choose and not choose(f):
1525 continue
1525 continue
1526 if f[0] == "/":
1526 if f[0] == "/":
1527 continue
1527 continue
1528 if f in ma and n != ma[f]:
1528 if f in ma and n != ma[f]:
1529 r = _("k")
1529 r = _("k")
1530 if not force and (linear_path or allow):
1530 if not force and (linear_path or allow):
1531 r = self.ui.prompt(
1531 r = self.ui.prompt(
1532 (_("remote changed %s which local deleted\n") % f) +
1532 (_("remote changed %s which local deleted\n") % f) +
1533 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1533 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1534 if r == _("k"):
1534 if r == _("k"):
1535 get[f] = n
1535 get[f] = n
1536 elif f not in ma:
1536 elif f not in ma:
1537 self.ui.debug(_("remote created %s\n") % f)
1537 self.ui.debug(_("remote created %s\n") % f)
1538 get[f] = n
1538 get[f] = n
1539 else:
1539 else:
1540 if force or p2 == pa: # going backwards?
1540 if force or p2 == pa: # going backwards?
1541 self.ui.debug(_("local deleted %s, recreating\n") % f)
1541 self.ui.debug(_("local deleted %s, recreating\n") % f)
1542 get[f] = n
1542 get[f] = n
1543 else:
1543 else:
1544 self.ui.debug(_("local deleted %s\n") % f)
1544 self.ui.debug(_("local deleted %s\n") % f)
1545
1545
1546 del mw, m1, m2, ma
1546 del mw, m1, m2, ma
1547
1547
1548 if force:
1548 if force:
1549 for f in merge:
1549 for f in merge:
1550 get[f] = merge[f][1]
1550 get[f] = merge[f][1]
1551 merge = {}
1551 merge = {}
1552
1552
1553 if linear_path or force:
1553 if linear_path or force:
1554 # we don't need to do any magic, just jump to the new rev
1554 # we don't need to do any magic, just jump to the new rev
1555 branch_merge = False
1555 branch_merge = False
1556 p1, p2 = p2, nullid
1556 p1, p2 = p2, nullid
1557 else:
1557 else:
1558 if not allow:
1558 if not allow:
1559 self.ui.status(_("this update spans a branch"
1559 self.ui.status(_("this update spans a branch"
1560 " affecting the following files:\n"))
1560 " affecting the following files:\n"))
1561 fl = merge.keys() + get.keys()
1561 fl = merge.keys() + get.keys()
1562 fl.sort()
1562 fl.sort()
1563 for f in fl:
1563 for f in fl:
1564 cf = ""
1564 cf = ""
1565 if f in merge:
1565 if f in merge:
1566 cf = _(" (resolve)")
1566 cf = _(" (resolve)")
1567 self.ui.status(" %s%s\n" % (f, cf))
1567 self.ui.status(" %s%s\n" % (f, cf))
1568 self.ui.warn(_("aborting update spanning branches!\n"))
1568 self.ui.warn(_("aborting update spanning branches!\n"))
1569 self.ui.status(_("(use update -m to merge across branches"
1569 self.ui.status(_("(use update -m to merge across branches"
1570 " or -C to lose changes)\n"))
1570 " or -C to lose changes)\n"))
1571 return 1
1571 return 1
1572 branch_merge = True
1572 branch_merge = True
1573
1573
1574 # get the files we don't need to change
1574 # get the files we don't need to change
1575 files = get.keys()
1575 files = get.keys()
1576 files.sort()
1576 files.sort()
1577 for f in files:
1577 for f in files:
1578 if f[0] == "/":
1578 if f[0] == "/":
1579 continue
1579 continue
1580 self.ui.note(_("getting %s\n") % f)
1580 self.ui.note(_("getting %s\n") % f)
1581 t = self.file(f).read(get[f])
1581 t = self.file(f).read(get[f])
1582 self.wwrite(f, t)
1582 self.wwrite(f, t)
1583 util.set_exec(self.wjoin(f), mf2[f])
1583 util.set_exec(self.wjoin(f), mf2[f])
1584 if moddirstate:
1584 if moddirstate:
1585 if branch_merge:
1585 if branch_merge:
1586 self.dirstate.update([f], 'n', st_mtime=-1)
1586 self.dirstate.update([f], 'n', st_mtime=-1)
1587 else:
1587 else:
1588 self.dirstate.update([f], 'n')
1588 self.dirstate.update([f], 'n')
1589
1589
1590 # merge the tricky bits
1590 # merge the tricky bits
1591 files = merge.keys()
1591 files = merge.keys()
1592 files.sort()
1592 files.sort()
1593 for f in files:
1593 for f in files:
1594 self.ui.status(_("merging %s\n") % f)
1594 self.ui.status(_("merging %s\n") % f)
1595 my, other, flag = merge[f]
1595 my, other, flag = merge[f]
1596 self.merge3(f, my, other)
1596 self.merge3(f, my, other)
1597 util.set_exec(self.wjoin(f), flag)
1597 util.set_exec(self.wjoin(f), flag)
1598 if moddirstate:
1598 if moddirstate:
1599 if branch_merge:
1599 if branch_merge:
1600 # We've done a branch merge, mark this file as merged
1600 # We've done a branch merge, mark this file as merged
1601 # so that we properly record the merger later
1601 # so that we properly record the merger later
1602 self.dirstate.update([f], 'm')
1602 self.dirstate.update([f], 'm')
1603 else:
1603 else:
1604 # We've update-merged a locally modified file, so
1604 # We've update-merged a locally modified file, so
1605 # we set the dirstate to emulate a normal checkout
1605 # we set the dirstate to emulate a normal checkout
1606 # of that file some time in the past. Thus our
1606 # of that file some time in the past. Thus our
1607 # merge will appear as a normal local file
1607 # merge will appear as a normal local file
1608 # modification.
1608 # modification.
1609 f_len = len(self.file(f).read(other))
1609 f_len = len(self.file(f).read(other))
1610 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1610 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1611
1611
1612 remove.sort()
1612 remove.sort()
1613 for f in remove:
1613 for f in remove:
1614 self.ui.note(_("removing %s\n") % f)
1614 self.ui.note(_("removing %s\n") % f)
1615 try:
1615 try:
1616 util.unlink(self.wjoin(f))
1616 util.unlink(self.wjoin(f))
1617 except OSError, inst:
1617 except OSError, inst:
1618 if inst.errno != errno.ENOENT:
1618 if inst.errno != errno.ENOENT:
1619 self.ui.warn(_("update failed to remove %s: %s!\n") %
1619 self.ui.warn(_("update failed to remove %s: %s!\n") %
1620 (f, inst.strerror))
1620 (f, inst.strerror))
1621 if moddirstate:
1621 if moddirstate:
1622 if branch_merge:
1622 if branch_merge:
1623 self.dirstate.update(remove, 'r')
1623 self.dirstate.update(remove, 'r')
1624 else:
1624 else:
1625 self.dirstate.forget(remove)
1625 self.dirstate.forget(remove)
1626
1626
1627 if moddirstate:
1627 if moddirstate:
1628 self.dirstate.setparents(p1, p2)
1628 self.dirstate.setparents(p1, p2)
1629
1629
1630 def merge3(self, fn, my, other):
1630 def merge3(self, fn, my, other):
1631 """perform a 3-way merge in the working directory"""
1631 """perform a 3-way merge in the working directory"""
1632
1632
1633 def temp(prefix, node):
1633 def temp(prefix, node):
1634 pre = "%s~%s." % (os.path.basename(fn), prefix)
1634 pre = "%s~%s." % (os.path.basename(fn), prefix)
1635 (fd, name) = tempfile.mkstemp("", pre)
1635 (fd, name) = tempfile.mkstemp("", pre)
1636 f = os.fdopen(fd, "wb")
1636 f = os.fdopen(fd, "wb")
1637 self.wwrite(fn, fl.read(node), f)
1637 self.wwrite(fn, fl.read(node), f)
1638 f.close()
1638 f.close()
1639 return name
1639 return name
1640
1640
1641 fl = self.file(fn)
1641 fl = self.file(fn)
1642 base = fl.ancestor(my, other)
1642 base = fl.ancestor(my, other)
1643 a = self.wjoin(fn)
1643 a = self.wjoin(fn)
1644 b = temp("base", base)
1644 b = temp("base", base)
1645 c = temp("other", other)
1645 c = temp("other", other)
1646
1646
1647 self.ui.note(_("resolving %s\n") % fn)
1647 self.ui.note(_("resolving %s\n") % fn)
1648 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1648 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1649 (fn, short(my), short(other), short(base)))
1649 (fn, short(my), short(other), short(base)))
1650
1650
1651 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1651 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1652 or "hgmerge")
1652 or "hgmerge")
1653 r = os.system('%s "%s" "%s" "%s"' % (cmd, a, b, c))
1653 r = os.system('%s "%s" "%s" "%s"' % (cmd, a, b, c))
1654 if r:
1654 if r:
1655 self.ui.warn(_("merging %s failed!\n") % fn)
1655 self.ui.warn(_("merging %s failed!\n") % fn)
1656
1656
1657 os.unlink(b)
1657 os.unlink(b)
1658 os.unlink(c)
1658 os.unlink(c)
1659
1659
1660 def verify(self):
1660 def verify(self):
1661 filelinkrevs = {}
1661 filelinkrevs = {}
1662 filenodes = {}
1662 filenodes = {}
1663 changesets = revisions = files = 0
1663 changesets = revisions = files = 0
1664 errors = [0]
1664 errors = [0]
1665 neededmanifests = {}
1665 neededmanifests = {}
1666
1666
1667 def err(msg):
1667 def err(msg):
1668 self.ui.warn(msg + "\n")
1668 self.ui.warn(msg + "\n")
1669 errors[0] += 1
1669 errors[0] += 1
1670
1670
1671 seen = {}
1671 seen = {}
1672 self.ui.status(_("checking changesets\n"))
1672 self.ui.status(_("checking changesets\n"))
1673 d = self.changelog.checksize()
1673 d = self.changelog.checksize()
1674 if d:
1674 if d:
1675 err(_("changeset data short %d bytes") % d)
1675 err(_("changeset data short %d bytes") % d)
1676 for i in range(self.changelog.count()):
1676 for i in range(self.changelog.count()):
1677 changesets += 1
1677 changesets += 1
1678 n = self.changelog.node(i)
1678 n = self.changelog.node(i)
1679 l = self.changelog.linkrev(n)
1679 l = self.changelog.linkrev(n)
1680 if l != i:
1680 if l != i:
1681 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1681 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1682 if n in seen:
1682 if n in seen:
1683 err(_("duplicate changeset at revision %d") % i)
1683 err(_("duplicate changeset at revision %d") % i)
1684 seen[n] = 1
1684 seen[n] = 1
1685
1685
1686 for p in self.changelog.parents(n):
1686 for p in self.changelog.parents(n):
1687 if p not in self.changelog.nodemap:
1687 if p not in self.changelog.nodemap:
1688 err(_("changeset %s has unknown parent %s") %
1688 err(_("changeset %s has unknown parent %s") %
1689 (short(n), short(p)))
1689 (short(n), short(p)))
1690 try:
1690 try:
1691 changes = self.changelog.read(n)
1691 changes = self.changelog.read(n)
1692 except KeyboardInterrupt:
1692 except KeyboardInterrupt:
1693 self.ui.warn(_("interrupted"))
1693 self.ui.warn(_("interrupted"))
1694 raise
1694 raise
1695 except Exception, inst:
1695 except Exception, inst:
1696 err(_("unpacking changeset %s: %s") % (short(n), inst))
1696 err(_("unpacking changeset %s: %s") % (short(n), inst))
1697
1697
1698 neededmanifests[changes[0]] = n
1698 neededmanifests[changes[0]] = n
1699
1699
1700 for f in changes[3]:
1700 for f in changes[3]:
1701 filelinkrevs.setdefault(f, []).append(i)
1701 filelinkrevs.setdefault(f, []).append(i)
1702
1702
1703 seen = {}
1703 seen = {}
1704 self.ui.status(_("checking manifests\n"))
1704 self.ui.status(_("checking manifests\n"))
1705 d = self.manifest.checksize()
1705 d = self.manifest.checksize()
1706 if d:
1706 if d:
1707 err(_("manifest data short %d bytes") % d)
1707 err(_("manifest data short %d bytes") % d)
1708 for i in range(self.manifest.count()):
1708 for i in range(self.manifest.count()):
1709 n = self.manifest.node(i)
1709 n = self.manifest.node(i)
1710 l = self.manifest.linkrev(n)
1710 l = self.manifest.linkrev(n)
1711
1711
1712 if l < 0 or l >= self.changelog.count():
1712 if l < 0 or l >= self.changelog.count():
1713 err(_("bad manifest link (%d) at revision %d") % (l, i))
1713 err(_("bad manifest link (%d) at revision %d") % (l, i))
1714
1714
1715 if n in neededmanifests:
1715 if n in neededmanifests:
1716 del neededmanifests[n]
1716 del neededmanifests[n]
1717
1717
1718 if n in seen:
1718 if n in seen:
1719 err(_("duplicate manifest at revision %d") % i)
1719 err(_("duplicate manifest at revision %d") % i)
1720
1720
1721 seen[n] = 1
1721 seen[n] = 1
1722
1722
1723 for p in self.manifest.parents(n):
1723 for p in self.manifest.parents(n):
1724 if p not in self.manifest.nodemap:
1724 if p not in self.manifest.nodemap:
1725 err(_("manifest %s has unknown parent %s") %
1725 err(_("manifest %s has unknown parent %s") %
1726 (short(n), short(p)))
1726 (short(n), short(p)))
1727
1727
1728 try:
1728 try:
1729 delta = mdiff.patchtext(self.manifest.delta(n))
1729 delta = mdiff.patchtext(self.manifest.delta(n))
1730 except KeyboardInterrupt:
1730 except KeyboardInterrupt:
1731 self.ui.warn(_("interrupted"))
1731 self.ui.warn(_("interrupted"))
1732 raise
1732 raise
1733 except Exception, inst:
1733 except Exception, inst:
1734 err(_("unpacking manifest %s: %s") % (short(n), inst))
1734 err(_("unpacking manifest %s: %s") % (short(n), inst))
1735
1735
1736 ff = [ l.split('\0') for l in delta.splitlines() ]
1736 ff = [ l.split('\0') for l in delta.splitlines() ]
1737 for f, fn in ff:
1737 for f, fn in ff:
1738 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1738 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1739
1739
1740 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1740 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1741
1741
1742 for m, c in neededmanifests.items():
1742 for m, c in neededmanifests.items():
1743 err(_("Changeset %s refers to unknown manifest %s") %
1743 err(_("Changeset %s refers to unknown manifest %s") %
1744 (short(m), short(c)))
1744 (short(m), short(c)))
1745 del neededmanifests
1745 del neededmanifests
1746
1746
1747 for f in filenodes:
1747 for f in filenodes:
1748 if f not in filelinkrevs:
1748 if f not in filelinkrevs:
1749 err(_("file %s in manifest but not in changesets") % f)
1749 err(_("file %s in manifest but not in changesets") % f)
1750
1750
1751 for f in filelinkrevs:
1751 for f in filelinkrevs:
1752 if f not in filenodes:
1752 if f not in filenodes:
1753 err(_("file %s in changeset but not in manifest") % f)
1753 err(_("file %s in changeset but not in manifest") % f)
1754
1754
1755 self.ui.status(_("checking files\n"))
1755 self.ui.status(_("checking files\n"))
1756 ff = filenodes.keys()
1756 ff = filenodes.keys()
1757 ff.sort()
1757 ff.sort()
1758 for f in ff:
1758 for f in ff:
1759 if f == "/dev/null":
1759 if f == "/dev/null":
1760 continue
1760 continue
1761 files += 1
1761 files += 1
1762 fl = self.file(f)
1762 fl = self.file(f)
1763 d = fl.checksize()
1763 d = fl.checksize()
1764 if d:
1764 if d:
1765 err(_("%s file data short %d bytes") % (f, d))
1765 err(_("%s file data short %d bytes") % (f, d))
1766
1766
1767 nodes = {nullid: 1}
1767 nodes = {nullid: 1}
1768 seen = {}
1768 seen = {}
1769 for i in range(fl.count()):
1769 for i in range(fl.count()):
1770 revisions += 1
1770 revisions += 1
1771 n = fl.node(i)
1771 n = fl.node(i)
1772
1772
1773 if n in seen:
1773 if n in seen:
1774 err(_("%s: duplicate revision %d") % (f, i))
1774 err(_("%s: duplicate revision %d") % (f, i))
1775 if n not in filenodes[f]:
1775 if n not in filenodes[f]:
1776 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1776 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1777 else:
1777 else:
1778 del filenodes[f][n]
1778 del filenodes[f][n]
1779
1779
1780 flr = fl.linkrev(n)
1780 flr = fl.linkrev(n)
1781 if flr not in filelinkrevs[f]:
1781 if flr not in filelinkrevs[f]:
1782 err(_("%s:%s points to unexpected changeset %d")
1782 err(_("%s:%s points to unexpected changeset %d")
1783 % (f, short(n), flr))
1783 % (f, short(n), flr))
1784 else:
1784 else:
1785 filelinkrevs[f].remove(flr)
1785 filelinkrevs[f].remove(flr)
1786
1786
1787 # verify contents
1787 # verify contents
1788 try:
1788 try:
1789 t = fl.read(n)
1789 t = fl.read(n)
1790 except KeyboardInterrupt:
1790 except KeyboardInterrupt:
1791 self.ui.warn(_("interrupted"))
1791 self.ui.warn(_("interrupted"))
1792 raise
1792 raise
1793 except Exception, inst:
1793 except Exception, inst:
1794 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1794 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1795
1795
1796 # verify parents
1796 # verify parents
1797 (p1, p2) = fl.parents(n)
1797 (p1, p2) = fl.parents(n)
1798 if p1 not in nodes:
1798 if p1 not in nodes:
1799 err(_("file %s:%s unknown parent 1 %s") %
1799 err(_("file %s:%s unknown parent 1 %s") %
1800 (f, short(n), short(p1)))
1800 (f, short(n), short(p1)))
1801 if p2 not in nodes:
1801 if p2 not in nodes:
1802 err(_("file %s:%s unknown parent 2 %s") %
1802 err(_("file %s:%s unknown parent 2 %s") %
1803 (f, short(n), short(p1)))
1803 (f, short(n), short(p1)))
1804 nodes[n] = 1
1804 nodes[n] = 1
1805
1805
1806 # cross-check
1806 # cross-check
1807 for node in filenodes[f]:
1807 for node in filenodes[f]:
1808 err(_("node %s in manifests not in %s") % (hex(node), f))
1808 err(_("node %s in manifests not in %s") % (hex(node), f))
1809
1809
1810 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1810 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1811 (files, changesets, revisions))
1811 (files, changesets, revisions))
1812
1812
1813 if errors[0]:
1813 if errors[0]:
1814 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1814 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1815 return 1
1815 return 1
General Comments 0
You need to be logged in to leave comments. Login now