##// END OF EJS Templates
Allow reverting a deleted file with two parents...
Matt Mackall -
r1448:182879d7 default
parent child Browse files
Show More
@@ -1,1476 +1,1472 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import struct, os, util
8 import struct, os, util
9 import filelog, manifest, changelog, dirstate, repo
9 import filelog, manifest, changelog, dirstate, repo
10 from node import *
10 from node import *
11 from i18n import gettext as _
11 from i18n import gettext as _
12 from demandload import *
12 from demandload import *
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
14
14
15 class localrepository:
15 class localrepository:
16 def __init__(self, ui, path=None, create=0):
16 def __init__(self, ui, path=None, create=0):
17 if not path:
17 if not path:
18 p = os.getcwd()
18 p = os.getcwd()
19 while not os.path.isdir(os.path.join(p, ".hg")):
19 while not os.path.isdir(os.path.join(p, ".hg")):
20 oldp = p
20 oldp = p
21 p = os.path.dirname(p)
21 p = os.path.dirname(p)
22 if p == oldp: raise repo.RepoError(_("no repo found"))
22 if p == oldp: raise repo.RepoError(_("no repo found"))
23 path = p
23 path = p
24 self.path = os.path.join(path, ".hg")
24 self.path = os.path.join(path, ".hg")
25
25
26 if not create and not os.path.isdir(self.path):
26 if not create and not os.path.isdir(self.path):
27 raise repo.RepoError(_("repository %s not found") % self.path)
27 raise repo.RepoError(_("repository %s not found") % self.path)
28
28
29 self.root = os.path.abspath(path)
29 self.root = os.path.abspath(path)
30 self.ui = ui
30 self.ui = ui
31 self.opener = util.opener(self.path)
31 self.opener = util.opener(self.path)
32 self.wopener = util.opener(self.root)
32 self.wopener = util.opener(self.root)
33 self.manifest = manifest.manifest(self.opener)
33 self.manifest = manifest.manifest(self.opener)
34 self.changelog = changelog.changelog(self.opener)
34 self.changelog = changelog.changelog(self.opener)
35 self.tagscache = None
35 self.tagscache = None
36 self.nodetagscache = None
36 self.nodetagscache = None
37 self.encodepats = None
37 self.encodepats = None
38 self.decodepats = None
38 self.decodepats = None
39
39
40 if create:
40 if create:
41 os.mkdir(self.path)
41 os.mkdir(self.path)
42 os.mkdir(self.join("data"))
42 os.mkdir(self.join("data"))
43
43
44 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
44 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
45 try:
45 try:
46 self.ui.readconfig(self.opener("hgrc"))
46 self.ui.readconfig(self.opener("hgrc"))
47 except IOError: pass
47 except IOError: pass
48
48
49 def hook(self, name, **args):
49 def hook(self, name, **args):
50 s = self.ui.config("hooks", name)
50 s = self.ui.config("hooks", name)
51 if s:
51 if s:
52 self.ui.note(_("running hook %s: %s\n") % (name, s))
52 self.ui.note(_("running hook %s: %s\n") % (name, s))
53 old = {}
53 old = {}
54 for k, v in args.items():
54 for k, v in args.items():
55 k = k.upper()
55 k = k.upper()
56 old[k] = os.environ.get(k, None)
56 old[k] = os.environ.get(k, None)
57 os.environ[k] = v
57 os.environ[k] = v
58
58
59 # Hooks run in the repository root
59 # Hooks run in the repository root
60 olddir = os.getcwd()
60 olddir = os.getcwd()
61 os.chdir(self.root)
61 os.chdir(self.root)
62 r = os.system(s)
62 r = os.system(s)
63 os.chdir(olddir)
63 os.chdir(olddir)
64
64
65 for k, v in old.items():
65 for k, v in old.items():
66 if v != None:
66 if v != None:
67 os.environ[k] = v
67 os.environ[k] = v
68 else:
68 else:
69 del os.environ[k]
69 del os.environ[k]
70
70
71 if r:
71 if r:
72 self.ui.warn(_("abort: %s hook failed with status %d!\n") %
72 self.ui.warn(_("abort: %s hook failed with status %d!\n") %
73 (name, r))
73 (name, r))
74 return False
74 return False
75 return True
75 return True
76
76
77 def tags(self):
77 def tags(self):
78 '''return a mapping of tag to node'''
78 '''return a mapping of tag to node'''
79 if not self.tagscache:
79 if not self.tagscache:
80 self.tagscache = {}
80 self.tagscache = {}
81 def addtag(self, k, n):
81 def addtag(self, k, n):
82 try:
82 try:
83 bin_n = bin(n)
83 bin_n = bin(n)
84 except TypeError:
84 except TypeError:
85 bin_n = ''
85 bin_n = ''
86 self.tagscache[k.strip()] = bin_n
86 self.tagscache[k.strip()] = bin_n
87
87
88 try:
88 try:
89 # read each head of the tags file, ending with the tip
89 # read each head of the tags file, ending with the tip
90 # and add each tag found to the map, with "newer" ones
90 # and add each tag found to the map, with "newer" ones
91 # taking precedence
91 # taking precedence
92 fl = self.file(".hgtags")
92 fl = self.file(".hgtags")
93 h = fl.heads()
93 h = fl.heads()
94 h.reverse()
94 h.reverse()
95 for r in h:
95 for r in h:
96 for l in fl.read(r).splitlines():
96 for l in fl.read(r).splitlines():
97 if l:
97 if l:
98 n, k = l.split(" ", 1)
98 n, k = l.split(" ", 1)
99 addtag(self, k, n)
99 addtag(self, k, n)
100 except KeyError:
100 except KeyError:
101 pass
101 pass
102
102
103 try:
103 try:
104 f = self.opener("localtags")
104 f = self.opener("localtags")
105 for l in f:
105 for l in f:
106 n, k = l.split(" ", 1)
106 n, k = l.split(" ", 1)
107 addtag(self, k, n)
107 addtag(self, k, n)
108 except IOError:
108 except IOError:
109 pass
109 pass
110
110
111 self.tagscache['tip'] = self.changelog.tip()
111 self.tagscache['tip'] = self.changelog.tip()
112
112
113 return self.tagscache
113 return self.tagscache
114
114
115 def tagslist(self):
115 def tagslist(self):
116 '''return a list of tags ordered by revision'''
116 '''return a list of tags ordered by revision'''
117 l = []
117 l = []
118 for t, n in self.tags().items():
118 for t, n in self.tags().items():
119 try:
119 try:
120 r = self.changelog.rev(n)
120 r = self.changelog.rev(n)
121 except:
121 except:
122 r = -2 # sort to the beginning of the list if unknown
122 r = -2 # sort to the beginning of the list if unknown
123 l.append((r,t,n))
123 l.append((r,t,n))
124 l.sort()
124 l.sort()
125 return [(t,n) for r,t,n in l]
125 return [(t,n) for r,t,n in l]
126
126
127 def nodetags(self, node):
127 def nodetags(self, node):
128 '''return the tags associated with a node'''
128 '''return the tags associated with a node'''
129 if not self.nodetagscache:
129 if not self.nodetagscache:
130 self.nodetagscache = {}
130 self.nodetagscache = {}
131 for t,n in self.tags().items():
131 for t,n in self.tags().items():
132 self.nodetagscache.setdefault(n,[]).append(t)
132 self.nodetagscache.setdefault(n,[]).append(t)
133 return self.nodetagscache.get(node, [])
133 return self.nodetagscache.get(node, [])
134
134
135 def lookup(self, key):
135 def lookup(self, key):
136 try:
136 try:
137 return self.tags()[key]
137 return self.tags()[key]
138 except KeyError:
138 except KeyError:
139 try:
139 try:
140 return self.changelog.lookup(key)
140 return self.changelog.lookup(key)
141 except:
141 except:
142 raise repo.RepoError(_("unknown revision '%s'") % key)
142 raise repo.RepoError(_("unknown revision '%s'") % key)
143
143
144 def dev(self):
144 def dev(self):
145 return os.stat(self.path).st_dev
145 return os.stat(self.path).st_dev
146
146
147 def local(self):
147 def local(self):
148 return True
148 return True
149
149
150 def join(self, f):
150 def join(self, f):
151 return os.path.join(self.path, f)
151 return os.path.join(self.path, f)
152
152
153 def wjoin(self, f):
153 def wjoin(self, f):
154 return os.path.join(self.root, f)
154 return os.path.join(self.root, f)
155
155
156 def file(self, f):
156 def file(self, f):
157 if f[0] == '/': f = f[1:]
157 if f[0] == '/': f = f[1:]
158 return filelog.filelog(self.opener, f)
158 return filelog.filelog(self.opener, f)
159
159
160 def getcwd(self):
160 def getcwd(self):
161 return self.dirstate.getcwd()
161 return self.dirstate.getcwd()
162
162
163 def wfile(self, f, mode='r'):
163 def wfile(self, f, mode='r'):
164 return self.wopener(f, mode)
164 return self.wopener(f, mode)
165
165
166 def wread(self, filename):
166 def wread(self, filename):
167 if self.encodepats == None:
167 if self.encodepats == None:
168 l = []
168 l = []
169 for pat, cmd in self.ui.configitems("encode"):
169 for pat, cmd in self.ui.configitems("encode"):
170 mf = util.matcher("", "/", [pat], [], [])[1]
170 mf = util.matcher("", "/", [pat], [], [])[1]
171 l.append((mf, cmd))
171 l.append((mf, cmd))
172 self.encodepats = l
172 self.encodepats = l
173
173
174 data = self.wopener(filename, 'r').read()
174 data = self.wopener(filename, 'r').read()
175
175
176 for mf, cmd in self.encodepats:
176 for mf, cmd in self.encodepats:
177 if mf(filename):
177 if mf(filename):
178 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
178 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
179 data = util.filter(data, cmd)
179 data = util.filter(data, cmd)
180 break
180 break
181
181
182 return data
182 return data
183
183
184 def wwrite(self, filename, data, fd=None):
184 def wwrite(self, filename, data, fd=None):
185 if self.decodepats == None:
185 if self.decodepats == None:
186 l = []
186 l = []
187 for pat, cmd in self.ui.configitems("decode"):
187 for pat, cmd in self.ui.configitems("decode"):
188 mf = util.matcher("", "/", [pat], [], [])[1]
188 mf = util.matcher("", "/", [pat], [], [])[1]
189 l.append((mf, cmd))
189 l.append((mf, cmd))
190 self.decodepats = l
190 self.decodepats = l
191
191
192 for mf, cmd in self.decodepats:
192 for mf, cmd in self.decodepats:
193 if mf(filename):
193 if mf(filename):
194 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
194 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
195 data = util.filter(data, cmd)
195 data = util.filter(data, cmd)
196 break
196 break
197
197
198 if fd:
198 if fd:
199 return fd.write(data)
199 return fd.write(data)
200 return self.wopener(filename, 'w').write(data)
200 return self.wopener(filename, 'w').write(data)
201
201
202 def transaction(self):
202 def transaction(self):
203 # save dirstate for undo
203 # save dirstate for undo
204 try:
204 try:
205 ds = self.opener("dirstate").read()
205 ds = self.opener("dirstate").read()
206 except IOError:
206 except IOError:
207 ds = ""
207 ds = ""
208 self.opener("journal.dirstate", "w").write(ds)
208 self.opener("journal.dirstate", "w").write(ds)
209
209
210 def after():
210 def after():
211 util.rename(self.join("journal"), self.join("undo"))
211 util.rename(self.join("journal"), self.join("undo"))
212 util.rename(self.join("journal.dirstate"),
212 util.rename(self.join("journal.dirstate"),
213 self.join("undo.dirstate"))
213 self.join("undo.dirstate"))
214
214
215 return transaction.transaction(self.ui.warn, self.opener,
215 return transaction.transaction(self.ui.warn, self.opener,
216 self.join("journal"), after)
216 self.join("journal"), after)
217
217
218 def recover(self):
218 def recover(self):
219 lock = self.lock()
219 lock = self.lock()
220 if os.path.exists(self.join("journal")):
220 if os.path.exists(self.join("journal")):
221 self.ui.status(_("rolling back interrupted transaction\n"))
221 self.ui.status(_("rolling back interrupted transaction\n"))
222 return transaction.rollback(self.opener, self.join("journal"))
222 return transaction.rollback(self.opener, self.join("journal"))
223 else:
223 else:
224 self.ui.warn(_("no interrupted transaction available\n"))
224 self.ui.warn(_("no interrupted transaction available\n"))
225
225
226 def undo(self):
226 def undo(self):
227 lock = self.lock()
227 lock = self.lock()
228 if os.path.exists(self.join("undo")):
228 if os.path.exists(self.join("undo")):
229 self.ui.status(_("rolling back last transaction\n"))
229 self.ui.status(_("rolling back last transaction\n"))
230 transaction.rollback(self.opener, self.join("undo"))
230 transaction.rollback(self.opener, self.join("undo"))
231 self.dirstate = None
231 self.dirstate = None
232 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
232 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
233 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
233 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
234 else:
234 else:
235 self.ui.warn(_("no undo information available\n"))
235 self.ui.warn(_("no undo information available\n"))
236
236
237 def lock(self, wait=1):
237 def lock(self, wait=1):
238 try:
238 try:
239 return lock.lock(self.join("lock"), 0)
239 return lock.lock(self.join("lock"), 0)
240 except lock.LockHeld, inst:
240 except lock.LockHeld, inst:
241 if wait:
241 if wait:
242 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
242 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
243 return lock.lock(self.join("lock"), wait)
243 return lock.lock(self.join("lock"), wait)
244 raise inst
244 raise inst
245
245
246 def rawcommit(self, files, text, user, date, p1=None, p2=None):
246 def rawcommit(self, files, text, user, date, p1=None, p2=None):
247 orig_parent = self.dirstate.parents()[0] or nullid
247 orig_parent = self.dirstate.parents()[0] or nullid
248 p1 = p1 or self.dirstate.parents()[0] or nullid
248 p1 = p1 or self.dirstate.parents()[0] or nullid
249 p2 = p2 or self.dirstate.parents()[1] or nullid
249 p2 = p2 or self.dirstate.parents()[1] or nullid
250 c1 = self.changelog.read(p1)
250 c1 = self.changelog.read(p1)
251 c2 = self.changelog.read(p2)
251 c2 = self.changelog.read(p2)
252 m1 = self.manifest.read(c1[0])
252 m1 = self.manifest.read(c1[0])
253 mf1 = self.manifest.readflags(c1[0])
253 mf1 = self.manifest.readflags(c1[0])
254 m2 = self.manifest.read(c2[0])
254 m2 = self.manifest.read(c2[0])
255 changed = []
255 changed = []
256
256
257 if orig_parent == p1:
257 if orig_parent == p1:
258 update_dirstate = 1
258 update_dirstate = 1
259 else:
259 else:
260 update_dirstate = 0
260 update_dirstate = 0
261
261
262 tr = self.transaction()
262 tr = self.transaction()
263 mm = m1.copy()
263 mm = m1.copy()
264 mfm = mf1.copy()
264 mfm = mf1.copy()
265 linkrev = self.changelog.count()
265 linkrev = self.changelog.count()
266 for f in files:
266 for f in files:
267 try:
267 try:
268 t = self.wread(f)
268 t = self.wread(f)
269 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
269 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
270 r = self.file(f)
270 r = self.file(f)
271 mfm[f] = tm
271 mfm[f] = tm
272
272
273 fp1 = m1.get(f, nullid)
273 fp1 = m1.get(f, nullid)
274 fp2 = m2.get(f, nullid)
274 fp2 = m2.get(f, nullid)
275
275
276 # is the same revision on two branches of a merge?
276 # is the same revision on two branches of a merge?
277 if fp2 == fp1:
277 if fp2 == fp1:
278 fp2 = nullid
278 fp2 = nullid
279
279
280 if fp2 != nullid:
280 if fp2 != nullid:
281 # is one parent an ancestor of the other?
281 # is one parent an ancestor of the other?
282 fpa = r.ancestor(fp1, fp2)
282 fpa = r.ancestor(fp1, fp2)
283 if fpa == fp1:
283 if fpa == fp1:
284 fp1, fp2 = fp2, nullid
284 fp1, fp2 = fp2, nullid
285 elif fpa == fp2:
285 elif fpa == fp2:
286 fp2 = nullid
286 fp2 = nullid
287
287
288 # is the file unmodified from the parent?
288 # is the file unmodified from the parent?
289 if t == r.read(fp1):
289 if t == r.read(fp1):
290 # record the proper existing parent in manifest
290 # record the proper existing parent in manifest
291 # no need to add a revision
291 # no need to add a revision
292 mm[f] = fp1
292 mm[f] = fp1
293 continue
293 continue
294
294
295 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
295 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
296 changed.append(f)
296 changed.append(f)
297 if update_dirstate:
297 if update_dirstate:
298 self.dirstate.update([f], "n")
298 self.dirstate.update([f], "n")
299 except IOError:
299 except IOError:
300 try:
300 try:
301 del mm[f]
301 del mm[f]
302 del mfm[f]
302 del mfm[f]
303 if update_dirstate:
303 if update_dirstate:
304 self.dirstate.forget([f])
304 self.dirstate.forget([f])
305 except:
305 except:
306 # deleted from p2?
306 # deleted from p2?
307 pass
307 pass
308
308
309 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
309 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
310 user = user or self.ui.username()
310 user = user or self.ui.username()
311 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
311 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
312 tr.close()
312 tr.close()
313 if update_dirstate:
313 if update_dirstate:
314 self.dirstate.setparents(n, nullid)
314 self.dirstate.setparents(n, nullid)
315
315
316 def commit(self, files = None, text = "", user = None, date = None,
316 def commit(self, files = None, text = "", user = None, date = None,
317 match = util.always, force=False):
317 match = util.always, force=False):
318 commit = []
318 commit = []
319 remove = []
319 remove = []
320 changed = []
320 changed = []
321
321
322 if files:
322 if files:
323 for f in files:
323 for f in files:
324 s = self.dirstate.state(f)
324 s = self.dirstate.state(f)
325 if s in 'nmai':
325 if s in 'nmai':
326 commit.append(f)
326 commit.append(f)
327 elif s == 'r':
327 elif s == 'r':
328 remove.append(f)
328 remove.append(f)
329 else:
329 else:
330 self.ui.warn(_("%s not tracked!\n") % f)
330 self.ui.warn(_("%s not tracked!\n") % f)
331 else:
331 else:
332 (c, a, d, u) = self.changes(match=match)
332 (c, a, d, u) = self.changes(match=match)
333 commit = c + a
333 commit = c + a
334 remove = d
334 remove = d
335
335
336 p1, p2 = self.dirstate.parents()
336 p1, p2 = self.dirstate.parents()
337 c1 = self.changelog.read(p1)
337 c1 = self.changelog.read(p1)
338 c2 = self.changelog.read(p2)
338 c2 = self.changelog.read(p2)
339 m1 = self.manifest.read(c1[0])
339 m1 = self.manifest.read(c1[0])
340 mf1 = self.manifest.readflags(c1[0])
340 mf1 = self.manifest.readflags(c1[0])
341 m2 = self.manifest.read(c2[0])
341 m2 = self.manifest.read(c2[0])
342
342
343 if not commit and not remove and not force and p2 == nullid:
343 if not commit and not remove and not force and p2 == nullid:
344 self.ui.status(_("nothing changed\n"))
344 self.ui.status(_("nothing changed\n"))
345 return None
345 return None
346
346
347 if not self.hook("precommit"):
347 if not self.hook("precommit"):
348 return None
348 return None
349
349
350 lock = self.lock()
350 lock = self.lock()
351 tr = self.transaction()
351 tr = self.transaction()
352
352
353 # check in files
353 # check in files
354 new = {}
354 new = {}
355 linkrev = self.changelog.count()
355 linkrev = self.changelog.count()
356 commit.sort()
356 commit.sort()
357 for f in commit:
357 for f in commit:
358 self.ui.note(f + "\n")
358 self.ui.note(f + "\n")
359 try:
359 try:
360 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
360 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
361 t = self.wread(f)
361 t = self.wread(f)
362 except IOError:
362 except IOError:
363 self.ui.warn(_("trouble committing %s!\n") % f)
363 self.ui.warn(_("trouble committing %s!\n") % f)
364 raise
364 raise
365
365
366 r = self.file(f)
366 r = self.file(f)
367
367
368 meta = {}
368 meta = {}
369 cp = self.dirstate.copied(f)
369 cp = self.dirstate.copied(f)
370 if cp:
370 if cp:
371 meta["copy"] = cp
371 meta["copy"] = cp
372 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
372 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
373 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
373 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
374 fp1, fp2 = nullid, nullid
374 fp1, fp2 = nullid, nullid
375 else:
375 else:
376 fp1 = m1.get(f, nullid)
376 fp1 = m1.get(f, nullid)
377 fp2 = m2.get(f, nullid)
377 fp2 = m2.get(f, nullid)
378
378
379 # is the same revision on two branches of a merge?
379 # is the same revision on two branches of a merge?
380 if fp2 == fp1:
380 if fp2 == fp1:
381 fp2 = nullid
381 fp2 = nullid
382
382
383 if fp2 != nullid:
383 if fp2 != nullid:
384 # is one parent an ancestor of the other?
384 # is one parent an ancestor of the other?
385 fpa = r.ancestor(fp1, fp2)
385 fpa = r.ancestor(fp1, fp2)
386 if fpa == fp1:
386 if fpa == fp1:
387 fp1, fp2 = fp2, nullid
387 fp1, fp2 = fp2, nullid
388 elif fpa == fp2:
388 elif fpa == fp2:
389 fp2 = nullid
389 fp2 = nullid
390
390
391 # is the file unmodified from the parent?
391 # is the file unmodified from the parent?
392 if not meta and t == r.read(fp1):
392 if not meta and t == r.read(fp1):
393 # record the proper existing parent in manifest
393 # record the proper existing parent in manifest
394 # no need to add a revision
394 # no need to add a revision
395 new[f] = fp1
395 new[f] = fp1
396 continue
396 continue
397
397
398 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
398 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
399 # remember what we've added so that we can later calculate
399 # remember what we've added so that we can later calculate
400 # the files to pull from a set of changesets
400 # the files to pull from a set of changesets
401 changed.append(f)
401 changed.append(f)
402
402
403 # update manifest
403 # update manifest
404 m1.update(new)
404 m1.update(new)
405 for f in remove:
405 for f in remove:
406 if f in m1:
406 if f in m1:
407 del m1[f]
407 del m1[f]
408 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
408 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
409 (new, remove))
409 (new, remove))
410
410
411 # add changeset
411 # add changeset
412 new = new.keys()
412 new = new.keys()
413 new.sort()
413 new.sort()
414
414
415 if not text:
415 if not text:
416 edittext = ""
416 edittext = ""
417 if p2 != nullid:
417 if p2 != nullid:
418 edittext += "HG: branch merge\n"
418 edittext += "HG: branch merge\n"
419 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
419 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
420 edittext += "".join(["HG: changed %s\n" % f for f in changed])
420 edittext += "".join(["HG: changed %s\n" % f for f in changed])
421 edittext += "".join(["HG: removed %s\n" % f for f in remove])
421 edittext += "".join(["HG: removed %s\n" % f for f in remove])
422 if not changed and not remove:
422 if not changed and not remove:
423 edittext += "HG: no files changed\n"
423 edittext += "HG: no files changed\n"
424 edittext = self.ui.edit(edittext)
424 edittext = self.ui.edit(edittext)
425 if not edittext.rstrip():
425 if not edittext.rstrip():
426 return None
426 return None
427 text = edittext
427 text = edittext
428
428
429 user = user or self.ui.username()
429 user = user or self.ui.username()
430 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
430 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
431 tr.close()
431 tr.close()
432
432
433 self.dirstate.setparents(n)
433 self.dirstate.setparents(n)
434 self.dirstate.update(new, "n")
434 self.dirstate.update(new, "n")
435 self.dirstate.forget(remove)
435 self.dirstate.forget(remove)
436
436
437 if not self.hook("commit", node=hex(n)):
437 if not self.hook("commit", node=hex(n)):
438 return None
438 return None
439 return n
439 return n
440
440
441 def walk(self, node=None, files=[], match=util.always):
441 def walk(self, node=None, files=[], match=util.always):
442 if node:
442 if node:
443 for fn in self.manifest.read(self.changelog.read(node)[0]):
443 for fn in self.manifest.read(self.changelog.read(node)[0]):
444 if match(fn): yield 'm', fn
444 if match(fn): yield 'm', fn
445 else:
445 else:
446 for src, fn in self.dirstate.walk(files, match):
446 for src, fn in self.dirstate.walk(files, match):
447 yield src, fn
447 yield src, fn
448
448
449 def changes(self, node1 = None, node2 = None, files = [],
449 def changes(self, node1 = None, node2 = None, files = [],
450 match = util.always):
450 match = util.always):
451 mf2, u = None, []
451 mf2, u = None, []
452
452
453 def fcmp(fn, mf):
453 def fcmp(fn, mf):
454 t1 = self.wread(fn)
454 t1 = self.wread(fn)
455 t2 = self.file(fn).read(mf.get(fn, nullid))
455 t2 = self.file(fn).read(mf.get(fn, nullid))
456 return cmp(t1, t2)
456 return cmp(t1, t2)
457
457
458 def mfmatches(node):
458 def mfmatches(node):
459 mf = dict(self.manifest.read(node))
459 mf = dict(self.manifest.read(node))
460 for fn in mf.keys():
460 for fn in mf.keys():
461 if not match(fn):
461 if not match(fn):
462 del mf[fn]
462 del mf[fn]
463 return mf
463 return mf
464
464
465 # are we comparing the working directory?
465 # are we comparing the working directory?
466 if not node2:
466 if not node2:
467 l, c, a, d, u = self.dirstate.changes(files, match)
467 l, c, a, d, u = self.dirstate.changes(files, match)
468
468
469 # are we comparing working dir against its parent?
469 # are we comparing working dir against its parent?
470 if not node1:
470 if not node1:
471 if l:
471 if l:
472 # do a full compare of any files that might have changed
472 # do a full compare of any files that might have changed
473 change = self.changelog.read(self.dirstate.parents()[0])
473 change = self.changelog.read(self.dirstate.parents()[0])
474 mf2 = mfmatches(change[0])
474 mf2 = mfmatches(change[0])
475 for f in l:
475 for f in l:
476 if fcmp(f, mf2):
476 if fcmp(f, mf2):
477 c.append(f)
477 c.append(f)
478
478
479 for l in c, a, d, u:
479 for l in c, a, d, u:
480 l.sort()
480 l.sort()
481
481
482 return (c, a, d, u)
482 return (c, a, d, u)
483
483
484 # are we comparing working dir against non-tip?
484 # are we comparing working dir against non-tip?
485 # generate a pseudo-manifest for the working dir
485 # generate a pseudo-manifest for the working dir
486 if not node2:
486 if not node2:
487 if not mf2:
487 if not mf2:
488 change = self.changelog.read(self.dirstate.parents()[0])
488 change = self.changelog.read(self.dirstate.parents()[0])
489 mf2 = mfmatches(change[0])
489 mf2 = mfmatches(change[0])
490 for f in a + c + l:
490 for f in a + c + l:
491 mf2[f] = ""
491 mf2[f] = ""
492 for f in d:
492 for f in d:
493 if f in mf2: del mf2[f]
493 if f in mf2: del mf2[f]
494 else:
494 else:
495 change = self.changelog.read(node2)
495 change = self.changelog.read(node2)
496 mf2 = mfmatches(change[0])
496 mf2 = mfmatches(change[0])
497
497
498 # flush lists from dirstate before comparing manifests
498 # flush lists from dirstate before comparing manifests
499 c, a = [], []
499 c, a = [], []
500
500
501 change = self.changelog.read(node1)
501 change = self.changelog.read(node1)
502 mf1 = mfmatches(change[0])
502 mf1 = mfmatches(change[0])
503
503
504 for fn in mf2:
504 for fn in mf2:
505 if mf1.has_key(fn):
505 if mf1.has_key(fn):
506 if mf1[fn] != mf2[fn]:
506 if mf1[fn] != mf2[fn]:
507 if mf2[fn] != "" or fcmp(fn, mf1):
507 if mf2[fn] != "" or fcmp(fn, mf1):
508 c.append(fn)
508 c.append(fn)
509 del mf1[fn]
509 del mf1[fn]
510 else:
510 else:
511 a.append(fn)
511 a.append(fn)
512
512
513 d = mf1.keys()
513 d = mf1.keys()
514
514
515 for l in c, a, d, u:
515 for l in c, a, d, u:
516 l.sort()
516 l.sort()
517
517
518 return (c, a, d, u)
518 return (c, a, d, u)
519
519
520 def add(self, list):
520 def add(self, list):
521 for f in list:
521 for f in list:
522 p = self.wjoin(f)
522 p = self.wjoin(f)
523 if not os.path.exists(p):
523 if not os.path.exists(p):
524 self.ui.warn(_("%s does not exist!\n") % f)
524 self.ui.warn(_("%s does not exist!\n") % f)
525 elif not os.path.isfile(p):
525 elif not os.path.isfile(p):
526 self.ui.warn(_("%s not added: only files supported currently\n") % f)
526 self.ui.warn(_("%s not added: only files supported currently\n") % f)
527 elif self.dirstate.state(f) in 'an':
527 elif self.dirstate.state(f) in 'an':
528 self.ui.warn(_("%s already tracked!\n") % f)
528 self.ui.warn(_("%s already tracked!\n") % f)
529 else:
529 else:
530 self.dirstate.update([f], "a")
530 self.dirstate.update([f], "a")
531
531
532 def forget(self, list):
532 def forget(self, list):
533 for f in list:
533 for f in list:
534 if self.dirstate.state(f) not in 'ai':
534 if self.dirstate.state(f) not in 'ai':
535 self.ui.warn(_("%s not added!\n") % f)
535 self.ui.warn(_("%s not added!\n") % f)
536 else:
536 else:
537 self.dirstate.forget([f])
537 self.dirstate.forget([f])
538
538
539 def remove(self, list, unlink=False):
539 def remove(self, list, unlink=False):
540 if unlink:
540 if unlink:
541 for f in list:
541 for f in list:
542 try:
542 try:
543 util.unlink(self.wjoin(f))
543 util.unlink(self.wjoin(f))
544 except OSError, inst:
544 except OSError, inst:
545 if inst.errno != errno.ENOENT: raise
545 if inst.errno != errno.ENOENT: raise
546 for f in list:
546 for f in list:
547 p = self.wjoin(f)
547 p = self.wjoin(f)
548 if os.path.exists(p):
548 if os.path.exists(p):
549 self.ui.warn(_("%s still exists!\n") % f)
549 self.ui.warn(_("%s still exists!\n") % f)
550 elif self.dirstate.state(f) == 'a':
550 elif self.dirstate.state(f) == 'a':
551 self.ui.warn(_("%s never committed!\n") % f)
551 self.ui.warn(_("%s never committed!\n") % f)
552 self.dirstate.forget([f])
552 self.dirstate.forget([f])
553 elif f not in self.dirstate:
553 elif f not in self.dirstate:
554 self.ui.warn(_("%s not tracked!\n") % f)
554 self.ui.warn(_("%s not tracked!\n") % f)
555 else:
555 else:
556 self.dirstate.update([f], "r")
556 self.dirstate.update([f], "r")
557
557
558 def undelete(self, list):
558 def undelete(self, list):
559 pl = self.dirstate.parents()
559 p = self.dirstate.parents()[0]
560 if pl[1] != nullid:
561 self.ui.warn("aborting: outstanding uncommitted merges\n")
562 return 1
563 p = pl[0]
564 mn = self.changelog.read(p)[0]
560 mn = self.changelog.read(p)[0]
565 mf = self.manifest.readflags(mn)
561 mf = self.manifest.readflags(mn)
566 m = self.manifest.read(mn)
562 m = self.manifest.read(mn)
567 for f in list:
563 for f in list:
568 if self.dirstate.state(f) not in "r":
564 if self.dirstate.state(f) not in "r":
569 self.ui.warn("%s not removed!\n" % f)
565 self.ui.warn("%s not removed!\n" % f)
570 else:
566 else:
571 t = self.file(f).read(m[f])
567 t = self.file(f).read(m[f])
572 try:
568 try:
573 self.wwrite(f, t)
569 self.wwrite(f, t)
574 except IOError, e:
570 except IOError, e:
575 if e.errno != errno.ENOENT:
571 if e.errno != errno.ENOENT:
576 raise
572 raise
577 os.makedirs(os.path.dirname(self.wjoin(f)))
573 os.makedirs(os.path.dirname(self.wjoin(f)))
578 self.wwrite(f, t)
574 self.wwrite(f, t)
579 util.set_exec(self.wjoin(f), mf[f])
575 util.set_exec(self.wjoin(f), mf[f])
580 self.dirstate.update([f], "n")
576 self.dirstate.update([f], "n")
581
577
582 def copy(self, source, dest):
578 def copy(self, source, dest):
583 p = self.wjoin(dest)
579 p = self.wjoin(dest)
584 if not os.path.exists(p):
580 if not os.path.exists(p):
585 self.ui.warn(_("%s does not exist!\n") % dest)
581 self.ui.warn(_("%s does not exist!\n") % dest)
586 elif not os.path.isfile(p):
582 elif not os.path.isfile(p):
587 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
583 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
588 else:
584 else:
589 if self.dirstate.state(dest) == '?':
585 if self.dirstate.state(dest) == '?':
590 self.dirstate.update([dest], "a")
586 self.dirstate.update([dest], "a")
591 self.dirstate.copy(source, dest)
587 self.dirstate.copy(source, dest)
592
588
593 def heads(self):
589 def heads(self):
594 return self.changelog.heads()
590 return self.changelog.heads()
595
591
596 # branchlookup returns a dict giving a list of branches for
592 # branchlookup returns a dict giving a list of branches for
597 # each head. A branch is defined as the tag of a node or
593 # each head. A branch is defined as the tag of a node or
598 # the branch of the node's parents. If a node has multiple
594 # the branch of the node's parents. If a node has multiple
599 # branch tags, tags are eliminated if they are visible from other
595 # branch tags, tags are eliminated if they are visible from other
600 # branch tags.
596 # branch tags.
601 #
597 #
602 # So, for this graph: a->b->c->d->e
598 # So, for this graph: a->b->c->d->e
603 # \ /
599 # \ /
604 # aa -----/
600 # aa -----/
605 # a has tag 2.6.12
601 # a has tag 2.6.12
606 # d has tag 2.6.13
602 # d has tag 2.6.13
607 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
603 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
608 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
604 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
609 # from the list.
605 # from the list.
610 #
606 #
611 # It is possible that more than one head will have the same branch tag.
607 # It is possible that more than one head will have the same branch tag.
612 # callers need to check the result for multiple heads under the same
608 # callers need to check the result for multiple heads under the same
613 # branch tag if that is a problem for them (ie checkout of a specific
609 # branch tag if that is a problem for them (ie checkout of a specific
614 # branch).
610 # branch).
615 #
611 #
616 # passing in a specific branch will limit the depth of the search
612 # passing in a specific branch will limit the depth of the search
617 # through the parents. It won't limit the branches returned in the
613 # through the parents. It won't limit the branches returned in the
618 # result though.
614 # result though.
619 def branchlookup(self, heads=None, branch=None):
615 def branchlookup(self, heads=None, branch=None):
620 if not heads:
616 if not heads:
621 heads = self.heads()
617 heads = self.heads()
622 headt = [ h for h in heads ]
618 headt = [ h for h in heads ]
623 chlog = self.changelog
619 chlog = self.changelog
624 branches = {}
620 branches = {}
625 merges = []
621 merges = []
626 seenmerge = {}
622 seenmerge = {}
627
623
628 # traverse the tree once for each head, recording in the branches
624 # traverse the tree once for each head, recording in the branches
629 # dict which tags are visible from this head. The branches
625 # dict which tags are visible from this head. The branches
630 # dict also records which tags are visible from each tag
626 # dict also records which tags are visible from each tag
631 # while we traverse.
627 # while we traverse.
632 while headt or merges:
628 while headt or merges:
633 if merges:
629 if merges:
634 n, found = merges.pop()
630 n, found = merges.pop()
635 visit = [n]
631 visit = [n]
636 else:
632 else:
637 h = headt.pop()
633 h = headt.pop()
638 visit = [h]
634 visit = [h]
639 found = [h]
635 found = [h]
640 seen = {}
636 seen = {}
641 while visit:
637 while visit:
642 n = visit.pop()
638 n = visit.pop()
643 if n in seen:
639 if n in seen:
644 continue
640 continue
645 pp = chlog.parents(n)
641 pp = chlog.parents(n)
646 tags = self.nodetags(n)
642 tags = self.nodetags(n)
647 if tags:
643 if tags:
648 for x in tags:
644 for x in tags:
649 if x == 'tip':
645 if x == 'tip':
650 continue
646 continue
651 for f in found:
647 for f in found:
652 branches.setdefault(f, {})[n] = 1
648 branches.setdefault(f, {})[n] = 1
653 branches.setdefault(n, {})[n] = 1
649 branches.setdefault(n, {})[n] = 1
654 break
650 break
655 if n not in found:
651 if n not in found:
656 found.append(n)
652 found.append(n)
657 if branch in tags:
653 if branch in tags:
658 continue
654 continue
659 seen[n] = 1
655 seen[n] = 1
660 if pp[1] != nullid and n not in seenmerge:
656 if pp[1] != nullid and n not in seenmerge:
661 merges.append((pp[1], [x for x in found]))
657 merges.append((pp[1], [x for x in found]))
662 seenmerge[n] = 1
658 seenmerge[n] = 1
663 if pp[0] != nullid:
659 if pp[0] != nullid:
664 visit.append(pp[0])
660 visit.append(pp[0])
665 # traverse the branches dict, eliminating branch tags from each
661 # traverse the branches dict, eliminating branch tags from each
666 # head that are visible from another branch tag for that head.
662 # head that are visible from another branch tag for that head.
667 out = {}
663 out = {}
668 viscache = {}
664 viscache = {}
669 for h in heads:
665 for h in heads:
670 def visible(node):
666 def visible(node):
671 if node in viscache:
667 if node in viscache:
672 return viscache[node]
668 return viscache[node]
673 ret = {}
669 ret = {}
674 visit = [node]
670 visit = [node]
675 while visit:
671 while visit:
676 x = visit.pop()
672 x = visit.pop()
677 if x in viscache:
673 if x in viscache:
678 ret.update(viscache[x])
674 ret.update(viscache[x])
679 elif x not in ret:
675 elif x not in ret:
680 ret[x] = 1
676 ret[x] = 1
681 if x in branches:
677 if x in branches:
682 visit[len(visit):] = branches[x].keys()
678 visit[len(visit):] = branches[x].keys()
683 viscache[node] = ret
679 viscache[node] = ret
684 return ret
680 return ret
685 if h not in branches:
681 if h not in branches:
686 continue
682 continue
687 # O(n^2), but somewhat limited. This only searches the
683 # O(n^2), but somewhat limited. This only searches the
688 # tags visible from a specific head, not all the tags in the
684 # tags visible from a specific head, not all the tags in the
689 # whole repo.
685 # whole repo.
690 for b in branches[h]:
686 for b in branches[h]:
691 vis = False
687 vis = False
692 for bb in branches[h].keys():
688 for bb in branches[h].keys():
693 if b != bb:
689 if b != bb:
694 if b in visible(bb):
690 if b in visible(bb):
695 vis = True
691 vis = True
696 break
692 break
697 if not vis:
693 if not vis:
698 l = out.setdefault(h, [])
694 l = out.setdefault(h, [])
699 l[len(l):] = self.nodetags(b)
695 l[len(l):] = self.nodetags(b)
700 return out
696 return out
701
697
702 def branches(self, nodes):
698 def branches(self, nodes):
703 if not nodes: nodes = [self.changelog.tip()]
699 if not nodes: nodes = [self.changelog.tip()]
704 b = []
700 b = []
705 for n in nodes:
701 for n in nodes:
706 t = n
702 t = n
707 while n:
703 while n:
708 p = self.changelog.parents(n)
704 p = self.changelog.parents(n)
709 if p[1] != nullid or p[0] == nullid:
705 if p[1] != nullid or p[0] == nullid:
710 b.append((t, n, p[0], p[1]))
706 b.append((t, n, p[0], p[1]))
711 break
707 break
712 n = p[0]
708 n = p[0]
713 return b
709 return b
714
710
715 def between(self, pairs):
711 def between(self, pairs):
716 r = []
712 r = []
717
713
718 for top, bottom in pairs:
714 for top, bottom in pairs:
719 n, l, i = top, [], 0
715 n, l, i = top, [], 0
720 f = 1
716 f = 1
721
717
722 while n != bottom:
718 while n != bottom:
723 p = self.changelog.parents(n)[0]
719 p = self.changelog.parents(n)[0]
724 if i == f:
720 if i == f:
725 l.append(n)
721 l.append(n)
726 f = f * 2
722 f = f * 2
727 n = p
723 n = p
728 i += 1
724 i += 1
729
725
730 r.append(l)
726 r.append(l)
731
727
732 return r
728 return r
733
729
734 def newer(self, nodes):
730 def newer(self, nodes):
735 m = {}
731 m = {}
736 nl = []
732 nl = []
737 pm = {}
733 pm = {}
738 cl = self.changelog
734 cl = self.changelog
739 t = l = cl.count()
735 t = l = cl.count()
740
736
741 # find the lowest numbered node
737 # find the lowest numbered node
742 for n in nodes:
738 for n in nodes:
743 l = min(l, cl.rev(n))
739 l = min(l, cl.rev(n))
744 m[n] = 1
740 m[n] = 1
745
741
746 for i in xrange(l, t):
742 for i in xrange(l, t):
747 n = cl.node(i)
743 n = cl.node(i)
748 if n in m: # explicitly listed
744 if n in m: # explicitly listed
749 pm[n] = 1
745 pm[n] = 1
750 nl.append(n)
746 nl.append(n)
751 continue
747 continue
752 for p in cl.parents(n):
748 for p in cl.parents(n):
753 if p in pm: # parent listed
749 if p in pm: # parent listed
754 pm[n] = 1
750 pm[n] = 1
755 nl.append(n)
751 nl.append(n)
756 break
752 break
757
753
758 return nl
754 return nl
759
755
760 def findincoming(self, remote, base=None, heads=None):
756 def findincoming(self, remote, base=None, heads=None):
761 m = self.changelog.nodemap
757 m = self.changelog.nodemap
762 search = []
758 search = []
763 fetch = {}
759 fetch = {}
764 seen = {}
760 seen = {}
765 seenbranch = {}
761 seenbranch = {}
766 if base == None:
762 if base == None:
767 base = {}
763 base = {}
768
764
769 # assume we're closer to the tip than the root
765 # assume we're closer to the tip than the root
770 # and start by examining the heads
766 # and start by examining the heads
771 self.ui.status(_("searching for changes\n"))
767 self.ui.status(_("searching for changes\n"))
772
768
773 if not heads:
769 if not heads:
774 heads = remote.heads()
770 heads = remote.heads()
775
771
776 unknown = []
772 unknown = []
777 for h in heads:
773 for h in heads:
778 if h not in m:
774 if h not in m:
779 unknown.append(h)
775 unknown.append(h)
780 else:
776 else:
781 base[h] = 1
777 base[h] = 1
782
778
783 if not unknown:
779 if not unknown:
784 return None
780 return None
785
781
786 rep = {}
782 rep = {}
787 reqcnt = 0
783 reqcnt = 0
788
784
789 # search through remote branches
785 # search through remote branches
790 # a 'branch' here is a linear segment of history, with four parts:
786 # a 'branch' here is a linear segment of history, with four parts:
791 # head, root, first parent, second parent
787 # head, root, first parent, second parent
792 # (a branch always has two parents (or none) by definition)
788 # (a branch always has two parents (or none) by definition)
793 unknown = remote.branches(unknown)
789 unknown = remote.branches(unknown)
794 while unknown:
790 while unknown:
795 r = []
791 r = []
796 while unknown:
792 while unknown:
797 n = unknown.pop(0)
793 n = unknown.pop(0)
798 if n[0] in seen:
794 if n[0] in seen:
799 continue
795 continue
800
796
801 self.ui.debug(_("examining %s:%s\n") % (short(n[0]), short(n[1])))
797 self.ui.debug(_("examining %s:%s\n") % (short(n[0]), short(n[1])))
802 if n[0] == nullid:
798 if n[0] == nullid:
803 break
799 break
804 if n in seenbranch:
800 if n in seenbranch:
805 self.ui.debug(_("branch already found\n"))
801 self.ui.debug(_("branch already found\n"))
806 continue
802 continue
807 if n[1] and n[1] in m: # do we know the base?
803 if n[1] and n[1] in m: # do we know the base?
808 self.ui.debug(_("found incomplete branch %s:%s\n")
804 self.ui.debug(_("found incomplete branch %s:%s\n")
809 % (short(n[0]), short(n[1])))
805 % (short(n[0]), short(n[1])))
810 search.append(n) # schedule branch range for scanning
806 search.append(n) # schedule branch range for scanning
811 seenbranch[n] = 1
807 seenbranch[n] = 1
812 else:
808 else:
813 if n[1] not in seen and n[1] not in fetch:
809 if n[1] not in seen and n[1] not in fetch:
814 if n[2] in m and n[3] in m:
810 if n[2] in m and n[3] in m:
815 self.ui.debug(_("found new changeset %s\n") %
811 self.ui.debug(_("found new changeset %s\n") %
816 short(n[1]))
812 short(n[1]))
817 fetch[n[1]] = 1 # earliest unknown
813 fetch[n[1]] = 1 # earliest unknown
818 base[n[2]] = 1 # latest known
814 base[n[2]] = 1 # latest known
819 continue
815 continue
820
816
821 for a in n[2:4]:
817 for a in n[2:4]:
822 if a not in rep:
818 if a not in rep:
823 r.append(a)
819 r.append(a)
824 rep[a] = 1
820 rep[a] = 1
825
821
826 seen[n[0]] = 1
822 seen[n[0]] = 1
827
823
828 if r:
824 if r:
829 reqcnt += 1
825 reqcnt += 1
830 self.ui.debug(_("request %d: %s\n") %
826 self.ui.debug(_("request %d: %s\n") %
831 (reqcnt, " ".join(map(short, r))))
827 (reqcnt, " ".join(map(short, r))))
832 for p in range(0, len(r), 10):
828 for p in range(0, len(r), 10):
833 for b in remote.branches(r[p:p+10]):
829 for b in remote.branches(r[p:p+10]):
834 self.ui.debug(_("received %s:%s\n") %
830 self.ui.debug(_("received %s:%s\n") %
835 (short(b[0]), short(b[1])))
831 (short(b[0]), short(b[1])))
836 if b[0] in m:
832 if b[0] in m:
837 self.ui.debug(_("found base node %s\n") % short(b[0]))
833 self.ui.debug(_("found base node %s\n") % short(b[0]))
838 base[b[0]] = 1
834 base[b[0]] = 1
839 elif b[0] not in seen:
835 elif b[0] not in seen:
840 unknown.append(b)
836 unknown.append(b)
841
837
842 # do binary search on the branches we found
838 # do binary search on the branches we found
843 while search:
839 while search:
844 n = search.pop(0)
840 n = search.pop(0)
845 reqcnt += 1
841 reqcnt += 1
846 l = remote.between([(n[0], n[1])])[0]
842 l = remote.between([(n[0], n[1])])[0]
847 l.append(n[1])
843 l.append(n[1])
848 p = n[0]
844 p = n[0]
849 f = 1
845 f = 1
850 for i in l:
846 for i in l:
851 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
847 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
852 if i in m:
848 if i in m:
853 if f <= 2:
849 if f <= 2:
854 self.ui.debug(_("found new branch changeset %s\n") %
850 self.ui.debug(_("found new branch changeset %s\n") %
855 short(p))
851 short(p))
856 fetch[p] = 1
852 fetch[p] = 1
857 base[i] = 1
853 base[i] = 1
858 else:
854 else:
859 self.ui.debug(_("narrowed branch search to %s:%s\n")
855 self.ui.debug(_("narrowed branch search to %s:%s\n")
860 % (short(p), short(i)))
856 % (short(p), short(i)))
861 search.append((p, i))
857 search.append((p, i))
862 break
858 break
863 p, f = i, f * 2
859 p, f = i, f * 2
864
860
865 # sanity check our fetch list
861 # sanity check our fetch list
866 for f in fetch.keys():
862 for f in fetch.keys():
867 if f in m:
863 if f in m:
868 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
864 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
869
865
870 if base.keys() == [nullid]:
866 if base.keys() == [nullid]:
871 self.ui.warn(_("warning: pulling from an unrelated repository!\n"))
867 self.ui.warn(_("warning: pulling from an unrelated repository!\n"))
872
868
873 self.ui.note(_("found new changesets starting at ") +
869 self.ui.note(_("found new changesets starting at ") +
874 " ".join([short(f) for f in fetch]) + "\n")
870 " ".join([short(f) for f in fetch]) + "\n")
875
871
876 self.ui.debug(_("%d total queries\n") % reqcnt)
872 self.ui.debug(_("%d total queries\n") % reqcnt)
877
873
878 return fetch.keys()
874 return fetch.keys()
879
875
880 def findoutgoing(self, remote, base=None, heads=None):
876 def findoutgoing(self, remote, base=None, heads=None):
881 if base == None:
877 if base == None:
882 base = {}
878 base = {}
883 self.findincoming(remote, base, heads)
879 self.findincoming(remote, base, heads)
884
880
885 self.ui.debug(_("common changesets up to ")
881 self.ui.debug(_("common changesets up to ")
886 + " ".join(map(short, base.keys())) + "\n")
882 + " ".join(map(short, base.keys())) + "\n")
887
883
888 remain = dict.fromkeys(self.changelog.nodemap)
884 remain = dict.fromkeys(self.changelog.nodemap)
889
885
890 # prune everything remote has from the tree
886 # prune everything remote has from the tree
891 del remain[nullid]
887 del remain[nullid]
892 remove = base.keys()
888 remove = base.keys()
893 while remove:
889 while remove:
894 n = remove.pop(0)
890 n = remove.pop(0)
895 if n in remain:
891 if n in remain:
896 del remain[n]
892 del remain[n]
897 for p in self.changelog.parents(n):
893 for p in self.changelog.parents(n):
898 remove.append(p)
894 remove.append(p)
899
895
900 # find every node whose parents have been pruned
896 # find every node whose parents have been pruned
901 subset = []
897 subset = []
902 for n in remain:
898 for n in remain:
903 p1, p2 = self.changelog.parents(n)
899 p1, p2 = self.changelog.parents(n)
904 if p1 not in remain and p2 not in remain:
900 if p1 not in remain and p2 not in remain:
905 subset.append(n)
901 subset.append(n)
906
902
907 # this is the set of all roots we have to push
903 # this is the set of all roots we have to push
908 return subset
904 return subset
909
905
910 def pull(self, remote):
906 def pull(self, remote):
911 lock = self.lock()
907 lock = self.lock()
912
908
913 # if we have an empty repo, fetch everything
909 # if we have an empty repo, fetch everything
914 if self.changelog.tip() == nullid:
910 if self.changelog.tip() == nullid:
915 self.ui.status(_("requesting all changes\n"))
911 self.ui.status(_("requesting all changes\n"))
916 fetch = [nullid]
912 fetch = [nullid]
917 else:
913 else:
918 fetch = self.findincoming(remote)
914 fetch = self.findincoming(remote)
919
915
920 if not fetch:
916 if not fetch:
921 self.ui.status(_("no changes found\n"))
917 self.ui.status(_("no changes found\n"))
922 return 1
918 return 1
923
919
924 cg = remote.changegroup(fetch)
920 cg = remote.changegroup(fetch)
925 return self.addchangegroup(cg)
921 return self.addchangegroup(cg)
926
922
927 def push(self, remote, force=False):
923 def push(self, remote, force=False):
928 lock = remote.lock()
924 lock = remote.lock()
929
925
930 base = {}
926 base = {}
931 heads = remote.heads()
927 heads = remote.heads()
932 inc = self.findincoming(remote, base, heads)
928 inc = self.findincoming(remote, base, heads)
933 if not force and inc:
929 if not force and inc:
934 self.ui.warn(_("abort: unsynced remote changes!\n"))
930 self.ui.warn(_("abort: unsynced remote changes!\n"))
935 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
931 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
936 return 1
932 return 1
937
933
938 update = self.findoutgoing(remote, base)
934 update = self.findoutgoing(remote, base)
939 if not update:
935 if not update:
940 self.ui.status(_("no changes found\n"))
936 self.ui.status(_("no changes found\n"))
941 return 1
937 return 1
942 elif not force:
938 elif not force:
943 if len(heads) < len(self.changelog.heads()):
939 if len(heads) < len(self.changelog.heads()):
944 self.ui.warn(_("abort: push creates new remote branches!\n"))
940 self.ui.warn(_("abort: push creates new remote branches!\n"))
945 self.ui.status(_("(did you forget to merge?"
941 self.ui.status(_("(did you forget to merge?"
946 " use push -f to force)\n"))
942 " use push -f to force)\n"))
947 return 1
943 return 1
948
944
949 cg = self.changegroup(update)
945 cg = self.changegroup(update)
950 return remote.addchangegroup(cg)
946 return remote.addchangegroup(cg)
951
947
952 def changegroup(self, basenodes):
948 def changegroup(self, basenodes):
953 genread = util.chunkbuffer
949 genread = util.chunkbuffer
954
950
955 def gengroup():
951 def gengroup():
956 nodes = self.newer(basenodes)
952 nodes = self.newer(basenodes)
957
953
958 # construct the link map
954 # construct the link map
959 linkmap = {}
955 linkmap = {}
960 for n in nodes:
956 for n in nodes:
961 linkmap[self.changelog.rev(n)] = n
957 linkmap[self.changelog.rev(n)] = n
962
958
963 # construct a list of all changed files
959 # construct a list of all changed files
964 changed = {}
960 changed = {}
965 for n in nodes:
961 for n in nodes:
966 c = self.changelog.read(n)
962 c = self.changelog.read(n)
967 for f in c[3]:
963 for f in c[3]:
968 changed[f] = 1
964 changed[f] = 1
969 changed = changed.keys()
965 changed = changed.keys()
970 changed.sort()
966 changed.sort()
971
967
972 # the changegroup is changesets + manifests + all file revs
968 # the changegroup is changesets + manifests + all file revs
973 revs = [ self.changelog.rev(n) for n in nodes ]
969 revs = [ self.changelog.rev(n) for n in nodes ]
974
970
975 for y in self.changelog.group(linkmap): yield y
971 for y in self.changelog.group(linkmap): yield y
976 for y in self.manifest.group(linkmap): yield y
972 for y in self.manifest.group(linkmap): yield y
977 for f in changed:
973 for f in changed:
978 yield struct.pack(">l", len(f) + 4) + f
974 yield struct.pack(">l", len(f) + 4) + f
979 g = self.file(f).group(linkmap)
975 g = self.file(f).group(linkmap)
980 for y in g:
976 for y in g:
981 yield y
977 yield y
982
978
983 yield struct.pack(">l", 0)
979 yield struct.pack(">l", 0)
984
980
985 return genread(gengroup())
981 return genread(gengroup())
986
982
987 def addchangegroup(self, source):
983 def addchangegroup(self, source):
988
984
989 def getchunk():
985 def getchunk():
990 d = source.read(4)
986 d = source.read(4)
991 if not d: return ""
987 if not d: return ""
992 l = struct.unpack(">l", d)[0]
988 l = struct.unpack(">l", d)[0]
993 if l <= 4: return ""
989 if l <= 4: return ""
994 d = source.read(l - 4)
990 d = source.read(l - 4)
995 if len(d) < l - 4:
991 if len(d) < l - 4:
996 raise repo.RepoError(_("premature EOF reading chunk"
992 raise repo.RepoError(_("premature EOF reading chunk"
997 " (got %d bytes, expected %d)")
993 " (got %d bytes, expected %d)")
998 % (len(d), l - 4))
994 % (len(d), l - 4))
999 return d
995 return d
1000
996
1001 def getgroup():
997 def getgroup():
1002 while 1:
998 while 1:
1003 c = getchunk()
999 c = getchunk()
1004 if not c: break
1000 if not c: break
1005 yield c
1001 yield c
1006
1002
1007 def csmap(x):
1003 def csmap(x):
1008 self.ui.debug(_("add changeset %s\n") % short(x))
1004 self.ui.debug(_("add changeset %s\n") % short(x))
1009 return self.changelog.count()
1005 return self.changelog.count()
1010
1006
1011 def revmap(x):
1007 def revmap(x):
1012 return self.changelog.rev(x)
1008 return self.changelog.rev(x)
1013
1009
1014 if not source: return
1010 if not source: return
1015 changesets = files = revisions = 0
1011 changesets = files = revisions = 0
1016
1012
1017 tr = self.transaction()
1013 tr = self.transaction()
1018
1014
1019 oldheads = len(self.changelog.heads())
1015 oldheads = len(self.changelog.heads())
1020
1016
1021 # pull off the changeset group
1017 # pull off the changeset group
1022 self.ui.status(_("adding changesets\n"))
1018 self.ui.status(_("adding changesets\n"))
1023 co = self.changelog.tip()
1019 co = self.changelog.tip()
1024 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1020 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1025 cnr, cor = map(self.changelog.rev, (cn, co))
1021 cnr, cor = map(self.changelog.rev, (cn, co))
1026 if cn == nullid:
1022 if cn == nullid:
1027 cnr = cor
1023 cnr = cor
1028 changesets = cnr - cor
1024 changesets = cnr - cor
1029
1025
1030 # pull off the manifest group
1026 # pull off the manifest group
1031 self.ui.status(_("adding manifests\n"))
1027 self.ui.status(_("adding manifests\n"))
1032 mm = self.manifest.tip()
1028 mm = self.manifest.tip()
1033 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1029 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1034
1030
1035 # process the files
1031 # process the files
1036 self.ui.status(_("adding file changes\n"))
1032 self.ui.status(_("adding file changes\n"))
1037 while 1:
1033 while 1:
1038 f = getchunk()
1034 f = getchunk()
1039 if not f: break
1035 if not f: break
1040 self.ui.debug(_("adding %s revisions\n") % f)
1036 self.ui.debug(_("adding %s revisions\n") % f)
1041 fl = self.file(f)
1037 fl = self.file(f)
1042 o = fl.count()
1038 o = fl.count()
1043 n = fl.addgroup(getgroup(), revmap, tr)
1039 n = fl.addgroup(getgroup(), revmap, tr)
1044 revisions += fl.count() - o
1040 revisions += fl.count() - o
1045 files += 1
1041 files += 1
1046
1042
1047 newheads = len(self.changelog.heads())
1043 newheads = len(self.changelog.heads())
1048 heads = ""
1044 heads = ""
1049 if oldheads and newheads > oldheads:
1045 if oldheads and newheads > oldheads:
1050 heads = _(" (+%d heads)") % (newheads - oldheads)
1046 heads = _(" (+%d heads)") % (newheads - oldheads)
1051
1047
1052 self.ui.status(_("added %d changesets"
1048 self.ui.status(_("added %d changesets"
1053 " with %d changes to %d files%s\n")
1049 " with %d changes to %d files%s\n")
1054 % (changesets, revisions, files, heads))
1050 % (changesets, revisions, files, heads))
1055
1051
1056 tr.close()
1052 tr.close()
1057
1053
1058 if changesets > 0:
1054 if changesets > 0:
1059 if not self.hook("changegroup",
1055 if not self.hook("changegroup",
1060 node=hex(self.changelog.node(cor+1))):
1056 node=hex(self.changelog.node(cor+1))):
1061 self.ui.warn(_("abort: changegroup hook returned failure!\n"))
1057 self.ui.warn(_("abort: changegroup hook returned failure!\n"))
1062 return 1
1058 return 1
1063
1059
1064 for i in range(cor + 1, cnr + 1):
1060 for i in range(cor + 1, cnr + 1):
1065 self.hook("commit", node=hex(self.changelog.node(i)))
1061 self.hook("commit", node=hex(self.changelog.node(i)))
1066
1062
1067 return
1063 return
1068
1064
1069 def update(self, node, allow=False, force=False, choose=None,
1065 def update(self, node, allow=False, force=False, choose=None,
1070 moddirstate=True):
1066 moddirstate=True):
1071 pl = self.dirstate.parents()
1067 pl = self.dirstate.parents()
1072 if not force and pl[1] != nullid:
1068 if not force and pl[1] != nullid:
1073 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1069 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1074 return 1
1070 return 1
1075
1071
1076 p1, p2 = pl[0], node
1072 p1, p2 = pl[0], node
1077 pa = self.changelog.ancestor(p1, p2)
1073 pa = self.changelog.ancestor(p1, p2)
1078 m1n = self.changelog.read(p1)[0]
1074 m1n = self.changelog.read(p1)[0]
1079 m2n = self.changelog.read(p2)[0]
1075 m2n = self.changelog.read(p2)[0]
1080 man = self.manifest.ancestor(m1n, m2n)
1076 man = self.manifest.ancestor(m1n, m2n)
1081 m1 = self.manifest.read(m1n)
1077 m1 = self.manifest.read(m1n)
1082 mf1 = self.manifest.readflags(m1n)
1078 mf1 = self.manifest.readflags(m1n)
1083 m2 = self.manifest.read(m2n)
1079 m2 = self.manifest.read(m2n)
1084 mf2 = self.manifest.readflags(m2n)
1080 mf2 = self.manifest.readflags(m2n)
1085 ma = self.manifest.read(man)
1081 ma = self.manifest.read(man)
1086 mfa = self.manifest.readflags(man)
1082 mfa = self.manifest.readflags(man)
1087
1083
1088 (c, a, d, u) = self.changes()
1084 (c, a, d, u) = self.changes()
1089
1085
1090 # is this a jump, or a merge? i.e. is there a linear path
1086 # is this a jump, or a merge? i.e. is there a linear path
1091 # from p1 to p2?
1087 # from p1 to p2?
1092 linear_path = (pa == p1 or pa == p2)
1088 linear_path = (pa == p1 or pa == p2)
1093
1089
1094 # resolve the manifest to determine which files
1090 # resolve the manifest to determine which files
1095 # we care about merging
1091 # we care about merging
1096 self.ui.note(_("resolving manifests\n"))
1092 self.ui.note(_("resolving manifests\n"))
1097 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1093 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1098 (force, allow, moddirstate, linear_path))
1094 (force, allow, moddirstate, linear_path))
1099 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1095 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1100 (short(man), short(m1n), short(m2n)))
1096 (short(man), short(m1n), short(m2n)))
1101
1097
1102 merge = {}
1098 merge = {}
1103 get = {}
1099 get = {}
1104 remove = []
1100 remove = []
1105
1101
1106 # construct a working dir manifest
1102 # construct a working dir manifest
1107 mw = m1.copy()
1103 mw = m1.copy()
1108 mfw = mf1.copy()
1104 mfw = mf1.copy()
1109 umap = dict.fromkeys(u)
1105 umap = dict.fromkeys(u)
1110
1106
1111 for f in a + c + u:
1107 for f in a + c + u:
1112 mw[f] = ""
1108 mw[f] = ""
1113 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1109 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1114
1110
1115 for f in d:
1111 for f in d:
1116 if f in mw: del mw[f]
1112 if f in mw: del mw[f]
1117
1113
1118 # If we're jumping between revisions (as opposed to merging),
1114 # If we're jumping between revisions (as opposed to merging),
1119 # and if neither the working directory nor the target rev has
1115 # and if neither the working directory nor the target rev has
1120 # the file, then we need to remove it from the dirstate, to
1116 # the file, then we need to remove it from the dirstate, to
1121 # prevent the dirstate from listing the file when it is no
1117 # prevent the dirstate from listing the file when it is no
1122 # longer in the manifest.
1118 # longer in the manifest.
1123 if moddirstate and linear_path and f not in m2:
1119 if moddirstate and linear_path and f not in m2:
1124 self.dirstate.forget((f,))
1120 self.dirstate.forget((f,))
1125
1121
1126 # Compare manifests
1122 # Compare manifests
1127 for f, n in mw.iteritems():
1123 for f, n in mw.iteritems():
1128 if choose and not choose(f): continue
1124 if choose and not choose(f): continue
1129 if f in m2:
1125 if f in m2:
1130 s = 0
1126 s = 0
1131
1127
1132 # is the wfile new since m1, and match m2?
1128 # is the wfile new since m1, and match m2?
1133 if f not in m1:
1129 if f not in m1:
1134 t1 = self.wread(f)
1130 t1 = self.wread(f)
1135 t2 = self.file(f).read(m2[f])
1131 t2 = self.file(f).read(m2[f])
1136 if cmp(t1, t2) == 0:
1132 if cmp(t1, t2) == 0:
1137 n = m2[f]
1133 n = m2[f]
1138 del t1, t2
1134 del t1, t2
1139
1135
1140 # are files different?
1136 # are files different?
1141 if n != m2[f]:
1137 if n != m2[f]:
1142 a = ma.get(f, nullid)
1138 a = ma.get(f, nullid)
1143 # are both different from the ancestor?
1139 # are both different from the ancestor?
1144 if n != a and m2[f] != a:
1140 if n != a and m2[f] != a:
1145 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1141 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1146 # merge executable bits
1142 # merge executable bits
1147 # "if we changed or they changed, change in merge"
1143 # "if we changed or they changed, change in merge"
1148 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1144 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1149 mode = ((a^b) | (a^c)) ^ a
1145 mode = ((a^b) | (a^c)) ^ a
1150 merge[f] = (m1.get(f, nullid), m2[f], mode)
1146 merge[f] = (m1.get(f, nullid), m2[f], mode)
1151 s = 1
1147 s = 1
1152 # are we clobbering?
1148 # are we clobbering?
1153 # is remote's version newer?
1149 # is remote's version newer?
1154 # or are we going back in time?
1150 # or are we going back in time?
1155 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1151 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1156 self.ui.debug(_(" remote %s is newer, get\n") % f)
1152 self.ui.debug(_(" remote %s is newer, get\n") % f)
1157 get[f] = m2[f]
1153 get[f] = m2[f]
1158 s = 1
1154 s = 1
1159 elif f in umap:
1155 elif f in umap:
1160 # this unknown file is the same as the checkout
1156 # this unknown file is the same as the checkout
1161 get[f] = m2[f]
1157 get[f] = m2[f]
1162
1158
1163 if not s and mfw[f] != mf2[f]:
1159 if not s and mfw[f] != mf2[f]:
1164 if force:
1160 if force:
1165 self.ui.debug(_(" updating permissions for %s\n") % f)
1161 self.ui.debug(_(" updating permissions for %s\n") % f)
1166 util.set_exec(self.wjoin(f), mf2[f])
1162 util.set_exec(self.wjoin(f), mf2[f])
1167 else:
1163 else:
1168 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1164 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1169 mode = ((a^b) | (a^c)) ^ a
1165 mode = ((a^b) | (a^c)) ^ a
1170 if mode != b:
1166 if mode != b:
1171 self.ui.debug(_(" updating permissions for %s\n") % f)
1167 self.ui.debug(_(" updating permissions for %s\n") % f)
1172 util.set_exec(self.wjoin(f), mode)
1168 util.set_exec(self.wjoin(f), mode)
1173 del m2[f]
1169 del m2[f]
1174 elif f in ma:
1170 elif f in ma:
1175 if n != ma[f]:
1171 if n != ma[f]:
1176 r = _("d")
1172 r = _("d")
1177 if not force and (linear_path or allow):
1173 if not force and (linear_path or allow):
1178 r = self.ui.prompt(
1174 r = self.ui.prompt(
1179 (_(" local changed %s which remote deleted\n") % f) +
1175 (_(" local changed %s which remote deleted\n") % f) +
1180 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1176 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1181 if r == _("d"):
1177 if r == _("d"):
1182 remove.append(f)
1178 remove.append(f)
1183 else:
1179 else:
1184 self.ui.debug(_("other deleted %s\n") % f)
1180 self.ui.debug(_("other deleted %s\n") % f)
1185 remove.append(f) # other deleted it
1181 remove.append(f) # other deleted it
1186 else:
1182 else:
1187 # file is created on branch or in working directory
1183 # file is created on branch or in working directory
1188 if force and f not in umap:
1184 if force and f not in umap:
1189 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1185 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1190 remove.append(f)
1186 remove.append(f)
1191 elif n == m1.get(f, nullid): # same as parent
1187 elif n == m1.get(f, nullid): # same as parent
1192 if p2 == pa: # going backwards?
1188 if p2 == pa: # going backwards?
1193 self.ui.debug(_("remote deleted %s\n") % f)
1189 self.ui.debug(_("remote deleted %s\n") % f)
1194 remove.append(f)
1190 remove.append(f)
1195 else:
1191 else:
1196 self.ui.debug(_("local modified %s, keeping\n") % f)
1192 self.ui.debug(_("local modified %s, keeping\n") % f)
1197 else:
1193 else:
1198 self.ui.debug(_("working dir created %s, keeping\n") % f)
1194 self.ui.debug(_("working dir created %s, keeping\n") % f)
1199
1195
1200 for f, n in m2.iteritems():
1196 for f, n in m2.iteritems():
1201 if choose and not choose(f): continue
1197 if choose and not choose(f): continue
1202 if f[0] == "/": continue
1198 if f[0] == "/": continue
1203 if f in ma and n != ma[f]:
1199 if f in ma and n != ma[f]:
1204 r = _("k")
1200 r = _("k")
1205 if not force and (linear_path or allow):
1201 if not force and (linear_path or allow):
1206 r = self.ui.prompt(
1202 r = self.ui.prompt(
1207 (_("remote changed %s which local deleted\n") % f) +
1203 (_("remote changed %s which local deleted\n") % f) +
1208 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1204 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1209 if r == _("k"): get[f] = n
1205 if r == _("k"): get[f] = n
1210 elif f not in ma:
1206 elif f not in ma:
1211 self.ui.debug(_("remote created %s\n") % f)
1207 self.ui.debug(_("remote created %s\n") % f)
1212 get[f] = n
1208 get[f] = n
1213 else:
1209 else:
1214 if force or p2 == pa: # going backwards?
1210 if force or p2 == pa: # going backwards?
1215 self.ui.debug(_("local deleted %s, recreating\n") % f)
1211 self.ui.debug(_("local deleted %s, recreating\n") % f)
1216 get[f] = n
1212 get[f] = n
1217 else:
1213 else:
1218 self.ui.debug(_("local deleted %s\n") % f)
1214 self.ui.debug(_("local deleted %s\n") % f)
1219
1215
1220 del mw, m1, m2, ma
1216 del mw, m1, m2, ma
1221
1217
1222 if force:
1218 if force:
1223 for f in merge:
1219 for f in merge:
1224 get[f] = merge[f][1]
1220 get[f] = merge[f][1]
1225 merge = {}
1221 merge = {}
1226
1222
1227 if linear_path or force:
1223 if linear_path or force:
1228 # we don't need to do any magic, just jump to the new rev
1224 # we don't need to do any magic, just jump to the new rev
1229 branch_merge = False
1225 branch_merge = False
1230 p1, p2 = p2, nullid
1226 p1, p2 = p2, nullid
1231 else:
1227 else:
1232 if not allow:
1228 if not allow:
1233 self.ui.status(_("this update spans a branch"
1229 self.ui.status(_("this update spans a branch"
1234 " affecting the following files:\n"))
1230 " affecting the following files:\n"))
1235 fl = merge.keys() + get.keys()
1231 fl = merge.keys() + get.keys()
1236 fl.sort()
1232 fl.sort()
1237 for f in fl:
1233 for f in fl:
1238 cf = ""
1234 cf = ""
1239 if f in merge: cf = _(" (resolve)")
1235 if f in merge: cf = _(" (resolve)")
1240 self.ui.status(" %s%s\n" % (f, cf))
1236 self.ui.status(" %s%s\n" % (f, cf))
1241 self.ui.warn(_("aborting update spanning branches!\n"))
1237 self.ui.warn(_("aborting update spanning branches!\n"))
1242 self.ui.status(_("(use update -m to merge across branches"
1238 self.ui.status(_("(use update -m to merge across branches"
1243 " or -C to lose changes)\n"))
1239 " or -C to lose changes)\n"))
1244 return 1
1240 return 1
1245 branch_merge = True
1241 branch_merge = True
1246
1242
1247 if moddirstate:
1243 if moddirstate:
1248 self.dirstate.setparents(p1, p2)
1244 self.dirstate.setparents(p1, p2)
1249
1245
1250 # get the files we don't need to change
1246 # get the files we don't need to change
1251 files = get.keys()
1247 files = get.keys()
1252 files.sort()
1248 files.sort()
1253 for f in files:
1249 for f in files:
1254 if f[0] == "/": continue
1250 if f[0] == "/": continue
1255 self.ui.note(_("getting %s\n") % f)
1251 self.ui.note(_("getting %s\n") % f)
1256 t = self.file(f).read(get[f])
1252 t = self.file(f).read(get[f])
1257 try:
1253 try:
1258 self.wwrite(f, t)
1254 self.wwrite(f, t)
1259 except IOError, e:
1255 except IOError, e:
1260 if e.errno != errno.ENOENT:
1256 if e.errno != errno.ENOENT:
1261 raise
1257 raise
1262 os.makedirs(os.path.dirname(self.wjoin(f)))
1258 os.makedirs(os.path.dirname(self.wjoin(f)))
1263 self.wwrite(f, t)
1259 self.wwrite(f, t)
1264 util.set_exec(self.wjoin(f), mf2[f])
1260 util.set_exec(self.wjoin(f), mf2[f])
1265 if moddirstate:
1261 if moddirstate:
1266 if branch_merge:
1262 if branch_merge:
1267 self.dirstate.update([f], 'n', st_mtime=-1)
1263 self.dirstate.update([f], 'n', st_mtime=-1)
1268 else:
1264 else:
1269 self.dirstate.update([f], 'n')
1265 self.dirstate.update([f], 'n')
1270
1266
1271 # merge the tricky bits
1267 # merge the tricky bits
1272 files = merge.keys()
1268 files = merge.keys()
1273 files.sort()
1269 files.sort()
1274 for f in files:
1270 for f in files:
1275 self.ui.status(_("merging %s\n") % f)
1271 self.ui.status(_("merging %s\n") % f)
1276 my, other, flag = merge[f]
1272 my, other, flag = merge[f]
1277 self.merge3(f, my, other)
1273 self.merge3(f, my, other)
1278 util.set_exec(self.wjoin(f), flag)
1274 util.set_exec(self.wjoin(f), flag)
1279 if moddirstate:
1275 if moddirstate:
1280 if branch_merge:
1276 if branch_merge:
1281 # We've done a branch merge, mark this file as merged
1277 # We've done a branch merge, mark this file as merged
1282 # so that we properly record the merger later
1278 # so that we properly record the merger later
1283 self.dirstate.update([f], 'm')
1279 self.dirstate.update([f], 'm')
1284 else:
1280 else:
1285 # We've update-merged a locally modified file, so
1281 # We've update-merged a locally modified file, so
1286 # we set the dirstate to emulate a normal checkout
1282 # we set the dirstate to emulate a normal checkout
1287 # of that file some time in the past. Thus our
1283 # of that file some time in the past. Thus our
1288 # merge will appear as a normal local file
1284 # merge will appear as a normal local file
1289 # modification.
1285 # modification.
1290 f_len = len(self.file(f).read(other))
1286 f_len = len(self.file(f).read(other))
1291 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1287 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1292
1288
1293 remove.sort()
1289 remove.sort()
1294 for f in remove:
1290 for f in remove:
1295 self.ui.note(_("removing %s\n") % f)
1291 self.ui.note(_("removing %s\n") % f)
1296 try:
1292 try:
1297 util.unlink(self.wjoin(f))
1293 util.unlink(self.wjoin(f))
1298 except OSError, inst:
1294 except OSError, inst:
1299 if inst.errno != errno.ENOENT:
1295 if inst.errno != errno.ENOENT:
1300 self.ui.warn(_("update failed to remove %s: %s!\n") %
1296 self.ui.warn(_("update failed to remove %s: %s!\n") %
1301 (f, inst.strerror))
1297 (f, inst.strerror))
1302 if moddirstate:
1298 if moddirstate:
1303 if branch_merge:
1299 if branch_merge:
1304 self.dirstate.update(remove, 'r')
1300 self.dirstate.update(remove, 'r')
1305 else:
1301 else:
1306 self.dirstate.forget(remove)
1302 self.dirstate.forget(remove)
1307
1303
1308 def merge3(self, fn, my, other):
1304 def merge3(self, fn, my, other):
1309 """perform a 3-way merge in the working directory"""
1305 """perform a 3-way merge in the working directory"""
1310
1306
1311 def temp(prefix, node):
1307 def temp(prefix, node):
1312 pre = "%s~%s." % (os.path.basename(fn), prefix)
1308 pre = "%s~%s." % (os.path.basename(fn), prefix)
1313 (fd, name) = tempfile.mkstemp("", pre)
1309 (fd, name) = tempfile.mkstemp("", pre)
1314 f = os.fdopen(fd, "wb")
1310 f = os.fdopen(fd, "wb")
1315 self.wwrite(fn, fl.read(node), f)
1311 self.wwrite(fn, fl.read(node), f)
1316 f.close()
1312 f.close()
1317 return name
1313 return name
1318
1314
1319 fl = self.file(fn)
1315 fl = self.file(fn)
1320 base = fl.ancestor(my, other)
1316 base = fl.ancestor(my, other)
1321 a = self.wjoin(fn)
1317 a = self.wjoin(fn)
1322 b = temp("base", base)
1318 b = temp("base", base)
1323 c = temp("other", other)
1319 c = temp("other", other)
1324
1320
1325 self.ui.note(_("resolving %s\n") % fn)
1321 self.ui.note(_("resolving %s\n") % fn)
1326 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1322 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1327 (fn, short(my), short(other), short(base)))
1323 (fn, short(my), short(other), short(base)))
1328
1324
1329 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1325 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1330 or "hgmerge")
1326 or "hgmerge")
1331 r = os.system('%s "%s" "%s" "%s"' % (cmd, a, b, c))
1327 r = os.system('%s "%s" "%s" "%s"' % (cmd, a, b, c))
1332 if r:
1328 if r:
1333 self.ui.warn(_("merging %s failed!\n") % fn)
1329 self.ui.warn(_("merging %s failed!\n") % fn)
1334
1330
1335 os.unlink(b)
1331 os.unlink(b)
1336 os.unlink(c)
1332 os.unlink(c)
1337
1333
1338 def verify(self):
1334 def verify(self):
1339 filelinkrevs = {}
1335 filelinkrevs = {}
1340 filenodes = {}
1336 filenodes = {}
1341 changesets = revisions = files = 0
1337 changesets = revisions = files = 0
1342 errors = [0]
1338 errors = [0]
1343 neededmanifests = {}
1339 neededmanifests = {}
1344
1340
1345 def err(msg):
1341 def err(msg):
1346 self.ui.warn(msg + "\n")
1342 self.ui.warn(msg + "\n")
1347 errors[0] += 1
1343 errors[0] += 1
1348
1344
1349 seen = {}
1345 seen = {}
1350 self.ui.status(_("checking changesets\n"))
1346 self.ui.status(_("checking changesets\n"))
1351 for i in range(self.changelog.count()):
1347 for i in range(self.changelog.count()):
1352 changesets += 1
1348 changesets += 1
1353 n = self.changelog.node(i)
1349 n = self.changelog.node(i)
1354 l = self.changelog.linkrev(n)
1350 l = self.changelog.linkrev(n)
1355 if l != i:
1351 if l != i:
1356 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1352 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1357 if n in seen:
1353 if n in seen:
1358 err(_("duplicate changeset at revision %d") % i)
1354 err(_("duplicate changeset at revision %d") % i)
1359 seen[n] = 1
1355 seen[n] = 1
1360
1356
1361 for p in self.changelog.parents(n):
1357 for p in self.changelog.parents(n):
1362 if p not in self.changelog.nodemap:
1358 if p not in self.changelog.nodemap:
1363 err(_("changeset %s has unknown parent %s") %
1359 err(_("changeset %s has unknown parent %s") %
1364 (short(n), short(p)))
1360 (short(n), short(p)))
1365 try:
1361 try:
1366 changes = self.changelog.read(n)
1362 changes = self.changelog.read(n)
1367 except Exception, inst:
1363 except Exception, inst:
1368 err(_("unpacking changeset %s: %s") % (short(n), inst))
1364 err(_("unpacking changeset %s: %s") % (short(n), inst))
1369
1365
1370 neededmanifests[changes[0]] = n
1366 neededmanifests[changes[0]] = n
1371
1367
1372 for f in changes[3]:
1368 for f in changes[3]:
1373 filelinkrevs.setdefault(f, []).append(i)
1369 filelinkrevs.setdefault(f, []).append(i)
1374
1370
1375 seen = {}
1371 seen = {}
1376 self.ui.status(_("checking manifests\n"))
1372 self.ui.status(_("checking manifests\n"))
1377 for i in range(self.manifest.count()):
1373 for i in range(self.manifest.count()):
1378 n = self.manifest.node(i)
1374 n = self.manifest.node(i)
1379 l = self.manifest.linkrev(n)
1375 l = self.manifest.linkrev(n)
1380
1376
1381 if l < 0 or l >= self.changelog.count():
1377 if l < 0 or l >= self.changelog.count():
1382 err(_("bad manifest link (%d) at revision %d") % (l, i))
1378 err(_("bad manifest link (%d) at revision %d") % (l, i))
1383
1379
1384 if n in neededmanifests:
1380 if n in neededmanifests:
1385 del neededmanifests[n]
1381 del neededmanifests[n]
1386
1382
1387 if n in seen:
1383 if n in seen:
1388 err(_("duplicate manifest at revision %d") % i)
1384 err(_("duplicate manifest at revision %d") % i)
1389
1385
1390 seen[n] = 1
1386 seen[n] = 1
1391
1387
1392 for p in self.manifest.parents(n):
1388 for p in self.manifest.parents(n):
1393 if p not in self.manifest.nodemap:
1389 if p not in self.manifest.nodemap:
1394 err(_("manifest %s has unknown parent %s") %
1390 err(_("manifest %s has unknown parent %s") %
1395 (short(n), short(p)))
1391 (short(n), short(p)))
1396
1392
1397 try:
1393 try:
1398 delta = mdiff.patchtext(self.manifest.delta(n))
1394 delta = mdiff.patchtext(self.manifest.delta(n))
1399 except KeyboardInterrupt:
1395 except KeyboardInterrupt:
1400 self.ui.warn(_("interrupted"))
1396 self.ui.warn(_("interrupted"))
1401 raise
1397 raise
1402 except Exception, inst:
1398 except Exception, inst:
1403 err(_("unpacking manifest %s: %s") % (short(n), inst))
1399 err(_("unpacking manifest %s: %s") % (short(n), inst))
1404
1400
1405 ff = [ l.split('\0') for l in delta.splitlines() ]
1401 ff = [ l.split('\0') for l in delta.splitlines() ]
1406 for f, fn in ff:
1402 for f, fn in ff:
1407 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1403 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1408
1404
1409 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1405 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1410
1406
1411 for m,c in neededmanifests.items():
1407 for m,c in neededmanifests.items():
1412 err(_("Changeset %s refers to unknown manifest %s") %
1408 err(_("Changeset %s refers to unknown manifest %s") %
1413 (short(m), short(c)))
1409 (short(m), short(c)))
1414 del neededmanifests
1410 del neededmanifests
1415
1411
1416 for f in filenodes:
1412 for f in filenodes:
1417 if f not in filelinkrevs:
1413 if f not in filelinkrevs:
1418 err(_("file %s in manifest but not in changesets") % f)
1414 err(_("file %s in manifest but not in changesets") % f)
1419
1415
1420 for f in filelinkrevs:
1416 for f in filelinkrevs:
1421 if f not in filenodes:
1417 if f not in filenodes:
1422 err(_("file %s in changeset but not in manifest") % f)
1418 err(_("file %s in changeset but not in manifest") % f)
1423
1419
1424 self.ui.status(_("checking files\n"))
1420 self.ui.status(_("checking files\n"))
1425 ff = filenodes.keys()
1421 ff = filenodes.keys()
1426 ff.sort()
1422 ff.sort()
1427 for f in ff:
1423 for f in ff:
1428 if f == "/dev/null": continue
1424 if f == "/dev/null": continue
1429 files += 1
1425 files += 1
1430 fl = self.file(f)
1426 fl = self.file(f)
1431 nodes = { nullid: 1 }
1427 nodes = { nullid: 1 }
1432 seen = {}
1428 seen = {}
1433 for i in range(fl.count()):
1429 for i in range(fl.count()):
1434 revisions += 1
1430 revisions += 1
1435 n = fl.node(i)
1431 n = fl.node(i)
1436
1432
1437 if n in seen:
1433 if n in seen:
1438 err(_("%s: duplicate revision %d") % (f, i))
1434 err(_("%s: duplicate revision %d") % (f, i))
1439 if n not in filenodes[f]:
1435 if n not in filenodes[f]:
1440 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1436 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1441 else:
1437 else:
1442 del filenodes[f][n]
1438 del filenodes[f][n]
1443
1439
1444 flr = fl.linkrev(n)
1440 flr = fl.linkrev(n)
1445 if flr not in filelinkrevs[f]:
1441 if flr not in filelinkrevs[f]:
1446 err(_("%s:%s points to unexpected changeset %d")
1442 err(_("%s:%s points to unexpected changeset %d")
1447 % (f, short(n), flr))
1443 % (f, short(n), flr))
1448 else:
1444 else:
1449 filelinkrevs[f].remove(flr)
1445 filelinkrevs[f].remove(flr)
1450
1446
1451 # verify contents
1447 # verify contents
1452 try:
1448 try:
1453 t = fl.read(n)
1449 t = fl.read(n)
1454 except Exception, inst:
1450 except Exception, inst:
1455 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1451 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1456
1452
1457 # verify parents
1453 # verify parents
1458 (p1, p2) = fl.parents(n)
1454 (p1, p2) = fl.parents(n)
1459 if p1 not in nodes:
1455 if p1 not in nodes:
1460 err(_("file %s:%s unknown parent 1 %s") %
1456 err(_("file %s:%s unknown parent 1 %s") %
1461 (f, short(n), short(p1)))
1457 (f, short(n), short(p1)))
1462 if p2 not in nodes:
1458 if p2 not in nodes:
1463 err(_("file %s:%s unknown parent 2 %s") %
1459 err(_("file %s:%s unknown parent 2 %s") %
1464 (f, short(n), short(p1)))
1460 (f, short(n), short(p1)))
1465 nodes[n] = 1
1461 nodes[n] = 1
1466
1462
1467 # cross-check
1463 # cross-check
1468 for node in filenodes[f]:
1464 for node in filenodes[f]:
1469 err(_("node %s in manifests not in %s") % (hex(node), f))
1465 err(_("node %s in manifests not in %s") % (hex(node), f))
1470
1466
1471 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1467 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1472 (files, changesets, revisions))
1468 (files, changesets, revisions))
1473
1469
1474 if errors[0]:
1470 if errors[0]:
1475 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1471 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1476 return 1
1472 return 1
General Comments 0
You need to be logged in to leave comments. Login now