##// END OF EJS Templates
Execute hooks in the repository root
mpm@selenic.com -
r1346:88a9c75d default
parent child Browse files
Show More
@@ -1,1431 +1,1435 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import struct, os, util
8 import struct, os, util
9 import filelog, manifest, changelog, dirstate, repo
9 import filelog, manifest, changelog, dirstate, repo
10 from node import *
10 from node import *
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "re lock transaction tempfile stat mdiff")
12 demandload(globals(), "re lock transaction tempfile stat mdiff")
13
13
14 class localrepository:
14 class localrepository:
15 def __init__(self, ui, path=None, create=0):
15 def __init__(self, ui, path=None, create=0):
16 if not path:
16 if not path:
17 p = os.getcwd()
17 p = os.getcwd()
18 while not os.path.isdir(os.path.join(p, ".hg")):
18 while not os.path.isdir(os.path.join(p, ".hg")):
19 oldp = p
19 oldp = p
20 p = os.path.dirname(p)
20 p = os.path.dirname(p)
21 if p == oldp: raise repo.RepoError("no repo found")
21 if p == oldp: raise repo.RepoError("no repo found")
22 path = p
22 path = p
23 self.path = os.path.join(path, ".hg")
23 self.path = os.path.join(path, ".hg")
24
24
25 if not create and not os.path.isdir(self.path):
25 if not create and not os.path.isdir(self.path):
26 raise repo.RepoError("repository %s not found" % self.path)
26 raise repo.RepoError("repository %s not found" % self.path)
27
27
28 self.root = os.path.abspath(path)
28 self.root = os.path.abspath(path)
29 self.ui = ui
29 self.ui = ui
30 self.opener = util.opener(self.path)
30 self.opener = util.opener(self.path)
31 self.wopener = util.opener(self.root)
31 self.wopener = util.opener(self.root)
32 self.manifest = manifest.manifest(self.opener)
32 self.manifest = manifest.manifest(self.opener)
33 self.changelog = changelog.changelog(self.opener)
33 self.changelog = changelog.changelog(self.opener)
34 self.tagscache = None
34 self.tagscache = None
35 self.nodetagscache = None
35 self.nodetagscache = None
36 self.encodepats = None
36 self.encodepats = None
37 self.decodepats = None
37 self.decodepats = None
38
38
39 if create:
39 if create:
40 os.mkdir(self.path)
40 os.mkdir(self.path)
41 os.mkdir(self.join("data"))
41 os.mkdir(self.join("data"))
42
42
43 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
43 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
44 try:
44 try:
45 self.ui.readconfig(self.opener("hgrc"))
45 self.ui.readconfig(self.opener("hgrc"))
46 except IOError: pass
46 except IOError: pass
47
47
48 def hook(self, name, **args):
48 def hook(self, name, **args):
49 s = self.ui.config("hooks", name)
49 s = self.ui.config("hooks", name)
50 if s:
50 if s:
51 self.ui.note("running hook %s: %s\n" % (name, s))
51 self.ui.note("running hook %s: %s\n" % (name, s))
52 old = {}
52 old = {}
53 for k, v in args.items():
53 for k, v in args.items():
54 k = k.upper()
54 k = k.upper()
55 old[k] = os.environ.get(k, None)
55 old[k] = os.environ.get(k, None)
56 os.environ[k] = v
56 os.environ[k] = v
57
57
58 # Hooks run in the repository root
59 olddir = os.getcwd()
60 os.chdir(self.root)
58 r = os.system(s)
61 r = os.system(s)
62 os.chdir(olddir)
59
63
60 for k, v in old.items():
64 for k, v in old.items():
61 if v != None:
65 if v != None:
62 os.environ[k] = v
66 os.environ[k] = v
63 else:
67 else:
64 del os.environ[k]
68 del os.environ[k]
65
69
66 if r:
70 if r:
67 self.ui.warn("abort: %s hook failed with status %d!\n" %
71 self.ui.warn("abort: %s hook failed with status %d!\n" %
68 (name, r))
72 (name, r))
69 return False
73 return False
70 return True
74 return True
71
75
72 def tags(self):
76 def tags(self):
73 '''return a mapping of tag to node'''
77 '''return a mapping of tag to node'''
74 if not self.tagscache:
78 if not self.tagscache:
75 self.tagscache = {}
79 self.tagscache = {}
76 def addtag(self, k, n):
80 def addtag(self, k, n):
77 try:
81 try:
78 bin_n = bin(n)
82 bin_n = bin(n)
79 except TypeError:
83 except TypeError:
80 bin_n = ''
84 bin_n = ''
81 self.tagscache[k.strip()] = bin_n
85 self.tagscache[k.strip()] = bin_n
82
86
83 try:
87 try:
84 # read each head of the tags file, ending with the tip
88 # read each head of the tags file, ending with the tip
85 # and add each tag found to the map, with "newer" ones
89 # and add each tag found to the map, with "newer" ones
86 # taking precedence
90 # taking precedence
87 fl = self.file(".hgtags")
91 fl = self.file(".hgtags")
88 h = fl.heads()
92 h = fl.heads()
89 h.reverse()
93 h.reverse()
90 for r in h:
94 for r in h:
91 for l in fl.read(r).splitlines():
95 for l in fl.read(r).splitlines():
92 if l:
96 if l:
93 n, k = l.split(" ", 1)
97 n, k = l.split(" ", 1)
94 addtag(self, k, n)
98 addtag(self, k, n)
95 except KeyError:
99 except KeyError:
96 pass
100 pass
97
101
98 try:
102 try:
99 f = self.opener("localtags")
103 f = self.opener("localtags")
100 for l in f:
104 for l in f:
101 n, k = l.split(" ", 1)
105 n, k = l.split(" ", 1)
102 addtag(self, k, n)
106 addtag(self, k, n)
103 except IOError:
107 except IOError:
104 pass
108 pass
105
109
106 self.tagscache['tip'] = self.changelog.tip()
110 self.tagscache['tip'] = self.changelog.tip()
107
111
108 return self.tagscache
112 return self.tagscache
109
113
110 def tagslist(self):
114 def tagslist(self):
111 '''return a list of tags ordered by revision'''
115 '''return a list of tags ordered by revision'''
112 l = []
116 l = []
113 for t, n in self.tags().items():
117 for t, n in self.tags().items():
114 try:
118 try:
115 r = self.changelog.rev(n)
119 r = self.changelog.rev(n)
116 except:
120 except:
117 r = -2 # sort to the beginning of the list if unknown
121 r = -2 # sort to the beginning of the list if unknown
118 l.append((r,t,n))
122 l.append((r,t,n))
119 l.sort()
123 l.sort()
120 return [(t,n) for r,t,n in l]
124 return [(t,n) for r,t,n in l]
121
125
122 def nodetags(self, node):
126 def nodetags(self, node):
123 '''return the tags associated with a node'''
127 '''return the tags associated with a node'''
124 if not self.nodetagscache:
128 if not self.nodetagscache:
125 self.nodetagscache = {}
129 self.nodetagscache = {}
126 for t,n in self.tags().items():
130 for t,n in self.tags().items():
127 self.nodetagscache.setdefault(n,[]).append(t)
131 self.nodetagscache.setdefault(n,[]).append(t)
128 return self.nodetagscache.get(node, [])
132 return self.nodetagscache.get(node, [])
129
133
130 def lookup(self, key):
134 def lookup(self, key):
131 try:
135 try:
132 return self.tags()[key]
136 return self.tags()[key]
133 except KeyError:
137 except KeyError:
134 try:
138 try:
135 return self.changelog.lookup(key)
139 return self.changelog.lookup(key)
136 except:
140 except:
137 raise repo.RepoError("unknown revision '%s'" % key)
141 raise repo.RepoError("unknown revision '%s'" % key)
138
142
139 def dev(self):
143 def dev(self):
140 return os.stat(self.path).st_dev
144 return os.stat(self.path).st_dev
141
145
142 def local(self):
146 def local(self):
143 return True
147 return True
144
148
145 def join(self, f):
149 def join(self, f):
146 return os.path.join(self.path, f)
150 return os.path.join(self.path, f)
147
151
148 def wjoin(self, f):
152 def wjoin(self, f):
149 return os.path.join(self.root, f)
153 return os.path.join(self.root, f)
150
154
151 def file(self, f):
155 def file(self, f):
152 if f[0] == '/': f = f[1:]
156 if f[0] == '/': f = f[1:]
153 return filelog.filelog(self.opener, f)
157 return filelog.filelog(self.opener, f)
154
158
155 def getcwd(self):
159 def getcwd(self):
156 return self.dirstate.getcwd()
160 return self.dirstate.getcwd()
157
161
158 def wfile(self, f, mode='r'):
162 def wfile(self, f, mode='r'):
159 return self.wopener(f, mode)
163 return self.wopener(f, mode)
160
164
161 def wread(self, filename):
165 def wread(self, filename):
162 if self.encodepats == None:
166 if self.encodepats == None:
163 l = []
167 l = []
164 for pat, cmd in self.ui.configitems("encode"):
168 for pat, cmd in self.ui.configitems("encode"):
165 mf = util.matcher("", "/", [pat], [], [])[1]
169 mf = util.matcher("", "/", [pat], [], [])[1]
166 l.append((mf, cmd))
170 l.append((mf, cmd))
167 self.encodepats = l
171 self.encodepats = l
168
172
169 data = self.wopener(filename, 'r').read()
173 data = self.wopener(filename, 'r').read()
170
174
171 for mf, cmd in self.encodepats:
175 for mf, cmd in self.encodepats:
172 if mf(filename):
176 if mf(filename):
173 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
177 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
174 data = util.filter(data, cmd)
178 data = util.filter(data, cmd)
175 break
179 break
176
180
177 return data
181 return data
178
182
179 def wwrite(self, filename, data, fd=None):
183 def wwrite(self, filename, data, fd=None):
180 if self.decodepats == None:
184 if self.decodepats == None:
181 l = []
185 l = []
182 for pat, cmd in self.ui.configitems("decode"):
186 for pat, cmd in self.ui.configitems("decode"):
183 mf = util.matcher("", "/", [pat], [], [])[1]
187 mf = util.matcher("", "/", [pat], [], [])[1]
184 l.append((mf, cmd))
188 l.append((mf, cmd))
185 self.decodepats = l
189 self.decodepats = l
186
190
187 for mf, cmd in self.decodepats:
191 for mf, cmd in self.decodepats:
188 if mf(filename):
192 if mf(filename):
189 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
193 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
190 data = util.filter(data, cmd)
194 data = util.filter(data, cmd)
191 break
195 break
192
196
193 if fd:
197 if fd:
194 return fd.write(data)
198 return fd.write(data)
195 return self.wopener(filename, 'w').write(data)
199 return self.wopener(filename, 'w').write(data)
196
200
197 def transaction(self):
201 def transaction(self):
198 # save dirstate for undo
202 # save dirstate for undo
199 try:
203 try:
200 ds = self.opener("dirstate").read()
204 ds = self.opener("dirstate").read()
201 except IOError:
205 except IOError:
202 ds = ""
206 ds = ""
203 self.opener("journal.dirstate", "w").write(ds)
207 self.opener("journal.dirstate", "w").write(ds)
204
208
205 def after():
209 def after():
206 util.rename(self.join("journal"), self.join("undo"))
210 util.rename(self.join("journal"), self.join("undo"))
207 util.rename(self.join("journal.dirstate"),
211 util.rename(self.join("journal.dirstate"),
208 self.join("undo.dirstate"))
212 self.join("undo.dirstate"))
209
213
210 return transaction.transaction(self.ui.warn, self.opener,
214 return transaction.transaction(self.ui.warn, self.opener,
211 self.join("journal"), after)
215 self.join("journal"), after)
212
216
213 def recover(self):
217 def recover(self):
214 lock = self.lock()
218 lock = self.lock()
215 if os.path.exists(self.join("journal")):
219 if os.path.exists(self.join("journal")):
216 self.ui.status("rolling back interrupted transaction\n")
220 self.ui.status("rolling back interrupted transaction\n")
217 return transaction.rollback(self.opener, self.join("journal"))
221 return transaction.rollback(self.opener, self.join("journal"))
218 else:
222 else:
219 self.ui.warn("no interrupted transaction available\n")
223 self.ui.warn("no interrupted transaction available\n")
220
224
221 def undo(self):
225 def undo(self):
222 lock = self.lock()
226 lock = self.lock()
223 if os.path.exists(self.join("undo")):
227 if os.path.exists(self.join("undo")):
224 self.ui.status("rolling back last transaction\n")
228 self.ui.status("rolling back last transaction\n")
225 transaction.rollback(self.opener, self.join("undo"))
229 transaction.rollback(self.opener, self.join("undo"))
226 self.dirstate = None
230 self.dirstate = None
227 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
231 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
228 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
232 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
229 else:
233 else:
230 self.ui.warn("no undo information available\n")
234 self.ui.warn("no undo information available\n")
231
235
232 def lock(self, wait=1):
236 def lock(self, wait=1):
233 try:
237 try:
234 return lock.lock(self.join("lock"), 0)
238 return lock.lock(self.join("lock"), 0)
235 except lock.LockHeld, inst:
239 except lock.LockHeld, inst:
236 if wait:
240 if wait:
237 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
241 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
238 return lock.lock(self.join("lock"), wait)
242 return lock.lock(self.join("lock"), wait)
239 raise inst
243 raise inst
240
244
241 def rawcommit(self, files, text, user, date, p1=None, p2=None):
245 def rawcommit(self, files, text, user, date, p1=None, p2=None):
242 orig_parent = self.dirstate.parents()[0] or nullid
246 orig_parent = self.dirstate.parents()[0] or nullid
243 p1 = p1 or self.dirstate.parents()[0] or nullid
247 p1 = p1 or self.dirstate.parents()[0] or nullid
244 p2 = p2 or self.dirstate.parents()[1] or nullid
248 p2 = p2 or self.dirstate.parents()[1] or nullid
245 c1 = self.changelog.read(p1)
249 c1 = self.changelog.read(p1)
246 c2 = self.changelog.read(p2)
250 c2 = self.changelog.read(p2)
247 m1 = self.manifest.read(c1[0])
251 m1 = self.manifest.read(c1[0])
248 mf1 = self.manifest.readflags(c1[0])
252 mf1 = self.manifest.readflags(c1[0])
249 m2 = self.manifest.read(c2[0])
253 m2 = self.manifest.read(c2[0])
250 changed = []
254 changed = []
251
255
252 if orig_parent == p1:
256 if orig_parent == p1:
253 update_dirstate = 1
257 update_dirstate = 1
254 else:
258 else:
255 update_dirstate = 0
259 update_dirstate = 0
256
260
257 tr = self.transaction()
261 tr = self.transaction()
258 mm = m1.copy()
262 mm = m1.copy()
259 mfm = mf1.copy()
263 mfm = mf1.copy()
260 linkrev = self.changelog.count()
264 linkrev = self.changelog.count()
261 for f in files:
265 for f in files:
262 try:
266 try:
263 t = self.wread(f)
267 t = self.wread(f)
264 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
268 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
265 r = self.file(f)
269 r = self.file(f)
266 mfm[f] = tm
270 mfm[f] = tm
267
271
268 fp1 = m1.get(f, nullid)
272 fp1 = m1.get(f, nullid)
269 fp2 = m2.get(f, nullid)
273 fp2 = m2.get(f, nullid)
270
274
271 # is the same revision on two branches of a merge?
275 # is the same revision on two branches of a merge?
272 if fp2 == fp1:
276 if fp2 == fp1:
273 fp2 = nullid
277 fp2 = nullid
274
278
275 if fp2 != nullid:
279 if fp2 != nullid:
276 # is one parent an ancestor of the other?
280 # is one parent an ancestor of the other?
277 fpa = r.ancestor(fp1, fp2)
281 fpa = r.ancestor(fp1, fp2)
278 if fpa == fp1:
282 if fpa == fp1:
279 fp1, fp2 = fp2, nullid
283 fp1, fp2 = fp2, nullid
280 elif fpa == fp2:
284 elif fpa == fp2:
281 fp2 = nullid
285 fp2 = nullid
282
286
283 # is the file unmodified from the parent?
287 # is the file unmodified from the parent?
284 if t == r.read(fp1):
288 if t == r.read(fp1):
285 # record the proper existing parent in manifest
289 # record the proper existing parent in manifest
286 # no need to add a revision
290 # no need to add a revision
287 mm[f] = fp1
291 mm[f] = fp1
288 continue
292 continue
289
293
290 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
294 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
291 changed.append(f)
295 changed.append(f)
292 if update_dirstate:
296 if update_dirstate:
293 self.dirstate.update([f], "n")
297 self.dirstate.update([f], "n")
294 except IOError:
298 except IOError:
295 try:
299 try:
296 del mm[f]
300 del mm[f]
297 del mfm[f]
301 del mfm[f]
298 if update_dirstate:
302 if update_dirstate:
299 self.dirstate.forget([f])
303 self.dirstate.forget([f])
300 except:
304 except:
301 # deleted from p2?
305 # deleted from p2?
302 pass
306 pass
303
307
304 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
308 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
305 user = user or self.ui.username()
309 user = user or self.ui.username()
306 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
310 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
307 tr.close()
311 tr.close()
308 if update_dirstate:
312 if update_dirstate:
309 self.dirstate.setparents(n, nullid)
313 self.dirstate.setparents(n, nullid)
310
314
311 def commit(self, files = None, text = "", user = None, date = None,
315 def commit(self, files = None, text = "", user = None, date = None,
312 match = util.always, force=False):
316 match = util.always, force=False):
313 commit = []
317 commit = []
314 remove = []
318 remove = []
315 changed = []
319 changed = []
316
320
317 if files:
321 if files:
318 for f in files:
322 for f in files:
319 s = self.dirstate.state(f)
323 s = self.dirstate.state(f)
320 if s in 'nmai':
324 if s in 'nmai':
321 commit.append(f)
325 commit.append(f)
322 elif s == 'r':
326 elif s == 'r':
323 remove.append(f)
327 remove.append(f)
324 else:
328 else:
325 self.ui.warn("%s not tracked!\n" % f)
329 self.ui.warn("%s not tracked!\n" % f)
326 else:
330 else:
327 (c, a, d, u) = self.changes(match=match)
331 (c, a, d, u) = self.changes(match=match)
328 commit = c + a
332 commit = c + a
329 remove = d
333 remove = d
330
334
331 p1, p2 = self.dirstate.parents()
335 p1, p2 = self.dirstate.parents()
332 c1 = self.changelog.read(p1)
336 c1 = self.changelog.read(p1)
333 c2 = self.changelog.read(p2)
337 c2 = self.changelog.read(p2)
334 m1 = self.manifest.read(c1[0])
338 m1 = self.manifest.read(c1[0])
335 mf1 = self.manifest.readflags(c1[0])
339 mf1 = self.manifest.readflags(c1[0])
336 m2 = self.manifest.read(c2[0])
340 m2 = self.manifest.read(c2[0])
337
341
338 if not commit and not remove and not force and p2 == nullid:
342 if not commit and not remove and not force and p2 == nullid:
339 self.ui.status("nothing changed\n")
343 self.ui.status("nothing changed\n")
340 return None
344 return None
341
345
342 if not self.hook("precommit"):
346 if not self.hook("precommit"):
343 return None
347 return None
344
348
345 lock = self.lock()
349 lock = self.lock()
346 tr = self.transaction()
350 tr = self.transaction()
347
351
348 # check in files
352 # check in files
349 new = {}
353 new = {}
350 linkrev = self.changelog.count()
354 linkrev = self.changelog.count()
351 commit.sort()
355 commit.sort()
352 for f in commit:
356 for f in commit:
353 self.ui.note(f + "\n")
357 self.ui.note(f + "\n")
354 try:
358 try:
355 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
359 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
356 t = self.wread(f)
360 t = self.wread(f)
357 except IOError:
361 except IOError:
358 self.ui.warn("trouble committing %s!\n" % f)
362 self.ui.warn("trouble committing %s!\n" % f)
359 raise
363 raise
360
364
361 r = self.file(f)
365 r = self.file(f)
362
366
363 meta = {}
367 meta = {}
364 cp = self.dirstate.copied(f)
368 cp = self.dirstate.copied(f)
365 if cp:
369 if cp:
366 meta["copy"] = cp
370 meta["copy"] = cp
367 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
371 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
368 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
372 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
369 fp1, fp2 = nullid, nullid
373 fp1, fp2 = nullid, nullid
370 else:
374 else:
371 fp1 = m1.get(f, nullid)
375 fp1 = m1.get(f, nullid)
372 fp2 = m2.get(f, nullid)
376 fp2 = m2.get(f, nullid)
373
377
374 # is the same revision on two branches of a merge?
378 # is the same revision on two branches of a merge?
375 if fp2 == fp1:
379 if fp2 == fp1:
376 fp2 = nullid
380 fp2 = nullid
377
381
378 if fp2 != nullid:
382 if fp2 != nullid:
379 # is one parent an ancestor of the other?
383 # is one parent an ancestor of the other?
380 fpa = r.ancestor(fp1, fp2)
384 fpa = r.ancestor(fp1, fp2)
381 if fpa == fp1:
385 if fpa == fp1:
382 fp1, fp2 = fp2, nullid
386 fp1, fp2 = fp2, nullid
383 elif fpa == fp2:
387 elif fpa == fp2:
384 fp2 = nullid
388 fp2 = nullid
385
389
386 # is the file unmodified from the parent?
390 # is the file unmodified from the parent?
387 if not meta and t == r.read(fp1):
391 if not meta and t == r.read(fp1):
388 # record the proper existing parent in manifest
392 # record the proper existing parent in manifest
389 # no need to add a revision
393 # no need to add a revision
390 new[f] = fp1
394 new[f] = fp1
391 continue
395 continue
392
396
393 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
397 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
394 # remember what we've added so that we can later calculate
398 # remember what we've added so that we can later calculate
395 # the files to pull from a set of changesets
399 # the files to pull from a set of changesets
396 changed.append(f)
400 changed.append(f)
397
401
398 # update manifest
402 # update manifest
399 m1.update(new)
403 m1.update(new)
400 for f in remove:
404 for f in remove:
401 if f in m1:
405 if f in m1:
402 del m1[f]
406 del m1[f]
403 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
407 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
404 (new, remove))
408 (new, remove))
405
409
406 # add changeset
410 # add changeset
407 new = new.keys()
411 new = new.keys()
408 new.sort()
412 new.sort()
409
413
410 if not text:
414 if not text:
411 edittext = ""
415 edittext = ""
412 if p2 != nullid:
416 if p2 != nullid:
413 edittext += "HG: branch merge\n"
417 edittext += "HG: branch merge\n"
414 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
418 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
415 edittext += "".join(["HG: changed %s\n" % f for f in changed])
419 edittext += "".join(["HG: changed %s\n" % f for f in changed])
416 edittext += "".join(["HG: removed %s\n" % f for f in remove])
420 edittext += "".join(["HG: removed %s\n" % f for f in remove])
417 if not changed and not remove:
421 if not changed and not remove:
418 edittext += "HG: no files changed\n"
422 edittext += "HG: no files changed\n"
419 edittext = self.ui.edit(edittext)
423 edittext = self.ui.edit(edittext)
420 if not edittext.rstrip():
424 if not edittext.rstrip():
421 return None
425 return None
422 text = edittext
426 text = edittext
423
427
424 user = user or self.ui.username()
428 user = user or self.ui.username()
425 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
429 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
426 tr.close()
430 tr.close()
427
431
428 self.dirstate.setparents(n)
432 self.dirstate.setparents(n)
429 self.dirstate.update(new, "n")
433 self.dirstate.update(new, "n")
430 self.dirstate.forget(remove)
434 self.dirstate.forget(remove)
431
435
432 if not self.hook("commit", node=hex(n)):
436 if not self.hook("commit", node=hex(n)):
433 return None
437 return None
434 return n
438 return n
435
439
436 def walk(self, node=None, files=[], match=util.always):
440 def walk(self, node=None, files=[], match=util.always):
437 if node:
441 if node:
438 for fn in self.manifest.read(self.changelog.read(node)[0]):
442 for fn in self.manifest.read(self.changelog.read(node)[0]):
439 if match(fn): yield 'm', fn
443 if match(fn): yield 'm', fn
440 else:
444 else:
441 for src, fn in self.dirstate.walk(files, match):
445 for src, fn in self.dirstate.walk(files, match):
442 yield src, fn
446 yield src, fn
443
447
444 def changes(self, node1 = None, node2 = None, files = [],
448 def changes(self, node1 = None, node2 = None, files = [],
445 match = util.always):
449 match = util.always):
446 mf2, u = None, []
450 mf2, u = None, []
447
451
448 def fcmp(fn, mf):
452 def fcmp(fn, mf):
449 t1 = self.wread(fn)
453 t1 = self.wread(fn)
450 t2 = self.file(fn).read(mf.get(fn, nullid))
454 t2 = self.file(fn).read(mf.get(fn, nullid))
451 return cmp(t1, t2)
455 return cmp(t1, t2)
452
456
453 def mfmatches(node):
457 def mfmatches(node):
454 mf = dict(self.manifest.read(node))
458 mf = dict(self.manifest.read(node))
455 for fn in mf.keys():
459 for fn in mf.keys():
456 if not match(fn):
460 if not match(fn):
457 del mf[fn]
461 del mf[fn]
458 return mf
462 return mf
459
463
460 # are we comparing the working directory?
464 # are we comparing the working directory?
461 if not node2:
465 if not node2:
462 l, c, a, d, u = self.dirstate.changes(files, match)
466 l, c, a, d, u = self.dirstate.changes(files, match)
463
467
464 # are we comparing working dir against its parent?
468 # are we comparing working dir against its parent?
465 if not node1:
469 if not node1:
466 if l:
470 if l:
467 # do a full compare of any files that might have changed
471 # do a full compare of any files that might have changed
468 change = self.changelog.read(self.dirstate.parents()[0])
472 change = self.changelog.read(self.dirstate.parents()[0])
469 mf2 = mfmatches(change[0])
473 mf2 = mfmatches(change[0])
470 for f in l:
474 for f in l:
471 if fcmp(f, mf2):
475 if fcmp(f, mf2):
472 c.append(f)
476 c.append(f)
473
477
474 for l in c, a, d, u:
478 for l in c, a, d, u:
475 l.sort()
479 l.sort()
476
480
477 return (c, a, d, u)
481 return (c, a, d, u)
478
482
479 # are we comparing working dir against non-tip?
483 # are we comparing working dir against non-tip?
480 # generate a pseudo-manifest for the working dir
484 # generate a pseudo-manifest for the working dir
481 if not node2:
485 if not node2:
482 if not mf2:
486 if not mf2:
483 change = self.changelog.read(self.dirstate.parents()[0])
487 change = self.changelog.read(self.dirstate.parents()[0])
484 mf2 = mfmatches(change[0])
488 mf2 = mfmatches(change[0])
485 for f in a + c + l:
489 for f in a + c + l:
486 mf2[f] = ""
490 mf2[f] = ""
487 for f in d:
491 for f in d:
488 if f in mf2: del mf2[f]
492 if f in mf2: del mf2[f]
489 else:
493 else:
490 change = self.changelog.read(node2)
494 change = self.changelog.read(node2)
491 mf2 = mfmatches(change[0])
495 mf2 = mfmatches(change[0])
492
496
493 # flush lists from dirstate before comparing manifests
497 # flush lists from dirstate before comparing manifests
494 c, a = [], []
498 c, a = [], []
495
499
496 change = self.changelog.read(node1)
500 change = self.changelog.read(node1)
497 mf1 = mfmatches(change[0])
501 mf1 = mfmatches(change[0])
498
502
499 for fn in mf2:
503 for fn in mf2:
500 if mf1.has_key(fn):
504 if mf1.has_key(fn):
501 if mf1[fn] != mf2[fn]:
505 if mf1[fn] != mf2[fn]:
502 if mf2[fn] != "" or fcmp(fn, mf1):
506 if mf2[fn] != "" or fcmp(fn, mf1):
503 c.append(fn)
507 c.append(fn)
504 del mf1[fn]
508 del mf1[fn]
505 else:
509 else:
506 a.append(fn)
510 a.append(fn)
507
511
508 d = mf1.keys()
512 d = mf1.keys()
509
513
510 for l in c, a, d, u:
514 for l in c, a, d, u:
511 l.sort()
515 l.sort()
512
516
513 return (c, a, d, u)
517 return (c, a, d, u)
514
518
515 def add(self, list):
519 def add(self, list):
516 for f in list:
520 for f in list:
517 p = self.wjoin(f)
521 p = self.wjoin(f)
518 if not os.path.exists(p):
522 if not os.path.exists(p):
519 self.ui.warn("%s does not exist!\n" % f)
523 self.ui.warn("%s does not exist!\n" % f)
520 elif not os.path.isfile(p):
524 elif not os.path.isfile(p):
521 self.ui.warn("%s not added: only files supported currently\n" % f)
525 self.ui.warn("%s not added: only files supported currently\n" % f)
522 elif self.dirstate.state(f) in 'an':
526 elif self.dirstate.state(f) in 'an':
523 self.ui.warn("%s already tracked!\n" % f)
527 self.ui.warn("%s already tracked!\n" % f)
524 else:
528 else:
525 self.dirstate.update([f], "a")
529 self.dirstate.update([f], "a")
526
530
527 def forget(self, list):
531 def forget(self, list):
528 for f in list:
532 for f in list:
529 if self.dirstate.state(f) not in 'ai':
533 if self.dirstate.state(f) not in 'ai':
530 self.ui.warn("%s not added!\n" % f)
534 self.ui.warn("%s not added!\n" % f)
531 else:
535 else:
532 self.dirstate.forget([f])
536 self.dirstate.forget([f])
533
537
534 def remove(self, list):
538 def remove(self, list):
535 for f in list:
539 for f in list:
536 p = self.wjoin(f)
540 p = self.wjoin(f)
537 if os.path.exists(p):
541 if os.path.exists(p):
538 self.ui.warn("%s still exists!\n" % f)
542 self.ui.warn("%s still exists!\n" % f)
539 elif self.dirstate.state(f) == 'a':
543 elif self.dirstate.state(f) == 'a':
540 self.ui.warn("%s never committed!\n" % f)
544 self.ui.warn("%s never committed!\n" % f)
541 self.dirstate.forget([f])
545 self.dirstate.forget([f])
542 elif f not in self.dirstate:
546 elif f not in self.dirstate:
543 self.ui.warn("%s not tracked!\n" % f)
547 self.ui.warn("%s not tracked!\n" % f)
544 else:
548 else:
545 self.dirstate.update([f], "r")
549 self.dirstate.update([f], "r")
546
550
547 def copy(self, source, dest):
551 def copy(self, source, dest):
548 p = self.wjoin(dest)
552 p = self.wjoin(dest)
549 if not os.path.exists(p):
553 if not os.path.exists(p):
550 self.ui.warn("%s does not exist!\n" % dest)
554 self.ui.warn("%s does not exist!\n" % dest)
551 elif not os.path.isfile(p):
555 elif not os.path.isfile(p):
552 self.ui.warn("copy failed: %s is not a file\n" % dest)
556 self.ui.warn("copy failed: %s is not a file\n" % dest)
553 else:
557 else:
554 if self.dirstate.state(dest) == '?':
558 if self.dirstate.state(dest) == '?':
555 self.dirstate.update([dest], "a")
559 self.dirstate.update([dest], "a")
556 self.dirstate.copy(source, dest)
560 self.dirstate.copy(source, dest)
557
561
558 def heads(self):
562 def heads(self):
559 return self.changelog.heads()
563 return self.changelog.heads()
560
564
561 # branchlookup returns a dict giving a list of branches for
565 # branchlookup returns a dict giving a list of branches for
562 # each head. A branch is defined as the tag of a node or
566 # each head. A branch is defined as the tag of a node or
563 # the branch of the node's parents. If a node has multiple
567 # the branch of the node's parents. If a node has multiple
564 # branch tags, tags are eliminated if they are visible from other
568 # branch tags, tags are eliminated if they are visible from other
565 # branch tags.
569 # branch tags.
566 #
570 #
567 # So, for this graph: a->b->c->d->e
571 # So, for this graph: a->b->c->d->e
568 # \ /
572 # \ /
569 # aa -----/
573 # aa -----/
570 # a has tag 2.6.12
574 # a has tag 2.6.12
571 # d has tag 2.6.13
575 # d has tag 2.6.13
572 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
576 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
573 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
577 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
574 # from the list.
578 # from the list.
575 #
579 #
576 # It is possible that more than one head will have the same branch tag.
580 # It is possible that more than one head will have the same branch tag.
577 # callers need to check the result for multiple heads under the same
581 # callers need to check the result for multiple heads under the same
578 # branch tag if that is a problem for them (ie checkout of a specific
582 # branch tag if that is a problem for them (ie checkout of a specific
579 # branch).
583 # branch).
580 #
584 #
581 # passing in a specific branch will limit the depth of the search
585 # passing in a specific branch will limit the depth of the search
582 # through the parents. It won't limit the branches returned in the
586 # through the parents. It won't limit the branches returned in the
583 # result though.
587 # result though.
584 def branchlookup(self, heads=None, branch=None):
588 def branchlookup(self, heads=None, branch=None):
585 if not heads:
589 if not heads:
586 heads = self.heads()
590 heads = self.heads()
587 headt = [ h for h in heads ]
591 headt = [ h for h in heads ]
588 chlog = self.changelog
592 chlog = self.changelog
589 branches = {}
593 branches = {}
590 merges = []
594 merges = []
591 seenmerge = {}
595 seenmerge = {}
592
596
593 # traverse the tree once for each head, recording in the branches
597 # traverse the tree once for each head, recording in the branches
594 # dict which tags are visible from this head. The branches
598 # dict which tags are visible from this head. The branches
595 # dict also records which tags are visible from each tag
599 # dict also records which tags are visible from each tag
596 # while we traverse.
600 # while we traverse.
597 while headt or merges:
601 while headt or merges:
598 if merges:
602 if merges:
599 n, found = merges.pop()
603 n, found = merges.pop()
600 visit = [n]
604 visit = [n]
601 else:
605 else:
602 h = headt.pop()
606 h = headt.pop()
603 visit = [h]
607 visit = [h]
604 found = [h]
608 found = [h]
605 seen = {}
609 seen = {}
606 while visit:
610 while visit:
607 n = visit.pop()
611 n = visit.pop()
608 if n in seen:
612 if n in seen:
609 continue
613 continue
610 pp = chlog.parents(n)
614 pp = chlog.parents(n)
611 tags = self.nodetags(n)
615 tags = self.nodetags(n)
612 if tags:
616 if tags:
613 for x in tags:
617 for x in tags:
614 if x == 'tip':
618 if x == 'tip':
615 continue
619 continue
616 for f in found:
620 for f in found:
617 branches.setdefault(f, {})[n] = 1
621 branches.setdefault(f, {})[n] = 1
618 branches.setdefault(n, {})[n] = 1
622 branches.setdefault(n, {})[n] = 1
619 break
623 break
620 if n not in found:
624 if n not in found:
621 found.append(n)
625 found.append(n)
622 if branch in tags:
626 if branch in tags:
623 continue
627 continue
624 seen[n] = 1
628 seen[n] = 1
625 if pp[1] != nullid and n not in seenmerge:
629 if pp[1] != nullid and n not in seenmerge:
626 merges.append((pp[1], [x for x in found]))
630 merges.append((pp[1], [x for x in found]))
627 seenmerge[n] = 1
631 seenmerge[n] = 1
628 if pp[0] != nullid:
632 if pp[0] != nullid:
629 visit.append(pp[0])
633 visit.append(pp[0])
630 # traverse the branches dict, eliminating branch tags from each
634 # traverse the branches dict, eliminating branch tags from each
631 # head that are visible from another branch tag for that head.
635 # head that are visible from another branch tag for that head.
632 out = {}
636 out = {}
633 viscache = {}
637 viscache = {}
634 for h in heads:
638 for h in heads:
635 def visible(node):
639 def visible(node):
636 if node in viscache:
640 if node in viscache:
637 return viscache[node]
641 return viscache[node]
638 ret = {}
642 ret = {}
639 visit = [node]
643 visit = [node]
640 while visit:
644 while visit:
641 x = visit.pop()
645 x = visit.pop()
642 if x in viscache:
646 if x in viscache:
643 ret.update(viscache[x])
647 ret.update(viscache[x])
644 elif x not in ret:
648 elif x not in ret:
645 ret[x] = 1
649 ret[x] = 1
646 if x in branches:
650 if x in branches:
647 visit[len(visit):] = branches[x].keys()
651 visit[len(visit):] = branches[x].keys()
648 viscache[node] = ret
652 viscache[node] = ret
649 return ret
653 return ret
650 if h not in branches:
654 if h not in branches:
651 continue
655 continue
652 # O(n^2), but somewhat limited. This only searches the
656 # O(n^2), but somewhat limited. This only searches the
653 # tags visible from a specific head, not all the tags in the
657 # tags visible from a specific head, not all the tags in the
654 # whole repo.
658 # whole repo.
655 for b in branches[h]:
659 for b in branches[h]:
656 vis = False
660 vis = False
657 for bb in branches[h].keys():
661 for bb in branches[h].keys():
658 if b != bb:
662 if b != bb:
659 if b in visible(bb):
663 if b in visible(bb):
660 vis = True
664 vis = True
661 break
665 break
662 if not vis:
666 if not vis:
663 l = out.setdefault(h, [])
667 l = out.setdefault(h, [])
664 l[len(l):] = self.nodetags(b)
668 l[len(l):] = self.nodetags(b)
665 return out
669 return out
666
670
667 def branches(self, nodes):
671 def branches(self, nodes):
668 if not nodes: nodes = [self.changelog.tip()]
672 if not nodes: nodes = [self.changelog.tip()]
669 b = []
673 b = []
670 for n in nodes:
674 for n in nodes:
671 t = n
675 t = n
672 while n:
676 while n:
673 p = self.changelog.parents(n)
677 p = self.changelog.parents(n)
674 if p[1] != nullid or p[0] == nullid:
678 if p[1] != nullid or p[0] == nullid:
675 b.append((t, n, p[0], p[1]))
679 b.append((t, n, p[0], p[1]))
676 break
680 break
677 n = p[0]
681 n = p[0]
678 return b
682 return b
679
683
680 def between(self, pairs):
684 def between(self, pairs):
681 r = []
685 r = []
682
686
683 for top, bottom in pairs:
687 for top, bottom in pairs:
684 n, l, i = top, [], 0
688 n, l, i = top, [], 0
685 f = 1
689 f = 1
686
690
687 while n != bottom:
691 while n != bottom:
688 p = self.changelog.parents(n)[0]
692 p = self.changelog.parents(n)[0]
689 if i == f:
693 if i == f:
690 l.append(n)
694 l.append(n)
691 f = f * 2
695 f = f * 2
692 n = p
696 n = p
693 i += 1
697 i += 1
694
698
695 r.append(l)
699 r.append(l)
696
700
697 return r
701 return r
698
702
699 def newer(self, nodes):
703 def newer(self, nodes):
700 m = {}
704 m = {}
701 nl = []
705 nl = []
702 pm = {}
706 pm = {}
703 cl = self.changelog
707 cl = self.changelog
704 t = l = cl.count()
708 t = l = cl.count()
705
709
706 # find the lowest numbered node
710 # find the lowest numbered node
707 for n in nodes:
711 for n in nodes:
708 l = min(l, cl.rev(n))
712 l = min(l, cl.rev(n))
709 m[n] = 1
713 m[n] = 1
710
714
711 for i in xrange(l, t):
715 for i in xrange(l, t):
712 n = cl.node(i)
716 n = cl.node(i)
713 if n in m: # explicitly listed
717 if n in m: # explicitly listed
714 pm[n] = 1
718 pm[n] = 1
715 nl.append(n)
719 nl.append(n)
716 continue
720 continue
717 for p in cl.parents(n):
721 for p in cl.parents(n):
718 if p in pm: # parent listed
722 if p in pm: # parent listed
719 pm[n] = 1
723 pm[n] = 1
720 nl.append(n)
724 nl.append(n)
721 break
725 break
722
726
723 return nl
727 return nl
724
728
725 def findincoming(self, remote, base=None, heads=None):
729 def findincoming(self, remote, base=None, heads=None):
726 m = self.changelog.nodemap
730 m = self.changelog.nodemap
727 search = []
731 search = []
728 fetch = {}
732 fetch = {}
729 seen = {}
733 seen = {}
730 seenbranch = {}
734 seenbranch = {}
731 if base == None:
735 if base == None:
732 base = {}
736 base = {}
733
737
734 # assume we're closer to the tip than the root
738 # assume we're closer to the tip than the root
735 # and start by examining the heads
739 # and start by examining the heads
736 self.ui.status("searching for changes\n")
740 self.ui.status("searching for changes\n")
737
741
738 if not heads:
742 if not heads:
739 heads = remote.heads()
743 heads = remote.heads()
740
744
741 unknown = []
745 unknown = []
742 for h in heads:
746 for h in heads:
743 if h not in m:
747 if h not in m:
744 unknown.append(h)
748 unknown.append(h)
745 else:
749 else:
746 base[h] = 1
750 base[h] = 1
747
751
748 if not unknown:
752 if not unknown:
749 return None
753 return None
750
754
751 rep = {}
755 rep = {}
752 reqcnt = 0
756 reqcnt = 0
753
757
754 # search through remote branches
758 # search through remote branches
755 # a 'branch' here is a linear segment of history, with four parts:
759 # a 'branch' here is a linear segment of history, with four parts:
756 # head, root, first parent, second parent
760 # head, root, first parent, second parent
757 # (a branch always has two parents (or none) by definition)
761 # (a branch always has two parents (or none) by definition)
758 unknown = remote.branches(unknown)
762 unknown = remote.branches(unknown)
759 while unknown:
763 while unknown:
760 r = []
764 r = []
761 while unknown:
765 while unknown:
762 n = unknown.pop(0)
766 n = unknown.pop(0)
763 if n[0] in seen:
767 if n[0] in seen:
764 continue
768 continue
765
769
766 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
770 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
767 if n[0] == nullid:
771 if n[0] == nullid:
768 break
772 break
769 if n in seenbranch:
773 if n in seenbranch:
770 self.ui.debug("branch already found\n")
774 self.ui.debug("branch already found\n")
771 continue
775 continue
772 if n[1] and n[1] in m: # do we know the base?
776 if n[1] and n[1] in m: # do we know the base?
773 self.ui.debug("found incomplete branch %s:%s\n"
777 self.ui.debug("found incomplete branch %s:%s\n"
774 % (short(n[0]), short(n[1])))
778 % (short(n[0]), short(n[1])))
775 search.append(n) # schedule branch range for scanning
779 search.append(n) # schedule branch range for scanning
776 seenbranch[n] = 1
780 seenbranch[n] = 1
777 else:
781 else:
778 if n[1] not in seen and n[1] not in fetch:
782 if n[1] not in seen and n[1] not in fetch:
779 if n[2] in m and n[3] in m:
783 if n[2] in m and n[3] in m:
780 self.ui.debug("found new changeset %s\n" %
784 self.ui.debug("found new changeset %s\n" %
781 short(n[1]))
785 short(n[1]))
782 fetch[n[1]] = 1 # earliest unknown
786 fetch[n[1]] = 1 # earliest unknown
783 base[n[2]] = 1 # latest known
787 base[n[2]] = 1 # latest known
784 continue
788 continue
785
789
786 for a in n[2:4]:
790 for a in n[2:4]:
787 if a not in rep:
791 if a not in rep:
788 r.append(a)
792 r.append(a)
789 rep[a] = 1
793 rep[a] = 1
790
794
791 seen[n[0]] = 1
795 seen[n[0]] = 1
792
796
793 if r:
797 if r:
794 reqcnt += 1
798 reqcnt += 1
795 self.ui.debug("request %d: %s\n" %
799 self.ui.debug("request %d: %s\n" %
796 (reqcnt, " ".join(map(short, r))))
800 (reqcnt, " ".join(map(short, r))))
797 for p in range(0, len(r), 10):
801 for p in range(0, len(r), 10):
798 for b in remote.branches(r[p:p+10]):
802 for b in remote.branches(r[p:p+10]):
799 self.ui.debug("received %s:%s\n" %
803 self.ui.debug("received %s:%s\n" %
800 (short(b[0]), short(b[1])))
804 (short(b[0]), short(b[1])))
801 if b[0] in m:
805 if b[0] in m:
802 self.ui.debug("found base node %s\n" % short(b[0]))
806 self.ui.debug("found base node %s\n" % short(b[0]))
803 base[b[0]] = 1
807 base[b[0]] = 1
804 elif b[0] not in seen:
808 elif b[0] not in seen:
805 unknown.append(b)
809 unknown.append(b)
806
810
807 # do binary search on the branches we found
811 # do binary search on the branches we found
808 while search:
812 while search:
809 n = search.pop(0)
813 n = search.pop(0)
810 reqcnt += 1
814 reqcnt += 1
811 l = remote.between([(n[0], n[1])])[0]
815 l = remote.between([(n[0], n[1])])[0]
812 l.append(n[1])
816 l.append(n[1])
813 p = n[0]
817 p = n[0]
814 f = 1
818 f = 1
815 for i in l:
819 for i in l:
816 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
820 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
817 if i in m:
821 if i in m:
818 if f <= 2:
822 if f <= 2:
819 self.ui.debug("found new branch changeset %s\n" %
823 self.ui.debug("found new branch changeset %s\n" %
820 short(p))
824 short(p))
821 fetch[p] = 1
825 fetch[p] = 1
822 base[i] = 1
826 base[i] = 1
823 else:
827 else:
824 self.ui.debug("narrowed branch search to %s:%s\n"
828 self.ui.debug("narrowed branch search to %s:%s\n"
825 % (short(p), short(i)))
829 % (short(p), short(i)))
826 search.append((p, i))
830 search.append((p, i))
827 break
831 break
828 p, f = i, f * 2
832 p, f = i, f * 2
829
833
830 # sanity check our fetch list
834 # sanity check our fetch list
831 for f in fetch.keys():
835 for f in fetch.keys():
832 if f in m:
836 if f in m:
833 raise repo.RepoError("already have changeset " + short(f[:4]))
837 raise repo.RepoError("already have changeset " + short(f[:4]))
834
838
835 if base.keys() == [nullid]:
839 if base.keys() == [nullid]:
836 self.ui.warn("warning: pulling from an unrelated repository!\n")
840 self.ui.warn("warning: pulling from an unrelated repository!\n")
837
841
838 self.ui.note("found new changesets starting at " +
842 self.ui.note("found new changesets starting at " +
839 " ".join([short(f) for f in fetch]) + "\n")
843 " ".join([short(f) for f in fetch]) + "\n")
840
844
841 self.ui.debug("%d total queries\n" % reqcnt)
845 self.ui.debug("%d total queries\n" % reqcnt)
842
846
843 return fetch.keys()
847 return fetch.keys()
844
848
845 def findoutgoing(self, remote, base=None, heads=None):
849 def findoutgoing(self, remote, base=None, heads=None):
846 if base == None:
850 if base == None:
847 base = {}
851 base = {}
848 self.findincoming(remote, base, heads)
852 self.findincoming(remote, base, heads)
849
853
850 self.ui.debug("common changesets up to "
854 self.ui.debug("common changesets up to "
851 + " ".join(map(short, base.keys())) + "\n")
855 + " ".join(map(short, base.keys())) + "\n")
852
856
853 remain = dict.fromkeys(self.changelog.nodemap)
857 remain = dict.fromkeys(self.changelog.nodemap)
854
858
855 # prune everything remote has from the tree
859 # prune everything remote has from the tree
856 del remain[nullid]
860 del remain[nullid]
857 remove = base.keys()
861 remove = base.keys()
858 while remove:
862 while remove:
859 n = remove.pop(0)
863 n = remove.pop(0)
860 if n in remain:
864 if n in remain:
861 del remain[n]
865 del remain[n]
862 for p in self.changelog.parents(n):
866 for p in self.changelog.parents(n):
863 remove.append(p)
867 remove.append(p)
864
868
865 # find every node whose parents have been pruned
869 # find every node whose parents have been pruned
866 subset = []
870 subset = []
867 for n in remain:
871 for n in remain:
868 p1, p2 = self.changelog.parents(n)
872 p1, p2 = self.changelog.parents(n)
869 if p1 not in remain and p2 not in remain:
873 if p1 not in remain and p2 not in remain:
870 subset.append(n)
874 subset.append(n)
871
875
872 # this is the set of all roots we have to push
876 # this is the set of all roots we have to push
873 return subset
877 return subset
874
878
875 def pull(self, remote):
879 def pull(self, remote):
876 lock = self.lock()
880 lock = self.lock()
877
881
878 # if we have an empty repo, fetch everything
882 # if we have an empty repo, fetch everything
879 if self.changelog.tip() == nullid:
883 if self.changelog.tip() == nullid:
880 self.ui.status("requesting all changes\n")
884 self.ui.status("requesting all changes\n")
881 fetch = [nullid]
885 fetch = [nullid]
882 else:
886 else:
883 fetch = self.findincoming(remote)
887 fetch = self.findincoming(remote)
884
888
885 if not fetch:
889 if not fetch:
886 self.ui.status("no changes found\n")
890 self.ui.status("no changes found\n")
887 return 1
891 return 1
888
892
889 cg = remote.changegroup(fetch)
893 cg = remote.changegroup(fetch)
890 return self.addchangegroup(cg)
894 return self.addchangegroup(cg)
891
895
892 def push(self, remote, force=False):
896 def push(self, remote, force=False):
893 lock = remote.lock()
897 lock = remote.lock()
894
898
895 base = {}
899 base = {}
896 heads = remote.heads()
900 heads = remote.heads()
897 inc = self.findincoming(remote, base, heads)
901 inc = self.findincoming(remote, base, heads)
898 if not force and inc:
902 if not force and inc:
899 self.ui.warn("abort: unsynced remote changes!\n")
903 self.ui.warn("abort: unsynced remote changes!\n")
900 self.ui.status("(did you forget to sync? use push -f to force)\n")
904 self.ui.status("(did you forget to sync? use push -f to force)\n")
901 return 1
905 return 1
902
906
903 update = self.findoutgoing(remote, base)
907 update = self.findoutgoing(remote, base)
904 if not update:
908 if not update:
905 self.ui.status("no changes found\n")
909 self.ui.status("no changes found\n")
906 return 1
910 return 1
907 elif not force:
911 elif not force:
908 if len(heads) < len(self.changelog.heads()):
912 if len(heads) < len(self.changelog.heads()):
909 self.ui.warn("abort: push creates new remote branches!\n")
913 self.ui.warn("abort: push creates new remote branches!\n")
910 self.ui.status("(did you forget to merge?" +
914 self.ui.status("(did you forget to merge?" +
911 " use push -f to force)\n")
915 " use push -f to force)\n")
912 return 1
916 return 1
913
917
914 cg = self.changegroup(update)
918 cg = self.changegroup(update)
915 return remote.addchangegroup(cg)
919 return remote.addchangegroup(cg)
916
920
917 def changegroup(self, basenodes):
921 def changegroup(self, basenodes):
918 genread = util.chunkbuffer
922 genread = util.chunkbuffer
919
923
920 def gengroup():
924 def gengroup():
921 nodes = self.newer(basenodes)
925 nodes = self.newer(basenodes)
922
926
923 # construct the link map
927 # construct the link map
924 linkmap = {}
928 linkmap = {}
925 for n in nodes:
929 for n in nodes:
926 linkmap[self.changelog.rev(n)] = n
930 linkmap[self.changelog.rev(n)] = n
927
931
928 # construct a list of all changed files
932 # construct a list of all changed files
929 changed = {}
933 changed = {}
930 for n in nodes:
934 for n in nodes:
931 c = self.changelog.read(n)
935 c = self.changelog.read(n)
932 for f in c[3]:
936 for f in c[3]:
933 changed[f] = 1
937 changed[f] = 1
934 changed = changed.keys()
938 changed = changed.keys()
935 changed.sort()
939 changed.sort()
936
940
937 # the changegroup is changesets + manifests + all file revs
941 # the changegroup is changesets + manifests + all file revs
938 revs = [ self.changelog.rev(n) for n in nodes ]
942 revs = [ self.changelog.rev(n) for n in nodes ]
939
943
940 for y in self.changelog.group(linkmap): yield y
944 for y in self.changelog.group(linkmap): yield y
941 for y in self.manifest.group(linkmap): yield y
945 for y in self.manifest.group(linkmap): yield y
942 for f in changed:
946 for f in changed:
943 yield struct.pack(">l", len(f) + 4) + f
947 yield struct.pack(">l", len(f) + 4) + f
944 g = self.file(f).group(linkmap)
948 g = self.file(f).group(linkmap)
945 for y in g:
949 for y in g:
946 yield y
950 yield y
947
951
948 yield struct.pack(">l", 0)
952 yield struct.pack(">l", 0)
949
953
950 return genread(gengroup())
954 return genread(gengroup())
951
955
952 def addchangegroup(self, source):
956 def addchangegroup(self, source):
953
957
954 def getchunk():
958 def getchunk():
955 d = source.read(4)
959 d = source.read(4)
956 if not d: return ""
960 if not d: return ""
957 l = struct.unpack(">l", d)[0]
961 l = struct.unpack(">l", d)[0]
958 if l <= 4: return ""
962 if l <= 4: return ""
959 d = source.read(l - 4)
963 d = source.read(l - 4)
960 if len(d) < l - 4:
964 if len(d) < l - 4:
961 raise repo.RepoError("premature EOF reading chunk" +
965 raise repo.RepoError("premature EOF reading chunk" +
962 " (got %d bytes, expected %d)"
966 " (got %d bytes, expected %d)"
963 % (len(d), l - 4))
967 % (len(d), l - 4))
964 return d
968 return d
965
969
966 def getgroup():
970 def getgroup():
967 while 1:
971 while 1:
968 c = getchunk()
972 c = getchunk()
969 if not c: break
973 if not c: break
970 yield c
974 yield c
971
975
972 def csmap(x):
976 def csmap(x):
973 self.ui.debug("add changeset %s\n" % short(x))
977 self.ui.debug("add changeset %s\n" % short(x))
974 return self.changelog.count()
978 return self.changelog.count()
975
979
976 def revmap(x):
980 def revmap(x):
977 return self.changelog.rev(x)
981 return self.changelog.rev(x)
978
982
979 if not source: return
983 if not source: return
980 changesets = files = revisions = 0
984 changesets = files = revisions = 0
981
985
982 tr = self.transaction()
986 tr = self.transaction()
983
987
984 oldheads = len(self.changelog.heads())
988 oldheads = len(self.changelog.heads())
985
989
986 # pull off the changeset group
990 # pull off the changeset group
987 self.ui.status("adding changesets\n")
991 self.ui.status("adding changesets\n")
988 co = self.changelog.tip()
992 co = self.changelog.tip()
989 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
993 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
990 cnr, cor = map(self.changelog.rev, (cn, co))
994 cnr, cor = map(self.changelog.rev, (cn, co))
991 changesets = cnr - cor
995 changesets = cnr - cor
992
996
993 # pull off the manifest group
997 # pull off the manifest group
994 self.ui.status("adding manifests\n")
998 self.ui.status("adding manifests\n")
995 mm = self.manifest.tip()
999 mm = self.manifest.tip()
996 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1000 mo = self.manifest.addgroup(getgroup(), revmap, tr)
997
1001
998 # process the files
1002 # process the files
999 self.ui.status("adding file changes\n")
1003 self.ui.status("adding file changes\n")
1000 while 1:
1004 while 1:
1001 f = getchunk()
1005 f = getchunk()
1002 if not f: break
1006 if not f: break
1003 self.ui.debug("adding %s revisions\n" % f)
1007 self.ui.debug("adding %s revisions\n" % f)
1004 fl = self.file(f)
1008 fl = self.file(f)
1005 o = fl.count()
1009 o = fl.count()
1006 n = fl.addgroup(getgroup(), revmap, tr)
1010 n = fl.addgroup(getgroup(), revmap, tr)
1007 revisions += fl.count() - o
1011 revisions += fl.count() - o
1008 files += 1
1012 files += 1
1009
1013
1010 newheads = len(self.changelog.heads())
1014 newheads = len(self.changelog.heads())
1011 heads = ""
1015 heads = ""
1012 if oldheads and newheads > oldheads:
1016 if oldheads and newheads > oldheads:
1013 heads = " (+%d heads)" % (newheads - oldheads)
1017 heads = " (+%d heads)" % (newheads - oldheads)
1014
1018
1015 self.ui.status(("added %d changesets" +
1019 self.ui.status(("added %d changesets" +
1016 " with %d changes to %d files%s\n")
1020 " with %d changes to %d files%s\n")
1017 % (changesets, revisions, files, heads))
1021 % (changesets, revisions, files, heads))
1018
1022
1019 tr.close()
1023 tr.close()
1020
1024
1021 if not self.hook("changegroup", node=hex(self.changelog.node(cor+1))):
1025 if not self.hook("changegroup", node=hex(self.changelog.node(cor+1))):
1022 self.ui.warn("abort: changegroup hook returned failure!\n")
1026 self.ui.warn("abort: changegroup hook returned failure!\n")
1023 return 1
1027 return 1
1024
1028
1025 for i in range(cor + 1, cnr + 1):
1029 for i in range(cor + 1, cnr + 1):
1026 self.hook("commit", node=hex(self.changelog.node(i)))
1030 self.hook("commit", node=hex(self.changelog.node(i)))
1027
1031
1028 return
1032 return
1029
1033
1030 def update(self, node, allow=False, force=False, choose=None,
1034 def update(self, node, allow=False, force=False, choose=None,
1031 moddirstate=True):
1035 moddirstate=True):
1032 pl = self.dirstate.parents()
1036 pl = self.dirstate.parents()
1033 if not force and pl[1] != nullid:
1037 if not force and pl[1] != nullid:
1034 self.ui.warn("aborting: outstanding uncommitted merges\n")
1038 self.ui.warn("aborting: outstanding uncommitted merges\n")
1035 return 1
1039 return 1
1036
1040
1037 p1, p2 = pl[0], node
1041 p1, p2 = pl[0], node
1038 pa = self.changelog.ancestor(p1, p2)
1042 pa = self.changelog.ancestor(p1, p2)
1039 m1n = self.changelog.read(p1)[0]
1043 m1n = self.changelog.read(p1)[0]
1040 m2n = self.changelog.read(p2)[0]
1044 m2n = self.changelog.read(p2)[0]
1041 man = self.manifest.ancestor(m1n, m2n)
1045 man = self.manifest.ancestor(m1n, m2n)
1042 m1 = self.manifest.read(m1n)
1046 m1 = self.manifest.read(m1n)
1043 mf1 = self.manifest.readflags(m1n)
1047 mf1 = self.manifest.readflags(m1n)
1044 m2 = self.manifest.read(m2n)
1048 m2 = self.manifest.read(m2n)
1045 mf2 = self.manifest.readflags(m2n)
1049 mf2 = self.manifest.readflags(m2n)
1046 ma = self.manifest.read(man)
1050 ma = self.manifest.read(man)
1047 mfa = self.manifest.readflags(man)
1051 mfa = self.manifest.readflags(man)
1048
1052
1049 (c, a, d, u) = self.changes()
1053 (c, a, d, u) = self.changes()
1050
1054
1051 # is this a jump, or a merge? i.e. is there a linear path
1055 # is this a jump, or a merge? i.e. is there a linear path
1052 # from p1 to p2?
1056 # from p1 to p2?
1053 linear_path = (pa == p1 or pa == p2)
1057 linear_path = (pa == p1 or pa == p2)
1054
1058
1055 # resolve the manifest to determine which files
1059 # resolve the manifest to determine which files
1056 # we care about merging
1060 # we care about merging
1057 self.ui.note("resolving manifests\n")
1061 self.ui.note("resolving manifests\n")
1058 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1062 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1059 (force, allow, moddirstate, linear_path))
1063 (force, allow, moddirstate, linear_path))
1060 self.ui.debug(" ancestor %s local %s remote %s\n" %
1064 self.ui.debug(" ancestor %s local %s remote %s\n" %
1061 (short(man), short(m1n), short(m2n)))
1065 (short(man), short(m1n), short(m2n)))
1062
1066
1063 merge = {}
1067 merge = {}
1064 get = {}
1068 get = {}
1065 remove = []
1069 remove = []
1066
1070
1067 # construct a working dir manifest
1071 # construct a working dir manifest
1068 mw = m1.copy()
1072 mw = m1.copy()
1069 mfw = mf1.copy()
1073 mfw = mf1.copy()
1070 umap = dict.fromkeys(u)
1074 umap = dict.fromkeys(u)
1071
1075
1072 for f in a + c + u:
1076 for f in a + c + u:
1073 mw[f] = ""
1077 mw[f] = ""
1074 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1078 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1075
1079
1076 for f in d:
1080 for f in d:
1077 if f in mw: del mw[f]
1081 if f in mw: del mw[f]
1078
1082
1079 # If we're jumping between revisions (as opposed to merging),
1083 # If we're jumping between revisions (as opposed to merging),
1080 # and if neither the working directory nor the target rev has
1084 # and if neither the working directory nor the target rev has
1081 # the file, then we need to remove it from the dirstate, to
1085 # the file, then we need to remove it from the dirstate, to
1082 # prevent the dirstate from listing the file when it is no
1086 # prevent the dirstate from listing the file when it is no
1083 # longer in the manifest.
1087 # longer in the manifest.
1084 if moddirstate and linear_path and f not in m2:
1088 if moddirstate and linear_path and f not in m2:
1085 self.dirstate.forget((f,))
1089 self.dirstate.forget((f,))
1086
1090
1087 # Compare manifests
1091 # Compare manifests
1088 for f, n in mw.iteritems():
1092 for f, n in mw.iteritems():
1089 if choose and not choose(f): continue
1093 if choose and not choose(f): continue
1090 if f in m2:
1094 if f in m2:
1091 s = 0
1095 s = 0
1092
1096
1093 # is the wfile new since m1, and match m2?
1097 # is the wfile new since m1, and match m2?
1094 if f not in m1:
1098 if f not in m1:
1095 t1 = self.wread(f)
1099 t1 = self.wread(f)
1096 t2 = self.file(f).read(m2[f])
1100 t2 = self.file(f).read(m2[f])
1097 if cmp(t1, t2) == 0:
1101 if cmp(t1, t2) == 0:
1098 n = m2[f]
1102 n = m2[f]
1099 del t1, t2
1103 del t1, t2
1100
1104
1101 # are files different?
1105 # are files different?
1102 if n != m2[f]:
1106 if n != m2[f]:
1103 a = ma.get(f, nullid)
1107 a = ma.get(f, nullid)
1104 # are both different from the ancestor?
1108 # are both different from the ancestor?
1105 if n != a and m2[f] != a:
1109 if n != a and m2[f] != a:
1106 self.ui.debug(" %s versions differ, resolve\n" % f)
1110 self.ui.debug(" %s versions differ, resolve\n" % f)
1107 # merge executable bits
1111 # merge executable bits
1108 # "if we changed or they changed, change in merge"
1112 # "if we changed or they changed, change in merge"
1109 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1113 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1110 mode = ((a^b) | (a^c)) ^ a
1114 mode = ((a^b) | (a^c)) ^ a
1111 merge[f] = (m1.get(f, nullid), m2[f], mode)
1115 merge[f] = (m1.get(f, nullid), m2[f], mode)
1112 s = 1
1116 s = 1
1113 # are we clobbering?
1117 # are we clobbering?
1114 # is remote's version newer?
1118 # is remote's version newer?
1115 # or are we going back in time?
1119 # or are we going back in time?
1116 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1120 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1117 self.ui.debug(" remote %s is newer, get\n" % f)
1121 self.ui.debug(" remote %s is newer, get\n" % f)
1118 get[f] = m2[f]
1122 get[f] = m2[f]
1119 s = 1
1123 s = 1
1120 elif f in umap:
1124 elif f in umap:
1121 # this unknown file is the same as the checkout
1125 # this unknown file is the same as the checkout
1122 get[f] = m2[f]
1126 get[f] = m2[f]
1123
1127
1124 if not s and mfw[f] != mf2[f]:
1128 if not s and mfw[f] != mf2[f]:
1125 if force:
1129 if force:
1126 self.ui.debug(" updating permissions for %s\n" % f)
1130 self.ui.debug(" updating permissions for %s\n" % f)
1127 util.set_exec(self.wjoin(f), mf2[f])
1131 util.set_exec(self.wjoin(f), mf2[f])
1128 else:
1132 else:
1129 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1133 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1130 mode = ((a^b) | (a^c)) ^ a
1134 mode = ((a^b) | (a^c)) ^ a
1131 if mode != b:
1135 if mode != b:
1132 self.ui.debug(" updating permissions for %s\n" % f)
1136 self.ui.debug(" updating permissions for %s\n" % f)
1133 util.set_exec(self.wjoin(f), mode)
1137 util.set_exec(self.wjoin(f), mode)
1134 del m2[f]
1138 del m2[f]
1135 elif f in ma:
1139 elif f in ma:
1136 if n != ma[f]:
1140 if n != ma[f]:
1137 r = "d"
1141 r = "d"
1138 if not force and (linear_path or allow):
1142 if not force and (linear_path or allow):
1139 r = self.ui.prompt(
1143 r = self.ui.prompt(
1140 (" local changed %s which remote deleted\n" % f) +
1144 (" local changed %s which remote deleted\n" % f) +
1141 "(k)eep or (d)elete?", "[kd]", "k")
1145 "(k)eep or (d)elete?", "[kd]", "k")
1142 if r == "d":
1146 if r == "d":
1143 remove.append(f)
1147 remove.append(f)
1144 else:
1148 else:
1145 self.ui.debug("other deleted %s\n" % f)
1149 self.ui.debug("other deleted %s\n" % f)
1146 remove.append(f) # other deleted it
1150 remove.append(f) # other deleted it
1147 else:
1151 else:
1148 # file is created on branch or in working directory
1152 # file is created on branch or in working directory
1149 if force and f not in umap:
1153 if force and f not in umap:
1150 self.ui.debug("remote deleted %s, clobbering\n" % f)
1154 self.ui.debug("remote deleted %s, clobbering\n" % f)
1151 remove.append(f)
1155 remove.append(f)
1152 elif n == m1.get(f, nullid): # same as parent
1156 elif n == m1.get(f, nullid): # same as parent
1153 if p2 == pa: # going backwards?
1157 if p2 == pa: # going backwards?
1154 self.ui.debug("remote deleted %s\n" % f)
1158 self.ui.debug("remote deleted %s\n" % f)
1155 remove.append(f)
1159 remove.append(f)
1156 else:
1160 else:
1157 self.ui.debug("local modified %s, keeping\n" % f)
1161 self.ui.debug("local modified %s, keeping\n" % f)
1158 else:
1162 else:
1159 self.ui.debug("working dir created %s, keeping\n" % f)
1163 self.ui.debug("working dir created %s, keeping\n" % f)
1160
1164
1161 for f, n in m2.iteritems():
1165 for f, n in m2.iteritems():
1162 if choose and not choose(f): continue
1166 if choose and not choose(f): continue
1163 if f[0] == "/": continue
1167 if f[0] == "/": continue
1164 if f in ma and n != ma[f]:
1168 if f in ma and n != ma[f]:
1165 r = "k"
1169 r = "k"
1166 if not force and (linear_path or allow):
1170 if not force and (linear_path or allow):
1167 r = self.ui.prompt(
1171 r = self.ui.prompt(
1168 ("remote changed %s which local deleted\n" % f) +
1172 ("remote changed %s which local deleted\n" % f) +
1169 "(k)eep or (d)elete?", "[kd]", "k")
1173 "(k)eep or (d)elete?", "[kd]", "k")
1170 if r == "k": get[f] = n
1174 if r == "k": get[f] = n
1171 elif f not in ma:
1175 elif f not in ma:
1172 self.ui.debug("remote created %s\n" % f)
1176 self.ui.debug("remote created %s\n" % f)
1173 get[f] = n
1177 get[f] = n
1174 else:
1178 else:
1175 if force or p2 == pa: # going backwards?
1179 if force or p2 == pa: # going backwards?
1176 self.ui.debug("local deleted %s, recreating\n" % f)
1180 self.ui.debug("local deleted %s, recreating\n" % f)
1177 get[f] = n
1181 get[f] = n
1178 else:
1182 else:
1179 self.ui.debug("local deleted %s\n" % f)
1183 self.ui.debug("local deleted %s\n" % f)
1180
1184
1181 del mw, m1, m2, ma
1185 del mw, m1, m2, ma
1182
1186
1183 if force:
1187 if force:
1184 for f in merge:
1188 for f in merge:
1185 get[f] = merge[f][1]
1189 get[f] = merge[f][1]
1186 merge = {}
1190 merge = {}
1187
1191
1188 if linear_path or force:
1192 if linear_path or force:
1189 # we don't need to do any magic, just jump to the new rev
1193 # we don't need to do any magic, just jump to the new rev
1190 branch_merge = False
1194 branch_merge = False
1191 p1, p2 = p2, nullid
1195 p1, p2 = p2, nullid
1192 else:
1196 else:
1193 if not allow:
1197 if not allow:
1194 self.ui.status("this update spans a branch" +
1198 self.ui.status("this update spans a branch" +
1195 " affecting the following files:\n")
1199 " affecting the following files:\n")
1196 fl = merge.keys() + get.keys()
1200 fl = merge.keys() + get.keys()
1197 fl.sort()
1201 fl.sort()
1198 for f in fl:
1202 for f in fl:
1199 cf = ""
1203 cf = ""
1200 if f in merge: cf = " (resolve)"
1204 if f in merge: cf = " (resolve)"
1201 self.ui.status(" %s%s\n" % (f, cf))
1205 self.ui.status(" %s%s\n" % (f, cf))
1202 self.ui.warn("aborting update spanning branches!\n")
1206 self.ui.warn("aborting update spanning branches!\n")
1203 self.ui.status("(use update -m to merge across branches" +
1207 self.ui.status("(use update -m to merge across branches" +
1204 " or -C to lose changes)\n")
1208 " or -C to lose changes)\n")
1205 return 1
1209 return 1
1206 branch_merge = True
1210 branch_merge = True
1207
1211
1208 if moddirstate:
1212 if moddirstate:
1209 self.dirstate.setparents(p1, p2)
1213 self.dirstate.setparents(p1, p2)
1210
1214
1211 # get the files we don't need to change
1215 # get the files we don't need to change
1212 files = get.keys()
1216 files = get.keys()
1213 files.sort()
1217 files.sort()
1214 for f in files:
1218 for f in files:
1215 if f[0] == "/": continue
1219 if f[0] == "/": continue
1216 self.ui.note("getting %s\n" % f)
1220 self.ui.note("getting %s\n" % f)
1217 t = self.file(f).read(get[f])
1221 t = self.file(f).read(get[f])
1218 try:
1222 try:
1219 self.wwrite(f, t)
1223 self.wwrite(f, t)
1220 except IOError:
1224 except IOError:
1221 os.makedirs(os.path.dirname(self.wjoin(f)))
1225 os.makedirs(os.path.dirname(self.wjoin(f)))
1222 self.wwrite(f, t)
1226 self.wwrite(f, t)
1223 util.set_exec(self.wjoin(f), mf2[f])
1227 util.set_exec(self.wjoin(f), mf2[f])
1224 if moddirstate:
1228 if moddirstate:
1225 if branch_merge:
1229 if branch_merge:
1226 self.dirstate.update([f], 'n', st_mtime=-1)
1230 self.dirstate.update([f], 'n', st_mtime=-1)
1227 else:
1231 else:
1228 self.dirstate.update([f], 'n')
1232 self.dirstate.update([f], 'n')
1229
1233
1230 # merge the tricky bits
1234 # merge the tricky bits
1231 files = merge.keys()
1235 files = merge.keys()
1232 files.sort()
1236 files.sort()
1233 for f in files:
1237 for f in files:
1234 self.ui.status("merging %s\n" % f)
1238 self.ui.status("merging %s\n" % f)
1235 my, other, flag = merge[f]
1239 my, other, flag = merge[f]
1236 self.merge3(f, my, other)
1240 self.merge3(f, my, other)
1237 util.set_exec(self.wjoin(f), flag)
1241 util.set_exec(self.wjoin(f), flag)
1238 if moddirstate:
1242 if moddirstate:
1239 if branch_merge:
1243 if branch_merge:
1240 # We've done a branch merge, mark this file as merged
1244 # We've done a branch merge, mark this file as merged
1241 # so that we properly record the merger later
1245 # so that we properly record the merger later
1242 self.dirstate.update([f], 'm')
1246 self.dirstate.update([f], 'm')
1243 else:
1247 else:
1244 # We've update-merged a locally modified file, so
1248 # We've update-merged a locally modified file, so
1245 # we set the dirstate to emulate a normal checkout
1249 # we set the dirstate to emulate a normal checkout
1246 # of that file some time in the past. Thus our
1250 # of that file some time in the past. Thus our
1247 # merge will appear as a normal local file
1251 # merge will appear as a normal local file
1248 # modification.
1252 # modification.
1249 f_len = len(self.file(f).read(other))
1253 f_len = len(self.file(f).read(other))
1250 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1254 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1251
1255
1252 remove.sort()
1256 remove.sort()
1253 for f in remove:
1257 for f in remove:
1254 self.ui.note("removing %s\n" % f)
1258 self.ui.note("removing %s\n" % f)
1255 try:
1259 try:
1256 os.unlink(self.wjoin(f))
1260 os.unlink(self.wjoin(f))
1257 except OSError, inst:
1261 except OSError, inst:
1258 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1262 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1259 # try removing directories that might now be empty
1263 # try removing directories that might now be empty
1260 try: os.removedirs(os.path.dirname(self.wjoin(f)))
1264 try: os.removedirs(os.path.dirname(self.wjoin(f)))
1261 except: pass
1265 except: pass
1262 if moddirstate:
1266 if moddirstate:
1263 if branch_merge:
1267 if branch_merge:
1264 self.dirstate.update(remove, 'r')
1268 self.dirstate.update(remove, 'r')
1265 else:
1269 else:
1266 self.dirstate.forget(remove)
1270 self.dirstate.forget(remove)
1267
1271
1268 def merge3(self, fn, my, other):
1272 def merge3(self, fn, my, other):
1269 """perform a 3-way merge in the working directory"""
1273 """perform a 3-way merge in the working directory"""
1270
1274
1271 def temp(prefix, node):
1275 def temp(prefix, node):
1272 pre = "%s~%s." % (os.path.basename(fn), prefix)
1276 pre = "%s~%s." % (os.path.basename(fn), prefix)
1273 (fd, name) = tempfile.mkstemp("", pre)
1277 (fd, name) = tempfile.mkstemp("", pre)
1274 f = os.fdopen(fd, "wb")
1278 f = os.fdopen(fd, "wb")
1275 self.wwrite(fn, fl.read(node), f)
1279 self.wwrite(fn, fl.read(node), f)
1276 f.close()
1280 f.close()
1277 return name
1281 return name
1278
1282
1279 fl = self.file(fn)
1283 fl = self.file(fn)
1280 base = fl.ancestor(my, other)
1284 base = fl.ancestor(my, other)
1281 a = self.wjoin(fn)
1285 a = self.wjoin(fn)
1282 b = temp("base", base)
1286 b = temp("base", base)
1283 c = temp("other", other)
1287 c = temp("other", other)
1284
1288
1285 self.ui.note("resolving %s\n" % fn)
1289 self.ui.note("resolving %s\n" % fn)
1286 self.ui.debug("file %s: other %s ancestor %s\n" %
1290 self.ui.debug("file %s: other %s ancestor %s\n" %
1287 (fn, short(other), short(base)))
1291 (fn, short(other), short(base)))
1288
1292
1289 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1293 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1290 or "hgmerge")
1294 or "hgmerge")
1291 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1295 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1292 if r:
1296 if r:
1293 self.ui.warn("merging %s failed!\n" % fn)
1297 self.ui.warn("merging %s failed!\n" % fn)
1294
1298
1295 os.unlink(b)
1299 os.unlink(b)
1296 os.unlink(c)
1300 os.unlink(c)
1297
1301
1298 def verify(self):
1302 def verify(self):
1299 filelinkrevs = {}
1303 filelinkrevs = {}
1300 filenodes = {}
1304 filenodes = {}
1301 changesets = revisions = files = 0
1305 changesets = revisions = files = 0
1302 errors = 0
1306 errors = 0
1303
1307
1304 seen = {}
1308 seen = {}
1305 self.ui.status("checking changesets\n")
1309 self.ui.status("checking changesets\n")
1306 for i in range(self.changelog.count()):
1310 for i in range(self.changelog.count()):
1307 changesets += 1
1311 changesets += 1
1308 n = self.changelog.node(i)
1312 n = self.changelog.node(i)
1309 if n in seen:
1313 if n in seen:
1310 self.ui.warn("duplicate changeset at revision %d\n" % i)
1314 self.ui.warn("duplicate changeset at revision %d\n" % i)
1311 errors += 1
1315 errors += 1
1312 seen[n] = 1
1316 seen[n] = 1
1313
1317
1314 for p in self.changelog.parents(n):
1318 for p in self.changelog.parents(n):
1315 if p not in self.changelog.nodemap:
1319 if p not in self.changelog.nodemap:
1316 self.ui.warn("changeset %s has unknown parent %s\n" %
1320 self.ui.warn("changeset %s has unknown parent %s\n" %
1317 (short(n), short(p)))
1321 (short(n), short(p)))
1318 errors += 1
1322 errors += 1
1319 try:
1323 try:
1320 changes = self.changelog.read(n)
1324 changes = self.changelog.read(n)
1321 except Exception, inst:
1325 except Exception, inst:
1322 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1326 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1323 errors += 1
1327 errors += 1
1324
1328
1325 for f in changes[3]:
1329 for f in changes[3]:
1326 filelinkrevs.setdefault(f, []).append(i)
1330 filelinkrevs.setdefault(f, []).append(i)
1327
1331
1328 seen = {}
1332 seen = {}
1329 self.ui.status("checking manifests\n")
1333 self.ui.status("checking manifests\n")
1330 for i in range(self.manifest.count()):
1334 for i in range(self.manifest.count()):
1331 n = self.manifest.node(i)
1335 n = self.manifest.node(i)
1332 if n in seen:
1336 if n in seen:
1333 self.ui.warn("duplicate manifest at revision %d\n" % i)
1337 self.ui.warn("duplicate manifest at revision %d\n" % i)
1334 errors += 1
1338 errors += 1
1335 seen[n] = 1
1339 seen[n] = 1
1336
1340
1337 for p in self.manifest.parents(n):
1341 for p in self.manifest.parents(n):
1338 if p not in self.manifest.nodemap:
1342 if p not in self.manifest.nodemap:
1339 self.ui.warn("manifest %s has unknown parent %s\n" %
1343 self.ui.warn("manifest %s has unknown parent %s\n" %
1340 (short(n), short(p)))
1344 (short(n), short(p)))
1341 errors += 1
1345 errors += 1
1342
1346
1343 try:
1347 try:
1344 delta = mdiff.patchtext(self.manifest.delta(n))
1348 delta = mdiff.patchtext(self.manifest.delta(n))
1345 except KeyboardInterrupt:
1349 except KeyboardInterrupt:
1346 self.ui.warn("interrupted")
1350 self.ui.warn("interrupted")
1347 raise
1351 raise
1348 except Exception, inst:
1352 except Exception, inst:
1349 self.ui.warn("unpacking manifest %s: %s\n"
1353 self.ui.warn("unpacking manifest %s: %s\n"
1350 % (short(n), inst))
1354 % (short(n), inst))
1351 errors += 1
1355 errors += 1
1352
1356
1353 ff = [ l.split('\0') for l in delta.splitlines() ]
1357 ff = [ l.split('\0') for l in delta.splitlines() ]
1354 for f, fn in ff:
1358 for f, fn in ff:
1355 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1359 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1356
1360
1357 self.ui.status("crosschecking files in changesets and manifests\n")
1361 self.ui.status("crosschecking files in changesets and manifests\n")
1358 for f in filenodes:
1362 for f in filenodes:
1359 if f not in filelinkrevs:
1363 if f not in filelinkrevs:
1360 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1364 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1361 errors += 1
1365 errors += 1
1362
1366
1363 for f in filelinkrevs:
1367 for f in filelinkrevs:
1364 if f not in filenodes:
1368 if f not in filenodes:
1365 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1369 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1366 errors += 1
1370 errors += 1
1367
1371
1368 self.ui.status("checking files\n")
1372 self.ui.status("checking files\n")
1369 ff = filenodes.keys()
1373 ff = filenodes.keys()
1370 ff.sort()
1374 ff.sort()
1371 for f in ff:
1375 for f in ff:
1372 if f == "/dev/null": continue
1376 if f == "/dev/null": continue
1373 files += 1
1377 files += 1
1374 fl = self.file(f)
1378 fl = self.file(f)
1375 nodes = { nullid: 1 }
1379 nodes = { nullid: 1 }
1376 seen = {}
1380 seen = {}
1377 for i in range(fl.count()):
1381 for i in range(fl.count()):
1378 revisions += 1
1382 revisions += 1
1379 n = fl.node(i)
1383 n = fl.node(i)
1380
1384
1381 if n in seen:
1385 if n in seen:
1382 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1386 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1383 errors += 1
1387 errors += 1
1384
1388
1385 if n not in filenodes[f]:
1389 if n not in filenodes[f]:
1386 self.ui.warn("%s: %d:%s not in manifests\n"
1390 self.ui.warn("%s: %d:%s not in manifests\n"
1387 % (f, i, short(n)))
1391 % (f, i, short(n)))
1388 errors += 1
1392 errors += 1
1389 else:
1393 else:
1390 del filenodes[f][n]
1394 del filenodes[f][n]
1391
1395
1392 flr = fl.linkrev(n)
1396 flr = fl.linkrev(n)
1393 if flr not in filelinkrevs[f]:
1397 if flr not in filelinkrevs[f]:
1394 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1398 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1395 % (f, short(n), fl.linkrev(n)))
1399 % (f, short(n), fl.linkrev(n)))
1396 errors += 1
1400 errors += 1
1397 else:
1401 else:
1398 filelinkrevs[f].remove(flr)
1402 filelinkrevs[f].remove(flr)
1399
1403
1400 # verify contents
1404 # verify contents
1401 try:
1405 try:
1402 t = fl.read(n)
1406 t = fl.read(n)
1403 except Exception, inst:
1407 except Exception, inst:
1404 self.ui.warn("unpacking file %s %s: %s\n"
1408 self.ui.warn("unpacking file %s %s: %s\n"
1405 % (f, short(n), inst))
1409 % (f, short(n), inst))
1406 errors += 1
1410 errors += 1
1407
1411
1408 # verify parents
1412 # verify parents
1409 (p1, p2) = fl.parents(n)
1413 (p1, p2) = fl.parents(n)
1410 if p1 not in nodes:
1414 if p1 not in nodes:
1411 self.ui.warn("file %s:%s unknown parent 1 %s" %
1415 self.ui.warn("file %s:%s unknown parent 1 %s" %
1412 (f, short(n), short(p1)))
1416 (f, short(n), short(p1)))
1413 errors += 1
1417 errors += 1
1414 if p2 not in nodes:
1418 if p2 not in nodes:
1415 self.ui.warn("file %s:%s unknown parent 2 %s" %
1419 self.ui.warn("file %s:%s unknown parent 2 %s" %
1416 (f, short(n), short(p1)))
1420 (f, short(n), short(p1)))
1417 errors += 1
1421 errors += 1
1418 nodes[n] = 1
1422 nodes[n] = 1
1419
1423
1420 # cross-check
1424 # cross-check
1421 for node in filenodes[f]:
1425 for node in filenodes[f]:
1422 self.ui.warn("node %s in manifests not in %s\n"
1426 self.ui.warn("node %s in manifests not in %s\n"
1423 % (hex(node), f))
1427 % (hex(node), f))
1424 errors += 1
1428 errors += 1
1425
1429
1426 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1430 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1427 (files, changesets, revisions))
1431 (files, changesets, revisions))
1428
1432
1429 if errors:
1433 if errors:
1430 self.ui.warn("%d integrity errors encountered!\n" % errors)
1434 self.ui.warn("%d integrity errors encountered!\n" % errors)
1431 return 1
1435 return 1
General Comments 0
You need to be logged in to leave comments. Login now