##// END OF EJS Templates
Pass correct username as $HGUSER to hgeditor if "commit -u" is used....
Thomas Arendsen Hein -
r1983:ae12a815 default
parent child Browse files
Show More
@@ -1,1894 +1,1894 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import os, util
8 import os, util
9 import filelog, manifest, changelog, dirstate, repo
9 import filelog, manifest, changelog, dirstate, repo
10 from node import *
10 from node import *
11 from i18n import gettext as _
11 from i18n import gettext as _
12 from demandload import *
12 from demandload import *
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
14 demandload(globals(), "changegroup")
14 demandload(globals(), "changegroup")
15
15
16 class localrepository(object):
16 class localrepository(object):
17 def __del__(self):
17 def __del__(self):
18 self.transhandle = None
18 self.transhandle = None
19 def __init__(self, parentui, path=None, create=0):
19 def __init__(self, parentui, path=None, create=0):
20 if not path:
20 if not path:
21 p = os.getcwd()
21 p = os.getcwd()
22 while not os.path.isdir(os.path.join(p, ".hg")):
22 while not os.path.isdir(os.path.join(p, ".hg")):
23 oldp = p
23 oldp = p
24 p = os.path.dirname(p)
24 p = os.path.dirname(p)
25 if p == oldp:
25 if p == oldp:
26 raise repo.RepoError(_("no repo found"))
26 raise repo.RepoError(_("no repo found"))
27 path = p
27 path = p
28 self.path = os.path.join(path, ".hg")
28 self.path = os.path.join(path, ".hg")
29
29
30 if not create and not os.path.isdir(self.path):
30 if not create and not os.path.isdir(self.path):
31 raise repo.RepoError(_("repository %s not found") % path)
31 raise repo.RepoError(_("repository %s not found") % path)
32
32
33 self.root = os.path.abspath(path)
33 self.root = os.path.abspath(path)
34 self.ui = ui.ui(parentui=parentui)
34 self.ui = ui.ui(parentui=parentui)
35 self.opener = util.opener(self.path)
35 self.opener = util.opener(self.path)
36 self.wopener = util.opener(self.root)
36 self.wopener = util.opener(self.root)
37 self.manifest = manifest.manifest(self.opener)
37 self.manifest = manifest.manifest(self.opener)
38 self.changelog = changelog.changelog(self.opener)
38 self.changelog = changelog.changelog(self.opener)
39 self.tagscache = None
39 self.tagscache = None
40 self.nodetagscache = None
40 self.nodetagscache = None
41 self.encodepats = None
41 self.encodepats = None
42 self.decodepats = None
42 self.decodepats = None
43 self.transhandle = None
43 self.transhandle = None
44
44
45 if create:
45 if create:
46 os.mkdir(self.path)
46 os.mkdir(self.path)
47 os.mkdir(self.join("data"))
47 os.mkdir(self.join("data"))
48
48
49 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
49 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
50 try:
50 try:
51 self.ui.readconfig(self.join("hgrc"), self.root)
51 self.ui.readconfig(self.join("hgrc"), self.root)
52 except IOError:
52 except IOError:
53 pass
53 pass
54
54
55 def hook(self, name, throw=False, **args):
55 def hook(self, name, throw=False, **args):
56 def runhook(name, cmd):
56 def runhook(name, cmd):
57 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
57 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
58 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()] +
58 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()] +
59 [(k.upper(), v) for k, v in args.iteritems()])
59 [(k.upper(), v) for k, v in args.iteritems()])
60 r = util.system(cmd, environ=env, cwd=self.root)
60 r = util.system(cmd, environ=env, cwd=self.root)
61 if r:
61 if r:
62 desc, r = util.explain_exit(r)
62 desc, r = util.explain_exit(r)
63 if throw:
63 if throw:
64 raise util.Abort(_('%s hook %s') % (name, desc))
64 raise util.Abort(_('%s hook %s') % (name, desc))
65 self.ui.warn(_('error: %s hook %s\n') % (name, desc))
65 self.ui.warn(_('error: %s hook %s\n') % (name, desc))
66 return False
66 return False
67 return True
67 return True
68
68
69 r = True
69 r = True
70 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
70 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
71 if hname.split(".", 1)[0] == name and cmd]
71 if hname.split(".", 1)[0] == name and cmd]
72 hooks.sort()
72 hooks.sort()
73 for hname, cmd in hooks:
73 for hname, cmd in hooks:
74 r = runhook(hname, cmd) and r
74 r = runhook(hname, cmd) and r
75 return r
75 return r
76
76
77 def tags(self):
77 def tags(self):
78 '''return a mapping of tag to node'''
78 '''return a mapping of tag to node'''
79 if not self.tagscache:
79 if not self.tagscache:
80 self.tagscache = {}
80 self.tagscache = {}
81 def addtag(self, k, n):
81 def addtag(self, k, n):
82 try:
82 try:
83 bin_n = bin(n)
83 bin_n = bin(n)
84 except TypeError:
84 except TypeError:
85 bin_n = ''
85 bin_n = ''
86 self.tagscache[k.strip()] = bin_n
86 self.tagscache[k.strip()] = bin_n
87
87
88 try:
88 try:
89 # read each head of the tags file, ending with the tip
89 # read each head of the tags file, ending with the tip
90 # and add each tag found to the map, with "newer" ones
90 # and add each tag found to the map, with "newer" ones
91 # taking precedence
91 # taking precedence
92 fl = self.file(".hgtags")
92 fl = self.file(".hgtags")
93 h = fl.heads()
93 h = fl.heads()
94 h.reverse()
94 h.reverse()
95 for r in h:
95 for r in h:
96 for l in fl.read(r).splitlines():
96 for l in fl.read(r).splitlines():
97 if l:
97 if l:
98 n, k = l.split(" ", 1)
98 n, k = l.split(" ", 1)
99 addtag(self, k, n)
99 addtag(self, k, n)
100 except KeyError:
100 except KeyError:
101 pass
101 pass
102
102
103 try:
103 try:
104 f = self.opener("localtags")
104 f = self.opener("localtags")
105 for l in f:
105 for l in f:
106 n, k = l.split(" ", 1)
106 n, k = l.split(" ", 1)
107 addtag(self, k, n)
107 addtag(self, k, n)
108 except IOError:
108 except IOError:
109 pass
109 pass
110
110
111 self.tagscache['tip'] = self.changelog.tip()
111 self.tagscache['tip'] = self.changelog.tip()
112
112
113 return self.tagscache
113 return self.tagscache
114
114
115 def tagslist(self):
115 def tagslist(self):
116 '''return a list of tags ordered by revision'''
116 '''return a list of tags ordered by revision'''
117 l = []
117 l = []
118 for t, n in self.tags().items():
118 for t, n in self.tags().items():
119 try:
119 try:
120 r = self.changelog.rev(n)
120 r = self.changelog.rev(n)
121 except:
121 except:
122 r = -2 # sort to the beginning of the list if unknown
122 r = -2 # sort to the beginning of the list if unknown
123 l.append((r, t, n))
123 l.append((r, t, n))
124 l.sort()
124 l.sort()
125 return [(t, n) for r, t, n in l]
125 return [(t, n) for r, t, n in l]
126
126
127 def nodetags(self, node):
127 def nodetags(self, node):
128 '''return the tags associated with a node'''
128 '''return the tags associated with a node'''
129 if not self.nodetagscache:
129 if not self.nodetagscache:
130 self.nodetagscache = {}
130 self.nodetagscache = {}
131 for t, n in self.tags().items():
131 for t, n in self.tags().items():
132 self.nodetagscache.setdefault(n, []).append(t)
132 self.nodetagscache.setdefault(n, []).append(t)
133 return self.nodetagscache.get(node, [])
133 return self.nodetagscache.get(node, [])
134
134
135 def lookup(self, key):
135 def lookup(self, key):
136 try:
136 try:
137 return self.tags()[key]
137 return self.tags()[key]
138 except KeyError:
138 except KeyError:
139 try:
139 try:
140 return self.changelog.lookup(key)
140 return self.changelog.lookup(key)
141 except:
141 except:
142 raise repo.RepoError(_("unknown revision '%s'") % key)
142 raise repo.RepoError(_("unknown revision '%s'") % key)
143
143
144 def dev(self):
144 def dev(self):
145 return os.stat(self.path).st_dev
145 return os.stat(self.path).st_dev
146
146
147 def local(self):
147 def local(self):
148 return True
148 return True
149
149
150 def join(self, f):
150 def join(self, f):
151 return os.path.join(self.path, f)
151 return os.path.join(self.path, f)
152
152
153 def wjoin(self, f):
153 def wjoin(self, f):
154 return os.path.join(self.root, f)
154 return os.path.join(self.root, f)
155
155
156 def file(self, f):
156 def file(self, f):
157 if f[0] == '/':
157 if f[0] == '/':
158 f = f[1:]
158 f = f[1:]
159 return filelog.filelog(self.opener, f)
159 return filelog.filelog(self.opener, f)
160
160
161 def getcwd(self):
161 def getcwd(self):
162 return self.dirstate.getcwd()
162 return self.dirstate.getcwd()
163
163
164 def wfile(self, f, mode='r'):
164 def wfile(self, f, mode='r'):
165 return self.wopener(f, mode)
165 return self.wopener(f, mode)
166
166
167 def wread(self, filename):
167 def wread(self, filename):
168 if self.encodepats == None:
168 if self.encodepats == None:
169 l = []
169 l = []
170 for pat, cmd in self.ui.configitems("encode"):
170 for pat, cmd in self.ui.configitems("encode"):
171 mf = util.matcher(self.root, "", [pat], [], [])[1]
171 mf = util.matcher(self.root, "", [pat], [], [])[1]
172 l.append((mf, cmd))
172 l.append((mf, cmd))
173 self.encodepats = l
173 self.encodepats = l
174
174
175 data = self.wopener(filename, 'r').read()
175 data = self.wopener(filename, 'r').read()
176
176
177 for mf, cmd in self.encodepats:
177 for mf, cmd in self.encodepats:
178 if mf(filename):
178 if mf(filename):
179 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
179 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
180 data = util.filter(data, cmd)
180 data = util.filter(data, cmd)
181 break
181 break
182
182
183 return data
183 return data
184
184
185 def wwrite(self, filename, data, fd=None):
185 def wwrite(self, filename, data, fd=None):
186 if self.decodepats == None:
186 if self.decodepats == None:
187 l = []
187 l = []
188 for pat, cmd in self.ui.configitems("decode"):
188 for pat, cmd in self.ui.configitems("decode"):
189 mf = util.matcher(self.root, "", [pat], [], [])[1]
189 mf = util.matcher(self.root, "", [pat], [], [])[1]
190 l.append((mf, cmd))
190 l.append((mf, cmd))
191 self.decodepats = l
191 self.decodepats = l
192
192
193 for mf, cmd in self.decodepats:
193 for mf, cmd in self.decodepats:
194 if mf(filename):
194 if mf(filename):
195 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
195 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
196 data = util.filter(data, cmd)
196 data = util.filter(data, cmd)
197 break
197 break
198
198
199 if fd:
199 if fd:
200 return fd.write(data)
200 return fd.write(data)
201 return self.wopener(filename, 'w').write(data)
201 return self.wopener(filename, 'w').write(data)
202
202
203 def transaction(self):
203 def transaction(self):
204 tr = self.transhandle
204 tr = self.transhandle
205 if tr != None and tr.running():
205 if tr != None and tr.running():
206 return tr.nest()
206 return tr.nest()
207
207
208 # save dirstate for undo
208 # save dirstate for undo
209 try:
209 try:
210 ds = self.opener("dirstate").read()
210 ds = self.opener("dirstate").read()
211 except IOError:
211 except IOError:
212 ds = ""
212 ds = ""
213 self.opener("journal.dirstate", "w").write(ds)
213 self.opener("journal.dirstate", "w").write(ds)
214
214
215 tr = transaction.transaction(self.ui.warn, self.opener,
215 tr = transaction.transaction(self.ui.warn, self.opener,
216 self.join("journal"),
216 self.join("journal"),
217 aftertrans(self.path))
217 aftertrans(self.path))
218 self.transhandle = tr
218 self.transhandle = tr
219 return tr
219 return tr
220
220
221 def recover(self):
221 def recover(self):
222 l = self.lock()
222 l = self.lock()
223 if os.path.exists(self.join("journal")):
223 if os.path.exists(self.join("journal")):
224 self.ui.status(_("rolling back interrupted transaction\n"))
224 self.ui.status(_("rolling back interrupted transaction\n"))
225 transaction.rollback(self.opener, self.join("journal"))
225 transaction.rollback(self.opener, self.join("journal"))
226 self.reload()
226 self.reload()
227 return True
227 return True
228 else:
228 else:
229 self.ui.warn(_("no interrupted transaction available\n"))
229 self.ui.warn(_("no interrupted transaction available\n"))
230 return False
230 return False
231
231
232 def undo(self, wlock=None):
232 def undo(self, wlock=None):
233 if not wlock:
233 if not wlock:
234 wlock = self.wlock()
234 wlock = self.wlock()
235 l = self.lock()
235 l = self.lock()
236 if os.path.exists(self.join("undo")):
236 if os.path.exists(self.join("undo")):
237 self.ui.status(_("rolling back last transaction\n"))
237 self.ui.status(_("rolling back last transaction\n"))
238 transaction.rollback(self.opener, self.join("undo"))
238 transaction.rollback(self.opener, self.join("undo"))
239 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
239 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
240 self.reload()
240 self.reload()
241 self.wreload()
241 self.wreload()
242 else:
242 else:
243 self.ui.warn(_("no undo information available\n"))
243 self.ui.warn(_("no undo information available\n"))
244
244
245 def wreload(self):
245 def wreload(self):
246 self.dirstate.read()
246 self.dirstate.read()
247
247
248 def reload(self):
248 def reload(self):
249 self.changelog.load()
249 self.changelog.load()
250 self.manifest.load()
250 self.manifest.load()
251 self.tagscache = None
251 self.tagscache = None
252 self.nodetagscache = None
252 self.nodetagscache = None
253
253
254 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None):
254 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None):
255 try:
255 try:
256 l = lock.lock(self.join(lockname), 0, releasefn)
256 l = lock.lock(self.join(lockname), 0, releasefn)
257 except lock.LockHeld, inst:
257 except lock.LockHeld, inst:
258 if not wait:
258 if not wait:
259 raise inst
259 raise inst
260 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
260 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
261 try:
261 try:
262 # default to 600 seconds timeout
262 # default to 600 seconds timeout
263 l = lock.lock(self.join(lockname),
263 l = lock.lock(self.join(lockname),
264 int(self.ui.config("ui", "timeout") or 600),
264 int(self.ui.config("ui", "timeout") or 600),
265 releasefn)
265 releasefn)
266 except lock.LockHeld, inst:
266 except lock.LockHeld, inst:
267 raise util.Abort(_("timeout while waiting for "
267 raise util.Abort(_("timeout while waiting for "
268 "lock held by %s") % inst.args[0])
268 "lock held by %s") % inst.args[0])
269 if acquirefn:
269 if acquirefn:
270 acquirefn()
270 acquirefn()
271 return l
271 return l
272
272
273 def lock(self, wait=1):
273 def lock(self, wait=1):
274 return self.do_lock("lock", wait, acquirefn=self.reload)
274 return self.do_lock("lock", wait, acquirefn=self.reload)
275
275
276 def wlock(self, wait=1):
276 def wlock(self, wait=1):
277 return self.do_lock("wlock", wait,
277 return self.do_lock("wlock", wait,
278 self.dirstate.write,
278 self.dirstate.write,
279 self.wreload)
279 self.wreload)
280
280
281 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
281 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
282 "determine whether a new filenode is needed"
282 "determine whether a new filenode is needed"
283 fp1 = manifest1.get(filename, nullid)
283 fp1 = manifest1.get(filename, nullid)
284 fp2 = manifest2.get(filename, nullid)
284 fp2 = manifest2.get(filename, nullid)
285
285
286 if fp2 != nullid:
286 if fp2 != nullid:
287 # is one parent an ancestor of the other?
287 # is one parent an ancestor of the other?
288 fpa = filelog.ancestor(fp1, fp2)
288 fpa = filelog.ancestor(fp1, fp2)
289 if fpa == fp1:
289 if fpa == fp1:
290 fp1, fp2 = fp2, nullid
290 fp1, fp2 = fp2, nullid
291 elif fpa == fp2:
291 elif fpa == fp2:
292 fp2 = nullid
292 fp2 = nullid
293
293
294 # is the file unmodified from the parent? report existing entry
294 # is the file unmodified from the parent? report existing entry
295 if fp2 == nullid and text == filelog.read(fp1):
295 if fp2 == nullid and text == filelog.read(fp1):
296 return (fp1, None, None)
296 return (fp1, None, None)
297
297
298 return (None, fp1, fp2)
298 return (None, fp1, fp2)
299
299
300 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
300 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
301 orig_parent = self.dirstate.parents()[0] or nullid
301 orig_parent = self.dirstate.parents()[0] or nullid
302 p1 = p1 or self.dirstate.parents()[0] or nullid
302 p1 = p1 or self.dirstate.parents()[0] or nullid
303 p2 = p2 or self.dirstate.parents()[1] or nullid
303 p2 = p2 or self.dirstate.parents()[1] or nullid
304 c1 = self.changelog.read(p1)
304 c1 = self.changelog.read(p1)
305 c2 = self.changelog.read(p2)
305 c2 = self.changelog.read(p2)
306 m1 = self.manifest.read(c1[0])
306 m1 = self.manifest.read(c1[0])
307 mf1 = self.manifest.readflags(c1[0])
307 mf1 = self.manifest.readflags(c1[0])
308 m2 = self.manifest.read(c2[0])
308 m2 = self.manifest.read(c2[0])
309 changed = []
309 changed = []
310
310
311 if orig_parent == p1:
311 if orig_parent == p1:
312 update_dirstate = 1
312 update_dirstate = 1
313 else:
313 else:
314 update_dirstate = 0
314 update_dirstate = 0
315
315
316 if not wlock:
316 if not wlock:
317 wlock = self.wlock()
317 wlock = self.wlock()
318 l = self.lock()
318 l = self.lock()
319 tr = self.transaction()
319 tr = self.transaction()
320 mm = m1.copy()
320 mm = m1.copy()
321 mfm = mf1.copy()
321 mfm = mf1.copy()
322 linkrev = self.changelog.count()
322 linkrev = self.changelog.count()
323 for f in files:
323 for f in files:
324 try:
324 try:
325 t = self.wread(f)
325 t = self.wread(f)
326 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
326 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
327 r = self.file(f)
327 r = self.file(f)
328 mfm[f] = tm
328 mfm[f] = tm
329
329
330 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
330 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
331 if entry:
331 if entry:
332 mm[f] = entry
332 mm[f] = entry
333 continue
333 continue
334
334
335 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
335 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
336 changed.append(f)
336 changed.append(f)
337 if update_dirstate:
337 if update_dirstate:
338 self.dirstate.update([f], "n")
338 self.dirstate.update([f], "n")
339 except IOError:
339 except IOError:
340 try:
340 try:
341 del mm[f]
341 del mm[f]
342 del mfm[f]
342 del mfm[f]
343 if update_dirstate:
343 if update_dirstate:
344 self.dirstate.forget([f])
344 self.dirstate.forget([f])
345 except:
345 except:
346 # deleted from p2?
346 # deleted from p2?
347 pass
347 pass
348
348
349 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
349 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
350 user = user or self.ui.username()
350 user = user or self.ui.username()
351 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
351 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
352 tr.close()
352 tr.close()
353 if update_dirstate:
353 if update_dirstate:
354 self.dirstate.setparents(n, nullid)
354 self.dirstate.setparents(n, nullid)
355
355
356 def commit(self, files=None, text="", user=None, date=None,
356 def commit(self, files=None, text="", user=None, date=None,
357 match=util.always, force=False, lock=None, wlock=None):
357 match=util.always, force=False, lock=None, wlock=None):
358 commit = []
358 commit = []
359 remove = []
359 remove = []
360 changed = []
360 changed = []
361
361
362 if files:
362 if files:
363 for f in files:
363 for f in files:
364 s = self.dirstate.state(f)
364 s = self.dirstate.state(f)
365 if s in 'nmai':
365 if s in 'nmai':
366 commit.append(f)
366 commit.append(f)
367 elif s == 'r':
367 elif s == 'r':
368 remove.append(f)
368 remove.append(f)
369 else:
369 else:
370 self.ui.warn(_("%s not tracked!\n") % f)
370 self.ui.warn(_("%s not tracked!\n") % f)
371 else:
371 else:
372 modified, added, removed, deleted, unknown = self.changes(match=match)
372 modified, added, removed, deleted, unknown = self.changes(match=match)
373 commit = modified + added
373 commit = modified + added
374 remove = removed
374 remove = removed
375
375
376 p1, p2 = self.dirstate.parents()
376 p1, p2 = self.dirstate.parents()
377 c1 = self.changelog.read(p1)
377 c1 = self.changelog.read(p1)
378 c2 = self.changelog.read(p2)
378 c2 = self.changelog.read(p2)
379 m1 = self.manifest.read(c1[0])
379 m1 = self.manifest.read(c1[0])
380 mf1 = self.manifest.readflags(c1[0])
380 mf1 = self.manifest.readflags(c1[0])
381 m2 = self.manifest.read(c2[0])
381 m2 = self.manifest.read(c2[0])
382
382
383 if not commit and not remove and not force and p2 == nullid:
383 if not commit and not remove and not force and p2 == nullid:
384 self.ui.status(_("nothing changed\n"))
384 self.ui.status(_("nothing changed\n"))
385 return None
385 return None
386
386
387 xp1 = hex(p1)
387 xp1 = hex(p1)
388 if p2 == nullid: xp2 = ''
388 if p2 == nullid: xp2 = ''
389 else: xp2 = hex(p2)
389 else: xp2 = hex(p2)
390
390
391 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
391 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
392
392
393 if not wlock:
393 if not wlock:
394 wlock = self.wlock()
394 wlock = self.wlock()
395 if not lock:
395 if not lock:
396 lock = self.lock()
396 lock = self.lock()
397 tr = self.transaction()
397 tr = self.transaction()
398
398
399 # check in files
399 # check in files
400 new = {}
400 new = {}
401 linkrev = self.changelog.count()
401 linkrev = self.changelog.count()
402 commit.sort()
402 commit.sort()
403 for f in commit:
403 for f in commit:
404 self.ui.note(f + "\n")
404 self.ui.note(f + "\n")
405 try:
405 try:
406 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
406 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
407 t = self.wread(f)
407 t = self.wread(f)
408 except IOError:
408 except IOError:
409 self.ui.warn(_("trouble committing %s!\n") % f)
409 self.ui.warn(_("trouble committing %s!\n") % f)
410 raise
410 raise
411
411
412 r = self.file(f)
412 r = self.file(f)
413
413
414 meta = {}
414 meta = {}
415 cp = self.dirstate.copied(f)
415 cp = self.dirstate.copied(f)
416 if cp:
416 if cp:
417 meta["copy"] = cp
417 meta["copy"] = cp
418 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
418 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
419 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
419 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
420 fp1, fp2 = nullid, nullid
420 fp1, fp2 = nullid, nullid
421 else:
421 else:
422 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
422 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
423 if entry:
423 if entry:
424 new[f] = entry
424 new[f] = entry
425 continue
425 continue
426
426
427 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
427 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
428 # remember what we've added so that we can later calculate
428 # remember what we've added so that we can later calculate
429 # the files to pull from a set of changesets
429 # the files to pull from a set of changesets
430 changed.append(f)
430 changed.append(f)
431
431
432 # update manifest
432 # update manifest
433 m1 = m1.copy()
433 m1 = m1.copy()
434 m1.update(new)
434 m1.update(new)
435 for f in remove:
435 for f in remove:
436 if f in m1:
436 if f in m1:
437 del m1[f]
437 del m1[f]
438 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
438 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
439 (new, remove))
439 (new, remove))
440
440
441 # add changeset
441 # add changeset
442 new = new.keys()
442 new = new.keys()
443 new.sort()
443 new.sort()
444
444
445 user = user or self.ui.username()
445 if not text:
446 if not text:
446 edittext = [""]
447 edittext = [""]
447 if p2 != nullid:
448 if p2 != nullid:
448 edittext.append("HG: branch merge")
449 edittext.append("HG: branch merge")
449 edittext.extend(["HG: changed %s" % f for f in changed])
450 edittext.extend(["HG: changed %s" % f for f in changed])
450 edittext.extend(["HG: removed %s" % f for f in remove])
451 edittext.extend(["HG: removed %s" % f for f in remove])
451 if not changed and not remove:
452 if not changed and not remove:
452 edittext.append("HG: no files changed")
453 edittext.append("HG: no files changed")
453 edittext.append("")
454 edittext.append("")
454 # run editor in the repository root
455 # run editor in the repository root
455 olddir = os.getcwd()
456 olddir = os.getcwd()
456 os.chdir(self.root)
457 os.chdir(self.root)
457 edittext = self.ui.edit("\n".join(edittext))
458 edittext = self.ui.edit("\n".join(edittext), user)
458 os.chdir(olddir)
459 os.chdir(olddir)
459 if not edittext.rstrip():
460 if not edittext.rstrip():
460 return None
461 return None
461 text = edittext
462 text = edittext
462
463
463 user = user or self.ui.username()
464 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
464 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
465 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
465 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
466 parent2=xp2)
466 parent2=xp2)
467 tr.close()
467 tr.close()
468
468
469 self.dirstate.setparents(n)
469 self.dirstate.setparents(n)
470 self.dirstate.update(new, "n")
470 self.dirstate.update(new, "n")
471 self.dirstate.forget(remove)
471 self.dirstate.forget(remove)
472
472
473 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
473 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
474 return n
474 return n
475
475
476 def walk(self, node=None, files=[], match=util.always):
476 def walk(self, node=None, files=[], match=util.always):
477 if node:
477 if node:
478 fdict = dict.fromkeys(files)
478 fdict = dict.fromkeys(files)
479 for fn in self.manifest.read(self.changelog.read(node)[0]):
479 for fn in self.manifest.read(self.changelog.read(node)[0]):
480 fdict.pop(fn, None)
480 fdict.pop(fn, None)
481 if match(fn):
481 if match(fn):
482 yield 'm', fn
482 yield 'm', fn
483 for fn in fdict:
483 for fn in fdict:
484 self.ui.warn(_('%s: No such file in rev %s\n') % (
484 self.ui.warn(_('%s: No such file in rev %s\n') % (
485 util.pathto(self.getcwd(), fn), short(node)))
485 util.pathto(self.getcwd(), fn), short(node)))
486 else:
486 else:
487 for src, fn in self.dirstate.walk(files, match):
487 for src, fn in self.dirstate.walk(files, match):
488 yield src, fn
488 yield src, fn
489
489
490 def changes(self, node1=None, node2=None, files=[], match=util.always,
490 def changes(self, node1=None, node2=None, files=[], match=util.always,
491 wlock=None):
491 wlock=None):
492 """return changes between two nodes or node and working directory
492 """return changes between two nodes or node and working directory
493
493
494 If node1 is None, use the first dirstate parent instead.
494 If node1 is None, use the first dirstate parent instead.
495 If node2 is None, compare node1 with working directory.
495 If node2 is None, compare node1 with working directory.
496 """
496 """
497
497
498 def fcmp(fn, mf):
498 def fcmp(fn, mf):
499 t1 = self.wread(fn)
499 t1 = self.wread(fn)
500 t2 = self.file(fn).read(mf.get(fn, nullid))
500 t2 = self.file(fn).read(mf.get(fn, nullid))
501 return cmp(t1, t2)
501 return cmp(t1, t2)
502
502
503 def mfmatches(node):
503 def mfmatches(node):
504 change = self.changelog.read(node)
504 change = self.changelog.read(node)
505 mf = dict(self.manifest.read(change[0]))
505 mf = dict(self.manifest.read(change[0]))
506 for fn in mf.keys():
506 for fn in mf.keys():
507 if not match(fn):
507 if not match(fn):
508 del mf[fn]
508 del mf[fn]
509 return mf
509 return mf
510
510
511 if node1:
511 if node1:
512 # read the manifest from node1 before the manifest from node2,
512 # read the manifest from node1 before the manifest from node2,
513 # so that we'll hit the manifest cache if we're going through
513 # so that we'll hit the manifest cache if we're going through
514 # all the revisions in parent->child order.
514 # all the revisions in parent->child order.
515 mf1 = mfmatches(node1)
515 mf1 = mfmatches(node1)
516
516
517 # are we comparing the working directory?
517 # are we comparing the working directory?
518 if not node2:
518 if not node2:
519 if not wlock:
519 if not wlock:
520 try:
520 try:
521 wlock = self.wlock(wait=0)
521 wlock = self.wlock(wait=0)
522 except lock.LockException:
522 except lock.LockException:
523 wlock = None
523 wlock = None
524 lookup, modified, added, removed, deleted, unknown = (
524 lookup, modified, added, removed, deleted, unknown = (
525 self.dirstate.changes(files, match))
525 self.dirstate.changes(files, match))
526
526
527 # are we comparing working dir against its parent?
527 # are we comparing working dir against its parent?
528 if not node1:
528 if not node1:
529 if lookup:
529 if lookup:
530 # do a full compare of any files that might have changed
530 # do a full compare of any files that might have changed
531 mf2 = mfmatches(self.dirstate.parents()[0])
531 mf2 = mfmatches(self.dirstate.parents()[0])
532 for f in lookup:
532 for f in lookup:
533 if fcmp(f, mf2):
533 if fcmp(f, mf2):
534 modified.append(f)
534 modified.append(f)
535 elif wlock is not None:
535 elif wlock is not None:
536 self.dirstate.update([f], "n")
536 self.dirstate.update([f], "n")
537 else:
537 else:
538 # we are comparing working dir against non-parent
538 # we are comparing working dir against non-parent
539 # generate a pseudo-manifest for the working dir
539 # generate a pseudo-manifest for the working dir
540 mf2 = mfmatches(self.dirstate.parents()[0])
540 mf2 = mfmatches(self.dirstate.parents()[0])
541 for f in lookup + modified + added:
541 for f in lookup + modified + added:
542 mf2[f] = ""
542 mf2[f] = ""
543 for f in removed:
543 for f in removed:
544 if f in mf2:
544 if f in mf2:
545 del mf2[f]
545 del mf2[f]
546 else:
546 else:
547 # we are comparing two revisions
547 # we are comparing two revisions
548 deleted, unknown = [], []
548 deleted, unknown = [], []
549 mf2 = mfmatches(node2)
549 mf2 = mfmatches(node2)
550
550
551 if node1:
551 if node1:
552 # flush lists from dirstate before comparing manifests
552 # flush lists from dirstate before comparing manifests
553 modified, added = [], []
553 modified, added = [], []
554
554
555 for fn in mf2:
555 for fn in mf2:
556 if mf1.has_key(fn):
556 if mf1.has_key(fn):
557 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
557 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
558 modified.append(fn)
558 modified.append(fn)
559 del mf1[fn]
559 del mf1[fn]
560 else:
560 else:
561 added.append(fn)
561 added.append(fn)
562
562
563 removed = mf1.keys()
563 removed = mf1.keys()
564
564
565 # sort and return results:
565 # sort and return results:
566 for l in modified, added, removed, deleted, unknown:
566 for l in modified, added, removed, deleted, unknown:
567 l.sort()
567 l.sort()
568 return (modified, added, removed, deleted, unknown)
568 return (modified, added, removed, deleted, unknown)
569
569
570 def add(self, list, wlock=None):
570 def add(self, list, wlock=None):
571 if not wlock:
571 if not wlock:
572 wlock = self.wlock()
572 wlock = self.wlock()
573 for f in list:
573 for f in list:
574 p = self.wjoin(f)
574 p = self.wjoin(f)
575 if not os.path.exists(p):
575 if not os.path.exists(p):
576 self.ui.warn(_("%s does not exist!\n") % f)
576 self.ui.warn(_("%s does not exist!\n") % f)
577 elif not os.path.isfile(p):
577 elif not os.path.isfile(p):
578 self.ui.warn(_("%s not added: only files supported currently\n")
578 self.ui.warn(_("%s not added: only files supported currently\n")
579 % f)
579 % f)
580 elif self.dirstate.state(f) in 'an':
580 elif self.dirstate.state(f) in 'an':
581 self.ui.warn(_("%s already tracked!\n") % f)
581 self.ui.warn(_("%s already tracked!\n") % f)
582 else:
582 else:
583 self.dirstate.update([f], "a")
583 self.dirstate.update([f], "a")
584
584
585 def forget(self, list, wlock=None):
585 def forget(self, list, wlock=None):
586 if not wlock:
586 if not wlock:
587 wlock = self.wlock()
587 wlock = self.wlock()
588 for f in list:
588 for f in list:
589 if self.dirstate.state(f) not in 'ai':
589 if self.dirstate.state(f) not in 'ai':
590 self.ui.warn(_("%s not added!\n") % f)
590 self.ui.warn(_("%s not added!\n") % f)
591 else:
591 else:
592 self.dirstate.forget([f])
592 self.dirstate.forget([f])
593
593
594 def remove(self, list, unlink=False, wlock=None):
594 def remove(self, list, unlink=False, wlock=None):
595 if unlink:
595 if unlink:
596 for f in list:
596 for f in list:
597 try:
597 try:
598 util.unlink(self.wjoin(f))
598 util.unlink(self.wjoin(f))
599 except OSError, inst:
599 except OSError, inst:
600 if inst.errno != errno.ENOENT:
600 if inst.errno != errno.ENOENT:
601 raise
601 raise
602 if not wlock:
602 if not wlock:
603 wlock = self.wlock()
603 wlock = self.wlock()
604 for f in list:
604 for f in list:
605 p = self.wjoin(f)
605 p = self.wjoin(f)
606 if os.path.exists(p):
606 if os.path.exists(p):
607 self.ui.warn(_("%s still exists!\n") % f)
607 self.ui.warn(_("%s still exists!\n") % f)
608 elif self.dirstate.state(f) == 'a':
608 elif self.dirstate.state(f) == 'a':
609 self.dirstate.forget([f])
609 self.dirstate.forget([f])
610 elif f not in self.dirstate:
610 elif f not in self.dirstate:
611 self.ui.warn(_("%s not tracked!\n") % f)
611 self.ui.warn(_("%s not tracked!\n") % f)
612 else:
612 else:
613 self.dirstate.update([f], "r")
613 self.dirstate.update([f], "r")
614
614
615 def undelete(self, list, wlock=None):
615 def undelete(self, list, wlock=None):
616 p = self.dirstate.parents()[0]
616 p = self.dirstate.parents()[0]
617 mn = self.changelog.read(p)[0]
617 mn = self.changelog.read(p)[0]
618 mf = self.manifest.readflags(mn)
618 mf = self.manifest.readflags(mn)
619 m = self.manifest.read(mn)
619 m = self.manifest.read(mn)
620 if not wlock:
620 if not wlock:
621 wlock = self.wlock()
621 wlock = self.wlock()
622 for f in list:
622 for f in list:
623 if self.dirstate.state(f) not in "r":
623 if self.dirstate.state(f) not in "r":
624 self.ui.warn("%s not removed!\n" % f)
624 self.ui.warn("%s not removed!\n" % f)
625 else:
625 else:
626 t = self.file(f).read(m[f])
626 t = self.file(f).read(m[f])
627 self.wwrite(f, t)
627 self.wwrite(f, t)
628 util.set_exec(self.wjoin(f), mf[f])
628 util.set_exec(self.wjoin(f), mf[f])
629 self.dirstate.update([f], "n")
629 self.dirstate.update([f], "n")
630
630
631 def copy(self, source, dest, wlock=None):
631 def copy(self, source, dest, wlock=None):
632 p = self.wjoin(dest)
632 p = self.wjoin(dest)
633 if not os.path.exists(p):
633 if not os.path.exists(p):
634 self.ui.warn(_("%s does not exist!\n") % dest)
634 self.ui.warn(_("%s does not exist!\n") % dest)
635 elif not os.path.isfile(p):
635 elif not os.path.isfile(p):
636 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
636 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
637 else:
637 else:
638 if not wlock:
638 if not wlock:
639 wlock = self.wlock()
639 wlock = self.wlock()
640 if self.dirstate.state(dest) == '?':
640 if self.dirstate.state(dest) == '?':
641 self.dirstate.update([dest], "a")
641 self.dirstate.update([dest], "a")
642 self.dirstate.copy(source, dest)
642 self.dirstate.copy(source, dest)
643
643
644 def heads(self, start=None):
644 def heads(self, start=None):
645 heads = self.changelog.heads(start)
645 heads = self.changelog.heads(start)
646 # sort the output in rev descending order
646 # sort the output in rev descending order
647 heads = [(-self.changelog.rev(h), h) for h in heads]
647 heads = [(-self.changelog.rev(h), h) for h in heads]
648 heads.sort()
648 heads.sort()
649 return [n for (r, n) in heads]
649 return [n for (r, n) in heads]
650
650
651 # branchlookup returns a dict giving a list of branches for
651 # branchlookup returns a dict giving a list of branches for
652 # each head. A branch is defined as the tag of a node or
652 # each head. A branch is defined as the tag of a node or
653 # the branch of the node's parents. If a node has multiple
653 # the branch of the node's parents. If a node has multiple
654 # branch tags, tags are eliminated if they are visible from other
654 # branch tags, tags are eliminated if they are visible from other
655 # branch tags.
655 # branch tags.
656 #
656 #
657 # So, for this graph: a->b->c->d->e
657 # So, for this graph: a->b->c->d->e
658 # \ /
658 # \ /
659 # aa -----/
659 # aa -----/
660 # a has tag 2.6.12
660 # a has tag 2.6.12
661 # d has tag 2.6.13
661 # d has tag 2.6.13
662 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
662 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
663 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
663 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
664 # from the list.
664 # from the list.
665 #
665 #
666 # It is possible that more than one head will have the same branch tag.
666 # It is possible that more than one head will have the same branch tag.
667 # callers need to check the result for multiple heads under the same
667 # callers need to check the result for multiple heads under the same
668 # branch tag if that is a problem for them (ie checkout of a specific
668 # branch tag if that is a problem for them (ie checkout of a specific
669 # branch).
669 # branch).
670 #
670 #
671 # passing in a specific branch will limit the depth of the search
671 # passing in a specific branch will limit the depth of the search
672 # through the parents. It won't limit the branches returned in the
672 # through the parents. It won't limit the branches returned in the
673 # result though.
673 # result though.
674 def branchlookup(self, heads=None, branch=None):
674 def branchlookup(self, heads=None, branch=None):
675 if not heads:
675 if not heads:
676 heads = self.heads()
676 heads = self.heads()
677 headt = [ h for h in heads ]
677 headt = [ h for h in heads ]
678 chlog = self.changelog
678 chlog = self.changelog
679 branches = {}
679 branches = {}
680 merges = []
680 merges = []
681 seenmerge = {}
681 seenmerge = {}
682
682
683 # traverse the tree once for each head, recording in the branches
683 # traverse the tree once for each head, recording in the branches
684 # dict which tags are visible from this head. The branches
684 # dict which tags are visible from this head. The branches
685 # dict also records which tags are visible from each tag
685 # dict also records which tags are visible from each tag
686 # while we traverse.
686 # while we traverse.
687 while headt or merges:
687 while headt or merges:
688 if merges:
688 if merges:
689 n, found = merges.pop()
689 n, found = merges.pop()
690 visit = [n]
690 visit = [n]
691 else:
691 else:
692 h = headt.pop()
692 h = headt.pop()
693 visit = [h]
693 visit = [h]
694 found = [h]
694 found = [h]
695 seen = {}
695 seen = {}
696 while visit:
696 while visit:
697 n = visit.pop()
697 n = visit.pop()
698 if n in seen:
698 if n in seen:
699 continue
699 continue
700 pp = chlog.parents(n)
700 pp = chlog.parents(n)
701 tags = self.nodetags(n)
701 tags = self.nodetags(n)
702 if tags:
702 if tags:
703 for x in tags:
703 for x in tags:
704 if x == 'tip':
704 if x == 'tip':
705 continue
705 continue
706 for f in found:
706 for f in found:
707 branches.setdefault(f, {})[n] = 1
707 branches.setdefault(f, {})[n] = 1
708 branches.setdefault(n, {})[n] = 1
708 branches.setdefault(n, {})[n] = 1
709 break
709 break
710 if n not in found:
710 if n not in found:
711 found.append(n)
711 found.append(n)
712 if branch in tags:
712 if branch in tags:
713 continue
713 continue
714 seen[n] = 1
714 seen[n] = 1
715 if pp[1] != nullid and n not in seenmerge:
715 if pp[1] != nullid and n not in seenmerge:
716 merges.append((pp[1], [x for x in found]))
716 merges.append((pp[1], [x for x in found]))
717 seenmerge[n] = 1
717 seenmerge[n] = 1
718 if pp[0] != nullid:
718 if pp[0] != nullid:
719 visit.append(pp[0])
719 visit.append(pp[0])
720 # traverse the branches dict, eliminating branch tags from each
720 # traverse the branches dict, eliminating branch tags from each
721 # head that are visible from another branch tag for that head.
721 # head that are visible from another branch tag for that head.
722 out = {}
722 out = {}
723 viscache = {}
723 viscache = {}
724 for h in heads:
724 for h in heads:
725 def visible(node):
725 def visible(node):
726 if node in viscache:
726 if node in viscache:
727 return viscache[node]
727 return viscache[node]
728 ret = {}
728 ret = {}
729 visit = [node]
729 visit = [node]
730 while visit:
730 while visit:
731 x = visit.pop()
731 x = visit.pop()
732 if x in viscache:
732 if x in viscache:
733 ret.update(viscache[x])
733 ret.update(viscache[x])
734 elif x not in ret:
734 elif x not in ret:
735 ret[x] = 1
735 ret[x] = 1
736 if x in branches:
736 if x in branches:
737 visit[len(visit):] = branches[x].keys()
737 visit[len(visit):] = branches[x].keys()
738 viscache[node] = ret
738 viscache[node] = ret
739 return ret
739 return ret
740 if h not in branches:
740 if h not in branches:
741 continue
741 continue
742 # O(n^2), but somewhat limited. This only searches the
742 # O(n^2), but somewhat limited. This only searches the
743 # tags visible from a specific head, not all the tags in the
743 # tags visible from a specific head, not all the tags in the
744 # whole repo.
744 # whole repo.
745 for b in branches[h]:
745 for b in branches[h]:
746 vis = False
746 vis = False
747 for bb in branches[h].keys():
747 for bb in branches[h].keys():
748 if b != bb:
748 if b != bb:
749 if b in visible(bb):
749 if b in visible(bb):
750 vis = True
750 vis = True
751 break
751 break
752 if not vis:
752 if not vis:
753 l = out.setdefault(h, [])
753 l = out.setdefault(h, [])
754 l[len(l):] = self.nodetags(b)
754 l[len(l):] = self.nodetags(b)
755 return out
755 return out
756
756
757 def branches(self, nodes):
757 def branches(self, nodes):
758 if not nodes:
758 if not nodes:
759 nodes = [self.changelog.tip()]
759 nodes = [self.changelog.tip()]
760 b = []
760 b = []
761 for n in nodes:
761 for n in nodes:
762 t = n
762 t = n
763 while n:
763 while n:
764 p = self.changelog.parents(n)
764 p = self.changelog.parents(n)
765 if p[1] != nullid or p[0] == nullid:
765 if p[1] != nullid or p[0] == nullid:
766 b.append((t, n, p[0], p[1]))
766 b.append((t, n, p[0], p[1]))
767 break
767 break
768 n = p[0]
768 n = p[0]
769 return b
769 return b
770
770
771 def between(self, pairs):
771 def between(self, pairs):
772 r = []
772 r = []
773
773
774 for top, bottom in pairs:
774 for top, bottom in pairs:
775 n, l, i = top, [], 0
775 n, l, i = top, [], 0
776 f = 1
776 f = 1
777
777
778 while n != bottom:
778 while n != bottom:
779 p = self.changelog.parents(n)[0]
779 p = self.changelog.parents(n)[0]
780 if i == f:
780 if i == f:
781 l.append(n)
781 l.append(n)
782 f = f * 2
782 f = f * 2
783 n = p
783 n = p
784 i += 1
784 i += 1
785
785
786 r.append(l)
786 r.append(l)
787
787
788 return r
788 return r
789
789
790 def findincoming(self, remote, base=None, heads=None, force=False):
790 def findincoming(self, remote, base=None, heads=None, force=False):
791 m = self.changelog.nodemap
791 m = self.changelog.nodemap
792 search = []
792 search = []
793 fetch = {}
793 fetch = {}
794 seen = {}
794 seen = {}
795 seenbranch = {}
795 seenbranch = {}
796 if base == None:
796 if base == None:
797 base = {}
797 base = {}
798
798
799 # assume we're closer to the tip than the root
799 # assume we're closer to the tip than the root
800 # and start by examining the heads
800 # and start by examining the heads
801 self.ui.status(_("searching for changes\n"))
801 self.ui.status(_("searching for changes\n"))
802
802
803 if not heads:
803 if not heads:
804 heads = remote.heads()
804 heads = remote.heads()
805
805
806 unknown = []
806 unknown = []
807 for h in heads:
807 for h in heads:
808 if h not in m:
808 if h not in m:
809 unknown.append(h)
809 unknown.append(h)
810 else:
810 else:
811 base[h] = 1
811 base[h] = 1
812
812
813 if not unknown:
813 if not unknown:
814 return []
814 return []
815
815
816 rep = {}
816 rep = {}
817 reqcnt = 0
817 reqcnt = 0
818
818
819 # search through remote branches
819 # search through remote branches
820 # a 'branch' here is a linear segment of history, with four parts:
820 # a 'branch' here is a linear segment of history, with four parts:
821 # head, root, first parent, second parent
821 # head, root, first parent, second parent
822 # (a branch always has two parents (or none) by definition)
822 # (a branch always has two parents (or none) by definition)
823 unknown = remote.branches(unknown)
823 unknown = remote.branches(unknown)
824 while unknown:
824 while unknown:
825 r = []
825 r = []
826 while unknown:
826 while unknown:
827 n = unknown.pop(0)
827 n = unknown.pop(0)
828 if n[0] in seen:
828 if n[0] in seen:
829 continue
829 continue
830
830
831 self.ui.debug(_("examining %s:%s\n")
831 self.ui.debug(_("examining %s:%s\n")
832 % (short(n[0]), short(n[1])))
832 % (short(n[0]), short(n[1])))
833 if n[0] == nullid:
833 if n[0] == nullid:
834 break
834 break
835 if n in seenbranch:
835 if n in seenbranch:
836 self.ui.debug(_("branch already found\n"))
836 self.ui.debug(_("branch already found\n"))
837 continue
837 continue
838 if n[1] and n[1] in m: # do we know the base?
838 if n[1] and n[1] in m: # do we know the base?
839 self.ui.debug(_("found incomplete branch %s:%s\n")
839 self.ui.debug(_("found incomplete branch %s:%s\n")
840 % (short(n[0]), short(n[1])))
840 % (short(n[0]), short(n[1])))
841 search.append(n) # schedule branch range for scanning
841 search.append(n) # schedule branch range for scanning
842 seenbranch[n] = 1
842 seenbranch[n] = 1
843 else:
843 else:
844 if n[1] not in seen and n[1] not in fetch:
844 if n[1] not in seen and n[1] not in fetch:
845 if n[2] in m and n[3] in m:
845 if n[2] in m and n[3] in m:
846 self.ui.debug(_("found new changeset %s\n") %
846 self.ui.debug(_("found new changeset %s\n") %
847 short(n[1]))
847 short(n[1]))
848 fetch[n[1]] = 1 # earliest unknown
848 fetch[n[1]] = 1 # earliest unknown
849 base[n[2]] = 1 # latest known
849 base[n[2]] = 1 # latest known
850 continue
850 continue
851
851
852 for a in n[2:4]:
852 for a in n[2:4]:
853 if a not in rep:
853 if a not in rep:
854 r.append(a)
854 r.append(a)
855 rep[a] = 1
855 rep[a] = 1
856
856
857 seen[n[0]] = 1
857 seen[n[0]] = 1
858
858
859 if r:
859 if r:
860 reqcnt += 1
860 reqcnt += 1
861 self.ui.debug(_("request %d: %s\n") %
861 self.ui.debug(_("request %d: %s\n") %
862 (reqcnt, " ".join(map(short, r))))
862 (reqcnt, " ".join(map(short, r))))
863 for p in range(0, len(r), 10):
863 for p in range(0, len(r), 10):
864 for b in remote.branches(r[p:p+10]):
864 for b in remote.branches(r[p:p+10]):
865 self.ui.debug(_("received %s:%s\n") %
865 self.ui.debug(_("received %s:%s\n") %
866 (short(b[0]), short(b[1])))
866 (short(b[0]), short(b[1])))
867 if b[0] in m:
867 if b[0] in m:
868 self.ui.debug(_("found base node %s\n")
868 self.ui.debug(_("found base node %s\n")
869 % short(b[0]))
869 % short(b[0]))
870 base[b[0]] = 1
870 base[b[0]] = 1
871 elif b[0] not in seen:
871 elif b[0] not in seen:
872 unknown.append(b)
872 unknown.append(b)
873
873
874 # do binary search on the branches we found
874 # do binary search on the branches we found
875 while search:
875 while search:
876 n = search.pop(0)
876 n = search.pop(0)
877 reqcnt += 1
877 reqcnt += 1
878 l = remote.between([(n[0], n[1])])[0]
878 l = remote.between([(n[0], n[1])])[0]
879 l.append(n[1])
879 l.append(n[1])
880 p = n[0]
880 p = n[0]
881 f = 1
881 f = 1
882 for i in l:
882 for i in l:
883 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
883 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
884 if i in m:
884 if i in m:
885 if f <= 2:
885 if f <= 2:
886 self.ui.debug(_("found new branch changeset %s\n") %
886 self.ui.debug(_("found new branch changeset %s\n") %
887 short(p))
887 short(p))
888 fetch[p] = 1
888 fetch[p] = 1
889 base[i] = 1
889 base[i] = 1
890 else:
890 else:
891 self.ui.debug(_("narrowed branch search to %s:%s\n")
891 self.ui.debug(_("narrowed branch search to %s:%s\n")
892 % (short(p), short(i)))
892 % (short(p), short(i)))
893 search.append((p, i))
893 search.append((p, i))
894 break
894 break
895 p, f = i, f * 2
895 p, f = i, f * 2
896
896
897 # sanity check our fetch list
897 # sanity check our fetch list
898 for f in fetch.keys():
898 for f in fetch.keys():
899 if f in m:
899 if f in m:
900 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
900 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
901
901
902 if base.keys() == [nullid]:
902 if base.keys() == [nullid]:
903 if force:
903 if force:
904 self.ui.warn(_("warning: repository is unrelated\n"))
904 self.ui.warn(_("warning: repository is unrelated\n"))
905 else:
905 else:
906 raise util.Abort(_("repository is unrelated"))
906 raise util.Abort(_("repository is unrelated"))
907
907
908 self.ui.note(_("found new changesets starting at ") +
908 self.ui.note(_("found new changesets starting at ") +
909 " ".join([short(f) for f in fetch]) + "\n")
909 " ".join([short(f) for f in fetch]) + "\n")
910
910
911 self.ui.debug(_("%d total queries\n") % reqcnt)
911 self.ui.debug(_("%d total queries\n") % reqcnt)
912
912
913 return fetch.keys()
913 return fetch.keys()
914
914
915 def findoutgoing(self, remote, base=None, heads=None, force=False):
915 def findoutgoing(self, remote, base=None, heads=None, force=False):
916 if base == None:
916 if base == None:
917 base = {}
917 base = {}
918 self.findincoming(remote, base, heads, force=force)
918 self.findincoming(remote, base, heads, force=force)
919
919
920 self.ui.debug(_("common changesets up to ")
920 self.ui.debug(_("common changesets up to ")
921 + " ".join(map(short, base.keys())) + "\n")
921 + " ".join(map(short, base.keys())) + "\n")
922
922
923 remain = dict.fromkeys(self.changelog.nodemap)
923 remain = dict.fromkeys(self.changelog.nodemap)
924
924
925 # prune everything remote has from the tree
925 # prune everything remote has from the tree
926 del remain[nullid]
926 del remain[nullid]
927 remove = base.keys()
927 remove = base.keys()
928 while remove:
928 while remove:
929 n = remove.pop(0)
929 n = remove.pop(0)
930 if n in remain:
930 if n in remain:
931 del remain[n]
931 del remain[n]
932 for p in self.changelog.parents(n):
932 for p in self.changelog.parents(n):
933 remove.append(p)
933 remove.append(p)
934
934
935 # find every node whose parents have been pruned
935 # find every node whose parents have been pruned
936 subset = []
936 subset = []
937 for n in remain:
937 for n in remain:
938 p1, p2 = self.changelog.parents(n)
938 p1, p2 = self.changelog.parents(n)
939 if p1 not in remain and p2 not in remain:
939 if p1 not in remain and p2 not in remain:
940 subset.append(n)
940 subset.append(n)
941
941
942 # this is the set of all roots we have to push
942 # this is the set of all roots we have to push
943 return subset
943 return subset
944
944
945 def pull(self, remote, heads=None, force=False):
945 def pull(self, remote, heads=None, force=False):
946 l = self.lock()
946 l = self.lock()
947
947
948 # if we have an empty repo, fetch everything
948 # if we have an empty repo, fetch everything
949 if self.changelog.tip() == nullid:
949 if self.changelog.tip() == nullid:
950 self.ui.status(_("requesting all changes\n"))
950 self.ui.status(_("requesting all changes\n"))
951 fetch = [nullid]
951 fetch = [nullid]
952 else:
952 else:
953 fetch = self.findincoming(remote, force=force)
953 fetch = self.findincoming(remote, force=force)
954
954
955 if not fetch:
955 if not fetch:
956 self.ui.status(_("no changes found\n"))
956 self.ui.status(_("no changes found\n"))
957 return 1
957 return 1
958
958
959 if heads is None:
959 if heads is None:
960 cg = remote.changegroup(fetch, 'pull')
960 cg = remote.changegroup(fetch, 'pull')
961 else:
961 else:
962 cg = remote.changegroupsubset(fetch, heads, 'pull')
962 cg = remote.changegroupsubset(fetch, heads, 'pull')
963 return self.addchangegroup(cg)
963 return self.addchangegroup(cg)
964
964
965 def push(self, remote, force=False, revs=None):
965 def push(self, remote, force=False, revs=None):
966 lock = remote.lock()
966 lock = remote.lock()
967
967
968 base = {}
968 base = {}
969 heads = remote.heads()
969 heads = remote.heads()
970 inc = self.findincoming(remote, base, heads, force=force)
970 inc = self.findincoming(remote, base, heads, force=force)
971 if not force and inc:
971 if not force and inc:
972 self.ui.warn(_("abort: unsynced remote changes!\n"))
972 self.ui.warn(_("abort: unsynced remote changes!\n"))
973 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
973 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
974 return 1
974 return 1
975
975
976 update = self.findoutgoing(remote, base)
976 update = self.findoutgoing(remote, base)
977 if revs is not None:
977 if revs is not None:
978 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
978 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
979 else:
979 else:
980 bases, heads = update, self.changelog.heads()
980 bases, heads = update, self.changelog.heads()
981
981
982 if not bases:
982 if not bases:
983 self.ui.status(_("no changes found\n"))
983 self.ui.status(_("no changes found\n"))
984 return 1
984 return 1
985 elif not force:
985 elif not force:
986 if len(bases) < len(heads):
986 if len(bases) < len(heads):
987 self.ui.warn(_("abort: push creates new remote branches!\n"))
987 self.ui.warn(_("abort: push creates new remote branches!\n"))
988 self.ui.status(_("(did you forget to merge?"
988 self.ui.status(_("(did you forget to merge?"
989 " use push -f to force)\n"))
989 " use push -f to force)\n"))
990 return 1
990 return 1
991
991
992 if revs is None:
992 if revs is None:
993 cg = self.changegroup(update, 'push')
993 cg = self.changegroup(update, 'push')
994 else:
994 else:
995 cg = self.changegroupsubset(update, revs, 'push')
995 cg = self.changegroupsubset(update, revs, 'push')
996 return remote.addchangegroup(cg)
996 return remote.addchangegroup(cg)
997
997
998 def changegroupsubset(self, bases, heads, source):
998 def changegroupsubset(self, bases, heads, source):
999 """This function generates a changegroup consisting of all the nodes
999 """This function generates a changegroup consisting of all the nodes
1000 that are descendents of any of the bases, and ancestors of any of
1000 that are descendents of any of the bases, and ancestors of any of
1001 the heads.
1001 the heads.
1002
1002
1003 It is fairly complex as determining which filenodes and which
1003 It is fairly complex as determining which filenodes and which
1004 manifest nodes need to be included for the changeset to be complete
1004 manifest nodes need to be included for the changeset to be complete
1005 is non-trivial.
1005 is non-trivial.
1006
1006
1007 Another wrinkle is doing the reverse, figuring out which changeset in
1007 Another wrinkle is doing the reverse, figuring out which changeset in
1008 the changegroup a particular filenode or manifestnode belongs to."""
1008 the changegroup a particular filenode or manifestnode belongs to."""
1009
1009
1010 self.hook('preoutgoing', throw=True, source=source)
1010 self.hook('preoutgoing', throw=True, source=source)
1011
1011
1012 # Set up some initial variables
1012 # Set up some initial variables
1013 # Make it easy to refer to self.changelog
1013 # Make it easy to refer to self.changelog
1014 cl = self.changelog
1014 cl = self.changelog
1015 # msng is short for missing - compute the list of changesets in this
1015 # msng is short for missing - compute the list of changesets in this
1016 # changegroup.
1016 # changegroup.
1017 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1017 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1018 # Some bases may turn out to be superfluous, and some heads may be
1018 # Some bases may turn out to be superfluous, and some heads may be
1019 # too. nodesbetween will return the minimal set of bases and heads
1019 # too. nodesbetween will return the minimal set of bases and heads
1020 # necessary to re-create the changegroup.
1020 # necessary to re-create the changegroup.
1021
1021
1022 # Known heads are the list of heads that it is assumed the recipient
1022 # Known heads are the list of heads that it is assumed the recipient
1023 # of this changegroup will know about.
1023 # of this changegroup will know about.
1024 knownheads = {}
1024 knownheads = {}
1025 # We assume that all parents of bases are known heads.
1025 # We assume that all parents of bases are known heads.
1026 for n in bases:
1026 for n in bases:
1027 for p in cl.parents(n):
1027 for p in cl.parents(n):
1028 if p != nullid:
1028 if p != nullid:
1029 knownheads[p] = 1
1029 knownheads[p] = 1
1030 knownheads = knownheads.keys()
1030 knownheads = knownheads.keys()
1031 if knownheads:
1031 if knownheads:
1032 # Now that we know what heads are known, we can compute which
1032 # Now that we know what heads are known, we can compute which
1033 # changesets are known. The recipient must know about all
1033 # changesets are known. The recipient must know about all
1034 # changesets required to reach the known heads from the null
1034 # changesets required to reach the known heads from the null
1035 # changeset.
1035 # changeset.
1036 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1036 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1037 junk = None
1037 junk = None
1038 # Transform the list into an ersatz set.
1038 # Transform the list into an ersatz set.
1039 has_cl_set = dict.fromkeys(has_cl_set)
1039 has_cl_set = dict.fromkeys(has_cl_set)
1040 else:
1040 else:
1041 # If there were no known heads, the recipient cannot be assumed to
1041 # If there were no known heads, the recipient cannot be assumed to
1042 # know about any changesets.
1042 # know about any changesets.
1043 has_cl_set = {}
1043 has_cl_set = {}
1044
1044
1045 # Make it easy to refer to self.manifest
1045 # Make it easy to refer to self.manifest
1046 mnfst = self.manifest
1046 mnfst = self.manifest
1047 # We don't know which manifests are missing yet
1047 # We don't know which manifests are missing yet
1048 msng_mnfst_set = {}
1048 msng_mnfst_set = {}
1049 # Nor do we know which filenodes are missing.
1049 # Nor do we know which filenodes are missing.
1050 msng_filenode_set = {}
1050 msng_filenode_set = {}
1051
1051
1052 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1052 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1053 junk = None
1053 junk = None
1054
1054
1055 # A changeset always belongs to itself, so the changenode lookup
1055 # A changeset always belongs to itself, so the changenode lookup
1056 # function for a changenode is identity.
1056 # function for a changenode is identity.
1057 def identity(x):
1057 def identity(x):
1058 return x
1058 return x
1059
1059
1060 # A function generating function. Sets up an environment for the
1060 # A function generating function. Sets up an environment for the
1061 # inner function.
1061 # inner function.
1062 def cmp_by_rev_func(revlog):
1062 def cmp_by_rev_func(revlog):
1063 # Compare two nodes by their revision number in the environment's
1063 # Compare two nodes by their revision number in the environment's
1064 # revision history. Since the revision number both represents the
1064 # revision history. Since the revision number both represents the
1065 # most efficient order to read the nodes in, and represents a
1065 # most efficient order to read the nodes in, and represents a
1066 # topological sorting of the nodes, this function is often useful.
1066 # topological sorting of the nodes, this function is often useful.
1067 def cmp_by_rev(a, b):
1067 def cmp_by_rev(a, b):
1068 return cmp(revlog.rev(a), revlog.rev(b))
1068 return cmp(revlog.rev(a), revlog.rev(b))
1069 return cmp_by_rev
1069 return cmp_by_rev
1070
1070
1071 # If we determine that a particular file or manifest node must be a
1071 # If we determine that a particular file or manifest node must be a
1072 # node that the recipient of the changegroup will already have, we can
1072 # node that the recipient of the changegroup will already have, we can
1073 # also assume the recipient will have all the parents. This function
1073 # also assume the recipient will have all the parents. This function
1074 # prunes them from the set of missing nodes.
1074 # prunes them from the set of missing nodes.
1075 def prune_parents(revlog, hasset, msngset):
1075 def prune_parents(revlog, hasset, msngset):
1076 haslst = hasset.keys()
1076 haslst = hasset.keys()
1077 haslst.sort(cmp_by_rev_func(revlog))
1077 haslst.sort(cmp_by_rev_func(revlog))
1078 for node in haslst:
1078 for node in haslst:
1079 parentlst = [p for p in revlog.parents(node) if p != nullid]
1079 parentlst = [p for p in revlog.parents(node) if p != nullid]
1080 while parentlst:
1080 while parentlst:
1081 n = parentlst.pop()
1081 n = parentlst.pop()
1082 if n not in hasset:
1082 if n not in hasset:
1083 hasset[n] = 1
1083 hasset[n] = 1
1084 p = [p for p in revlog.parents(n) if p != nullid]
1084 p = [p for p in revlog.parents(n) if p != nullid]
1085 parentlst.extend(p)
1085 parentlst.extend(p)
1086 for n in hasset:
1086 for n in hasset:
1087 msngset.pop(n, None)
1087 msngset.pop(n, None)
1088
1088
1089 # This is a function generating function used to set up an environment
1089 # This is a function generating function used to set up an environment
1090 # for the inner function to execute in.
1090 # for the inner function to execute in.
1091 def manifest_and_file_collector(changedfileset):
1091 def manifest_and_file_collector(changedfileset):
1092 # This is an information gathering function that gathers
1092 # This is an information gathering function that gathers
1093 # information from each changeset node that goes out as part of
1093 # information from each changeset node that goes out as part of
1094 # the changegroup. The information gathered is a list of which
1094 # the changegroup. The information gathered is a list of which
1095 # manifest nodes are potentially required (the recipient may
1095 # manifest nodes are potentially required (the recipient may
1096 # already have them) and total list of all files which were
1096 # already have them) and total list of all files which were
1097 # changed in any changeset in the changegroup.
1097 # changed in any changeset in the changegroup.
1098 #
1098 #
1099 # We also remember the first changenode we saw any manifest
1099 # We also remember the first changenode we saw any manifest
1100 # referenced by so we can later determine which changenode 'owns'
1100 # referenced by so we can later determine which changenode 'owns'
1101 # the manifest.
1101 # the manifest.
1102 def collect_manifests_and_files(clnode):
1102 def collect_manifests_and_files(clnode):
1103 c = cl.read(clnode)
1103 c = cl.read(clnode)
1104 for f in c[3]:
1104 for f in c[3]:
1105 # This is to make sure we only have one instance of each
1105 # This is to make sure we only have one instance of each
1106 # filename string for each filename.
1106 # filename string for each filename.
1107 changedfileset.setdefault(f, f)
1107 changedfileset.setdefault(f, f)
1108 msng_mnfst_set.setdefault(c[0], clnode)
1108 msng_mnfst_set.setdefault(c[0], clnode)
1109 return collect_manifests_and_files
1109 return collect_manifests_and_files
1110
1110
1111 # Figure out which manifest nodes (of the ones we think might be part
1111 # Figure out which manifest nodes (of the ones we think might be part
1112 # of the changegroup) the recipient must know about and remove them
1112 # of the changegroup) the recipient must know about and remove them
1113 # from the changegroup.
1113 # from the changegroup.
1114 def prune_manifests():
1114 def prune_manifests():
1115 has_mnfst_set = {}
1115 has_mnfst_set = {}
1116 for n in msng_mnfst_set:
1116 for n in msng_mnfst_set:
1117 # If a 'missing' manifest thinks it belongs to a changenode
1117 # If a 'missing' manifest thinks it belongs to a changenode
1118 # the recipient is assumed to have, obviously the recipient
1118 # the recipient is assumed to have, obviously the recipient
1119 # must have that manifest.
1119 # must have that manifest.
1120 linknode = cl.node(mnfst.linkrev(n))
1120 linknode = cl.node(mnfst.linkrev(n))
1121 if linknode in has_cl_set:
1121 if linknode in has_cl_set:
1122 has_mnfst_set[n] = 1
1122 has_mnfst_set[n] = 1
1123 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1123 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1124
1124
1125 # Use the information collected in collect_manifests_and_files to say
1125 # Use the information collected in collect_manifests_and_files to say
1126 # which changenode any manifestnode belongs to.
1126 # which changenode any manifestnode belongs to.
1127 def lookup_manifest_link(mnfstnode):
1127 def lookup_manifest_link(mnfstnode):
1128 return msng_mnfst_set[mnfstnode]
1128 return msng_mnfst_set[mnfstnode]
1129
1129
1130 # A function generating function that sets up the initial environment
1130 # A function generating function that sets up the initial environment
1131 # the inner function.
1131 # the inner function.
1132 def filenode_collector(changedfiles):
1132 def filenode_collector(changedfiles):
1133 next_rev = [0]
1133 next_rev = [0]
1134 # This gathers information from each manifestnode included in the
1134 # This gathers information from each manifestnode included in the
1135 # changegroup about which filenodes the manifest node references
1135 # changegroup about which filenodes the manifest node references
1136 # so we can include those in the changegroup too.
1136 # so we can include those in the changegroup too.
1137 #
1137 #
1138 # It also remembers which changenode each filenode belongs to. It
1138 # It also remembers which changenode each filenode belongs to. It
1139 # does this by assuming the a filenode belongs to the changenode
1139 # does this by assuming the a filenode belongs to the changenode
1140 # the first manifest that references it belongs to.
1140 # the first manifest that references it belongs to.
1141 def collect_msng_filenodes(mnfstnode):
1141 def collect_msng_filenodes(mnfstnode):
1142 r = mnfst.rev(mnfstnode)
1142 r = mnfst.rev(mnfstnode)
1143 if r == next_rev[0]:
1143 if r == next_rev[0]:
1144 # If the last rev we looked at was the one just previous,
1144 # If the last rev we looked at was the one just previous,
1145 # we only need to see a diff.
1145 # we only need to see a diff.
1146 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1146 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1147 # For each line in the delta
1147 # For each line in the delta
1148 for dline in delta.splitlines():
1148 for dline in delta.splitlines():
1149 # get the filename and filenode for that line
1149 # get the filename and filenode for that line
1150 f, fnode = dline.split('\0')
1150 f, fnode = dline.split('\0')
1151 fnode = bin(fnode[:40])
1151 fnode = bin(fnode[:40])
1152 f = changedfiles.get(f, None)
1152 f = changedfiles.get(f, None)
1153 # And if the file is in the list of files we care
1153 # And if the file is in the list of files we care
1154 # about.
1154 # about.
1155 if f is not None:
1155 if f is not None:
1156 # Get the changenode this manifest belongs to
1156 # Get the changenode this manifest belongs to
1157 clnode = msng_mnfst_set[mnfstnode]
1157 clnode = msng_mnfst_set[mnfstnode]
1158 # Create the set of filenodes for the file if
1158 # Create the set of filenodes for the file if
1159 # there isn't one already.
1159 # there isn't one already.
1160 ndset = msng_filenode_set.setdefault(f, {})
1160 ndset = msng_filenode_set.setdefault(f, {})
1161 # And set the filenode's changelog node to the
1161 # And set the filenode's changelog node to the
1162 # manifest's if it hasn't been set already.
1162 # manifest's if it hasn't been set already.
1163 ndset.setdefault(fnode, clnode)
1163 ndset.setdefault(fnode, clnode)
1164 else:
1164 else:
1165 # Otherwise we need a full manifest.
1165 # Otherwise we need a full manifest.
1166 m = mnfst.read(mnfstnode)
1166 m = mnfst.read(mnfstnode)
1167 # For every file in we care about.
1167 # For every file in we care about.
1168 for f in changedfiles:
1168 for f in changedfiles:
1169 fnode = m.get(f, None)
1169 fnode = m.get(f, None)
1170 # If it's in the manifest
1170 # If it's in the manifest
1171 if fnode is not None:
1171 if fnode is not None:
1172 # See comments above.
1172 # See comments above.
1173 clnode = msng_mnfst_set[mnfstnode]
1173 clnode = msng_mnfst_set[mnfstnode]
1174 ndset = msng_filenode_set.setdefault(f, {})
1174 ndset = msng_filenode_set.setdefault(f, {})
1175 ndset.setdefault(fnode, clnode)
1175 ndset.setdefault(fnode, clnode)
1176 # Remember the revision we hope to see next.
1176 # Remember the revision we hope to see next.
1177 next_rev[0] = r + 1
1177 next_rev[0] = r + 1
1178 return collect_msng_filenodes
1178 return collect_msng_filenodes
1179
1179
1180 # We have a list of filenodes we think we need for a file, lets remove
1180 # We have a list of filenodes we think we need for a file, lets remove
1181 # all those we now the recipient must have.
1181 # all those we now the recipient must have.
1182 def prune_filenodes(f, filerevlog):
1182 def prune_filenodes(f, filerevlog):
1183 msngset = msng_filenode_set[f]
1183 msngset = msng_filenode_set[f]
1184 hasset = {}
1184 hasset = {}
1185 # If a 'missing' filenode thinks it belongs to a changenode we
1185 # If a 'missing' filenode thinks it belongs to a changenode we
1186 # assume the recipient must have, then the recipient must have
1186 # assume the recipient must have, then the recipient must have
1187 # that filenode.
1187 # that filenode.
1188 for n in msngset:
1188 for n in msngset:
1189 clnode = cl.node(filerevlog.linkrev(n))
1189 clnode = cl.node(filerevlog.linkrev(n))
1190 if clnode in has_cl_set:
1190 if clnode in has_cl_set:
1191 hasset[n] = 1
1191 hasset[n] = 1
1192 prune_parents(filerevlog, hasset, msngset)
1192 prune_parents(filerevlog, hasset, msngset)
1193
1193
1194 # A function generator function that sets up the a context for the
1194 # A function generator function that sets up the a context for the
1195 # inner function.
1195 # inner function.
1196 def lookup_filenode_link_func(fname):
1196 def lookup_filenode_link_func(fname):
1197 msngset = msng_filenode_set[fname]
1197 msngset = msng_filenode_set[fname]
1198 # Lookup the changenode the filenode belongs to.
1198 # Lookup the changenode the filenode belongs to.
1199 def lookup_filenode_link(fnode):
1199 def lookup_filenode_link(fnode):
1200 return msngset[fnode]
1200 return msngset[fnode]
1201 return lookup_filenode_link
1201 return lookup_filenode_link
1202
1202
1203 # Now that we have all theses utility functions to help out and
1203 # Now that we have all theses utility functions to help out and
1204 # logically divide up the task, generate the group.
1204 # logically divide up the task, generate the group.
1205 def gengroup():
1205 def gengroup():
1206 # The set of changed files starts empty.
1206 # The set of changed files starts empty.
1207 changedfiles = {}
1207 changedfiles = {}
1208 # Create a changenode group generator that will call our functions
1208 # Create a changenode group generator that will call our functions
1209 # back to lookup the owning changenode and collect information.
1209 # back to lookup the owning changenode and collect information.
1210 group = cl.group(msng_cl_lst, identity,
1210 group = cl.group(msng_cl_lst, identity,
1211 manifest_and_file_collector(changedfiles))
1211 manifest_and_file_collector(changedfiles))
1212 for chnk in group:
1212 for chnk in group:
1213 yield chnk
1213 yield chnk
1214
1214
1215 # The list of manifests has been collected by the generator
1215 # The list of manifests has been collected by the generator
1216 # calling our functions back.
1216 # calling our functions back.
1217 prune_manifests()
1217 prune_manifests()
1218 msng_mnfst_lst = msng_mnfst_set.keys()
1218 msng_mnfst_lst = msng_mnfst_set.keys()
1219 # Sort the manifestnodes by revision number.
1219 # Sort the manifestnodes by revision number.
1220 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1220 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1221 # Create a generator for the manifestnodes that calls our lookup
1221 # Create a generator for the manifestnodes that calls our lookup
1222 # and data collection functions back.
1222 # and data collection functions back.
1223 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1223 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1224 filenode_collector(changedfiles))
1224 filenode_collector(changedfiles))
1225 for chnk in group:
1225 for chnk in group:
1226 yield chnk
1226 yield chnk
1227
1227
1228 # These are no longer needed, dereference and toss the memory for
1228 # These are no longer needed, dereference and toss the memory for
1229 # them.
1229 # them.
1230 msng_mnfst_lst = None
1230 msng_mnfst_lst = None
1231 msng_mnfst_set.clear()
1231 msng_mnfst_set.clear()
1232
1232
1233 changedfiles = changedfiles.keys()
1233 changedfiles = changedfiles.keys()
1234 changedfiles.sort()
1234 changedfiles.sort()
1235 # Go through all our files in order sorted by name.
1235 # Go through all our files in order sorted by name.
1236 for fname in changedfiles:
1236 for fname in changedfiles:
1237 filerevlog = self.file(fname)
1237 filerevlog = self.file(fname)
1238 # Toss out the filenodes that the recipient isn't really
1238 # Toss out the filenodes that the recipient isn't really
1239 # missing.
1239 # missing.
1240 if msng_filenode_set.has_key(fname):
1240 if msng_filenode_set.has_key(fname):
1241 prune_filenodes(fname, filerevlog)
1241 prune_filenodes(fname, filerevlog)
1242 msng_filenode_lst = msng_filenode_set[fname].keys()
1242 msng_filenode_lst = msng_filenode_set[fname].keys()
1243 else:
1243 else:
1244 msng_filenode_lst = []
1244 msng_filenode_lst = []
1245 # If any filenodes are left, generate the group for them,
1245 # If any filenodes are left, generate the group for them,
1246 # otherwise don't bother.
1246 # otherwise don't bother.
1247 if len(msng_filenode_lst) > 0:
1247 if len(msng_filenode_lst) > 0:
1248 yield changegroup.genchunk(fname)
1248 yield changegroup.genchunk(fname)
1249 # Sort the filenodes by their revision #
1249 # Sort the filenodes by their revision #
1250 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1250 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1251 # Create a group generator and only pass in a changenode
1251 # Create a group generator and only pass in a changenode
1252 # lookup function as we need to collect no information
1252 # lookup function as we need to collect no information
1253 # from filenodes.
1253 # from filenodes.
1254 group = filerevlog.group(msng_filenode_lst,
1254 group = filerevlog.group(msng_filenode_lst,
1255 lookup_filenode_link_func(fname))
1255 lookup_filenode_link_func(fname))
1256 for chnk in group:
1256 for chnk in group:
1257 yield chnk
1257 yield chnk
1258 if msng_filenode_set.has_key(fname):
1258 if msng_filenode_set.has_key(fname):
1259 # Don't need this anymore, toss it to free memory.
1259 # Don't need this anymore, toss it to free memory.
1260 del msng_filenode_set[fname]
1260 del msng_filenode_set[fname]
1261 # Signal that no more groups are left.
1261 # Signal that no more groups are left.
1262 yield changegroup.closechunk()
1262 yield changegroup.closechunk()
1263
1263
1264 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1264 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1265
1265
1266 return util.chunkbuffer(gengroup())
1266 return util.chunkbuffer(gengroup())
1267
1267
1268 def changegroup(self, basenodes, source):
1268 def changegroup(self, basenodes, source):
1269 """Generate a changegroup of all nodes that we have that a recipient
1269 """Generate a changegroup of all nodes that we have that a recipient
1270 doesn't.
1270 doesn't.
1271
1271
1272 This is much easier than the previous function as we can assume that
1272 This is much easier than the previous function as we can assume that
1273 the recipient has any changenode we aren't sending them."""
1273 the recipient has any changenode we aren't sending them."""
1274
1274
1275 self.hook('preoutgoing', throw=True, source=source)
1275 self.hook('preoutgoing', throw=True, source=source)
1276
1276
1277 cl = self.changelog
1277 cl = self.changelog
1278 nodes = cl.nodesbetween(basenodes, None)[0]
1278 nodes = cl.nodesbetween(basenodes, None)[0]
1279 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1279 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1280
1280
1281 def identity(x):
1281 def identity(x):
1282 return x
1282 return x
1283
1283
1284 def gennodelst(revlog):
1284 def gennodelst(revlog):
1285 for r in xrange(0, revlog.count()):
1285 for r in xrange(0, revlog.count()):
1286 n = revlog.node(r)
1286 n = revlog.node(r)
1287 if revlog.linkrev(n) in revset:
1287 if revlog.linkrev(n) in revset:
1288 yield n
1288 yield n
1289
1289
1290 def changed_file_collector(changedfileset):
1290 def changed_file_collector(changedfileset):
1291 def collect_changed_files(clnode):
1291 def collect_changed_files(clnode):
1292 c = cl.read(clnode)
1292 c = cl.read(clnode)
1293 for fname in c[3]:
1293 for fname in c[3]:
1294 changedfileset[fname] = 1
1294 changedfileset[fname] = 1
1295 return collect_changed_files
1295 return collect_changed_files
1296
1296
1297 def lookuprevlink_func(revlog):
1297 def lookuprevlink_func(revlog):
1298 def lookuprevlink(n):
1298 def lookuprevlink(n):
1299 return cl.node(revlog.linkrev(n))
1299 return cl.node(revlog.linkrev(n))
1300 return lookuprevlink
1300 return lookuprevlink
1301
1301
1302 def gengroup():
1302 def gengroup():
1303 # construct a list of all changed files
1303 # construct a list of all changed files
1304 changedfiles = {}
1304 changedfiles = {}
1305
1305
1306 for chnk in cl.group(nodes, identity,
1306 for chnk in cl.group(nodes, identity,
1307 changed_file_collector(changedfiles)):
1307 changed_file_collector(changedfiles)):
1308 yield chnk
1308 yield chnk
1309 changedfiles = changedfiles.keys()
1309 changedfiles = changedfiles.keys()
1310 changedfiles.sort()
1310 changedfiles.sort()
1311
1311
1312 mnfst = self.manifest
1312 mnfst = self.manifest
1313 nodeiter = gennodelst(mnfst)
1313 nodeiter = gennodelst(mnfst)
1314 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1314 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1315 yield chnk
1315 yield chnk
1316
1316
1317 for fname in changedfiles:
1317 for fname in changedfiles:
1318 filerevlog = self.file(fname)
1318 filerevlog = self.file(fname)
1319 nodeiter = gennodelst(filerevlog)
1319 nodeiter = gennodelst(filerevlog)
1320 nodeiter = list(nodeiter)
1320 nodeiter = list(nodeiter)
1321 if nodeiter:
1321 if nodeiter:
1322 yield changegroup.genchunk(fname)
1322 yield changegroup.genchunk(fname)
1323 lookup = lookuprevlink_func(filerevlog)
1323 lookup = lookuprevlink_func(filerevlog)
1324 for chnk in filerevlog.group(nodeiter, lookup):
1324 for chnk in filerevlog.group(nodeiter, lookup):
1325 yield chnk
1325 yield chnk
1326
1326
1327 yield changegroup.closechunk()
1327 yield changegroup.closechunk()
1328 self.hook('outgoing', node=hex(nodes[0]), source=source)
1328 self.hook('outgoing', node=hex(nodes[0]), source=source)
1329
1329
1330 return util.chunkbuffer(gengroup())
1330 return util.chunkbuffer(gengroup())
1331
1331
1332 def addchangegroup(self, source):
1332 def addchangegroup(self, source):
1333
1333
1334 def csmap(x):
1334 def csmap(x):
1335 self.ui.debug(_("add changeset %s\n") % short(x))
1335 self.ui.debug(_("add changeset %s\n") % short(x))
1336 return self.changelog.count()
1336 return self.changelog.count()
1337
1337
1338 def revmap(x):
1338 def revmap(x):
1339 return self.changelog.rev(x)
1339 return self.changelog.rev(x)
1340
1340
1341 if not source:
1341 if not source:
1342 return
1342 return
1343
1343
1344 self.hook('prechangegroup', throw=True)
1344 self.hook('prechangegroup', throw=True)
1345
1345
1346 changesets = files = revisions = 0
1346 changesets = files = revisions = 0
1347
1347
1348 tr = self.transaction()
1348 tr = self.transaction()
1349
1349
1350 oldheads = len(self.changelog.heads())
1350 oldheads = len(self.changelog.heads())
1351
1351
1352 # pull off the changeset group
1352 # pull off the changeset group
1353 self.ui.status(_("adding changesets\n"))
1353 self.ui.status(_("adding changesets\n"))
1354 co = self.changelog.tip()
1354 co = self.changelog.tip()
1355 chunkiter = changegroup.chunkiter(source)
1355 chunkiter = changegroup.chunkiter(source)
1356 cn = self.changelog.addgroup(chunkiter, csmap, tr, 1) # unique
1356 cn = self.changelog.addgroup(chunkiter, csmap, tr, 1) # unique
1357 cnr, cor = map(self.changelog.rev, (cn, co))
1357 cnr, cor = map(self.changelog.rev, (cn, co))
1358 if cn == nullid:
1358 if cn == nullid:
1359 cnr = cor
1359 cnr = cor
1360 changesets = cnr - cor
1360 changesets = cnr - cor
1361
1361
1362 # pull off the manifest group
1362 # pull off the manifest group
1363 self.ui.status(_("adding manifests\n"))
1363 self.ui.status(_("adding manifests\n"))
1364 mm = self.manifest.tip()
1364 mm = self.manifest.tip()
1365 chunkiter = changegroup.chunkiter(source)
1365 chunkiter = changegroup.chunkiter(source)
1366 mo = self.manifest.addgroup(chunkiter, revmap, tr)
1366 mo = self.manifest.addgroup(chunkiter, revmap, tr)
1367
1367
1368 # process the files
1368 # process the files
1369 self.ui.status(_("adding file changes\n"))
1369 self.ui.status(_("adding file changes\n"))
1370 while 1:
1370 while 1:
1371 f = changegroup.getchunk(source)
1371 f = changegroup.getchunk(source)
1372 if not f:
1372 if not f:
1373 break
1373 break
1374 self.ui.debug(_("adding %s revisions\n") % f)
1374 self.ui.debug(_("adding %s revisions\n") % f)
1375 fl = self.file(f)
1375 fl = self.file(f)
1376 o = fl.count()
1376 o = fl.count()
1377 chunkiter = changegroup.chunkiter(source)
1377 chunkiter = changegroup.chunkiter(source)
1378 n = fl.addgroup(chunkiter, revmap, tr)
1378 n = fl.addgroup(chunkiter, revmap, tr)
1379 revisions += fl.count() - o
1379 revisions += fl.count() - o
1380 files += 1
1380 files += 1
1381
1381
1382 newheads = len(self.changelog.heads())
1382 newheads = len(self.changelog.heads())
1383 heads = ""
1383 heads = ""
1384 if oldheads and newheads > oldheads:
1384 if oldheads and newheads > oldheads:
1385 heads = _(" (+%d heads)") % (newheads - oldheads)
1385 heads = _(" (+%d heads)") % (newheads - oldheads)
1386
1386
1387 self.ui.status(_("added %d changesets"
1387 self.ui.status(_("added %d changesets"
1388 " with %d changes to %d files%s\n")
1388 " with %d changes to %d files%s\n")
1389 % (changesets, revisions, files, heads))
1389 % (changesets, revisions, files, heads))
1390
1390
1391 self.hook('pretxnchangegroup', throw=True,
1391 self.hook('pretxnchangegroup', throw=True,
1392 node=hex(self.changelog.node(cor+1)))
1392 node=hex(self.changelog.node(cor+1)))
1393
1393
1394 tr.close()
1394 tr.close()
1395
1395
1396 if changesets > 0:
1396 if changesets > 0:
1397 self.hook("changegroup", node=hex(self.changelog.node(cor+1)))
1397 self.hook("changegroup", node=hex(self.changelog.node(cor+1)))
1398
1398
1399 for i in range(cor + 1, cnr + 1):
1399 for i in range(cor + 1, cnr + 1):
1400 self.hook("incoming", node=hex(self.changelog.node(i)))
1400 self.hook("incoming", node=hex(self.changelog.node(i)))
1401
1401
1402 def update(self, node, allow=False, force=False, choose=None,
1402 def update(self, node, allow=False, force=False, choose=None,
1403 moddirstate=True, forcemerge=False, wlock=None):
1403 moddirstate=True, forcemerge=False, wlock=None):
1404 pl = self.dirstate.parents()
1404 pl = self.dirstate.parents()
1405 if not force and pl[1] != nullid:
1405 if not force and pl[1] != nullid:
1406 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1406 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1407 return 1
1407 return 1
1408
1408
1409 err = False
1409 err = False
1410
1410
1411 p1, p2 = pl[0], node
1411 p1, p2 = pl[0], node
1412 pa = self.changelog.ancestor(p1, p2)
1412 pa = self.changelog.ancestor(p1, p2)
1413 m1n = self.changelog.read(p1)[0]
1413 m1n = self.changelog.read(p1)[0]
1414 m2n = self.changelog.read(p2)[0]
1414 m2n = self.changelog.read(p2)[0]
1415 man = self.manifest.ancestor(m1n, m2n)
1415 man = self.manifest.ancestor(m1n, m2n)
1416 m1 = self.manifest.read(m1n)
1416 m1 = self.manifest.read(m1n)
1417 mf1 = self.manifest.readflags(m1n)
1417 mf1 = self.manifest.readflags(m1n)
1418 m2 = self.manifest.read(m2n).copy()
1418 m2 = self.manifest.read(m2n).copy()
1419 mf2 = self.manifest.readflags(m2n)
1419 mf2 = self.manifest.readflags(m2n)
1420 ma = self.manifest.read(man)
1420 ma = self.manifest.read(man)
1421 mfa = self.manifest.readflags(man)
1421 mfa = self.manifest.readflags(man)
1422
1422
1423 modified, added, removed, deleted, unknown = self.changes()
1423 modified, added, removed, deleted, unknown = self.changes()
1424
1424
1425 # is this a jump, or a merge? i.e. is there a linear path
1425 # is this a jump, or a merge? i.e. is there a linear path
1426 # from p1 to p2?
1426 # from p1 to p2?
1427 linear_path = (pa == p1 or pa == p2)
1427 linear_path = (pa == p1 or pa == p2)
1428
1428
1429 if allow and linear_path:
1429 if allow and linear_path:
1430 raise util.Abort(_("there is nothing to merge, "
1430 raise util.Abort(_("there is nothing to merge, "
1431 "just use 'hg update'"))
1431 "just use 'hg update'"))
1432 if allow and not forcemerge:
1432 if allow and not forcemerge:
1433 if modified or added or removed:
1433 if modified or added or removed:
1434 raise util.Abort(_("outstanding uncommited changes"))
1434 raise util.Abort(_("outstanding uncommited changes"))
1435 if not forcemerge and not force:
1435 if not forcemerge and not force:
1436 for f in unknown:
1436 for f in unknown:
1437 if f in m2:
1437 if f in m2:
1438 t1 = self.wread(f)
1438 t1 = self.wread(f)
1439 t2 = self.file(f).read(m2[f])
1439 t2 = self.file(f).read(m2[f])
1440 if cmp(t1, t2) != 0:
1440 if cmp(t1, t2) != 0:
1441 raise util.Abort(_("'%s' already exists in the working"
1441 raise util.Abort(_("'%s' already exists in the working"
1442 " dir and differs from remote") % f)
1442 " dir and differs from remote") % f)
1443
1443
1444 # resolve the manifest to determine which files
1444 # resolve the manifest to determine which files
1445 # we care about merging
1445 # we care about merging
1446 self.ui.note(_("resolving manifests\n"))
1446 self.ui.note(_("resolving manifests\n"))
1447 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1447 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1448 (force, allow, moddirstate, linear_path))
1448 (force, allow, moddirstate, linear_path))
1449 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1449 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1450 (short(man), short(m1n), short(m2n)))
1450 (short(man), short(m1n), short(m2n)))
1451
1451
1452 merge = {}
1452 merge = {}
1453 get = {}
1453 get = {}
1454 remove = []
1454 remove = []
1455
1455
1456 # construct a working dir manifest
1456 # construct a working dir manifest
1457 mw = m1.copy()
1457 mw = m1.copy()
1458 mfw = mf1.copy()
1458 mfw = mf1.copy()
1459 umap = dict.fromkeys(unknown)
1459 umap = dict.fromkeys(unknown)
1460
1460
1461 for f in added + modified + unknown:
1461 for f in added + modified + unknown:
1462 mw[f] = ""
1462 mw[f] = ""
1463 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1463 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1464
1464
1465 if moddirstate and not wlock:
1465 if moddirstate and not wlock:
1466 wlock = self.wlock()
1466 wlock = self.wlock()
1467
1467
1468 for f in deleted + removed:
1468 for f in deleted + removed:
1469 if f in mw:
1469 if f in mw:
1470 del mw[f]
1470 del mw[f]
1471
1471
1472 # If we're jumping between revisions (as opposed to merging),
1472 # If we're jumping between revisions (as opposed to merging),
1473 # and if neither the working directory nor the target rev has
1473 # and if neither the working directory nor the target rev has
1474 # the file, then we need to remove it from the dirstate, to
1474 # the file, then we need to remove it from the dirstate, to
1475 # prevent the dirstate from listing the file when it is no
1475 # prevent the dirstate from listing the file when it is no
1476 # longer in the manifest.
1476 # longer in the manifest.
1477 if moddirstate and linear_path and f not in m2:
1477 if moddirstate and linear_path and f not in m2:
1478 self.dirstate.forget((f,))
1478 self.dirstate.forget((f,))
1479
1479
1480 # Compare manifests
1480 # Compare manifests
1481 for f, n in mw.iteritems():
1481 for f, n in mw.iteritems():
1482 if choose and not choose(f):
1482 if choose and not choose(f):
1483 continue
1483 continue
1484 if f in m2:
1484 if f in m2:
1485 s = 0
1485 s = 0
1486
1486
1487 # is the wfile new since m1, and match m2?
1487 # is the wfile new since m1, and match m2?
1488 if f not in m1:
1488 if f not in m1:
1489 t1 = self.wread(f)
1489 t1 = self.wread(f)
1490 t2 = self.file(f).read(m2[f])
1490 t2 = self.file(f).read(m2[f])
1491 if cmp(t1, t2) == 0:
1491 if cmp(t1, t2) == 0:
1492 n = m2[f]
1492 n = m2[f]
1493 del t1, t2
1493 del t1, t2
1494
1494
1495 # are files different?
1495 # are files different?
1496 if n != m2[f]:
1496 if n != m2[f]:
1497 a = ma.get(f, nullid)
1497 a = ma.get(f, nullid)
1498 # are both different from the ancestor?
1498 # are both different from the ancestor?
1499 if n != a and m2[f] != a:
1499 if n != a and m2[f] != a:
1500 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1500 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1501 # merge executable bits
1501 # merge executable bits
1502 # "if we changed or they changed, change in merge"
1502 # "if we changed or they changed, change in merge"
1503 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1503 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1504 mode = ((a^b) | (a^c)) ^ a
1504 mode = ((a^b) | (a^c)) ^ a
1505 merge[f] = (m1.get(f, nullid), m2[f], mode)
1505 merge[f] = (m1.get(f, nullid), m2[f], mode)
1506 s = 1
1506 s = 1
1507 # are we clobbering?
1507 # are we clobbering?
1508 # is remote's version newer?
1508 # is remote's version newer?
1509 # or are we going back in time?
1509 # or are we going back in time?
1510 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1510 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1511 self.ui.debug(_(" remote %s is newer, get\n") % f)
1511 self.ui.debug(_(" remote %s is newer, get\n") % f)
1512 get[f] = m2[f]
1512 get[f] = m2[f]
1513 s = 1
1513 s = 1
1514 elif f in umap:
1514 elif f in umap:
1515 # this unknown file is the same as the checkout
1515 # this unknown file is the same as the checkout
1516 get[f] = m2[f]
1516 get[f] = m2[f]
1517
1517
1518 if not s and mfw[f] != mf2[f]:
1518 if not s and mfw[f] != mf2[f]:
1519 if force:
1519 if force:
1520 self.ui.debug(_(" updating permissions for %s\n") % f)
1520 self.ui.debug(_(" updating permissions for %s\n") % f)
1521 util.set_exec(self.wjoin(f), mf2[f])
1521 util.set_exec(self.wjoin(f), mf2[f])
1522 else:
1522 else:
1523 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1523 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1524 mode = ((a^b) | (a^c)) ^ a
1524 mode = ((a^b) | (a^c)) ^ a
1525 if mode != b:
1525 if mode != b:
1526 self.ui.debug(_(" updating permissions for %s\n")
1526 self.ui.debug(_(" updating permissions for %s\n")
1527 % f)
1527 % f)
1528 util.set_exec(self.wjoin(f), mode)
1528 util.set_exec(self.wjoin(f), mode)
1529 del m2[f]
1529 del m2[f]
1530 elif f in ma:
1530 elif f in ma:
1531 if n != ma[f]:
1531 if n != ma[f]:
1532 r = _("d")
1532 r = _("d")
1533 if not force and (linear_path or allow):
1533 if not force and (linear_path or allow):
1534 r = self.ui.prompt(
1534 r = self.ui.prompt(
1535 (_(" local changed %s which remote deleted\n") % f) +
1535 (_(" local changed %s which remote deleted\n") % f) +
1536 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1536 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1537 if r == _("d"):
1537 if r == _("d"):
1538 remove.append(f)
1538 remove.append(f)
1539 else:
1539 else:
1540 self.ui.debug(_("other deleted %s\n") % f)
1540 self.ui.debug(_("other deleted %s\n") % f)
1541 remove.append(f) # other deleted it
1541 remove.append(f) # other deleted it
1542 else:
1542 else:
1543 # file is created on branch or in working directory
1543 # file is created on branch or in working directory
1544 if force and f not in umap:
1544 if force and f not in umap:
1545 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1545 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1546 remove.append(f)
1546 remove.append(f)
1547 elif n == m1.get(f, nullid): # same as parent
1547 elif n == m1.get(f, nullid): # same as parent
1548 if p2 == pa: # going backwards?
1548 if p2 == pa: # going backwards?
1549 self.ui.debug(_("remote deleted %s\n") % f)
1549 self.ui.debug(_("remote deleted %s\n") % f)
1550 remove.append(f)
1550 remove.append(f)
1551 else:
1551 else:
1552 self.ui.debug(_("local modified %s, keeping\n") % f)
1552 self.ui.debug(_("local modified %s, keeping\n") % f)
1553 else:
1553 else:
1554 self.ui.debug(_("working dir created %s, keeping\n") % f)
1554 self.ui.debug(_("working dir created %s, keeping\n") % f)
1555
1555
1556 for f, n in m2.iteritems():
1556 for f, n in m2.iteritems():
1557 if choose and not choose(f):
1557 if choose and not choose(f):
1558 continue
1558 continue
1559 if f[0] == "/":
1559 if f[0] == "/":
1560 continue
1560 continue
1561 if f in ma and n != ma[f]:
1561 if f in ma and n != ma[f]:
1562 r = _("k")
1562 r = _("k")
1563 if not force and (linear_path or allow):
1563 if not force and (linear_path or allow):
1564 r = self.ui.prompt(
1564 r = self.ui.prompt(
1565 (_("remote changed %s which local deleted\n") % f) +
1565 (_("remote changed %s which local deleted\n") % f) +
1566 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1566 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1567 if r == _("k"):
1567 if r == _("k"):
1568 get[f] = n
1568 get[f] = n
1569 elif f not in ma:
1569 elif f not in ma:
1570 self.ui.debug(_("remote created %s\n") % f)
1570 self.ui.debug(_("remote created %s\n") % f)
1571 get[f] = n
1571 get[f] = n
1572 else:
1572 else:
1573 if force or p2 == pa: # going backwards?
1573 if force or p2 == pa: # going backwards?
1574 self.ui.debug(_("local deleted %s, recreating\n") % f)
1574 self.ui.debug(_("local deleted %s, recreating\n") % f)
1575 get[f] = n
1575 get[f] = n
1576 else:
1576 else:
1577 self.ui.debug(_("local deleted %s\n") % f)
1577 self.ui.debug(_("local deleted %s\n") % f)
1578
1578
1579 del mw, m1, m2, ma
1579 del mw, m1, m2, ma
1580
1580
1581 if force:
1581 if force:
1582 for f in merge:
1582 for f in merge:
1583 get[f] = merge[f][1]
1583 get[f] = merge[f][1]
1584 merge = {}
1584 merge = {}
1585
1585
1586 if linear_path or force:
1586 if linear_path or force:
1587 # we don't need to do any magic, just jump to the new rev
1587 # we don't need to do any magic, just jump to the new rev
1588 branch_merge = False
1588 branch_merge = False
1589 p1, p2 = p2, nullid
1589 p1, p2 = p2, nullid
1590 else:
1590 else:
1591 if not allow:
1591 if not allow:
1592 self.ui.status(_("this update spans a branch"
1592 self.ui.status(_("this update spans a branch"
1593 " affecting the following files:\n"))
1593 " affecting the following files:\n"))
1594 fl = merge.keys() + get.keys()
1594 fl = merge.keys() + get.keys()
1595 fl.sort()
1595 fl.sort()
1596 for f in fl:
1596 for f in fl:
1597 cf = ""
1597 cf = ""
1598 if f in merge:
1598 if f in merge:
1599 cf = _(" (resolve)")
1599 cf = _(" (resolve)")
1600 self.ui.status(" %s%s\n" % (f, cf))
1600 self.ui.status(" %s%s\n" % (f, cf))
1601 self.ui.warn(_("aborting update spanning branches!\n"))
1601 self.ui.warn(_("aborting update spanning branches!\n"))
1602 self.ui.status(_("(use update -m to merge across branches"
1602 self.ui.status(_("(use update -m to merge across branches"
1603 " or -C to lose changes)\n"))
1603 " or -C to lose changes)\n"))
1604 return 1
1604 return 1
1605 branch_merge = True
1605 branch_merge = True
1606
1606
1607 # get the files we don't need to change
1607 # get the files we don't need to change
1608 files = get.keys()
1608 files = get.keys()
1609 files.sort()
1609 files.sort()
1610 for f in files:
1610 for f in files:
1611 if f[0] == "/":
1611 if f[0] == "/":
1612 continue
1612 continue
1613 self.ui.note(_("getting %s\n") % f)
1613 self.ui.note(_("getting %s\n") % f)
1614 t = self.file(f).read(get[f])
1614 t = self.file(f).read(get[f])
1615 self.wwrite(f, t)
1615 self.wwrite(f, t)
1616 util.set_exec(self.wjoin(f), mf2[f])
1616 util.set_exec(self.wjoin(f), mf2[f])
1617 if moddirstate:
1617 if moddirstate:
1618 if branch_merge:
1618 if branch_merge:
1619 self.dirstate.update([f], 'n', st_mtime=-1)
1619 self.dirstate.update([f], 'n', st_mtime=-1)
1620 else:
1620 else:
1621 self.dirstate.update([f], 'n')
1621 self.dirstate.update([f], 'n')
1622
1622
1623 # merge the tricky bits
1623 # merge the tricky bits
1624 failedmerge = []
1624 failedmerge = []
1625 files = merge.keys()
1625 files = merge.keys()
1626 files.sort()
1626 files.sort()
1627 xp1 = hex(p1)
1627 xp1 = hex(p1)
1628 xp2 = hex(p2)
1628 xp2 = hex(p2)
1629 for f in files:
1629 for f in files:
1630 self.ui.status(_("merging %s\n") % f)
1630 self.ui.status(_("merging %s\n") % f)
1631 my, other, flag = merge[f]
1631 my, other, flag = merge[f]
1632 ret = self.merge3(f, my, other, xp1, xp2)
1632 ret = self.merge3(f, my, other, xp1, xp2)
1633 if ret:
1633 if ret:
1634 err = True
1634 err = True
1635 failedmerge.append(f)
1635 failedmerge.append(f)
1636 util.set_exec(self.wjoin(f), flag)
1636 util.set_exec(self.wjoin(f), flag)
1637 if moddirstate:
1637 if moddirstate:
1638 if branch_merge:
1638 if branch_merge:
1639 # We've done a branch merge, mark this file as merged
1639 # We've done a branch merge, mark this file as merged
1640 # so that we properly record the merger later
1640 # so that we properly record the merger later
1641 self.dirstate.update([f], 'm')
1641 self.dirstate.update([f], 'm')
1642 else:
1642 else:
1643 # We've update-merged a locally modified file, so
1643 # We've update-merged a locally modified file, so
1644 # we set the dirstate to emulate a normal checkout
1644 # we set the dirstate to emulate a normal checkout
1645 # of that file some time in the past. Thus our
1645 # of that file some time in the past. Thus our
1646 # merge will appear as a normal local file
1646 # merge will appear as a normal local file
1647 # modification.
1647 # modification.
1648 f_len = len(self.file(f).read(other))
1648 f_len = len(self.file(f).read(other))
1649 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1649 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1650
1650
1651 remove.sort()
1651 remove.sort()
1652 for f in remove:
1652 for f in remove:
1653 self.ui.note(_("removing %s\n") % f)
1653 self.ui.note(_("removing %s\n") % f)
1654 util.audit_path(f)
1654 util.audit_path(f)
1655 try:
1655 try:
1656 util.unlink(self.wjoin(f))
1656 util.unlink(self.wjoin(f))
1657 except OSError, inst:
1657 except OSError, inst:
1658 if inst.errno != errno.ENOENT:
1658 if inst.errno != errno.ENOENT:
1659 self.ui.warn(_("update failed to remove %s: %s!\n") %
1659 self.ui.warn(_("update failed to remove %s: %s!\n") %
1660 (f, inst.strerror))
1660 (f, inst.strerror))
1661 if moddirstate:
1661 if moddirstate:
1662 if branch_merge:
1662 if branch_merge:
1663 self.dirstate.update(remove, 'r')
1663 self.dirstate.update(remove, 'r')
1664 else:
1664 else:
1665 self.dirstate.forget(remove)
1665 self.dirstate.forget(remove)
1666
1666
1667 if moddirstate:
1667 if moddirstate:
1668 self.dirstate.setparents(p1, p2)
1668 self.dirstate.setparents(p1, p2)
1669
1669
1670 stat = ((len(get), _("updated")),
1670 stat = ((len(get), _("updated")),
1671 (len(merge) - len(failedmerge), _("merged")),
1671 (len(merge) - len(failedmerge), _("merged")),
1672 (len(remove), _("removed")),
1672 (len(remove), _("removed")),
1673 (len(failedmerge), _("unresolved")))
1673 (len(failedmerge), _("unresolved")))
1674 note = ", ".join([_("%d files %s") % s for s in stat])
1674 note = ", ".join([_("%d files %s") % s for s in stat])
1675 self.ui.note("%s\n" % note)
1675 self.ui.note("%s\n" % note)
1676 if moddirstate and branch_merge:
1676 if moddirstate and branch_merge:
1677 self.ui.note(_("(branch merge, don't forget to commit)\n"))
1677 self.ui.note(_("(branch merge, don't forget to commit)\n"))
1678
1678
1679 return err
1679 return err
1680
1680
1681 def merge3(self, fn, my, other, p1, p2):
1681 def merge3(self, fn, my, other, p1, p2):
1682 """perform a 3-way merge in the working directory"""
1682 """perform a 3-way merge in the working directory"""
1683
1683
1684 def temp(prefix, node):
1684 def temp(prefix, node):
1685 pre = "%s~%s." % (os.path.basename(fn), prefix)
1685 pre = "%s~%s." % (os.path.basename(fn), prefix)
1686 (fd, name) = tempfile.mkstemp("", pre)
1686 (fd, name) = tempfile.mkstemp("", pre)
1687 f = os.fdopen(fd, "wb")
1687 f = os.fdopen(fd, "wb")
1688 self.wwrite(fn, fl.read(node), f)
1688 self.wwrite(fn, fl.read(node), f)
1689 f.close()
1689 f.close()
1690 return name
1690 return name
1691
1691
1692 fl = self.file(fn)
1692 fl = self.file(fn)
1693 base = fl.ancestor(my, other)
1693 base = fl.ancestor(my, other)
1694 a = self.wjoin(fn)
1694 a = self.wjoin(fn)
1695 b = temp("base", base)
1695 b = temp("base", base)
1696 c = temp("other", other)
1696 c = temp("other", other)
1697
1697
1698 self.ui.note(_("resolving %s\n") % fn)
1698 self.ui.note(_("resolving %s\n") % fn)
1699 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1699 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1700 (fn, short(my), short(other), short(base)))
1700 (fn, short(my), short(other), short(base)))
1701
1701
1702 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1702 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1703 or "hgmerge")
1703 or "hgmerge")
1704 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
1704 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
1705 environ={'HG_FILE': fn,
1705 environ={'HG_FILE': fn,
1706 'HG_MY_NODE': p1,
1706 'HG_MY_NODE': p1,
1707 'HG_OTHER_NODE': p2,
1707 'HG_OTHER_NODE': p2,
1708 'HG_FILE_MY_NODE': hex(my),
1708 'HG_FILE_MY_NODE': hex(my),
1709 'HG_FILE_OTHER_NODE': hex(other),
1709 'HG_FILE_OTHER_NODE': hex(other),
1710 'HG_FILE_BASE_NODE': hex(base)})
1710 'HG_FILE_BASE_NODE': hex(base)})
1711 if r:
1711 if r:
1712 self.ui.warn(_("merging %s failed!\n") % fn)
1712 self.ui.warn(_("merging %s failed!\n") % fn)
1713
1713
1714 os.unlink(b)
1714 os.unlink(b)
1715 os.unlink(c)
1715 os.unlink(c)
1716 return r
1716 return r
1717
1717
1718 def verify(self):
1718 def verify(self):
1719 filelinkrevs = {}
1719 filelinkrevs = {}
1720 filenodes = {}
1720 filenodes = {}
1721 changesets = revisions = files = 0
1721 changesets = revisions = files = 0
1722 errors = [0]
1722 errors = [0]
1723 neededmanifests = {}
1723 neededmanifests = {}
1724
1724
1725 def err(msg):
1725 def err(msg):
1726 self.ui.warn(msg + "\n")
1726 self.ui.warn(msg + "\n")
1727 errors[0] += 1
1727 errors[0] += 1
1728
1728
1729 def checksize(obj, name):
1729 def checksize(obj, name):
1730 d = obj.checksize()
1730 d = obj.checksize()
1731 if d[0]:
1731 if d[0]:
1732 err(_("%s data length off by %d bytes") % (name, d[0]))
1732 err(_("%s data length off by %d bytes") % (name, d[0]))
1733 if d[1]:
1733 if d[1]:
1734 err(_("%s index contains %d extra bytes") % (name, d[1]))
1734 err(_("%s index contains %d extra bytes") % (name, d[1]))
1735
1735
1736 seen = {}
1736 seen = {}
1737 self.ui.status(_("checking changesets\n"))
1737 self.ui.status(_("checking changesets\n"))
1738 checksize(self.changelog, "changelog")
1738 checksize(self.changelog, "changelog")
1739
1739
1740 for i in range(self.changelog.count()):
1740 for i in range(self.changelog.count()):
1741 changesets += 1
1741 changesets += 1
1742 n = self.changelog.node(i)
1742 n = self.changelog.node(i)
1743 l = self.changelog.linkrev(n)
1743 l = self.changelog.linkrev(n)
1744 if l != i:
1744 if l != i:
1745 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1745 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1746 if n in seen:
1746 if n in seen:
1747 err(_("duplicate changeset at revision %d") % i)
1747 err(_("duplicate changeset at revision %d") % i)
1748 seen[n] = 1
1748 seen[n] = 1
1749
1749
1750 for p in self.changelog.parents(n):
1750 for p in self.changelog.parents(n):
1751 if p not in self.changelog.nodemap:
1751 if p not in self.changelog.nodemap:
1752 err(_("changeset %s has unknown parent %s") %
1752 err(_("changeset %s has unknown parent %s") %
1753 (short(n), short(p)))
1753 (short(n), short(p)))
1754 try:
1754 try:
1755 changes = self.changelog.read(n)
1755 changes = self.changelog.read(n)
1756 except KeyboardInterrupt:
1756 except KeyboardInterrupt:
1757 self.ui.warn(_("interrupted"))
1757 self.ui.warn(_("interrupted"))
1758 raise
1758 raise
1759 except Exception, inst:
1759 except Exception, inst:
1760 err(_("unpacking changeset %s: %s") % (short(n), inst))
1760 err(_("unpacking changeset %s: %s") % (short(n), inst))
1761 continue
1761 continue
1762
1762
1763 neededmanifests[changes[0]] = n
1763 neededmanifests[changes[0]] = n
1764
1764
1765 for f in changes[3]:
1765 for f in changes[3]:
1766 filelinkrevs.setdefault(f, []).append(i)
1766 filelinkrevs.setdefault(f, []).append(i)
1767
1767
1768 seen = {}
1768 seen = {}
1769 self.ui.status(_("checking manifests\n"))
1769 self.ui.status(_("checking manifests\n"))
1770 checksize(self.manifest, "manifest")
1770 checksize(self.manifest, "manifest")
1771
1771
1772 for i in range(self.manifest.count()):
1772 for i in range(self.manifest.count()):
1773 n = self.manifest.node(i)
1773 n = self.manifest.node(i)
1774 l = self.manifest.linkrev(n)
1774 l = self.manifest.linkrev(n)
1775
1775
1776 if l < 0 or l >= self.changelog.count():
1776 if l < 0 or l >= self.changelog.count():
1777 err(_("bad manifest link (%d) at revision %d") % (l, i))
1777 err(_("bad manifest link (%d) at revision %d") % (l, i))
1778
1778
1779 if n in neededmanifests:
1779 if n in neededmanifests:
1780 del neededmanifests[n]
1780 del neededmanifests[n]
1781
1781
1782 if n in seen:
1782 if n in seen:
1783 err(_("duplicate manifest at revision %d") % i)
1783 err(_("duplicate manifest at revision %d") % i)
1784
1784
1785 seen[n] = 1
1785 seen[n] = 1
1786
1786
1787 for p in self.manifest.parents(n):
1787 for p in self.manifest.parents(n):
1788 if p not in self.manifest.nodemap:
1788 if p not in self.manifest.nodemap:
1789 err(_("manifest %s has unknown parent %s") %
1789 err(_("manifest %s has unknown parent %s") %
1790 (short(n), short(p)))
1790 (short(n), short(p)))
1791
1791
1792 try:
1792 try:
1793 delta = mdiff.patchtext(self.manifest.delta(n))
1793 delta = mdiff.patchtext(self.manifest.delta(n))
1794 except KeyboardInterrupt:
1794 except KeyboardInterrupt:
1795 self.ui.warn(_("interrupted"))
1795 self.ui.warn(_("interrupted"))
1796 raise
1796 raise
1797 except Exception, inst:
1797 except Exception, inst:
1798 err(_("unpacking manifest %s: %s") % (short(n), inst))
1798 err(_("unpacking manifest %s: %s") % (short(n), inst))
1799 continue
1799 continue
1800
1800
1801 try:
1801 try:
1802 ff = [ l.split('\0') for l in delta.splitlines() ]
1802 ff = [ l.split('\0') for l in delta.splitlines() ]
1803 for f, fn in ff:
1803 for f, fn in ff:
1804 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1804 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1805 except (ValueError, TypeError), inst:
1805 except (ValueError, TypeError), inst:
1806 err(_("broken delta in manifest %s: %s") % (short(n), inst))
1806 err(_("broken delta in manifest %s: %s") % (short(n), inst))
1807
1807
1808 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1808 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1809
1809
1810 for m, c in neededmanifests.items():
1810 for m, c in neededmanifests.items():
1811 err(_("Changeset %s refers to unknown manifest %s") %
1811 err(_("Changeset %s refers to unknown manifest %s") %
1812 (short(m), short(c)))
1812 (short(m), short(c)))
1813 del neededmanifests
1813 del neededmanifests
1814
1814
1815 for f in filenodes:
1815 for f in filenodes:
1816 if f not in filelinkrevs:
1816 if f not in filelinkrevs:
1817 err(_("file %s in manifest but not in changesets") % f)
1817 err(_("file %s in manifest but not in changesets") % f)
1818
1818
1819 for f in filelinkrevs:
1819 for f in filelinkrevs:
1820 if f not in filenodes:
1820 if f not in filenodes:
1821 err(_("file %s in changeset but not in manifest") % f)
1821 err(_("file %s in changeset but not in manifest") % f)
1822
1822
1823 self.ui.status(_("checking files\n"))
1823 self.ui.status(_("checking files\n"))
1824 ff = filenodes.keys()
1824 ff = filenodes.keys()
1825 ff.sort()
1825 ff.sort()
1826 for f in ff:
1826 for f in ff:
1827 if f == "/dev/null":
1827 if f == "/dev/null":
1828 continue
1828 continue
1829 files += 1
1829 files += 1
1830 if not f:
1830 if not f:
1831 err(_("file without name in manifest %s") % short(n))
1831 err(_("file without name in manifest %s") % short(n))
1832 continue
1832 continue
1833 fl = self.file(f)
1833 fl = self.file(f)
1834 checksize(fl, f)
1834 checksize(fl, f)
1835
1835
1836 nodes = {nullid: 1}
1836 nodes = {nullid: 1}
1837 seen = {}
1837 seen = {}
1838 for i in range(fl.count()):
1838 for i in range(fl.count()):
1839 revisions += 1
1839 revisions += 1
1840 n = fl.node(i)
1840 n = fl.node(i)
1841
1841
1842 if n in seen:
1842 if n in seen:
1843 err(_("%s: duplicate revision %d") % (f, i))
1843 err(_("%s: duplicate revision %d") % (f, i))
1844 if n not in filenodes[f]:
1844 if n not in filenodes[f]:
1845 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1845 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1846 else:
1846 else:
1847 del filenodes[f][n]
1847 del filenodes[f][n]
1848
1848
1849 flr = fl.linkrev(n)
1849 flr = fl.linkrev(n)
1850 if flr not in filelinkrevs.get(f, []):
1850 if flr not in filelinkrevs.get(f, []):
1851 err(_("%s:%s points to unexpected changeset %d")
1851 err(_("%s:%s points to unexpected changeset %d")
1852 % (f, short(n), flr))
1852 % (f, short(n), flr))
1853 else:
1853 else:
1854 filelinkrevs[f].remove(flr)
1854 filelinkrevs[f].remove(flr)
1855
1855
1856 # verify contents
1856 # verify contents
1857 try:
1857 try:
1858 t = fl.read(n)
1858 t = fl.read(n)
1859 except KeyboardInterrupt:
1859 except KeyboardInterrupt:
1860 self.ui.warn(_("interrupted"))
1860 self.ui.warn(_("interrupted"))
1861 raise
1861 raise
1862 except Exception, inst:
1862 except Exception, inst:
1863 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1863 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1864
1864
1865 # verify parents
1865 # verify parents
1866 (p1, p2) = fl.parents(n)
1866 (p1, p2) = fl.parents(n)
1867 if p1 not in nodes:
1867 if p1 not in nodes:
1868 err(_("file %s:%s unknown parent 1 %s") %
1868 err(_("file %s:%s unknown parent 1 %s") %
1869 (f, short(n), short(p1)))
1869 (f, short(n), short(p1)))
1870 if p2 not in nodes:
1870 if p2 not in nodes:
1871 err(_("file %s:%s unknown parent 2 %s") %
1871 err(_("file %s:%s unknown parent 2 %s") %
1872 (f, short(n), short(p1)))
1872 (f, short(n), short(p1)))
1873 nodes[n] = 1
1873 nodes[n] = 1
1874
1874
1875 # cross-check
1875 # cross-check
1876 for node in filenodes[f]:
1876 for node in filenodes[f]:
1877 err(_("node %s in manifests not in %s") % (hex(node), f))
1877 err(_("node %s in manifests not in %s") % (hex(node), f))
1878
1878
1879 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1879 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1880 (files, changesets, revisions))
1880 (files, changesets, revisions))
1881
1881
1882 if errors[0]:
1882 if errors[0]:
1883 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1883 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1884 return 1
1884 return 1
1885
1885
1886 # used to avoid circular references so destructors work
1886 # used to avoid circular references so destructors work
1887 def aftertrans(base):
1887 def aftertrans(base):
1888 p = base
1888 p = base
1889 def a():
1889 def a():
1890 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1890 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1891 util.rename(os.path.join(p, "journal.dirstate"),
1891 util.rename(os.path.join(p, "journal.dirstate"),
1892 os.path.join(p, "undo.dirstate"))
1892 os.path.join(p, "undo.dirstate"))
1893 return a
1893 return a
1894
1894
@@ -1,218 +1,217 b''
1 # ui.py - user interface bits for mercurial
1 # ui.py - user interface bits for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import ConfigParser
8 import ConfigParser
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 demandload(globals(), "os re socket sys util")
11 demandload(globals(), "os re socket sys util")
12
12
13 class ui(object):
13 class ui(object):
14 def __init__(self, verbose=False, debug=False, quiet=False,
14 def __init__(self, verbose=False, debug=False, quiet=False,
15 interactive=True, parentui=None):
15 interactive=True, parentui=None):
16 self.overlay = {}
16 self.overlay = {}
17 if parentui is None:
17 if parentui is None:
18 # this is the parent of all ui children
18 # this is the parent of all ui children
19 self.parentui = None
19 self.parentui = None
20 self.cdata = ConfigParser.SafeConfigParser()
20 self.cdata = ConfigParser.SafeConfigParser()
21 self.readconfig(util.rcpath())
21 self.readconfig(util.rcpath())
22
22
23 self.quiet = self.configbool("ui", "quiet")
23 self.quiet = self.configbool("ui", "quiet")
24 self.verbose = self.configbool("ui", "verbose")
24 self.verbose = self.configbool("ui", "verbose")
25 self.debugflag = self.configbool("ui", "debug")
25 self.debugflag = self.configbool("ui", "debug")
26 self.interactive = self.configbool("ui", "interactive", True)
26 self.interactive = self.configbool("ui", "interactive", True)
27
27
28 self.updateopts(verbose, debug, quiet, interactive)
28 self.updateopts(verbose, debug, quiet, interactive)
29 self.diffcache = None
29 self.diffcache = None
30 else:
30 else:
31 # parentui may point to an ui object which is already a child
31 # parentui may point to an ui object which is already a child
32 self.parentui = parentui.parentui or parentui
32 self.parentui = parentui.parentui or parentui
33 parent_cdata = self.parentui.cdata
33 parent_cdata = self.parentui.cdata
34 self.cdata = ConfigParser.SafeConfigParser(parent_cdata.defaults())
34 self.cdata = ConfigParser.SafeConfigParser(parent_cdata.defaults())
35 # make interpolation work
35 # make interpolation work
36 for section in parent_cdata.sections():
36 for section in parent_cdata.sections():
37 self.cdata.add_section(section)
37 self.cdata.add_section(section)
38 for name, value in parent_cdata.items(section, raw=True):
38 for name, value in parent_cdata.items(section, raw=True):
39 self.cdata.set(section, name, value)
39 self.cdata.set(section, name, value)
40
40
41 def __getattr__(self, key):
41 def __getattr__(self, key):
42 return getattr(self.parentui, key)
42 return getattr(self.parentui, key)
43
43
44 def updateopts(self, verbose=False, debug=False, quiet=False,
44 def updateopts(self, verbose=False, debug=False, quiet=False,
45 interactive=True):
45 interactive=True):
46 self.quiet = (self.quiet or quiet) and not verbose and not debug
46 self.quiet = (self.quiet or quiet) and not verbose and not debug
47 self.verbose = (self.verbose or verbose) or debug
47 self.verbose = (self.verbose or verbose) or debug
48 self.debugflag = (self.debugflag or debug)
48 self.debugflag = (self.debugflag or debug)
49 self.interactive = (self.interactive and interactive)
49 self.interactive = (self.interactive and interactive)
50
50
51 def readconfig(self, fn, root=None):
51 def readconfig(self, fn, root=None):
52 if isinstance(fn, basestring):
52 if isinstance(fn, basestring):
53 fn = [fn]
53 fn = [fn]
54 for f in fn:
54 for f in fn:
55 try:
55 try:
56 self.cdata.read(f)
56 self.cdata.read(f)
57 except ConfigParser.ParsingError, inst:
57 except ConfigParser.ParsingError, inst:
58 raise util.Abort(_("Failed to parse %s\n%s") % (f, inst))
58 raise util.Abort(_("Failed to parse %s\n%s") % (f, inst))
59 # translate paths relative to root (or home) into absolute paths
59 # translate paths relative to root (or home) into absolute paths
60 if root is None:
60 if root is None:
61 root = os.path.expanduser('~')
61 root = os.path.expanduser('~')
62 for name, path in self.configitems("paths"):
62 for name, path in self.configitems("paths"):
63 if path and path.find("://") == -1 and not os.path.isabs(path):
63 if path and path.find("://") == -1 and not os.path.isabs(path):
64 self.cdata.set("paths", name, os.path.join(root, path))
64 self.cdata.set("paths", name, os.path.join(root, path))
65
65
66 def setconfig(self, section, name, val):
66 def setconfig(self, section, name, val):
67 self.overlay[(section, name)] = val
67 self.overlay[(section, name)] = val
68
68
69 def config(self, section, name, default=None):
69 def config(self, section, name, default=None):
70 if self.overlay.has_key((section, name)):
70 if self.overlay.has_key((section, name)):
71 return self.overlay[(section, name)]
71 return self.overlay[(section, name)]
72 if self.cdata.has_option(section, name):
72 if self.cdata.has_option(section, name):
73 try:
73 try:
74 return self.cdata.get(section, name)
74 return self.cdata.get(section, name)
75 except ConfigParser.InterpolationError, inst:
75 except ConfigParser.InterpolationError, inst:
76 raise util.Abort(_("Error in configuration:\n%s") % inst)
76 raise util.Abort(_("Error in configuration:\n%s") % inst)
77 if self.parentui is None:
77 if self.parentui is None:
78 return default
78 return default
79 else:
79 else:
80 return self.parentui.config(section, name, default)
80 return self.parentui.config(section, name, default)
81
81
82 def configbool(self, section, name, default=False):
82 def configbool(self, section, name, default=False):
83 if self.overlay.has_key((section, name)):
83 if self.overlay.has_key((section, name)):
84 return self.overlay[(section, name)]
84 return self.overlay[(section, name)]
85 if self.cdata.has_option(section, name):
85 if self.cdata.has_option(section, name):
86 try:
86 try:
87 return self.cdata.getboolean(section, name)
87 return self.cdata.getboolean(section, name)
88 except ConfigParser.InterpolationError, inst:
88 except ConfigParser.InterpolationError, inst:
89 raise util.Abort(_("Error in configuration:\n%s") % inst)
89 raise util.Abort(_("Error in configuration:\n%s") % inst)
90 if self.parentui is None:
90 if self.parentui is None:
91 return default
91 return default
92 else:
92 else:
93 return self.parentui.configbool(section, name, default)
93 return self.parentui.configbool(section, name, default)
94
94
95 def configitems(self, section):
95 def configitems(self, section):
96 items = {}
96 items = {}
97 if self.parentui is not None:
97 if self.parentui is not None:
98 items = dict(self.parentui.configitems(section))
98 items = dict(self.parentui.configitems(section))
99 if self.cdata.has_section(section):
99 if self.cdata.has_section(section):
100 try:
100 try:
101 items.update(dict(self.cdata.items(section)))
101 items.update(dict(self.cdata.items(section)))
102 except ConfigParser.InterpolationError, inst:
102 except ConfigParser.InterpolationError, inst:
103 raise util.Abort(_("Error in configuration:\n%s") % inst)
103 raise util.Abort(_("Error in configuration:\n%s") % inst)
104 x = items.items()
104 x = items.items()
105 x.sort()
105 x.sort()
106 return x
106 return x
107
107
108 def walkconfig(self, seen=None):
108 def walkconfig(self, seen=None):
109 if seen is None:
109 if seen is None:
110 seen = {}
110 seen = {}
111 for (section, name), value in self.overlay.iteritems():
111 for (section, name), value in self.overlay.iteritems():
112 yield section, name, value
112 yield section, name, value
113 seen[section, name] = 1
113 seen[section, name] = 1
114 for section in self.cdata.sections():
114 for section in self.cdata.sections():
115 for name, value in self.cdata.items(section):
115 for name, value in self.cdata.items(section):
116 if (section, name) in seen: continue
116 if (section, name) in seen: continue
117 yield section, name, value.replace('\n', '\\n')
117 yield section, name, value.replace('\n', '\\n')
118 seen[section, name] = 1
118 seen[section, name] = 1
119 if self.parentui is not None:
119 if self.parentui is not None:
120 for parent in self.parentui.walkconfig(seen):
120 for parent in self.parentui.walkconfig(seen):
121 yield parent
121 yield parent
122
122
123 def extensions(self):
123 def extensions(self):
124 return self.configitems("extensions")
124 return self.configitems("extensions")
125
125
126 def diffopts(self):
126 def diffopts(self):
127 if self.diffcache:
127 if self.diffcache:
128 return self.diffcache
128 return self.diffcache
129 ret = { 'showfunc' : True, 'ignorews' : False}
129 ret = { 'showfunc' : True, 'ignorews' : False}
130 for x in self.configitems("diff"):
130 for x in self.configitems("diff"):
131 k = x[0].lower()
131 k = x[0].lower()
132 v = x[1]
132 v = x[1]
133 if v:
133 if v:
134 v = v.lower()
134 v = v.lower()
135 if v == 'true':
135 if v == 'true':
136 value = True
136 value = True
137 else:
137 else:
138 value = False
138 value = False
139 ret[k] = value
139 ret[k] = value
140 self.diffcache = ret
140 self.diffcache = ret
141 return ret
141 return ret
142
142
143 def username(self):
143 def username(self):
144 return (os.environ.get("HGUSER") or
144 return (os.environ.get("HGUSER") or
145 self.config("ui", "username") or
145 self.config("ui", "username") or
146 os.environ.get("EMAIL") or
146 os.environ.get("EMAIL") or
147 (os.environ.get("LOGNAME",
147 (os.environ.get("LOGNAME",
148 os.environ.get("USERNAME", "unknown"))
148 os.environ.get("USERNAME", "unknown"))
149 + '@' + socket.getfqdn()))
149 + '@' + socket.getfqdn()))
150
150
151 def shortuser(self, user):
151 def shortuser(self, user):
152 """Return a short representation of a user name or email address."""
152 """Return a short representation of a user name or email address."""
153 if not self.verbose: user = util.shortuser(user)
153 if not self.verbose: user = util.shortuser(user)
154 return user
154 return user
155
155
156 def expandpath(self, loc):
156 def expandpath(self, loc):
157 """Return repository location relative to cwd or from [paths]"""
157 """Return repository location relative to cwd or from [paths]"""
158 if loc.find("://") != -1 or os.path.exists(loc):
158 if loc.find("://") != -1 or os.path.exists(loc):
159 return loc
159 return loc
160
160
161 return self.config("paths", loc, loc)
161 return self.config("paths", loc, loc)
162
162
163 def write(self, *args):
163 def write(self, *args):
164 for a in args:
164 for a in args:
165 sys.stdout.write(str(a))
165 sys.stdout.write(str(a))
166
166
167 def write_err(self, *args):
167 def write_err(self, *args):
168 if not sys.stdout.closed: sys.stdout.flush()
168 if not sys.stdout.closed: sys.stdout.flush()
169 for a in args:
169 for a in args:
170 sys.stderr.write(str(a))
170 sys.stderr.write(str(a))
171
171
172 def flush(self):
172 def flush(self):
173 try:
173 try:
174 sys.stdout.flush()
174 sys.stdout.flush()
175 finally:
175 finally:
176 sys.stderr.flush()
176 sys.stderr.flush()
177
177
178 def readline(self):
178 def readline(self):
179 return sys.stdin.readline()[:-1]
179 return sys.stdin.readline()[:-1]
180 def prompt(self, msg, pat, default="y"):
180 def prompt(self, msg, pat, default="y"):
181 if not self.interactive: return default
181 if not self.interactive: return default
182 while 1:
182 while 1:
183 self.write(msg, " ")
183 self.write(msg, " ")
184 r = self.readline()
184 r = self.readline()
185 if re.match(pat, r):
185 if re.match(pat, r):
186 return r
186 return r
187 else:
187 else:
188 self.write(_("unrecognized response\n"))
188 self.write(_("unrecognized response\n"))
189 def status(self, *msg):
189 def status(self, *msg):
190 if not self.quiet: self.write(*msg)
190 if not self.quiet: self.write(*msg)
191 def warn(self, *msg):
191 def warn(self, *msg):
192 self.write_err(*msg)
192 self.write_err(*msg)
193 def note(self, *msg):
193 def note(self, *msg):
194 if self.verbose: self.write(*msg)
194 if self.verbose: self.write(*msg)
195 def debug(self, *msg):
195 def debug(self, *msg):
196 if self.debugflag: self.write(*msg)
196 if self.debugflag: self.write(*msg)
197 def edit(self, text):
197 def edit(self, text, user):
198 import tempfile
198 import tempfile
199 (fd, name) = tempfile.mkstemp("hg")
199 (fd, name) = tempfile.mkstemp("hg")
200 f = os.fdopen(fd, "w")
200 f = os.fdopen(fd, "w")
201 f.write(text)
201 f.write(text)
202 f.close()
202 f.close()
203
203
204 editor = (os.environ.get("HGEDITOR") or
204 editor = (os.environ.get("HGEDITOR") or
205 self.config("ui", "editor") or
205 self.config("ui", "editor") or
206 os.environ.get("EDITOR", "vi"))
206 os.environ.get("EDITOR", "vi"))
207
207
208 os.environ["HGUSER"] = self.username()
209 util.system("%s \"%s\"" % (editor, name),
208 util.system("%s \"%s\"" % (editor, name),
210 environ={'HGUSER': self.username()},
209 environ={'HGUSER': user},
211 onerr=util.Abort, errprefix=_("edit failed"))
210 onerr=util.Abort, errprefix=_("edit failed"))
212
211
213 t = open(name).read()
212 t = open(name).read()
214 t = re.sub("(?m)^HG:.*\n", "", t)
213 t = re.sub("(?m)^HG:.*\n", "", t)
215
214
216 os.unlink(name)
215 os.unlink(name)
217
216
218 return t
217 return t
General Comments 0
You need to be logged in to leave comments. Login now