##// END OF EJS Templates
fix minor bugs in localrepo.hook.
Vadim Gelfer -
r2190:b67fcd91 default
parent child Browse files
Show More
@@ -1,2069 +1,2069
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import os, util
8 import os, util
9 import filelog, manifest, changelog, dirstate, repo
9 import filelog, manifest, changelog, dirstate, repo
10 from node import *
10 from node import *
11 from i18n import gettext as _
11 from i18n import gettext as _
12 from demandload import *
12 from demandload import *
13 demandload(globals(), "appendfile changegroup")
13 demandload(globals(), "appendfile changegroup")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 demandload(globals(), "revlog traceback")
15 demandload(globals(), "revlog traceback")
16
16
17 class localrepository(object):
17 class localrepository(object):
18 def __del__(self):
18 def __del__(self):
19 self.transhandle = None
19 self.transhandle = None
20 def __init__(self, parentui, path=None, create=0):
20 def __init__(self, parentui, path=None, create=0):
21 if not path:
21 if not path:
22 p = os.getcwd()
22 p = os.getcwd()
23 while not os.path.isdir(os.path.join(p, ".hg")):
23 while not os.path.isdir(os.path.join(p, ".hg")):
24 oldp = p
24 oldp = p
25 p = os.path.dirname(p)
25 p = os.path.dirname(p)
26 if p == oldp:
26 if p == oldp:
27 raise repo.RepoError(_("no repo found"))
27 raise repo.RepoError(_("no repo found"))
28 path = p
28 path = p
29 self.path = os.path.join(path, ".hg")
29 self.path = os.path.join(path, ".hg")
30
30
31 if not create and not os.path.isdir(self.path):
31 if not create and not os.path.isdir(self.path):
32 raise repo.RepoError(_("repository %s not found") % path)
32 raise repo.RepoError(_("repository %s not found") % path)
33
33
34 self.root = os.path.abspath(path)
34 self.root = os.path.abspath(path)
35 self.origroot = path
35 self.origroot = path
36 self.ui = ui.ui(parentui=parentui)
36 self.ui = ui.ui(parentui=parentui)
37 self.opener = util.opener(self.path)
37 self.opener = util.opener(self.path)
38 self.wopener = util.opener(self.root)
38 self.wopener = util.opener(self.root)
39
39
40 try:
40 try:
41 self.ui.readconfig(self.join("hgrc"), self.root)
41 self.ui.readconfig(self.join("hgrc"), self.root)
42 except IOError:
42 except IOError:
43 pass
43 pass
44
44
45 v = self.ui.revlogopts
45 v = self.ui.revlogopts
46 self.revlogversion = int(v.get('format', revlog.REVLOGV0))
46 self.revlogversion = int(v.get('format', revlog.REVLOGV0))
47 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
47 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
48 flags = 0
48 flags = 0
49 for x in v.get('flags', "").split():
49 for x in v.get('flags', "").split():
50 flags |= revlog.flagstr(x)
50 flags |= revlog.flagstr(x)
51
51
52 v = self.revlogversion | flags
52 v = self.revlogversion | flags
53 self.manifest = manifest.manifest(self.opener, v)
53 self.manifest = manifest.manifest(self.opener, v)
54 self.changelog = changelog.changelog(self.opener, v)
54 self.changelog = changelog.changelog(self.opener, v)
55
55
56 # the changelog might not have the inline index flag
56 # the changelog might not have the inline index flag
57 # on. If the format of the changelog is the same as found in
57 # on. If the format of the changelog is the same as found in
58 # .hgrc, apply any flags found in the .hgrc as well.
58 # .hgrc, apply any flags found in the .hgrc as well.
59 # Otherwise, just version from the changelog
59 # Otherwise, just version from the changelog
60 v = self.changelog.version
60 v = self.changelog.version
61 if v == self.revlogversion:
61 if v == self.revlogversion:
62 v |= flags
62 v |= flags
63 self.revlogversion = v
63 self.revlogversion = v
64
64
65 self.tagscache = None
65 self.tagscache = None
66 self.nodetagscache = None
66 self.nodetagscache = None
67 self.encodepats = None
67 self.encodepats = None
68 self.decodepats = None
68 self.decodepats = None
69 self.transhandle = None
69 self.transhandle = None
70
70
71 if create:
71 if create:
72 os.mkdir(self.path)
72 os.mkdir(self.path)
73 os.mkdir(self.join("data"))
73 os.mkdir(self.join("data"))
74
74
75 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
75 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
76
76
77 def hook(self, name, throw=False, **args):
77 def hook(self, name, throw=False, **args):
78 def callhook(hname, funcname):
78 def callhook(hname, funcname):
79 '''call python hook. hook is callable object, looked up as
79 '''call python hook. hook is callable object, looked up as
80 name in python module. if callable returns "true", hook
80 name in python module. if callable returns "true", hook
81 passes, else fails. if hook raises exception, treated as
81 passes, else fails. if hook raises exception, treated as
82 hook failure. exception propagates if throw is "true".'''
82 hook failure. exception propagates if throw is "true".'''
83
83
84 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
84 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
85 d = funcname.rfind('.')
85 d = funcname.rfind('.')
86 if d == -1:
86 if d == -1:
87 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
87 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
88 % (hname, funcname))
88 % (hname, funcname))
89 modname = funcname[:d]
89 modname = funcname[:d]
90 try:
90 try:
91 obj = __import__(modname)
91 obj = __import__(modname)
92 except ImportError:
92 except ImportError:
93 raise util.Abort(_('%s hook is invalid '
93 raise util.Abort(_('%s hook is invalid '
94 '(import of "%s" failed)') %
94 '(import of "%s" failed)') %
95 (hname, modname))
95 (hname, modname))
96 try:
96 try:
97 for p in funcname.split('.')[1:]:
97 for p in funcname.split('.')[1:]:
98 obj = getattr(obj, p)
98 obj = getattr(obj, p)
99 except AttributeError, err:
99 except AttributeError, err:
100 raise util.Abort(_('%s hook is invalid '
100 raise util.Abort(_('%s hook is invalid '
101 '("%s" is not defined)') %
101 '("%s" is not defined)') %
102 (hname, funcname))
102 (hname, funcname))
103 if not callable(obj):
103 if not callable(obj):
104 raise util.Abort(_('%s hook is invalid '
104 raise util.Abort(_('%s hook is invalid '
105 '("%s" is not callable)') %
105 '("%s" is not callable)') %
106 (hname, funcname))
106 (hname, funcname))
107 try:
107 try:
108 r = obj(ui=ui, repo=repo, hooktype=name, **args)
108 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
109 except (KeyboardInterrupt, util.SignalInterrupt):
109 except (KeyboardInterrupt, util.SignalInterrupt):
110 raise
110 raise
111 except Exception, exc:
111 except Exception, exc:
112 if isinstance(exc, util.Abort):
112 if isinstance(exc, util.Abort):
113 self.ui.warn(_('error: %s hook failed: %s\n') %
113 self.ui.warn(_('error: %s hook failed: %s\n') %
114 (hname, exc.args[0] % exc.args[1:]))
114 (hname, exc.args[0] % exc.args[1:]))
115 else:
115 else:
116 self.ui.warn(_('error: %s hook raised an exception: '
116 self.ui.warn(_('error: %s hook raised an exception: '
117 '%s\n') % (hname, exc))
117 '%s\n') % (hname, exc))
118 if throw:
118 if throw:
119 raise
119 raise
120 if self.ui.traceback:
120 if self.ui.traceback:
121 traceback.print_exc()
121 traceback.print_exc()
122 return False
122 return False
123 if not r:
123 if not r:
124 if throw:
124 if throw:
125 raise util.Abort(_('%s hook failed') % hname)
125 raise util.Abort(_('%s hook failed') % hname)
126 self.ui.warn(_('error: %s hook failed\n') % hname)
126 self.ui.warn(_('error: %s hook failed\n') % hname)
127 return r
127 return r
128
128
129 def runhook(name, cmd):
129 def runhook(name, cmd):
130 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
130 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
131 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()] +
131 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()] +
132 [(k.upper(), v) for k, v in args.iteritems()])
132 [(k.upper(), v) for k, v in args.iteritems()])
133 r = util.system(cmd, environ=env, cwd=self.root)
133 r = util.system(cmd, environ=env, cwd=self.root)
134 if r:
134 if r:
135 desc, r = util.explain_exit(r)
135 desc, r = util.explain_exit(r)
136 if throw:
136 if throw:
137 raise util.Abort(_('%s hook %s') % (name, desc))
137 raise util.Abort(_('%s hook %s') % (name, desc))
138 self.ui.warn(_('error: %s hook %s\n') % (name, desc))
138 self.ui.warn(_('error: %s hook %s\n') % (name, desc))
139 return False
139 return False
140 return True
140 return True
141
141
142 r = True
142 r = True
143 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
143 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
144 if hname.split(".", 1)[0] == name and cmd]
144 if hname.split(".", 1)[0] == name and cmd]
145 hooks.sort()
145 hooks.sort()
146 for hname, cmd in hooks:
146 for hname, cmd in hooks:
147 if cmd.startswith('python:'):
147 if cmd.startswith('python:'):
148 r = callhook(hname, cmd[7:].strip()) and r
148 r = callhook(hname, cmd[7:].strip()) and r
149 else:
149 else:
150 r = runhook(hname, cmd) and r
150 r = runhook(hname, cmd) and r
151 return r
151 return r
152
152
153 def tags(self):
153 def tags(self):
154 '''return a mapping of tag to node'''
154 '''return a mapping of tag to node'''
155 if not self.tagscache:
155 if not self.tagscache:
156 self.tagscache = {}
156 self.tagscache = {}
157
157
158 def parsetag(line, context):
158 def parsetag(line, context):
159 if not line:
159 if not line:
160 return
160 return
161 s = l.split(" ", 1)
161 s = l.split(" ", 1)
162 if len(s) != 2:
162 if len(s) != 2:
163 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
163 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
164 return
164 return
165 node, key = s
165 node, key = s
166 try:
166 try:
167 bin_n = bin(node)
167 bin_n = bin(node)
168 except TypeError:
168 except TypeError:
169 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
169 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
170 return
170 return
171 if bin_n not in self.changelog.nodemap:
171 if bin_n not in self.changelog.nodemap:
172 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
172 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
173 return
173 return
174 self.tagscache[key.strip()] = bin_n
174 self.tagscache[key.strip()] = bin_n
175
175
176 # read each head of the tags file, ending with the tip
176 # read each head of the tags file, ending with the tip
177 # and add each tag found to the map, with "newer" ones
177 # and add each tag found to the map, with "newer" ones
178 # taking precedence
178 # taking precedence
179 fl = self.file(".hgtags")
179 fl = self.file(".hgtags")
180 h = fl.heads()
180 h = fl.heads()
181 h.reverse()
181 h.reverse()
182 for r in h:
182 for r in h:
183 count = 0
183 count = 0
184 for l in fl.read(r).splitlines():
184 for l in fl.read(r).splitlines():
185 count += 1
185 count += 1
186 parsetag(l, ".hgtags:%d" % count)
186 parsetag(l, ".hgtags:%d" % count)
187
187
188 try:
188 try:
189 f = self.opener("localtags")
189 f = self.opener("localtags")
190 count = 0
190 count = 0
191 for l in f:
191 for l in f:
192 count += 1
192 count += 1
193 parsetag(l, "localtags:%d" % count)
193 parsetag(l, "localtags:%d" % count)
194 except IOError:
194 except IOError:
195 pass
195 pass
196
196
197 self.tagscache['tip'] = self.changelog.tip()
197 self.tagscache['tip'] = self.changelog.tip()
198
198
199 return self.tagscache
199 return self.tagscache
200
200
201 def tagslist(self):
201 def tagslist(self):
202 '''return a list of tags ordered by revision'''
202 '''return a list of tags ordered by revision'''
203 l = []
203 l = []
204 for t, n in self.tags().items():
204 for t, n in self.tags().items():
205 try:
205 try:
206 r = self.changelog.rev(n)
206 r = self.changelog.rev(n)
207 except:
207 except:
208 r = -2 # sort to the beginning of the list if unknown
208 r = -2 # sort to the beginning of the list if unknown
209 l.append((r, t, n))
209 l.append((r, t, n))
210 l.sort()
210 l.sort()
211 return [(t, n) for r, t, n in l]
211 return [(t, n) for r, t, n in l]
212
212
213 def nodetags(self, node):
213 def nodetags(self, node):
214 '''return the tags associated with a node'''
214 '''return the tags associated with a node'''
215 if not self.nodetagscache:
215 if not self.nodetagscache:
216 self.nodetagscache = {}
216 self.nodetagscache = {}
217 for t, n in self.tags().items():
217 for t, n in self.tags().items():
218 self.nodetagscache.setdefault(n, []).append(t)
218 self.nodetagscache.setdefault(n, []).append(t)
219 return self.nodetagscache.get(node, [])
219 return self.nodetagscache.get(node, [])
220
220
221 def lookup(self, key):
221 def lookup(self, key):
222 try:
222 try:
223 return self.tags()[key]
223 return self.tags()[key]
224 except KeyError:
224 except KeyError:
225 try:
225 try:
226 return self.changelog.lookup(key)
226 return self.changelog.lookup(key)
227 except:
227 except:
228 raise repo.RepoError(_("unknown revision '%s'") % key)
228 raise repo.RepoError(_("unknown revision '%s'") % key)
229
229
230 def dev(self):
230 def dev(self):
231 return os.stat(self.path).st_dev
231 return os.stat(self.path).st_dev
232
232
233 def local(self):
233 def local(self):
234 return True
234 return True
235
235
236 def join(self, f):
236 def join(self, f):
237 return os.path.join(self.path, f)
237 return os.path.join(self.path, f)
238
238
239 def wjoin(self, f):
239 def wjoin(self, f):
240 return os.path.join(self.root, f)
240 return os.path.join(self.root, f)
241
241
242 def file(self, f):
242 def file(self, f):
243 if f[0] == '/':
243 if f[0] == '/':
244 f = f[1:]
244 f = f[1:]
245 return filelog.filelog(self.opener, f, self.revlogversion)
245 return filelog.filelog(self.opener, f, self.revlogversion)
246
246
247 def getcwd(self):
247 def getcwd(self):
248 return self.dirstate.getcwd()
248 return self.dirstate.getcwd()
249
249
250 def wfile(self, f, mode='r'):
250 def wfile(self, f, mode='r'):
251 return self.wopener(f, mode)
251 return self.wopener(f, mode)
252
252
253 def wread(self, filename):
253 def wread(self, filename):
254 if self.encodepats == None:
254 if self.encodepats == None:
255 l = []
255 l = []
256 for pat, cmd in self.ui.configitems("encode"):
256 for pat, cmd in self.ui.configitems("encode"):
257 mf = util.matcher(self.root, "", [pat], [], [])[1]
257 mf = util.matcher(self.root, "", [pat], [], [])[1]
258 l.append((mf, cmd))
258 l.append((mf, cmd))
259 self.encodepats = l
259 self.encodepats = l
260
260
261 data = self.wopener(filename, 'r').read()
261 data = self.wopener(filename, 'r').read()
262
262
263 for mf, cmd in self.encodepats:
263 for mf, cmd in self.encodepats:
264 if mf(filename):
264 if mf(filename):
265 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
265 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
266 data = util.filter(data, cmd)
266 data = util.filter(data, cmd)
267 break
267 break
268
268
269 return data
269 return data
270
270
271 def wwrite(self, filename, data, fd=None):
271 def wwrite(self, filename, data, fd=None):
272 if self.decodepats == None:
272 if self.decodepats == None:
273 l = []
273 l = []
274 for pat, cmd in self.ui.configitems("decode"):
274 for pat, cmd in self.ui.configitems("decode"):
275 mf = util.matcher(self.root, "", [pat], [], [])[1]
275 mf = util.matcher(self.root, "", [pat], [], [])[1]
276 l.append((mf, cmd))
276 l.append((mf, cmd))
277 self.decodepats = l
277 self.decodepats = l
278
278
279 for mf, cmd in self.decodepats:
279 for mf, cmd in self.decodepats:
280 if mf(filename):
280 if mf(filename):
281 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
281 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
282 data = util.filter(data, cmd)
282 data = util.filter(data, cmd)
283 break
283 break
284
284
285 if fd:
285 if fd:
286 return fd.write(data)
286 return fd.write(data)
287 return self.wopener(filename, 'w').write(data)
287 return self.wopener(filename, 'w').write(data)
288
288
289 def transaction(self):
289 def transaction(self):
290 tr = self.transhandle
290 tr = self.transhandle
291 if tr != None and tr.running():
291 if tr != None and tr.running():
292 return tr.nest()
292 return tr.nest()
293
293
294 # save dirstate for undo
294 # save dirstate for undo
295 try:
295 try:
296 ds = self.opener("dirstate").read()
296 ds = self.opener("dirstate").read()
297 except IOError:
297 except IOError:
298 ds = ""
298 ds = ""
299 self.opener("journal.dirstate", "w").write(ds)
299 self.opener("journal.dirstate", "w").write(ds)
300
300
301 tr = transaction.transaction(self.ui.warn, self.opener,
301 tr = transaction.transaction(self.ui.warn, self.opener,
302 self.join("journal"),
302 self.join("journal"),
303 aftertrans(self.path))
303 aftertrans(self.path))
304 self.transhandle = tr
304 self.transhandle = tr
305 return tr
305 return tr
306
306
307 def recover(self):
307 def recover(self):
308 l = self.lock()
308 l = self.lock()
309 if os.path.exists(self.join("journal")):
309 if os.path.exists(self.join("journal")):
310 self.ui.status(_("rolling back interrupted transaction\n"))
310 self.ui.status(_("rolling back interrupted transaction\n"))
311 transaction.rollback(self.opener, self.join("journal"))
311 transaction.rollback(self.opener, self.join("journal"))
312 self.reload()
312 self.reload()
313 return True
313 return True
314 else:
314 else:
315 self.ui.warn(_("no interrupted transaction available\n"))
315 self.ui.warn(_("no interrupted transaction available\n"))
316 return False
316 return False
317
317
318 def undo(self, wlock=None):
318 def undo(self, wlock=None):
319 if not wlock:
319 if not wlock:
320 wlock = self.wlock()
320 wlock = self.wlock()
321 l = self.lock()
321 l = self.lock()
322 if os.path.exists(self.join("undo")):
322 if os.path.exists(self.join("undo")):
323 self.ui.status(_("rolling back last transaction\n"))
323 self.ui.status(_("rolling back last transaction\n"))
324 transaction.rollback(self.opener, self.join("undo"))
324 transaction.rollback(self.opener, self.join("undo"))
325 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
325 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
326 self.reload()
326 self.reload()
327 self.wreload()
327 self.wreload()
328 else:
328 else:
329 self.ui.warn(_("no undo information available\n"))
329 self.ui.warn(_("no undo information available\n"))
330
330
331 def wreload(self):
331 def wreload(self):
332 self.dirstate.read()
332 self.dirstate.read()
333
333
334 def reload(self):
334 def reload(self):
335 self.changelog.load()
335 self.changelog.load()
336 self.manifest.load()
336 self.manifest.load()
337 self.tagscache = None
337 self.tagscache = None
338 self.nodetagscache = None
338 self.nodetagscache = None
339
339
340 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
340 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
341 desc=None):
341 desc=None):
342 try:
342 try:
343 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
343 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
344 except lock.LockHeld, inst:
344 except lock.LockHeld, inst:
345 if not wait:
345 if not wait:
346 raise
346 raise
347 self.ui.warn(_("waiting for lock on %s held by %s\n") %
347 self.ui.warn(_("waiting for lock on %s held by %s\n") %
348 (desc, inst.args[0]))
348 (desc, inst.args[0]))
349 # default to 600 seconds timeout
349 # default to 600 seconds timeout
350 l = lock.lock(self.join(lockname),
350 l = lock.lock(self.join(lockname),
351 int(self.ui.config("ui", "timeout") or 600),
351 int(self.ui.config("ui", "timeout") or 600),
352 releasefn, desc=desc)
352 releasefn, desc=desc)
353 if acquirefn:
353 if acquirefn:
354 acquirefn()
354 acquirefn()
355 return l
355 return l
356
356
357 def lock(self, wait=1):
357 def lock(self, wait=1):
358 return self.do_lock("lock", wait, acquirefn=self.reload,
358 return self.do_lock("lock", wait, acquirefn=self.reload,
359 desc=_('repository %s') % self.origroot)
359 desc=_('repository %s') % self.origroot)
360
360
361 def wlock(self, wait=1):
361 def wlock(self, wait=1):
362 return self.do_lock("wlock", wait, self.dirstate.write,
362 return self.do_lock("wlock", wait, self.dirstate.write,
363 self.wreload,
363 self.wreload,
364 desc=_('working directory of %s') % self.origroot)
364 desc=_('working directory of %s') % self.origroot)
365
365
366 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
366 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
367 "determine whether a new filenode is needed"
367 "determine whether a new filenode is needed"
368 fp1 = manifest1.get(filename, nullid)
368 fp1 = manifest1.get(filename, nullid)
369 fp2 = manifest2.get(filename, nullid)
369 fp2 = manifest2.get(filename, nullid)
370
370
371 if fp2 != nullid:
371 if fp2 != nullid:
372 # is one parent an ancestor of the other?
372 # is one parent an ancestor of the other?
373 fpa = filelog.ancestor(fp1, fp2)
373 fpa = filelog.ancestor(fp1, fp2)
374 if fpa == fp1:
374 if fpa == fp1:
375 fp1, fp2 = fp2, nullid
375 fp1, fp2 = fp2, nullid
376 elif fpa == fp2:
376 elif fpa == fp2:
377 fp2 = nullid
377 fp2 = nullid
378
378
379 # is the file unmodified from the parent? report existing entry
379 # is the file unmodified from the parent? report existing entry
380 if fp2 == nullid and text == filelog.read(fp1):
380 if fp2 == nullid and text == filelog.read(fp1):
381 return (fp1, None, None)
381 return (fp1, None, None)
382
382
383 return (None, fp1, fp2)
383 return (None, fp1, fp2)
384
384
385 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
385 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
386 orig_parent = self.dirstate.parents()[0] or nullid
386 orig_parent = self.dirstate.parents()[0] or nullid
387 p1 = p1 or self.dirstate.parents()[0] or nullid
387 p1 = p1 or self.dirstate.parents()[0] or nullid
388 p2 = p2 or self.dirstate.parents()[1] or nullid
388 p2 = p2 or self.dirstate.parents()[1] or nullid
389 c1 = self.changelog.read(p1)
389 c1 = self.changelog.read(p1)
390 c2 = self.changelog.read(p2)
390 c2 = self.changelog.read(p2)
391 m1 = self.manifest.read(c1[0])
391 m1 = self.manifest.read(c1[0])
392 mf1 = self.manifest.readflags(c1[0])
392 mf1 = self.manifest.readflags(c1[0])
393 m2 = self.manifest.read(c2[0])
393 m2 = self.manifest.read(c2[0])
394 changed = []
394 changed = []
395
395
396 if orig_parent == p1:
396 if orig_parent == p1:
397 update_dirstate = 1
397 update_dirstate = 1
398 else:
398 else:
399 update_dirstate = 0
399 update_dirstate = 0
400
400
401 if not wlock:
401 if not wlock:
402 wlock = self.wlock()
402 wlock = self.wlock()
403 l = self.lock()
403 l = self.lock()
404 tr = self.transaction()
404 tr = self.transaction()
405 mm = m1.copy()
405 mm = m1.copy()
406 mfm = mf1.copy()
406 mfm = mf1.copy()
407 linkrev = self.changelog.count()
407 linkrev = self.changelog.count()
408 for f in files:
408 for f in files:
409 try:
409 try:
410 t = self.wread(f)
410 t = self.wread(f)
411 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
411 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
412 r = self.file(f)
412 r = self.file(f)
413 mfm[f] = tm
413 mfm[f] = tm
414
414
415 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
415 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
416 if entry:
416 if entry:
417 mm[f] = entry
417 mm[f] = entry
418 continue
418 continue
419
419
420 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
420 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
421 changed.append(f)
421 changed.append(f)
422 if update_dirstate:
422 if update_dirstate:
423 self.dirstate.update([f], "n")
423 self.dirstate.update([f], "n")
424 except IOError:
424 except IOError:
425 try:
425 try:
426 del mm[f]
426 del mm[f]
427 del mfm[f]
427 del mfm[f]
428 if update_dirstate:
428 if update_dirstate:
429 self.dirstate.forget([f])
429 self.dirstate.forget([f])
430 except:
430 except:
431 # deleted from p2?
431 # deleted from p2?
432 pass
432 pass
433
433
434 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
434 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
435 user = user or self.ui.username()
435 user = user or self.ui.username()
436 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
436 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
437 tr.close()
437 tr.close()
438 if update_dirstate:
438 if update_dirstate:
439 self.dirstate.setparents(n, nullid)
439 self.dirstate.setparents(n, nullid)
440
440
441 def commit(self, files=None, text="", user=None, date=None,
441 def commit(self, files=None, text="", user=None, date=None,
442 match=util.always, force=False, lock=None, wlock=None):
442 match=util.always, force=False, lock=None, wlock=None):
443 commit = []
443 commit = []
444 remove = []
444 remove = []
445 changed = []
445 changed = []
446
446
447 if files:
447 if files:
448 for f in files:
448 for f in files:
449 s = self.dirstate.state(f)
449 s = self.dirstate.state(f)
450 if s in 'nmai':
450 if s in 'nmai':
451 commit.append(f)
451 commit.append(f)
452 elif s == 'r':
452 elif s == 'r':
453 remove.append(f)
453 remove.append(f)
454 else:
454 else:
455 self.ui.warn(_("%s not tracked!\n") % f)
455 self.ui.warn(_("%s not tracked!\n") % f)
456 else:
456 else:
457 modified, added, removed, deleted, unknown = self.changes(match=match)
457 modified, added, removed, deleted, unknown = self.changes(match=match)
458 commit = modified + added
458 commit = modified + added
459 remove = removed
459 remove = removed
460
460
461 p1, p2 = self.dirstate.parents()
461 p1, p2 = self.dirstate.parents()
462 c1 = self.changelog.read(p1)
462 c1 = self.changelog.read(p1)
463 c2 = self.changelog.read(p2)
463 c2 = self.changelog.read(p2)
464 m1 = self.manifest.read(c1[0])
464 m1 = self.manifest.read(c1[0])
465 mf1 = self.manifest.readflags(c1[0])
465 mf1 = self.manifest.readflags(c1[0])
466 m2 = self.manifest.read(c2[0])
466 m2 = self.manifest.read(c2[0])
467
467
468 if not commit and not remove and not force and p2 == nullid:
468 if not commit and not remove and not force and p2 == nullid:
469 self.ui.status(_("nothing changed\n"))
469 self.ui.status(_("nothing changed\n"))
470 return None
470 return None
471
471
472 xp1 = hex(p1)
472 xp1 = hex(p1)
473 if p2 == nullid: xp2 = ''
473 if p2 == nullid: xp2 = ''
474 else: xp2 = hex(p2)
474 else: xp2 = hex(p2)
475
475
476 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
476 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
477
477
478 if not wlock:
478 if not wlock:
479 wlock = self.wlock()
479 wlock = self.wlock()
480 if not lock:
480 if not lock:
481 lock = self.lock()
481 lock = self.lock()
482 tr = self.transaction()
482 tr = self.transaction()
483
483
484 # check in files
484 # check in files
485 new = {}
485 new = {}
486 linkrev = self.changelog.count()
486 linkrev = self.changelog.count()
487 commit.sort()
487 commit.sort()
488 for f in commit:
488 for f in commit:
489 self.ui.note(f + "\n")
489 self.ui.note(f + "\n")
490 try:
490 try:
491 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
491 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
492 t = self.wread(f)
492 t = self.wread(f)
493 except IOError:
493 except IOError:
494 self.ui.warn(_("trouble committing %s!\n") % f)
494 self.ui.warn(_("trouble committing %s!\n") % f)
495 raise
495 raise
496
496
497 r = self.file(f)
497 r = self.file(f)
498
498
499 meta = {}
499 meta = {}
500 cp = self.dirstate.copied(f)
500 cp = self.dirstate.copied(f)
501 if cp:
501 if cp:
502 meta["copy"] = cp
502 meta["copy"] = cp
503 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
503 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
504 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
504 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
505 fp1, fp2 = nullid, nullid
505 fp1, fp2 = nullid, nullid
506 else:
506 else:
507 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
507 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
508 if entry:
508 if entry:
509 new[f] = entry
509 new[f] = entry
510 continue
510 continue
511
511
512 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
512 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
513 # remember what we've added so that we can later calculate
513 # remember what we've added so that we can later calculate
514 # the files to pull from a set of changesets
514 # the files to pull from a set of changesets
515 changed.append(f)
515 changed.append(f)
516
516
517 # update manifest
517 # update manifest
518 m1 = m1.copy()
518 m1 = m1.copy()
519 m1.update(new)
519 m1.update(new)
520 for f in remove:
520 for f in remove:
521 if f in m1:
521 if f in m1:
522 del m1[f]
522 del m1[f]
523 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
523 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
524 (new, remove))
524 (new, remove))
525
525
526 # add changeset
526 # add changeset
527 new = new.keys()
527 new = new.keys()
528 new.sort()
528 new.sort()
529
529
530 user = user or self.ui.username()
530 user = user or self.ui.username()
531 if not text:
531 if not text:
532 edittext = [""]
532 edittext = [""]
533 if p2 != nullid:
533 if p2 != nullid:
534 edittext.append("HG: branch merge")
534 edittext.append("HG: branch merge")
535 edittext.extend(["HG: changed %s" % f for f in changed])
535 edittext.extend(["HG: changed %s" % f for f in changed])
536 edittext.extend(["HG: removed %s" % f for f in remove])
536 edittext.extend(["HG: removed %s" % f for f in remove])
537 if not changed and not remove:
537 if not changed and not remove:
538 edittext.append("HG: no files changed")
538 edittext.append("HG: no files changed")
539 edittext.append("")
539 edittext.append("")
540 # run editor in the repository root
540 # run editor in the repository root
541 olddir = os.getcwd()
541 olddir = os.getcwd()
542 os.chdir(self.root)
542 os.chdir(self.root)
543 edittext = self.ui.edit("\n".join(edittext), user)
543 edittext = self.ui.edit("\n".join(edittext), user)
544 os.chdir(olddir)
544 os.chdir(olddir)
545 if not edittext.rstrip():
545 if not edittext.rstrip():
546 return None
546 return None
547 text = edittext
547 text = edittext
548
548
549 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
549 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
550 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
550 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
551 parent2=xp2)
551 parent2=xp2)
552 tr.close()
552 tr.close()
553
553
554 self.dirstate.setparents(n)
554 self.dirstate.setparents(n)
555 self.dirstate.update(new, "n")
555 self.dirstate.update(new, "n")
556 self.dirstate.forget(remove)
556 self.dirstate.forget(remove)
557
557
558 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
558 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
559 return n
559 return n
560
560
561 def walk(self, node=None, files=[], match=util.always, badmatch=None):
561 def walk(self, node=None, files=[], match=util.always, badmatch=None):
562 if node:
562 if node:
563 fdict = dict.fromkeys(files)
563 fdict = dict.fromkeys(files)
564 for fn in self.manifest.read(self.changelog.read(node)[0]):
564 for fn in self.manifest.read(self.changelog.read(node)[0]):
565 fdict.pop(fn, None)
565 fdict.pop(fn, None)
566 if match(fn):
566 if match(fn):
567 yield 'm', fn
567 yield 'm', fn
568 for fn in fdict:
568 for fn in fdict:
569 if badmatch and badmatch(fn):
569 if badmatch and badmatch(fn):
570 if match(fn):
570 if match(fn):
571 yield 'b', fn
571 yield 'b', fn
572 else:
572 else:
573 self.ui.warn(_('%s: No such file in rev %s\n') % (
573 self.ui.warn(_('%s: No such file in rev %s\n') % (
574 util.pathto(self.getcwd(), fn), short(node)))
574 util.pathto(self.getcwd(), fn), short(node)))
575 else:
575 else:
576 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
576 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
577 yield src, fn
577 yield src, fn
578
578
579 def changes(self, node1=None, node2=None, files=[], match=util.always,
579 def changes(self, node1=None, node2=None, files=[], match=util.always,
580 wlock=None, show_ignored=None):
580 wlock=None, show_ignored=None):
581 """return changes between two nodes or node and working directory
581 """return changes between two nodes or node and working directory
582
582
583 If node1 is None, use the first dirstate parent instead.
583 If node1 is None, use the first dirstate parent instead.
584 If node2 is None, compare node1 with working directory.
584 If node2 is None, compare node1 with working directory.
585 """
585 """
586
586
587 def fcmp(fn, mf):
587 def fcmp(fn, mf):
588 t1 = self.wread(fn)
588 t1 = self.wread(fn)
589 t2 = self.file(fn).read(mf.get(fn, nullid))
589 t2 = self.file(fn).read(mf.get(fn, nullid))
590 return cmp(t1, t2)
590 return cmp(t1, t2)
591
591
592 def mfmatches(node):
592 def mfmatches(node):
593 change = self.changelog.read(node)
593 change = self.changelog.read(node)
594 mf = dict(self.manifest.read(change[0]))
594 mf = dict(self.manifest.read(change[0]))
595 for fn in mf.keys():
595 for fn in mf.keys():
596 if not match(fn):
596 if not match(fn):
597 del mf[fn]
597 del mf[fn]
598 return mf
598 return mf
599
599
600 if node1:
600 if node1:
601 # read the manifest from node1 before the manifest from node2,
601 # read the manifest from node1 before the manifest from node2,
602 # so that we'll hit the manifest cache if we're going through
602 # so that we'll hit the manifest cache if we're going through
603 # all the revisions in parent->child order.
603 # all the revisions in parent->child order.
604 mf1 = mfmatches(node1)
604 mf1 = mfmatches(node1)
605
605
606 # are we comparing the working directory?
606 # are we comparing the working directory?
607 if not node2:
607 if not node2:
608 if not wlock:
608 if not wlock:
609 try:
609 try:
610 wlock = self.wlock(wait=0)
610 wlock = self.wlock(wait=0)
611 except lock.LockException:
611 except lock.LockException:
612 wlock = None
612 wlock = None
613 lookup, modified, added, removed, deleted, unknown, ignored = (
613 lookup, modified, added, removed, deleted, unknown, ignored = (
614 self.dirstate.changes(files, match, show_ignored))
614 self.dirstate.changes(files, match, show_ignored))
615
615
616 # are we comparing working dir against its parent?
616 # are we comparing working dir against its parent?
617 if not node1:
617 if not node1:
618 if lookup:
618 if lookup:
619 # do a full compare of any files that might have changed
619 # do a full compare of any files that might have changed
620 mf2 = mfmatches(self.dirstate.parents()[0])
620 mf2 = mfmatches(self.dirstate.parents()[0])
621 for f in lookup:
621 for f in lookup:
622 if fcmp(f, mf2):
622 if fcmp(f, mf2):
623 modified.append(f)
623 modified.append(f)
624 elif wlock is not None:
624 elif wlock is not None:
625 self.dirstate.update([f], "n")
625 self.dirstate.update([f], "n")
626 else:
626 else:
627 # we are comparing working dir against non-parent
627 # we are comparing working dir against non-parent
628 # generate a pseudo-manifest for the working dir
628 # generate a pseudo-manifest for the working dir
629 mf2 = mfmatches(self.dirstate.parents()[0])
629 mf2 = mfmatches(self.dirstate.parents()[0])
630 for f in lookup + modified + added:
630 for f in lookup + modified + added:
631 mf2[f] = ""
631 mf2[f] = ""
632 for f in removed:
632 for f in removed:
633 if f in mf2:
633 if f in mf2:
634 del mf2[f]
634 del mf2[f]
635 else:
635 else:
636 # we are comparing two revisions
636 # we are comparing two revisions
637 deleted, unknown, ignored = [], [], []
637 deleted, unknown, ignored = [], [], []
638 mf2 = mfmatches(node2)
638 mf2 = mfmatches(node2)
639
639
640 if node1:
640 if node1:
641 # flush lists from dirstate before comparing manifests
641 # flush lists from dirstate before comparing manifests
642 modified, added = [], []
642 modified, added = [], []
643
643
644 for fn in mf2:
644 for fn in mf2:
645 if mf1.has_key(fn):
645 if mf1.has_key(fn):
646 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
646 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
647 modified.append(fn)
647 modified.append(fn)
648 del mf1[fn]
648 del mf1[fn]
649 else:
649 else:
650 added.append(fn)
650 added.append(fn)
651
651
652 removed = mf1.keys()
652 removed = mf1.keys()
653
653
654 # sort and return results:
654 # sort and return results:
655 for l in modified, added, removed, deleted, unknown, ignored:
655 for l in modified, added, removed, deleted, unknown, ignored:
656 l.sort()
656 l.sort()
657 if show_ignored is None:
657 if show_ignored is None:
658 return (modified, added, removed, deleted, unknown)
658 return (modified, added, removed, deleted, unknown)
659 else:
659 else:
660 return (modified, added, removed, deleted, unknown, ignored)
660 return (modified, added, removed, deleted, unknown, ignored)
661
661
662 def add(self, list, wlock=None):
662 def add(self, list, wlock=None):
663 if not wlock:
663 if not wlock:
664 wlock = self.wlock()
664 wlock = self.wlock()
665 for f in list:
665 for f in list:
666 p = self.wjoin(f)
666 p = self.wjoin(f)
667 if not os.path.exists(p):
667 if not os.path.exists(p):
668 self.ui.warn(_("%s does not exist!\n") % f)
668 self.ui.warn(_("%s does not exist!\n") % f)
669 elif not os.path.isfile(p):
669 elif not os.path.isfile(p):
670 self.ui.warn(_("%s not added: only files supported currently\n")
670 self.ui.warn(_("%s not added: only files supported currently\n")
671 % f)
671 % f)
672 elif self.dirstate.state(f) in 'an':
672 elif self.dirstate.state(f) in 'an':
673 self.ui.warn(_("%s already tracked!\n") % f)
673 self.ui.warn(_("%s already tracked!\n") % f)
674 else:
674 else:
675 self.dirstate.update([f], "a")
675 self.dirstate.update([f], "a")
676
676
677 def forget(self, list, wlock=None):
677 def forget(self, list, wlock=None):
678 if not wlock:
678 if not wlock:
679 wlock = self.wlock()
679 wlock = self.wlock()
680 for f in list:
680 for f in list:
681 if self.dirstate.state(f) not in 'ai':
681 if self.dirstate.state(f) not in 'ai':
682 self.ui.warn(_("%s not added!\n") % f)
682 self.ui.warn(_("%s not added!\n") % f)
683 else:
683 else:
684 self.dirstate.forget([f])
684 self.dirstate.forget([f])
685
685
686 def remove(self, list, unlink=False, wlock=None):
686 def remove(self, list, unlink=False, wlock=None):
687 if unlink:
687 if unlink:
688 for f in list:
688 for f in list:
689 try:
689 try:
690 util.unlink(self.wjoin(f))
690 util.unlink(self.wjoin(f))
691 except OSError, inst:
691 except OSError, inst:
692 if inst.errno != errno.ENOENT:
692 if inst.errno != errno.ENOENT:
693 raise
693 raise
694 if not wlock:
694 if not wlock:
695 wlock = self.wlock()
695 wlock = self.wlock()
696 for f in list:
696 for f in list:
697 p = self.wjoin(f)
697 p = self.wjoin(f)
698 if os.path.exists(p):
698 if os.path.exists(p):
699 self.ui.warn(_("%s still exists!\n") % f)
699 self.ui.warn(_("%s still exists!\n") % f)
700 elif self.dirstate.state(f) == 'a':
700 elif self.dirstate.state(f) == 'a':
701 self.dirstate.forget([f])
701 self.dirstate.forget([f])
702 elif f not in self.dirstate:
702 elif f not in self.dirstate:
703 self.ui.warn(_("%s not tracked!\n") % f)
703 self.ui.warn(_("%s not tracked!\n") % f)
704 else:
704 else:
705 self.dirstate.update([f], "r")
705 self.dirstate.update([f], "r")
706
706
707 def undelete(self, list, wlock=None):
707 def undelete(self, list, wlock=None):
708 p = self.dirstate.parents()[0]
708 p = self.dirstate.parents()[0]
709 mn = self.changelog.read(p)[0]
709 mn = self.changelog.read(p)[0]
710 mf = self.manifest.readflags(mn)
710 mf = self.manifest.readflags(mn)
711 m = self.manifest.read(mn)
711 m = self.manifest.read(mn)
712 if not wlock:
712 if not wlock:
713 wlock = self.wlock()
713 wlock = self.wlock()
714 for f in list:
714 for f in list:
715 if self.dirstate.state(f) not in "r":
715 if self.dirstate.state(f) not in "r":
716 self.ui.warn("%s not removed!\n" % f)
716 self.ui.warn("%s not removed!\n" % f)
717 else:
717 else:
718 t = self.file(f).read(m[f])
718 t = self.file(f).read(m[f])
719 self.wwrite(f, t)
719 self.wwrite(f, t)
720 util.set_exec(self.wjoin(f), mf[f])
720 util.set_exec(self.wjoin(f), mf[f])
721 self.dirstate.update([f], "n")
721 self.dirstate.update([f], "n")
722
722
723 def copy(self, source, dest, wlock=None):
723 def copy(self, source, dest, wlock=None):
724 p = self.wjoin(dest)
724 p = self.wjoin(dest)
725 if not os.path.exists(p):
725 if not os.path.exists(p):
726 self.ui.warn(_("%s does not exist!\n") % dest)
726 self.ui.warn(_("%s does not exist!\n") % dest)
727 elif not os.path.isfile(p):
727 elif not os.path.isfile(p):
728 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
728 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
729 else:
729 else:
730 if not wlock:
730 if not wlock:
731 wlock = self.wlock()
731 wlock = self.wlock()
732 if self.dirstate.state(dest) == '?':
732 if self.dirstate.state(dest) == '?':
733 self.dirstate.update([dest], "a")
733 self.dirstate.update([dest], "a")
734 self.dirstate.copy(source, dest)
734 self.dirstate.copy(source, dest)
735
735
736 def heads(self, start=None):
736 def heads(self, start=None):
737 heads = self.changelog.heads(start)
737 heads = self.changelog.heads(start)
738 # sort the output in rev descending order
738 # sort the output in rev descending order
739 heads = [(-self.changelog.rev(h), h) for h in heads]
739 heads = [(-self.changelog.rev(h), h) for h in heads]
740 heads.sort()
740 heads.sort()
741 return [n for (r, n) in heads]
741 return [n for (r, n) in heads]
742
742
743 # branchlookup returns a dict giving a list of branches for
743 # branchlookup returns a dict giving a list of branches for
744 # each head. A branch is defined as the tag of a node or
744 # each head. A branch is defined as the tag of a node or
745 # the branch of the node's parents. If a node has multiple
745 # the branch of the node's parents. If a node has multiple
746 # branch tags, tags are eliminated if they are visible from other
746 # branch tags, tags are eliminated if they are visible from other
747 # branch tags.
747 # branch tags.
748 #
748 #
749 # So, for this graph: a->b->c->d->e
749 # So, for this graph: a->b->c->d->e
750 # \ /
750 # \ /
751 # aa -----/
751 # aa -----/
752 # a has tag 2.6.12
752 # a has tag 2.6.12
753 # d has tag 2.6.13
753 # d has tag 2.6.13
754 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
754 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
755 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
755 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
756 # from the list.
756 # from the list.
757 #
757 #
758 # It is possible that more than one head will have the same branch tag.
758 # It is possible that more than one head will have the same branch tag.
759 # callers need to check the result for multiple heads under the same
759 # callers need to check the result for multiple heads under the same
760 # branch tag if that is a problem for them (ie checkout of a specific
760 # branch tag if that is a problem for them (ie checkout of a specific
761 # branch).
761 # branch).
762 #
762 #
763 # passing in a specific branch will limit the depth of the search
763 # passing in a specific branch will limit the depth of the search
764 # through the parents. It won't limit the branches returned in the
764 # through the parents. It won't limit the branches returned in the
765 # result though.
765 # result though.
766 def branchlookup(self, heads=None, branch=None):
766 def branchlookup(self, heads=None, branch=None):
767 if not heads:
767 if not heads:
768 heads = self.heads()
768 heads = self.heads()
769 headt = [ h for h in heads ]
769 headt = [ h for h in heads ]
770 chlog = self.changelog
770 chlog = self.changelog
771 branches = {}
771 branches = {}
772 merges = []
772 merges = []
773 seenmerge = {}
773 seenmerge = {}
774
774
775 # traverse the tree once for each head, recording in the branches
775 # traverse the tree once for each head, recording in the branches
776 # dict which tags are visible from this head. The branches
776 # dict which tags are visible from this head. The branches
777 # dict also records which tags are visible from each tag
777 # dict also records which tags are visible from each tag
778 # while we traverse.
778 # while we traverse.
779 while headt or merges:
779 while headt or merges:
780 if merges:
780 if merges:
781 n, found = merges.pop()
781 n, found = merges.pop()
782 visit = [n]
782 visit = [n]
783 else:
783 else:
784 h = headt.pop()
784 h = headt.pop()
785 visit = [h]
785 visit = [h]
786 found = [h]
786 found = [h]
787 seen = {}
787 seen = {}
788 while visit:
788 while visit:
789 n = visit.pop()
789 n = visit.pop()
790 if n in seen:
790 if n in seen:
791 continue
791 continue
792 pp = chlog.parents(n)
792 pp = chlog.parents(n)
793 tags = self.nodetags(n)
793 tags = self.nodetags(n)
794 if tags:
794 if tags:
795 for x in tags:
795 for x in tags:
796 if x == 'tip':
796 if x == 'tip':
797 continue
797 continue
798 for f in found:
798 for f in found:
799 branches.setdefault(f, {})[n] = 1
799 branches.setdefault(f, {})[n] = 1
800 branches.setdefault(n, {})[n] = 1
800 branches.setdefault(n, {})[n] = 1
801 break
801 break
802 if n not in found:
802 if n not in found:
803 found.append(n)
803 found.append(n)
804 if branch in tags:
804 if branch in tags:
805 continue
805 continue
806 seen[n] = 1
806 seen[n] = 1
807 if pp[1] != nullid and n not in seenmerge:
807 if pp[1] != nullid and n not in seenmerge:
808 merges.append((pp[1], [x for x in found]))
808 merges.append((pp[1], [x for x in found]))
809 seenmerge[n] = 1
809 seenmerge[n] = 1
810 if pp[0] != nullid:
810 if pp[0] != nullid:
811 visit.append(pp[0])
811 visit.append(pp[0])
812 # traverse the branches dict, eliminating branch tags from each
812 # traverse the branches dict, eliminating branch tags from each
813 # head that are visible from another branch tag for that head.
813 # head that are visible from another branch tag for that head.
814 out = {}
814 out = {}
815 viscache = {}
815 viscache = {}
816 for h in heads:
816 for h in heads:
817 def visible(node):
817 def visible(node):
818 if node in viscache:
818 if node in viscache:
819 return viscache[node]
819 return viscache[node]
820 ret = {}
820 ret = {}
821 visit = [node]
821 visit = [node]
822 while visit:
822 while visit:
823 x = visit.pop()
823 x = visit.pop()
824 if x in viscache:
824 if x in viscache:
825 ret.update(viscache[x])
825 ret.update(viscache[x])
826 elif x not in ret:
826 elif x not in ret:
827 ret[x] = 1
827 ret[x] = 1
828 if x in branches:
828 if x in branches:
829 visit[len(visit):] = branches[x].keys()
829 visit[len(visit):] = branches[x].keys()
830 viscache[node] = ret
830 viscache[node] = ret
831 return ret
831 return ret
832 if h not in branches:
832 if h not in branches:
833 continue
833 continue
834 # O(n^2), but somewhat limited. This only searches the
834 # O(n^2), but somewhat limited. This only searches the
835 # tags visible from a specific head, not all the tags in the
835 # tags visible from a specific head, not all the tags in the
836 # whole repo.
836 # whole repo.
837 for b in branches[h]:
837 for b in branches[h]:
838 vis = False
838 vis = False
839 for bb in branches[h].keys():
839 for bb in branches[h].keys():
840 if b != bb:
840 if b != bb:
841 if b in visible(bb):
841 if b in visible(bb):
842 vis = True
842 vis = True
843 break
843 break
844 if not vis:
844 if not vis:
845 l = out.setdefault(h, [])
845 l = out.setdefault(h, [])
846 l[len(l):] = self.nodetags(b)
846 l[len(l):] = self.nodetags(b)
847 return out
847 return out
848
848
849 def branches(self, nodes):
849 def branches(self, nodes):
850 if not nodes:
850 if not nodes:
851 nodes = [self.changelog.tip()]
851 nodes = [self.changelog.tip()]
852 b = []
852 b = []
853 for n in nodes:
853 for n in nodes:
854 t = n
854 t = n
855 while n:
855 while n:
856 p = self.changelog.parents(n)
856 p = self.changelog.parents(n)
857 if p[1] != nullid or p[0] == nullid:
857 if p[1] != nullid or p[0] == nullid:
858 b.append((t, n, p[0], p[1]))
858 b.append((t, n, p[0], p[1]))
859 break
859 break
860 n = p[0]
860 n = p[0]
861 return b
861 return b
862
862
863 def between(self, pairs):
863 def between(self, pairs):
864 r = []
864 r = []
865
865
866 for top, bottom in pairs:
866 for top, bottom in pairs:
867 n, l, i = top, [], 0
867 n, l, i = top, [], 0
868 f = 1
868 f = 1
869
869
870 while n != bottom:
870 while n != bottom:
871 p = self.changelog.parents(n)[0]
871 p = self.changelog.parents(n)[0]
872 if i == f:
872 if i == f:
873 l.append(n)
873 l.append(n)
874 f = f * 2
874 f = f * 2
875 n = p
875 n = p
876 i += 1
876 i += 1
877
877
878 r.append(l)
878 r.append(l)
879
879
880 return r
880 return r
881
881
882 def findincoming(self, remote, base=None, heads=None, force=False):
882 def findincoming(self, remote, base=None, heads=None, force=False):
883 m = self.changelog.nodemap
883 m = self.changelog.nodemap
884 search = []
884 search = []
885 fetch = {}
885 fetch = {}
886 seen = {}
886 seen = {}
887 seenbranch = {}
887 seenbranch = {}
888 if base == None:
888 if base == None:
889 base = {}
889 base = {}
890
890
891 if not heads:
891 if not heads:
892 heads = remote.heads()
892 heads = remote.heads()
893
893
894 if self.changelog.tip() == nullid:
894 if self.changelog.tip() == nullid:
895 if heads != [nullid]:
895 if heads != [nullid]:
896 return [nullid]
896 return [nullid]
897 return []
897 return []
898
898
899 # assume we're closer to the tip than the root
899 # assume we're closer to the tip than the root
900 # and start by examining the heads
900 # and start by examining the heads
901 self.ui.status(_("searching for changes\n"))
901 self.ui.status(_("searching for changes\n"))
902
902
903 unknown = []
903 unknown = []
904 for h in heads:
904 for h in heads:
905 if h not in m:
905 if h not in m:
906 unknown.append(h)
906 unknown.append(h)
907 else:
907 else:
908 base[h] = 1
908 base[h] = 1
909
909
910 if not unknown:
910 if not unknown:
911 return []
911 return []
912
912
913 rep = {}
913 rep = {}
914 reqcnt = 0
914 reqcnt = 0
915
915
916 # search through remote branches
916 # search through remote branches
917 # a 'branch' here is a linear segment of history, with four parts:
917 # a 'branch' here is a linear segment of history, with four parts:
918 # head, root, first parent, second parent
918 # head, root, first parent, second parent
919 # (a branch always has two parents (or none) by definition)
919 # (a branch always has two parents (or none) by definition)
920 unknown = remote.branches(unknown)
920 unknown = remote.branches(unknown)
921 while unknown:
921 while unknown:
922 r = []
922 r = []
923 while unknown:
923 while unknown:
924 n = unknown.pop(0)
924 n = unknown.pop(0)
925 if n[0] in seen:
925 if n[0] in seen:
926 continue
926 continue
927
927
928 self.ui.debug(_("examining %s:%s\n")
928 self.ui.debug(_("examining %s:%s\n")
929 % (short(n[0]), short(n[1])))
929 % (short(n[0]), short(n[1])))
930 if n[0] == nullid:
930 if n[0] == nullid:
931 break
931 break
932 if n in seenbranch:
932 if n in seenbranch:
933 self.ui.debug(_("branch already found\n"))
933 self.ui.debug(_("branch already found\n"))
934 continue
934 continue
935 if n[1] and n[1] in m: # do we know the base?
935 if n[1] and n[1] in m: # do we know the base?
936 self.ui.debug(_("found incomplete branch %s:%s\n")
936 self.ui.debug(_("found incomplete branch %s:%s\n")
937 % (short(n[0]), short(n[1])))
937 % (short(n[0]), short(n[1])))
938 search.append(n) # schedule branch range for scanning
938 search.append(n) # schedule branch range for scanning
939 seenbranch[n] = 1
939 seenbranch[n] = 1
940 else:
940 else:
941 if n[1] not in seen and n[1] not in fetch:
941 if n[1] not in seen and n[1] not in fetch:
942 if n[2] in m and n[3] in m:
942 if n[2] in m and n[3] in m:
943 self.ui.debug(_("found new changeset %s\n") %
943 self.ui.debug(_("found new changeset %s\n") %
944 short(n[1]))
944 short(n[1]))
945 fetch[n[1]] = 1 # earliest unknown
945 fetch[n[1]] = 1 # earliest unknown
946 base[n[2]] = 1 # latest known
946 base[n[2]] = 1 # latest known
947 continue
947 continue
948
948
949 for a in n[2:4]:
949 for a in n[2:4]:
950 if a not in rep:
950 if a not in rep:
951 r.append(a)
951 r.append(a)
952 rep[a] = 1
952 rep[a] = 1
953
953
954 seen[n[0]] = 1
954 seen[n[0]] = 1
955
955
956 if r:
956 if r:
957 reqcnt += 1
957 reqcnt += 1
958 self.ui.debug(_("request %d: %s\n") %
958 self.ui.debug(_("request %d: %s\n") %
959 (reqcnt, " ".join(map(short, r))))
959 (reqcnt, " ".join(map(short, r))))
960 for p in range(0, len(r), 10):
960 for p in range(0, len(r), 10):
961 for b in remote.branches(r[p:p+10]):
961 for b in remote.branches(r[p:p+10]):
962 self.ui.debug(_("received %s:%s\n") %
962 self.ui.debug(_("received %s:%s\n") %
963 (short(b[0]), short(b[1])))
963 (short(b[0]), short(b[1])))
964 if b[0] in m:
964 if b[0] in m:
965 self.ui.debug(_("found base node %s\n")
965 self.ui.debug(_("found base node %s\n")
966 % short(b[0]))
966 % short(b[0]))
967 base[b[0]] = 1
967 base[b[0]] = 1
968 elif b[0] not in seen:
968 elif b[0] not in seen:
969 unknown.append(b)
969 unknown.append(b)
970
970
971 # do binary search on the branches we found
971 # do binary search on the branches we found
972 while search:
972 while search:
973 n = search.pop(0)
973 n = search.pop(0)
974 reqcnt += 1
974 reqcnt += 1
975 l = remote.between([(n[0], n[1])])[0]
975 l = remote.between([(n[0], n[1])])[0]
976 l.append(n[1])
976 l.append(n[1])
977 p = n[0]
977 p = n[0]
978 f = 1
978 f = 1
979 for i in l:
979 for i in l:
980 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
980 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
981 if i in m:
981 if i in m:
982 if f <= 2:
982 if f <= 2:
983 self.ui.debug(_("found new branch changeset %s\n") %
983 self.ui.debug(_("found new branch changeset %s\n") %
984 short(p))
984 short(p))
985 fetch[p] = 1
985 fetch[p] = 1
986 base[i] = 1
986 base[i] = 1
987 else:
987 else:
988 self.ui.debug(_("narrowed branch search to %s:%s\n")
988 self.ui.debug(_("narrowed branch search to %s:%s\n")
989 % (short(p), short(i)))
989 % (short(p), short(i)))
990 search.append((p, i))
990 search.append((p, i))
991 break
991 break
992 p, f = i, f * 2
992 p, f = i, f * 2
993
993
994 # sanity check our fetch list
994 # sanity check our fetch list
995 for f in fetch.keys():
995 for f in fetch.keys():
996 if f in m:
996 if f in m:
997 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
997 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
998
998
999 if base.keys() == [nullid]:
999 if base.keys() == [nullid]:
1000 if force:
1000 if force:
1001 self.ui.warn(_("warning: repository is unrelated\n"))
1001 self.ui.warn(_("warning: repository is unrelated\n"))
1002 else:
1002 else:
1003 raise util.Abort(_("repository is unrelated"))
1003 raise util.Abort(_("repository is unrelated"))
1004
1004
1005 self.ui.note(_("found new changesets starting at ") +
1005 self.ui.note(_("found new changesets starting at ") +
1006 " ".join([short(f) for f in fetch]) + "\n")
1006 " ".join([short(f) for f in fetch]) + "\n")
1007
1007
1008 self.ui.debug(_("%d total queries\n") % reqcnt)
1008 self.ui.debug(_("%d total queries\n") % reqcnt)
1009
1009
1010 return fetch.keys()
1010 return fetch.keys()
1011
1011
1012 def findoutgoing(self, remote, base=None, heads=None, force=False):
1012 def findoutgoing(self, remote, base=None, heads=None, force=False):
1013 """Return list of nodes that are roots of subsets not in remote
1013 """Return list of nodes that are roots of subsets not in remote
1014
1014
1015 If base dict is specified, assume that these nodes and their parents
1015 If base dict is specified, assume that these nodes and their parents
1016 exist on the remote side.
1016 exist on the remote side.
1017 If a list of heads is specified, return only nodes which are heads
1017 If a list of heads is specified, return only nodes which are heads
1018 or ancestors of these heads, and return a second element which
1018 or ancestors of these heads, and return a second element which
1019 contains all remote heads which get new children.
1019 contains all remote heads which get new children.
1020 """
1020 """
1021 if base == None:
1021 if base == None:
1022 base = {}
1022 base = {}
1023 self.findincoming(remote, base, heads, force=force)
1023 self.findincoming(remote, base, heads, force=force)
1024
1024
1025 self.ui.debug(_("common changesets up to ")
1025 self.ui.debug(_("common changesets up to ")
1026 + " ".join(map(short, base.keys())) + "\n")
1026 + " ".join(map(short, base.keys())) + "\n")
1027
1027
1028 remain = dict.fromkeys(self.changelog.nodemap)
1028 remain = dict.fromkeys(self.changelog.nodemap)
1029
1029
1030 # prune everything remote has from the tree
1030 # prune everything remote has from the tree
1031 del remain[nullid]
1031 del remain[nullid]
1032 remove = base.keys()
1032 remove = base.keys()
1033 while remove:
1033 while remove:
1034 n = remove.pop(0)
1034 n = remove.pop(0)
1035 if n in remain:
1035 if n in remain:
1036 del remain[n]
1036 del remain[n]
1037 for p in self.changelog.parents(n):
1037 for p in self.changelog.parents(n):
1038 remove.append(p)
1038 remove.append(p)
1039
1039
1040 # find every node whose parents have been pruned
1040 # find every node whose parents have been pruned
1041 subset = []
1041 subset = []
1042 # find every remote head that will get new children
1042 # find every remote head that will get new children
1043 updated_heads = {}
1043 updated_heads = {}
1044 for n in remain:
1044 for n in remain:
1045 p1, p2 = self.changelog.parents(n)
1045 p1, p2 = self.changelog.parents(n)
1046 if p1 not in remain and p2 not in remain:
1046 if p1 not in remain and p2 not in remain:
1047 subset.append(n)
1047 subset.append(n)
1048 if heads:
1048 if heads:
1049 if p1 in heads:
1049 if p1 in heads:
1050 updated_heads[p1] = True
1050 updated_heads[p1] = True
1051 if p2 in heads:
1051 if p2 in heads:
1052 updated_heads[p2] = True
1052 updated_heads[p2] = True
1053
1053
1054 # this is the set of all roots we have to push
1054 # this is the set of all roots we have to push
1055 if heads:
1055 if heads:
1056 return subset, updated_heads.keys()
1056 return subset, updated_heads.keys()
1057 else:
1057 else:
1058 return subset
1058 return subset
1059
1059
1060 def pull(self, remote, heads=None, force=False):
1060 def pull(self, remote, heads=None, force=False):
1061 l = self.lock()
1061 l = self.lock()
1062
1062
1063 fetch = self.findincoming(remote, force=force)
1063 fetch = self.findincoming(remote, force=force)
1064 if fetch == [nullid]:
1064 if fetch == [nullid]:
1065 self.ui.status(_("requesting all changes\n"))
1065 self.ui.status(_("requesting all changes\n"))
1066
1066
1067 if not fetch:
1067 if not fetch:
1068 self.ui.status(_("no changes found\n"))
1068 self.ui.status(_("no changes found\n"))
1069 return 0
1069 return 0
1070
1070
1071 if heads is None:
1071 if heads is None:
1072 cg = remote.changegroup(fetch, 'pull')
1072 cg = remote.changegroup(fetch, 'pull')
1073 else:
1073 else:
1074 cg = remote.changegroupsubset(fetch, heads, 'pull')
1074 cg = remote.changegroupsubset(fetch, heads, 'pull')
1075 return self.addchangegroup(cg)
1075 return self.addchangegroup(cg)
1076
1076
1077 def push(self, remote, force=False, revs=None):
1077 def push(self, remote, force=False, revs=None):
1078 lock = remote.lock()
1078 lock = remote.lock()
1079
1079
1080 base = {}
1080 base = {}
1081 remote_heads = remote.heads()
1081 remote_heads = remote.heads()
1082 inc = self.findincoming(remote, base, remote_heads, force=force)
1082 inc = self.findincoming(remote, base, remote_heads, force=force)
1083 if not force and inc:
1083 if not force and inc:
1084 self.ui.warn(_("abort: unsynced remote changes!\n"))
1084 self.ui.warn(_("abort: unsynced remote changes!\n"))
1085 self.ui.status(_("(did you forget to sync?"
1085 self.ui.status(_("(did you forget to sync?"
1086 " use push -f to force)\n"))
1086 " use push -f to force)\n"))
1087 return 1
1087 return 1
1088
1088
1089 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1089 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1090 if revs is not None:
1090 if revs is not None:
1091 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1091 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1092 else:
1092 else:
1093 bases, heads = update, self.changelog.heads()
1093 bases, heads = update, self.changelog.heads()
1094
1094
1095 if not bases:
1095 if not bases:
1096 self.ui.status(_("no changes found\n"))
1096 self.ui.status(_("no changes found\n"))
1097 return 1
1097 return 1
1098 elif not force:
1098 elif not force:
1099 # FIXME we don't properly detect creation of new heads
1099 # FIXME we don't properly detect creation of new heads
1100 # in the push -r case, assume the user knows what he's doing
1100 # in the push -r case, assume the user knows what he's doing
1101 if not revs and len(remote_heads) < len(heads) \
1101 if not revs and len(remote_heads) < len(heads) \
1102 and remote_heads != [nullid]:
1102 and remote_heads != [nullid]:
1103 self.ui.warn(_("abort: push creates new remote branches!\n"))
1103 self.ui.warn(_("abort: push creates new remote branches!\n"))
1104 self.ui.status(_("(did you forget to merge?"
1104 self.ui.status(_("(did you forget to merge?"
1105 " use push -f to force)\n"))
1105 " use push -f to force)\n"))
1106 return 1
1106 return 1
1107
1107
1108 if revs is None:
1108 if revs is None:
1109 cg = self.changegroup(update, 'push')
1109 cg = self.changegroup(update, 'push')
1110 else:
1110 else:
1111 cg = self.changegroupsubset(update, revs, 'push')
1111 cg = self.changegroupsubset(update, revs, 'push')
1112 return remote.addchangegroup(cg)
1112 return remote.addchangegroup(cg)
1113
1113
1114 def changegroupsubset(self, bases, heads, source):
1114 def changegroupsubset(self, bases, heads, source):
1115 """This function generates a changegroup consisting of all the nodes
1115 """This function generates a changegroup consisting of all the nodes
1116 that are descendents of any of the bases, and ancestors of any of
1116 that are descendents of any of the bases, and ancestors of any of
1117 the heads.
1117 the heads.
1118
1118
1119 It is fairly complex as determining which filenodes and which
1119 It is fairly complex as determining which filenodes and which
1120 manifest nodes need to be included for the changeset to be complete
1120 manifest nodes need to be included for the changeset to be complete
1121 is non-trivial.
1121 is non-trivial.
1122
1122
1123 Another wrinkle is doing the reverse, figuring out which changeset in
1123 Another wrinkle is doing the reverse, figuring out which changeset in
1124 the changegroup a particular filenode or manifestnode belongs to."""
1124 the changegroup a particular filenode or manifestnode belongs to."""
1125
1125
1126 self.hook('preoutgoing', throw=True, source=source)
1126 self.hook('preoutgoing', throw=True, source=source)
1127
1127
1128 # Set up some initial variables
1128 # Set up some initial variables
1129 # Make it easy to refer to self.changelog
1129 # Make it easy to refer to self.changelog
1130 cl = self.changelog
1130 cl = self.changelog
1131 # msng is short for missing - compute the list of changesets in this
1131 # msng is short for missing - compute the list of changesets in this
1132 # changegroup.
1132 # changegroup.
1133 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1133 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1134 # Some bases may turn out to be superfluous, and some heads may be
1134 # Some bases may turn out to be superfluous, and some heads may be
1135 # too. nodesbetween will return the minimal set of bases and heads
1135 # too. nodesbetween will return the minimal set of bases and heads
1136 # necessary to re-create the changegroup.
1136 # necessary to re-create the changegroup.
1137
1137
1138 # Known heads are the list of heads that it is assumed the recipient
1138 # Known heads are the list of heads that it is assumed the recipient
1139 # of this changegroup will know about.
1139 # of this changegroup will know about.
1140 knownheads = {}
1140 knownheads = {}
1141 # We assume that all parents of bases are known heads.
1141 # We assume that all parents of bases are known heads.
1142 for n in bases:
1142 for n in bases:
1143 for p in cl.parents(n):
1143 for p in cl.parents(n):
1144 if p != nullid:
1144 if p != nullid:
1145 knownheads[p] = 1
1145 knownheads[p] = 1
1146 knownheads = knownheads.keys()
1146 knownheads = knownheads.keys()
1147 if knownheads:
1147 if knownheads:
1148 # Now that we know what heads are known, we can compute which
1148 # Now that we know what heads are known, we can compute which
1149 # changesets are known. The recipient must know about all
1149 # changesets are known. The recipient must know about all
1150 # changesets required to reach the known heads from the null
1150 # changesets required to reach the known heads from the null
1151 # changeset.
1151 # changeset.
1152 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1152 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1153 junk = None
1153 junk = None
1154 # Transform the list into an ersatz set.
1154 # Transform the list into an ersatz set.
1155 has_cl_set = dict.fromkeys(has_cl_set)
1155 has_cl_set = dict.fromkeys(has_cl_set)
1156 else:
1156 else:
1157 # If there were no known heads, the recipient cannot be assumed to
1157 # If there were no known heads, the recipient cannot be assumed to
1158 # know about any changesets.
1158 # know about any changesets.
1159 has_cl_set = {}
1159 has_cl_set = {}
1160
1160
1161 # Make it easy to refer to self.manifest
1161 # Make it easy to refer to self.manifest
1162 mnfst = self.manifest
1162 mnfst = self.manifest
1163 # We don't know which manifests are missing yet
1163 # We don't know which manifests are missing yet
1164 msng_mnfst_set = {}
1164 msng_mnfst_set = {}
1165 # Nor do we know which filenodes are missing.
1165 # Nor do we know which filenodes are missing.
1166 msng_filenode_set = {}
1166 msng_filenode_set = {}
1167
1167
1168 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1168 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1169 junk = None
1169 junk = None
1170
1170
1171 # A changeset always belongs to itself, so the changenode lookup
1171 # A changeset always belongs to itself, so the changenode lookup
1172 # function for a changenode is identity.
1172 # function for a changenode is identity.
1173 def identity(x):
1173 def identity(x):
1174 return x
1174 return x
1175
1175
1176 # A function generating function. Sets up an environment for the
1176 # A function generating function. Sets up an environment for the
1177 # inner function.
1177 # inner function.
1178 def cmp_by_rev_func(revlog):
1178 def cmp_by_rev_func(revlog):
1179 # Compare two nodes by their revision number in the environment's
1179 # Compare two nodes by their revision number in the environment's
1180 # revision history. Since the revision number both represents the
1180 # revision history. Since the revision number both represents the
1181 # most efficient order to read the nodes in, and represents a
1181 # most efficient order to read the nodes in, and represents a
1182 # topological sorting of the nodes, this function is often useful.
1182 # topological sorting of the nodes, this function is often useful.
1183 def cmp_by_rev(a, b):
1183 def cmp_by_rev(a, b):
1184 return cmp(revlog.rev(a), revlog.rev(b))
1184 return cmp(revlog.rev(a), revlog.rev(b))
1185 return cmp_by_rev
1185 return cmp_by_rev
1186
1186
1187 # If we determine that a particular file or manifest node must be a
1187 # If we determine that a particular file or manifest node must be a
1188 # node that the recipient of the changegroup will already have, we can
1188 # node that the recipient of the changegroup will already have, we can
1189 # also assume the recipient will have all the parents. This function
1189 # also assume the recipient will have all the parents. This function
1190 # prunes them from the set of missing nodes.
1190 # prunes them from the set of missing nodes.
1191 def prune_parents(revlog, hasset, msngset):
1191 def prune_parents(revlog, hasset, msngset):
1192 haslst = hasset.keys()
1192 haslst = hasset.keys()
1193 haslst.sort(cmp_by_rev_func(revlog))
1193 haslst.sort(cmp_by_rev_func(revlog))
1194 for node in haslst:
1194 for node in haslst:
1195 parentlst = [p for p in revlog.parents(node) if p != nullid]
1195 parentlst = [p for p in revlog.parents(node) if p != nullid]
1196 while parentlst:
1196 while parentlst:
1197 n = parentlst.pop()
1197 n = parentlst.pop()
1198 if n not in hasset:
1198 if n not in hasset:
1199 hasset[n] = 1
1199 hasset[n] = 1
1200 p = [p for p in revlog.parents(n) if p != nullid]
1200 p = [p for p in revlog.parents(n) if p != nullid]
1201 parentlst.extend(p)
1201 parentlst.extend(p)
1202 for n in hasset:
1202 for n in hasset:
1203 msngset.pop(n, None)
1203 msngset.pop(n, None)
1204
1204
1205 # This is a function generating function used to set up an environment
1205 # This is a function generating function used to set up an environment
1206 # for the inner function to execute in.
1206 # for the inner function to execute in.
1207 def manifest_and_file_collector(changedfileset):
1207 def manifest_and_file_collector(changedfileset):
1208 # This is an information gathering function that gathers
1208 # This is an information gathering function that gathers
1209 # information from each changeset node that goes out as part of
1209 # information from each changeset node that goes out as part of
1210 # the changegroup. The information gathered is a list of which
1210 # the changegroup. The information gathered is a list of which
1211 # manifest nodes are potentially required (the recipient may
1211 # manifest nodes are potentially required (the recipient may
1212 # already have them) and total list of all files which were
1212 # already have them) and total list of all files which were
1213 # changed in any changeset in the changegroup.
1213 # changed in any changeset in the changegroup.
1214 #
1214 #
1215 # We also remember the first changenode we saw any manifest
1215 # We also remember the first changenode we saw any manifest
1216 # referenced by so we can later determine which changenode 'owns'
1216 # referenced by so we can later determine which changenode 'owns'
1217 # the manifest.
1217 # the manifest.
1218 def collect_manifests_and_files(clnode):
1218 def collect_manifests_and_files(clnode):
1219 c = cl.read(clnode)
1219 c = cl.read(clnode)
1220 for f in c[3]:
1220 for f in c[3]:
1221 # This is to make sure we only have one instance of each
1221 # This is to make sure we only have one instance of each
1222 # filename string for each filename.
1222 # filename string for each filename.
1223 changedfileset.setdefault(f, f)
1223 changedfileset.setdefault(f, f)
1224 msng_mnfst_set.setdefault(c[0], clnode)
1224 msng_mnfst_set.setdefault(c[0], clnode)
1225 return collect_manifests_and_files
1225 return collect_manifests_and_files
1226
1226
1227 # Figure out which manifest nodes (of the ones we think might be part
1227 # Figure out which manifest nodes (of the ones we think might be part
1228 # of the changegroup) the recipient must know about and remove them
1228 # of the changegroup) the recipient must know about and remove them
1229 # from the changegroup.
1229 # from the changegroup.
1230 def prune_manifests():
1230 def prune_manifests():
1231 has_mnfst_set = {}
1231 has_mnfst_set = {}
1232 for n in msng_mnfst_set:
1232 for n in msng_mnfst_set:
1233 # If a 'missing' manifest thinks it belongs to a changenode
1233 # If a 'missing' manifest thinks it belongs to a changenode
1234 # the recipient is assumed to have, obviously the recipient
1234 # the recipient is assumed to have, obviously the recipient
1235 # must have that manifest.
1235 # must have that manifest.
1236 linknode = cl.node(mnfst.linkrev(n))
1236 linknode = cl.node(mnfst.linkrev(n))
1237 if linknode in has_cl_set:
1237 if linknode in has_cl_set:
1238 has_mnfst_set[n] = 1
1238 has_mnfst_set[n] = 1
1239 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1239 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1240
1240
1241 # Use the information collected in collect_manifests_and_files to say
1241 # Use the information collected in collect_manifests_and_files to say
1242 # which changenode any manifestnode belongs to.
1242 # which changenode any manifestnode belongs to.
1243 def lookup_manifest_link(mnfstnode):
1243 def lookup_manifest_link(mnfstnode):
1244 return msng_mnfst_set[mnfstnode]
1244 return msng_mnfst_set[mnfstnode]
1245
1245
1246 # A function generating function that sets up the initial environment
1246 # A function generating function that sets up the initial environment
1247 # the inner function.
1247 # the inner function.
1248 def filenode_collector(changedfiles):
1248 def filenode_collector(changedfiles):
1249 next_rev = [0]
1249 next_rev = [0]
1250 # This gathers information from each manifestnode included in the
1250 # This gathers information from each manifestnode included in the
1251 # changegroup about which filenodes the manifest node references
1251 # changegroup about which filenodes the manifest node references
1252 # so we can include those in the changegroup too.
1252 # so we can include those in the changegroup too.
1253 #
1253 #
1254 # It also remembers which changenode each filenode belongs to. It
1254 # It also remembers which changenode each filenode belongs to. It
1255 # does this by assuming the a filenode belongs to the changenode
1255 # does this by assuming the a filenode belongs to the changenode
1256 # the first manifest that references it belongs to.
1256 # the first manifest that references it belongs to.
1257 def collect_msng_filenodes(mnfstnode):
1257 def collect_msng_filenodes(mnfstnode):
1258 r = mnfst.rev(mnfstnode)
1258 r = mnfst.rev(mnfstnode)
1259 if r == next_rev[0]:
1259 if r == next_rev[0]:
1260 # If the last rev we looked at was the one just previous,
1260 # If the last rev we looked at was the one just previous,
1261 # we only need to see a diff.
1261 # we only need to see a diff.
1262 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1262 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1263 # For each line in the delta
1263 # For each line in the delta
1264 for dline in delta.splitlines():
1264 for dline in delta.splitlines():
1265 # get the filename and filenode for that line
1265 # get the filename and filenode for that line
1266 f, fnode = dline.split('\0')
1266 f, fnode = dline.split('\0')
1267 fnode = bin(fnode[:40])
1267 fnode = bin(fnode[:40])
1268 f = changedfiles.get(f, None)
1268 f = changedfiles.get(f, None)
1269 # And if the file is in the list of files we care
1269 # And if the file is in the list of files we care
1270 # about.
1270 # about.
1271 if f is not None:
1271 if f is not None:
1272 # Get the changenode this manifest belongs to
1272 # Get the changenode this manifest belongs to
1273 clnode = msng_mnfst_set[mnfstnode]
1273 clnode = msng_mnfst_set[mnfstnode]
1274 # Create the set of filenodes for the file if
1274 # Create the set of filenodes for the file if
1275 # there isn't one already.
1275 # there isn't one already.
1276 ndset = msng_filenode_set.setdefault(f, {})
1276 ndset = msng_filenode_set.setdefault(f, {})
1277 # And set the filenode's changelog node to the
1277 # And set the filenode's changelog node to the
1278 # manifest's if it hasn't been set already.
1278 # manifest's if it hasn't been set already.
1279 ndset.setdefault(fnode, clnode)
1279 ndset.setdefault(fnode, clnode)
1280 else:
1280 else:
1281 # Otherwise we need a full manifest.
1281 # Otherwise we need a full manifest.
1282 m = mnfst.read(mnfstnode)
1282 m = mnfst.read(mnfstnode)
1283 # For every file in we care about.
1283 # For every file in we care about.
1284 for f in changedfiles:
1284 for f in changedfiles:
1285 fnode = m.get(f, None)
1285 fnode = m.get(f, None)
1286 # If it's in the manifest
1286 # If it's in the manifest
1287 if fnode is not None:
1287 if fnode is not None:
1288 # See comments above.
1288 # See comments above.
1289 clnode = msng_mnfst_set[mnfstnode]
1289 clnode = msng_mnfst_set[mnfstnode]
1290 ndset = msng_filenode_set.setdefault(f, {})
1290 ndset = msng_filenode_set.setdefault(f, {})
1291 ndset.setdefault(fnode, clnode)
1291 ndset.setdefault(fnode, clnode)
1292 # Remember the revision we hope to see next.
1292 # Remember the revision we hope to see next.
1293 next_rev[0] = r + 1
1293 next_rev[0] = r + 1
1294 return collect_msng_filenodes
1294 return collect_msng_filenodes
1295
1295
1296 # We have a list of filenodes we think we need for a file, lets remove
1296 # We have a list of filenodes we think we need for a file, lets remove
1297 # all those we now the recipient must have.
1297 # all those we now the recipient must have.
1298 def prune_filenodes(f, filerevlog):
1298 def prune_filenodes(f, filerevlog):
1299 msngset = msng_filenode_set[f]
1299 msngset = msng_filenode_set[f]
1300 hasset = {}
1300 hasset = {}
1301 # If a 'missing' filenode thinks it belongs to a changenode we
1301 # If a 'missing' filenode thinks it belongs to a changenode we
1302 # assume the recipient must have, then the recipient must have
1302 # assume the recipient must have, then the recipient must have
1303 # that filenode.
1303 # that filenode.
1304 for n in msngset:
1304 for n in msngset:
1305 clnode = cl.node(filerevlog.linkrev(n))
1305 clnode = cl.node(filerevlog.linkrev(n))
1306 if clnode in has_cl_set:
1306 if clnode in has_cl_set:
1307 hasset[n] = 1
1307 hasset[n] = 1
1308 prune_parents(filerevlog, hasset, msngset)
1308 prune_parents(filerevlog, hasset, msngset)
1309
1309
1310 # A function generator function that sets up the a context for the
1310 # A function generator function that sets up the a context for the
1311 # inner function.
1311 # inner function.
1312 def lookup_filenode_link_func(fname):
1312 def lookup_filenode_link_func(fname):
1313 msngset = msng_filenode_set[fname]
1313 msngset = msng_filenode_set[fname]
1314 # Lookup the changenode the filenode belongs to.
1314 # Lookup the changenode the filenode belongs to.
1315 def lookup_filenode_link(fnode):
1315 def lookup_filenode_link(fnode):
1316 return msngset[fnode]
1316 return msngset[fnode]
1317 return lookup_filenode_link
1317 return lookup_filenode_link
1318
1318
1319 # Now that we have all theses utility functions to help out and
1319 # Now that we have all theses utility functions to help out and
1320 # logically divide up the task, generate the group.
1320 # logically divide up the task, generate the group.
1321 def gengroup():
1321 def gengroup():
1322 # The set of changed files starts empty.
1322 # The set of changed files starts empty.
1323 changedfiles = {}
1323 changedfiles = {}
1324 # Create a changenode group generator that will call our functions
1324 # Create a changenode group generator that will call our functions
1325 # back to lookup the owning changenode and collect information.
1325 # back to lookup the owning changenode and collect information.
1326 group = cl.group(msng_cl_lst, identity,
1326 group = cl.group(msng_cl_lst, identity,
1327 manifest_and_file_collector(changedfiles))
1327 manifest_and_file_collector(changedfiles))
1328 for chnk in group:
1328 for chnk in group:
1329 yield chnk
1329 yield chnk
1330
1330
1331 # The list of manifests has been collected by the generator
1331 # The list of manifests has been collected by the generator
1332 # calling our functions back.
1332 # calling our functions back.
1333 prune_manifests()
1333 prune_manifests()
1334 msng_mnfst_lst = msng_mnfst_set.keys()
1334 msng_mnfst_lst = msng_mnfst_set.keys()
1335 # Sort the manifestnodes by revision number.
1335 # Sort the manifestnodes by revision number.
1336 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1336 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1337 # Create a generator for the manifestnodes that calls our lookup
1337 # Create a generator for the manifestnodes that calls our lookup
1338 # and data collection functions back.
1338 # and data collection functions back.
1339 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1339 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1340 filenode_collector(changedfiles))
1340 filenode_collector(changedfiles))
1341 for chnk in group:
1341 for chnk in group:
1342 yield chnk
1342 yield chnk
1343
1343
1344 # These are no longer needed, dereference and toss the memory for
1344 # These are no longer needed, dereference and toss the memory for
1345 # them.
1345 # them.
1346 msng_mnfst_lst = None
1346 msng_mnfst_lst = None
1347 msng_mnfst_set.clear()
1347 msng_mnfst_set.clear()
1348
1348
1349 changedfiles = changedfiles.keys()
1349 changedfiles = changedfiles.keys()
1350 changedfiles.sort()
1350 changedfiles.sort()
1351 # Go through all our files in order sorted by name.
1351 # Go through all our files in order sorted by name.
1352 for fname in changedfiles:
1352 for fname in changedfiles:
1353 filerevlog = self.file(fname)
1353 filerevlog = self.file(fname)
1354 # Toss out the filenodes that the recipient isn't really
1354 # Toss out the filenodes that the recipient isn't really
1355 # missing.
1355 # missing.
1356 if msng_filenode_set.has_key(fname):
1356 if msng_filenode_set.has_key(fname):
1357 prune_filenodes(fname, filerevlog)
1357 prune_filenodes(fname, filerevlog)
1358 msng_filenode_lst = msng_filenode_set[fname].keys()
1358 msng_filenode_lst = msng_filenode_set[fname].keys()
1359 else:
1359 else:
1360 msng_filenode_lst = []
1360 msng_filenode_lst = []
1361 # If any filenodes are left, generate the group for them,
1361 # If any filenodes are left, generate the group for them,
1362 # otherwise don't bother.
1362 # otherwise don't bother.
1363 if len(msng_filenode_lst) > 0:
1363 if len(msng_filenode_lst) > 0:
1364 yield changegroup.genchunk(fname)
1364 yield changegroup.genchunk(fname)
1365 # Sort the filenodes by their revision #
1365 # Sort the filenodes by their revision #
1366 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1366 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1367 # Create a group generator and only pass in a changenode
1367 # Create a group generator and only pass in a changenode
1368 # lookup function as we need to collect no information
1368 # lookup function as we need to collect no information
1369 # from filenodes.
1369 # from filenodes.
1370 group = filerevlog.group(msng_filenode_lst,
1370 group = filerevlog.group(msng_filenode_lst,
1371 lookup_filenode_link_func(fname))
1371 lookup_filenode_link_func(fname))
1372 for chnk in group:
1372 for chnk in group:
1373 yield chnk
1373 yield chnk
1374 if msng_filenode_set.has_key(fname):
1374 if msng_filenode_set.has_key(fname):
1375 # Don't need this anymore, toss it to free memory.
1375 # Don't need this anymore, toss it to free memory.
1376 del msng_filenode_set[fname]
1376 del msng_filenode_set[fname]
1377 # Signal that no more groups are left.
1377 # Signal that no more groups are left.
1378 yield changegroup.closechunk()
1378 yield changegroup.closechunk()
1379
1379
1380 if msng_cl_lst:
1380 if msng_cl_lst:
1381 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1381 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1382
1382
1383 return util.chunkbuffer(gengroup())
1383 return util.chunkbuffer(gengroup())
1384
1384
1385 def changegroup(self, basenodes, source):
1385 def changegroup(self, basenodes, source):
1386 """Generate a changegroup of all nodes that we have that a recipient
1386 """Generate a changegroup of all nodes that we have that a recipient
1387 doesn't.
1387 doesn't.
1388
1388
1389 This is much easier than the previous function as we can assume that
1389 This is much easier than the previous function as we can assume that
1390 the recipient has any changenode we aren't sending them."""
1390 the recipient has any changenode we aren't sending them."""
1391
1391
1392 self.hook('preoutgoing', throw=True, source=source)
1392 self.hook('preoutgoing', throw=True, source=source)
1393
1393
1394 cl = self.changelog
1394 cl = self.changelog
1395 nodes = cl.nodesbetween(basenodes, None)[0]
1395 nodes = cl.nodesbetween(basenodes, None)[0]
1396 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1396 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1397
1397
1398 def identity(x):
1398 def identity(x):
1399 return x
1399 return x
1400
1400
1401 def gennodelst(revlog):
1401 def gennodelst(revlog):
1402 for r in xrange(0, revlog.count()):
1402 for r in xrange(0, revlog.count()):
1403 n = revlog.node(r)
1403 n = revlog.node(r)
1404 if revlog.linkrev(n) in revset:
1404 if revlog.linkrev(n) in revset:
1405 yield n
1405 yield n
1406
1406
1407 def changed_file_collector(changedfileset):
1407 def changed_file_collector(changedfileset):
1408 def collect_changed_files(clnode):
1408 def collect_changed_files(clnode):
1409 c = cl.read(clnode)
1409 c = cl.read(clnode)
1410 for fname in c[3]:
1410 for fname in c[3]:
1411 changedfileset[fname] = 1
1411 changedfileset[fname] = 1
1412 return collect_changed_files
1412 return collect_changed_files
1413
1413
1414 def lookuprevlink_func(revlog):
1414 def lookuprevlink_func(revlog):
1415 def lookuprevlink(n):
1415 def lookuprevlink(n):
1416 return cl.node(revlog.linkrev(n))
1416 return cl.node(revlog.linkrev(n))
1417 return lookuprevlink
1417 return lookuprevlink
1418
1418
1419 def gengroup():
1419 def gengroup():
1420 # construct a list of all changed files
1420 # construct a list of all changed files
1421 changedfiles = {}
1421 changedfiles = {}
1422
1422
1423 for chnk in cl.group(nodes, identity,
1423 for chnk in cl.group(nodes, identity,
1424 changed_file_collector(changedfiles)):
1424 changed_file_collector(changedfiles)):
1425 yield chnk
1425 yield chnk
1426 changedfiles = changedfiles.keys()
1426 changedfiles = changedfiles.keys()
1427 changedfiles.sort()
1427 changedfiles.sort()
1428
1428
1429 mnfst = self.manifest
1429 mnfst = self.manifest
1430 nodeiter = gennodelst(mnfst)
1430 nodeiter = gennodelst(mnfst)
1431 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1431 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1432 yield chnk
1432 yield chnk
1433
1433
1434 for fname in changedfiles:
1434 for fname in changedfiles:
1435 filerevlog = self.file(fname)
1435 filerevlog = self.file(fname)
1436 nodeiter = gennodelst(filerevlog)
1436 nodeiter = gennodelst(filerevlog)
1437 nodeiter = list(nodeiter)
1437 nodeiter = list(nodeiter)
1438 if nodeiter:
1438 if nodeiter:
1439 yield changegroup.genchunk(fname)
1439 yield changegroup.genchunk(fname)
1440 lookup = lookuprevlink_func(filerevlog)
1440 lookup = lookuprevlink_func(filerevlog)
1441 for chnk in filerevlog.group(nodeiter, lookup):
1441 for chnk in filerevlog.group(nodeiter, lookup):
1442 yield chnk
1442 yield chnk
1443
1443
1444 yield changegroup.closechunk()
1444 yield changegroup.closechunk()
1445
1445
1446 if nodes:
1446 if nodes:
1447 self.hook('outgoing', node=hex(nodes[0]), source=source)
1447 self.hook('outgoing', node=hex(nodes[0]), source=source)
1448
1448
1449 return util.chunkbuffer(gengroup())
1449 return util.chunkbuffer(gengroup())
1450
1450
1451 def addchangegroup(self, source):
1451 def addchangegroup(self, source):
1452 """add changegroup to repo.
1452 """add changegroup to repo.
1453 returns number of heads modified or added + 1."""
1453 returns number of heads modified or added + 1."""
1454
1454
1455 def csmap(x):
1455 def csmap(x):
1456 self.ui.debug(_("add changeset %s\n") % short(x))
1456 self.ui.debug(_("add changeset %s\n") % short(x))
1457 return cl.count()
1457 return cl.count()
1458
1458
1459 def revmap(x):
1459 def revmap(x):
1460 return cl.rev(x)
1460 return cl.rev(x)
1461
1461
1462 if not source:
1462 if not source:
1463 return 0
1463 return 0
1464
1464
1465 self.hook('prechangegroup', throw=True)
1465 self.hook('prechangegroup', throw=True)
1466
1466
1467 changesets = files = revisions = 0
1467 changesets = files = revisions = 0
1468
1468
1469 tr = self.transaction()
1469 tr = self.transaction()
1470
1470
1471 # write changelog and manifest data to temp files so
1471 # write changelog and manifest data to temp files so
1472 # concurrent readers will not see inconsistent view
1472 # concurrent readers will not see inconsistent view
1473 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1473 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1474
1474
1475 oldheads = len(cl.heads())
1475 oldheads = len(cl.heads())
1476
1476
1477 # pull off the changeset group
1477 # pull off the changeset group
1478 self.ui.status(_("adding changesets\n"))
1478 self.ui.status(_("adding changesets\n"))
1479 co = cl.tip()
1479 co = cl.tip()
1480 chunkiter = changegroup.chunkiter(source)
1480 chunkiter = changegroup.chunkiter(source)
1481 cn = cl.addgroup(chunkiter, csmap, tr, 1) # unique
1481 cn = cl.addgroup(chunkiter, csmap, tr, 1) # unique
1482 cnr, cor = map(cl.rev, (cn, co))
1482 cnr, cor = map(cl.rev, (cn, co))
1483 if cn == nullid:
1483 if cn == nullid:
1484 cnr = cor
1484 cnr = cor
1485 changesets = cnr - cor
1485 changesets = cnr - cor
1486
1486
1487 mf = appendfile.appendmanifest(self.opener, self.manifest.version)
1487 mf = appendfile.appendmanifest(self.opener, self.manifest.version)
1488
1488
1489 # pull off the manifest group
1489 # pull off the manifest group
1490 self.ui.status(_("adding manifests\n"))
1490 self.ui.status(_("adding manifests\n"))
1491 mm = mf.tip()
1491 mm = mf.tip()
1492 chunkiter = changegroup.chunkiter(source)
1492 chunkiter = changegroup.chunkiter(source)
1493 mo = mf.addgroup(chunkiter, revmap, tr)
1493 mo = mf.addgroup(chunkiter, revmap, tr)
1494
1494
1495 # process the files
1495 # process the files
1496 self.ui.status(_("adding file changes\n"))
1496 self.ui.status(_("adding file changes\n"))
1497 while 1:
1497 while 1:
1498 f = changegroup.getchunk(source)
1498 f = changegroup.getchunk(source)
1499 if not f:
1499 if not f:
1500 break
1500 break
1501 self.ui.debug(_("adding %s revisions\n") % f)
1501 self.ui.debug(_("adding %s revisions\n") % f)
1502 fl = self.file(f)
1502 fl = self.file(f)
1503 o = fl.count()
1503 o = fl.count()
1504 chunkiter = changegroup.chunkiter(source)
1504 chunkiter = changegroup.chunkiter(source)
1505 n = fl.addgroup(chunkiter, revmap, tr)
1505 n = fl.addgroup(chunkiter, revmap, tr)
1506 revisions += fl.count() - o
1506 revisions += fl.count() - o
1507 files += 1
1507 files += 1
1508
1508
1509 # write order here is important so concurrent readers will see
1509 # write order here is important so concurrent readers will see
1510 # consistent view of repo
1510 # consistent view of repo
1511 mf.writedata()
1511 mf.writedata()
1512 cl.writedata()
1512 cl.writedata()
1513
1513
1514 # make changelog and manifest see real files again
1514 # make changelog and manifest see real files again
1515 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1515 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1516 self.manifest = manifest.manifest(self.opener, self.manifest.version)
1516 self.manifest = manifest.manifest(self.opener, self.manifest.version)
1517 self.changelog.checkinlinesize(tr)
1517 self.changelog.checkinlinesize(tr)
1518 self.manifest.checkinlinesize(tr)
1518 self.manifest.checkinlinesize(tr)
1519
1519
1520 newheads = len(self.changelog.heads())
1520 newheads = len(self.changelog.heads())
1521 heads = ""
1521 heads = ""
1522 if oldheads and newheads > oldheads:
1522 if oldheads and newheads > oldheads:
1523 heads = _(" (+%d heads)") % (newheads - oldheads)
1523 heads = _(" (+%d heads)") % (newheads - oldheads)
1524
1524
1525 self.ui.status(_("added %d changesets"
1525 self.ui.status(_("added %d changesets"
1526 " with %d changes to %d files%s\n")
1526 " with %d changes to %d files%s\n")
1527 % (changesets, revisions, files, heads))
1527 % (changesets, revisions, files, heads))
1528
1528
1529 self.hook('pretxnchangegroup', throw=True,
1529 self.hook('pretxnchangegroup', throw=True,
1530 node=hex(self.changelog.node(cor+1)))
1530 node=hex(self.changelog.node(cor+1)))
1531
1531
1532 tr.close()
1532 tr.close()
1533
1533
1534 if changesets > 0:
1534 if changesets > 0:
1535 self.hook("changegroup", node=hex(self.changelog.node(cor+1)))
1535 self.hook("changegroup", node=hex(self.changelog.node(cor+1)))
1536
1536
1537 for i in range(cor + 1, cnr + 1):
1537 for i in range(cor + 1, cnr + 1):
1538 self.hook("incoming", node=hex(self.changelog.node(i)))
1538 self.hook("incoming", node=hex(self.changelog.node(i)))
1539
1539
1540 return newheads - oldheads + 1
1540 return newheads - oldheads + 1
1541
1541
1542 def update(self, node, allow=False, force=False, choose=None,
1542 def update(self, node, allow=False, force=False, choose=None,
1543 moddirstate=True, forcemerge=False, wlock=None, show_stats=True):
1543 moddirstate=True, forcemerge=False, wlock=None, show_stats=True):
1544 pl = self.dirstate.parents()
1544 pl = self.dirstate.parents()
1545 if not force and pl[1] != nullid:
1545 if not force and pl[1] != nullid:
1546 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1546 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1547 return 1
1547 return 1
1548
1548
1549 err = False
1549 err = False
1550
1550
1551 p1, p2 = pl[0], node
1551 p1, p2 = pl[0], node
1552 pa = self.changelog.ancestor(p1, p2)
1552 pa = self.changelog.ancestor(p1, p2)
1553 m1n = self.changelog.read(p1)[0]
1553 m1n = self.changelog.read(p1)[0]
1554 m2n = self.changelog.read(p2)[0]
1554 m2n = self.changelog.read(p2)[0]
1555 man = self.manifest.ancestor(m1n, m2n)
1555 man = self.manifest.ancestor(m1n, m2n)
1556 m1 = self.manifest.read(m1n)
1556 m1 = self.manifest.read(m1n)
1557 mf1 = self.manifest.readflags(m1n)
1557 mf1 = self.manifest.readflags(m1n)
1558 m2 = self.manifest.read(m2n).copy()
1558 m2 = self.manifest.read(m2n).copy()
1559 mf2 = self.manifest.readflags(m2n)
1559 mf2 = self.manifest.readflags(m2n)
1560 ma = self.manifest.read(man)
1560 ma = self.manifest.read(man)
1561 mfa = self.manifest.readflags(man)
1561 mfa = self.manifest.readflags(man)
1562
1562
1563 modified, added, removed, deleted, unknown = self.changes()
1563 modified, added, removed, deleted, unknown = self.changes()
1564
1564
1565 # is this a jump, or a merge? i.e. is there a linear path
1565 # is this a jump, or a merge? i.e. is there a linear path
1566 # from p1 to p2?
1566 # from p1 to p2?
1567 linear_path = (pa == p1 or pa == p2)
1567 linear_path = (pa == p1 or pa == p2)
1568
1568
1569 if allow and linear_path:
1569 if allow and linear_path:
1570 raise util.Abort(_("there is nothing to merge, "
1570 raise util.Abort(_("there is nothing to merge, "
1571 "just use 'hg update'"))
1571 "just use 'hg update'"))
1572 if allow and not forcemerge:
1572 if allow and not forcemerge:
1573 if modified or added or removed:
1573 if modified or added or removed:
1574 raise util.Abort(_("outstanding uncommitted changes"))
1574 raise util.Abort(_("outstanding uncommitted changes"))
1575 if not forcemerge and not force:
1575 if not forcemerge and not force:
1576 for f in unknown:
1576 for f in unknown:
1577 if f in m2:
1577 if f in m2:
1578 t1 = self.wread(f)
1578 t1 = self.wread(f)
1579 t2 = self.file(f).read(m2[f])
1579 t2 = self.file(f).read(m2[f])
1580 if cmp(t1, t2) != 0:
1580 if cmp(t1, t2) != 0:
1581 raise util.Abort(_("'%s' already exists in the working"
1581 raise util.Abort(_("'%s' already exists in the working"
1582 " dir and differs from remote") % f)
1582 " dir and differs from remote") % f)
1583
1583
1584 # resolve the manifest to determine which files
1584 # resolve the manifest to determine which files
1585 # we care about merging
1585 # we care about merging
1586 self.ui.note(_("resolving manifests\n"))
1586 self.ui.note(_("resolving manifests\n"))
1587 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1587 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1588 (force, allow, moddirstate, linear_path))
1588 (force, allow, moddirstate, linear_path))
1589 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1589 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1590 (short(man), short(m1n), short(m2n)))
1590 (short(man), short(m1n), short(m2n)))
1591
1591
1592 merge = {}
1592 merge = {}
1593 get = {}
1593 get = {}
1594 remove = []
1594 remove = []
1595
1595
1596 # construct a working dir manifest
1596 # construct a working dir manifest
1597 mw = m1.copy()
1597 mw = m1.copy()
1598 mfw = mf1.copy()
1598 mfw = mf1.copy()
1599 umap = dict.fromkeys(unknown)
1599 umap = dict.fromkeys(unknown)
1600
1600
1601 for f in added + modified + unknown:
1601 for f in added + modified + unknown:
1602 mw[f] = ""
1602 mw[f] = ""
1603 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1603 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1604
1604
1605 if moddirstate and not wlock:
1605 if moddirstate and not wlock:
1606 wlock = self.wlock()
1606 wlock = self.wlock()
1607
1607
1608 for f in deleted + removed:
1608 for f in deleted + removed:
1609 if f in mw:
1609 if f in mw:
1610 del mw[f]
1610 del mw[f]
1611
1611
1612 # If we're jumping between revisions (as opposed to merging),
1612 # If we're jumping between revisions (as opposed to merging),
1613 # and if neither the working directory nor the target rev has
1613 # and if neither the working directory nor the target rev has
1614 # the file, then we need to remove it from the dirstate, to
1614 # the file, then we need to remove it from the dirstate, to
1615 # prevent the dirstate from listing the file when it is no
1615 # prevent the dirstate from listing the file when it is no
1616 # longer in the manifest.
1616 # longer in the manifest.
1617 if moddirstate and linear_path and f not in m2:
1617 if moddirstate and linear_path and f not in m2:
1618 self.dirstate.forget((f,))
1618 self.dirstate.forget((f,))
1619
1619
1620 # Compare manifests
1620 # Compare manifests
1621 for f, n in mw.iteritems():
1621 for f, n in mw.iteritems():
1622 if choose and not choose(f):
1622 if choose and not choose(f):
1623 continue
1623 continue
1624 if f in m2:
1624 if f in m2:
1625 s = 0
1625 s = 0
1626
1626
1627 # is the wfile new since m1, and match m2?
1627 # is the wfile new since m1, and match m2?
1628 if f not in m1:
1628 if f not in m1:
1629 t1 = self.wread(f)
1629 t1 = self.wread(f)
1630 t2 = self.file(f).read(m2[f])
1630 t2 = self.file(f).read(m2[f])
1631 if cmp(t1, t2) == 0:
1631 if cmp(t1, t2) == 0:
1632 n = m2[f]
1632 n = m2[f]
1633 del t1, t2
1633 del t1, t2
1634
1634
1635 # are files different?
1635 # are files different?
1636 if n != m2[f]:
1636 if n != m2[f]:
1637 a = ma.get(f, nullid)
1637 a = ma.get(f, nullid)
1638 # are both different from the ancestor?
1638 # are both different from the ancestor?
1639 if n != a and m2[f] != a:
1639 if n != a and m2[f] != a:
1640 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1640 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1641 # merge executable bits
1641 # merge executable bits
1642 # "if we changed or they changed, change in merge"
1642 # "if we changed or they changed, change in merge"
1643 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1643 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1644 mode = ((a^b) | (a^c)) ^ a
1644 mode = ((a^b) | (a^c)) ^ a
1645 merge[f] = (m1.get(f, nullid), m2[f], mode)
1645 merge[f] = (m1.get(f, nullid), m2[f], mode)
1646 s = 1
1646 s = 1
1647 # are we clobbering?
1647 # are we clobbering?
1648 # is remote's version newer?
1648 # is remote's version newer?
1649 # or are we going back in time?
1649 # or are we going back in time?
1650 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1650 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1651 self.ui.debug(_(" remote %s is newer, get\n") % f)
1651 self.ui.debug(_(" remote %s is newer, get\n") % f)
1652 get[f] = m2[f]
1652 get[f] = m2[f]
1653 s = 1
1653 s = 1
1654 elif f in umap or f in added:
1654 elif f in umap or f in added:
1655 # this unknown file is the same as the checkout
1655 # this unknown file is the same as the checkout
1656 # we need to reset the dirstate if the file was added
1656 # we need to reset the dirstate if the file was added
1657 get[f] = m2[f]
1657 get[f] = m2[f]
1658
1658
1659 if not s and mfw[f] != mf2[f]:
1659 if not s and mfw[f] != mf2[f]:
1660 if force:
1660 if force:
1661 self.ui.debug(_(" updating permissions for %s\n") % f)
1661 self.ui.debug(_(" updating permissions for %s\n") % f)
1662 util.set_exec(self.wjoin(f), mf2[f])
1662 util.set_exec(self.wjoin(f), mf2[f])
1663 else:
1663 else:
1664 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1664 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1665 mode = ((a^b) | (a^c)) ^ a
1665 mode = ((a^b) | (a^c)) ^ a
1666 if mode != b:
1666 if mode != b:
1667 self.ui.debug(_(" updating permissions for %s\n")
1667 self.ui.debug(_(" updating permissions for %s\n")
1668 % f)
1668 % f)
1669 util.set_exec(self.wjoin(f), mode)
1669 util.set_exec(self.wjoin(f), mode)
1670 del m2[f]
1670 del m2[f]
1671 elif f in ma:
1671 elif f in ma:
1672 if n != ma[f]:
1672 if n != ma[f]:
1673 r = _("d")
1673 r = _("d")
1674 if not force and (linear_path or allow):
1674 if not force and (linear_path or allow):
1675 r = self.ui.prompt(
1675 r = self.ui.prompt(
1676 (_(" local changed %s which remote deleted\n") % f) +
1676 (_(" local changed %s which remote deleted\n") % f) +
1677 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1677 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1678 if r == _("d"):
1678 if r == _("d"):
1679 remove.append(f)
1679 remove.append(f)
1680 else:
1680 else:
1681 self.ui.debug(_("other deleted %s\n") % f)
1681 self.ui.debug(_("other deleted %s\n") % f)
1682 remove.append(f) # other deleted it
1682 remove.append(f) # other deleted it
1683 else:
1683 else:
1684 # file is created on branch or in working directory
1684 # file is created on branch or in working directory
1685 if force and f not in umap:
1685 if force and f not in umap:
1686 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1686 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1687 remove.append(f)
1687 remove.append(f)
1688 elif n == m1.get(f, nullid): # same as parent
1688 elif n == m1.get(f, nullid): # same as parent
1689 if p2 == pa: # going backwards?
1689 if p2 == pa: # going backwards?
1690 self.ui.debug(_("remote deleted %s\n") % f)
1690 self.ui.debug(_("remote deleted %s\n") % f)
1691 remove.append(f)
1691 remove.append(f)
1692 else:
1692 else:
1693 self.ui.debug(_("local modified %s, keeping\n") % f)
1693 self.ui.debug(_("local modified %s, keeping\n") % f)
1694 else:
1694 else:
1695 self.ui.debug(_("working dir created %s, keeping\n") % f)
1695 self.ui.debug(_("working dir created %s, keeping\n") % f)
1696
1696
1697 for f, n in m2.iteritems():
1697 for f, n in m2.iteritems():
1698 if choose and not choose(f):
1698 if choose and not choose(f):
1699 continue
1699 continue
1700 if f[0] == "/":
1700 if f[0] == "/":
1701 continue
1701 continue
1702 if f in ma and n != ma[f]:
1702 if f in ma and n != ma[f]:
1703 r = _("k")
1703 r = _("k")
1704 if not force and (linear_path or allow):
1704 if not force and (linear_path or allow):
1705 r = self.ui.prompt(
1705 r = self.ui.prompt(
1706 (_("remote changed %s which local deleted\n") % f) +
1706 (_("remote changed %s which local deleted\n") % f) +
1707 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1707 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1708 if r == _("k"):
1708 if r == _("k"):
1709 get[f] = n
1709 get[f] = n
1710 elif f not in ma:
1710 elif f not in ma:
1711 self.ui.debug(_("remote created %s\n") % f)
1711 self.ui.debug(_("remote created %s\n") % f)
1712 get[f] = n
1712 get[f] = n
1713 else:
1713 else:
1714 if force or p2 == pa: # going backwards?
1714 if force or p2 == pa: # going backwards?
1715 self.ui.debug(_("local deleted %s, recreating\n") % f)
1715 self.ui.debug(_("local deleted %s, recreating\n") % f)
1716 get[f] = n
1716 get[f] = n
1717 else:
1717 else:
1718 self.ui.debug(_("local deleted %s\n") % f)
1718 self.ui.debug(_("local deleted %s\n") % f)
1719
1719
1720 del mw, m1, m2, ma
1720 del mw, m1, m2, ma
1721
1721
1722 if force:
1722 if force:
1723 for f in merge:
1723 for f in merge:
1724 get[f] = merge[f][1]
1724 get[f] = merge[f][1]
1725 merge = {}
1725 merge = {}
1726
1726
1727 if linear_path or force:
1727 if linear_path or force:
1728 # we don't need to do any magic, just jump to the new rev
1728 # we don't need to do any magic, just jump to the new rev
1729 branch_merge = False
1729 branch_merge = False
1730 p1, p2 = p2, nullid
1730 p1, p2 = p2, nullid
1731 else:
1731 else:
1732 if not allow:
1732 if not allow:
1733 self.ui.status(_("this update spans a branch"
1733 self.ui.status(_("this update spans a branch"
1734 " affecting the following files:\n"))
1734 " affecting the following files:\n"))
1735 fl = merge.keys() + get.keys()
1735 fl = merge.keys() + get.keys()
1736 fl.sort()
1736 fl.sort()
1737 for f in fl:
1737 for f in fl:
1738 cf = ""
1738 cf = ""
1739 if f in merge:
1739 if f in merge:
1740 cf = _(" (resolve)")
1740 cf = _(" (resolve)")
1741 self.ui.status(" %s%s\n" % (f, cf))
1741 self.ui.status(" %s%s\n" % (f, cf))
1742 self.ui.warn(_("aborting update spanning branches!\n"))
1742 self.ui.warn(_("aborting update spanning branches!\n"))
1743 self.ui.status(_("(use 'hg merge' to merge across branches"
1743 self.ui.status(_("(use 'hg merge' to merge across branches"
1744 " or 'hg update -C' to lose changes)\n"))
1744 " or 'hg update -C' to lose changes)\n"))
1745 return 1
1745 return 1
1746 branch_merge = True
1746 branch_merge = True
1747
1747
1748 # get the files we don't need to change
1748 # get the files we don't need to change
1749 files = get.keys()
1749 files = get.keys()
1750 files.sort()
1750 files.sort()
1751 for f in files:
1751 for f in files:
1752 if f[0] == "/":
1752 if f[0] == "/":
1753 continue
1753 continue
1754 self.ui.note(_("getting %s\n") % f)
1754 self.ui.note(_("getting %s\n") % f)
1755 t = self.file(f).read(get[f])
1755 t = self.file(f).read(get[f])
1756 self.wwrite(f, t)
1756 self.wwrite(f, t)
1757 util.set_exec(self.wjoin(f), mf2[f])
1757 util.set_exec(self.wjoin(f), mf2[f])
1758 if moddirstate:
1758 if moddirstate:
1759 if branch_merge:
1759 if branch_merge:
1760 self.dirstate.update([f], 'n', st_mtime=-1)
1760 self.dirstate.update([f], 'n', st_mtime=-1)
1761 else:
1761 else:
1762 self.dirstate.update([f], 'n')
1762 self.dirstate.update([f], 'n')
1763
1763
1764 # merge the tricky bits
1764 # merge the tricky bits
1765 failedmerge = []
1765 failedmerge = []
1766 files = merge.keys()
1766 files = merge.keys()
1767 files.sort()
1767 files.sort()
1768 xp1 = hex(p1)
1768 xp1 = hex(p1)
1769 xp2 = hex(p2)
1769 xp2 = hex(p2)
1770 for f in files:
1770 for f in files:
1771 self.ui.status(_("merging %s\n") % f)
1771 self.ui.status(_("merging %s\n") % f)
1772 my, other, flag = merge[f]
1772 my, other, flag = merge[f]
1773 ret = self.merge3(f, my, other, xp1, xp2)
1773 ret = self.merge3(f, my, other, xp1, xp2)
1774 if ret:
1774 if ret:
1775 err = True
1775 err = True
1776 failedmerge.append(f)
1776 failedmerge.append(f)
1777 util.set_exec(self.wjoin(f), flag)
1777 util.set_exec(self.wjoin(f), flag)
1778 if moddirstate:
1778 if moddirstate:
1779 if branch_merge:
1779 if branch_merge:
1780 # We've done a branch merge, mark this file as merged
1780 # We've done a branch merge, mark this file as merged
1781 # so that we properly record the merger later
1781 # so that we properly record the merger later
1782 self.dirstate.update([f], 'm')
1782 self.dirstate.update([f], 'm')
1783 else:
1783 else:
1784 # We've update-merged a locally modified file, so
1784 # We've update-merged a locally modified file, so
1785 # we set the dirstate to emulate a normal checkout
1785 # we set the dirstate to emulate a normal checkout
1786 # of that file some time in the past. Thus our
1786 # of that file some time in the past. Thus our
1787 # merge will appear as a normal local file
1787 # merge will appear as a normal local file
1788 # modification.
1788 # modification.
1789 f_len = len(self.file(f).read(other))
1789 f_len = len(self.file(f).read(other))
1790 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1790 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1791
1791
1792 remove.sort()
1792 remove.sort()
1793 for f in remove:
1793 for f in remove:
1794 self.ui.note(_("removing %s\n") % f)
1794 self.ui.note(_("removing %s\n") % f)
1795 util.audit_path(f)
1795 util.audit_path(f)
1796 try:
1796 try:
1797 util.unlink(self.wjoin(f))
1797 util.unlink(self.wjoin(f))
1798 except OSError, inst:
1798 except OSError, inst:
1799 if inst.errno != errno.ENOENT:
1799 if inst.errno != errno.ENOENT:
1800 self.ui.warn(_("update failed to remove %s: %s!\n") %
1800 self.ui.warn(_("update failed to remove %s: %s!\n") %
1801 (f, inst.strerror))
1801 (f, inst.strerror))
1802 if moddirstate:
1802 if moddirstate:
1803 if branch_merge:
1803 if branch_merge:
1804 self.dirstate.update(remove, 'r')
1804 self.dirstate.update(remove, 'r')
1805 else:
1805 else:
1806 self.dirstate.forget(remove)
1806 self.dirstate.forget(remove)
1807
1807
1808 if moddirstate:
1808 if moddirstate:
1809 self.dirstate.setparents(p1, p2)
1809 self.dirstate.setparents(p1, p2)
1810
1810
1811 if show_stats:
1811 if show_stats:
1812 stats = ((len(get), _("updated")),
1812 stats = ((len(get), _("updated")),
1813 (len(merge) - len(failedmerge), _("merged")),
1813 (len(merge) - len(failedmerge), _("merged")),
1814 (len(remove), _("removed")),
1814 (len(remove), _("removed")),
1815 (len(failedmerge), _("unresolved")))
1815 (len(failedmerge), _("unresolved")))
1816 note = ", ".join([_("%d files %s") % s for s in stats])
1816 note = ", ".join([_("%d files %s") % s for s in stats])
1817 self.ui.status("%s\n" % note)
1817 self.ui.status("%s\n" % note)
1818 if moddirstate:
1818 if moddirstate:
1819 if branch_merge:
1819 if branch_merge:
1820 if failedmerge:
1820 if failedmerge:
1821 self.ui.status(_("There are unresolved merges,"
1821 self.ui.status(_("There are unresolved merges,"
1822 " you can redo the full merge using:\n"
1822 " you can redo the full merge using:\n"
1823 " hg update -C %s\n"
1823 " hg update -C %s\n"
1824 " hg merge %s\n"
1824 " hg merge %s\n"
1825 % (self.changelog.rev(p1),
1825 % (self.changelog.rev(p1),
1826 self.changelog.rev(p2))))
1826 self.changelog.rev(p2))))
1827 else:
1827 else:
1828 self.ui.status(_("(branch merge, don't forget to commit)\n"))
1828 self.ui.status(_("(branch merge, don't forget to commit)\n"))
1829 elif failedmerge:
1829 elif failedmerge:
1830 self.ui.status(_("There are unresolved merges with"
1830 self.ui.status(_("There are unresolved merges with"
1831 " locally modified files.\n"))
1831 " locally modified files.\n"))
1832
1832
1833 return err
1833 return err
1834
1834
1835 def merge3(self, fn, my, other, p1, p2):
1835 def merge3(self, fn, my, other, p1, p2):
1836 """perform a 3-way merge in the working directory"""
1836 """perform a 3-way merge in the working directory"""
1837
1837
1838 def temp(prefix, node):
1838 def temp(prefix, node):
1839 pre = "%s~%s." % (os.path.basename(fn), prefix)
1839 pre = "%s~%s." % (os.path.basename(fn), prefix)
1840 (fd, name) = tempfile.mkstemp(prefix=pre)
1840 (fd, name) = tempfile.mkstemp(prefix=pre)
1841 f = os.fdopen(fd, "wb")
1841 f = os.fdopen(fd, "wb")
1842 self.wwrite(fn, fl.read(node), f)
1842 self.wwrite(fn, fl.read(node), f)
1843 f.close()
1843 f.close()
1844 return name
1844 return name
1845
1845
1846 fl = self.file(fn)
1846 fl = self.file(fn)
1847 base = fl.ancestor(my, other)
1847 base = fl.ancestor(my, other)
1848 a = self.wjoin(fn)
1848 a = self.wjoin(fn)
1849 b = temp("base", base)
1849 b = temp("base", base)
1850 c = temp("other", other)
1850 c = temp("other", other)
1851
1851
1852 self.ui.note(_("resolving %s\n") % fn)
1852 self.ui.note(_("resolving %s\n") % fn)
1853 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1853 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1854 (fn, short(my), short(other), short(base)))
1854 (fn, short(my), short(other), short(base)))
1855
1855
1856 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1856 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1857 or "hgmerge")
1857 or "hgmerge")
1858 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
1858 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
1859 environ={'HG_FILE': fn,
1859 environ={'HG_FILE': fn,
1860 'HG_MY_NODE': p1,
1860 'HG_MY_NODE': p1,
1861 'HG_OTHER_NODE': p2,
1861 'HG_OTHER_NODE': p2,
1862 'HG_FILE_MY_NODE': hex(my),
1862 'HG_FILE_MY_NODE': hex(my),
1863 'HG_FILE_OTHER_NODE': hex(other),
1863 'HG_FILE_OTHER_NODE': hex(other),
1864 'HG_FILE_BASE_NODE': hex(base)})
1864 'HG_FILE_BASE_NODE': hex(base)})
1865 if r:
1865 if r:
1866 self.ui.warn(_("merging %s failed!\n") % fn)
1866 self.ui.warn(_("merging %s failed!\n") % fn)
1867
1867
1868 os.unlink(b)
1868 os.unlink(b)
1869 os.unlink(c)
1869 os.unlink(c)
1870 return r
1870 return r
1871
1871
1872 def verify(self):
1872 def verify(self):
1873 filelinkrevs = {}
1873 filelinkrevs = {}
1874 filenodes = {}
1874 filenodes = {}
1875 changesets = revisions = files = 0
1875 changesets = revisions = files = 0
1876 errors = [0]
1876 errors = [0]
1877 warnings = [0]
1877 warnings = [0]
1878 neededmanifests = {}
1878 neededmanifests = {}
1879
1879
1880 def err(msg):
1880 def err(msg):
1881 self.ui.warn(msg + "\n")
1881 self.ui.warn(msg + "\n")
1882 errors[0] += 1
1882 errors[0] += 1
1883
1883
1884 def warn(msg):
1884 def warn(msg):
1885 self.ui.warn(msg + "\n")
1885 self.ui.warn(msg + "\n")
1886 warnings[0] += 1
1886 warnings[0] += 1
1887
1887
1888 def checksize(obj, name):
1888 def checksize(obj, name):
1889 d = obj.checksize()
1889 d = obj.checksize()
1890 if d[0]:
1890 if d[0]:
1891 err(_("%s data length off by %d bytes") % (name, d[0]))
1891 err(_("%s data length off by %d bytes") % (name, d[0]))
1892 if d[1]:
1892 if d[1]:
1893 err(_("%s index contains %d extra bytes") % (name, d[1]))
1893 err(_("%s index contains %d extra bytes") % (name, d[1]))
1894
1894
1895 def checkversion(obj, name):
1895 def checkversion(obj, name):
1896 if obj.version != revlog.REVLOGV0:
1896 if obj.version != revlog.REVLOGV0:
1897 if not revlogv1:
1897 if not revlogv1:
1898 warn(_("warning: `%s' uses revlog format 1") % name)
1898 warn(_("warning: `%s' uses revlog format 1") % name)
1899 elif revlogv1:
1899 elif revlogv1:
1900 warn(_("warning: `%s' uses revlog format 0") % name)
1900 warn(_("warning: `%s' uses revlog format 0") % name)
1901
1901
1902 revlogv1 = self.revlogversion != revlog.REVLOGV0
1902 revlogv1 = self.revlogversion != revlog.REVLOGV0
1903 if self.ui.verbose or revlogv1 != self.revlogv1:
1903 if self.ui.verbose or revlogv1 != self.revlogv1:
1904 self.ui.status(_("repository uses revlog format %d\n") %
1904 self.ui.status(_("repository uses revlog format %d\n") %
1905 (revlogv1 and 1 or 0))
1905 (revlogv1 and 1 or 0))
1906
1906
1907 seen = {}
1907 seen = {}
1908 self.ui.status(_("checking changesets\n"))
1908 self.ui.status(_("checking changesets\n"))
1909 checksize(self.changelog, "changelog")
1909 checksize(self.changelog, "changelog")
1910
1910
1911 for i in range(self.changelog.count()):
1911 for i in range(self.changelog.count()):
1912 changesets += 1
1912 changesets += 1
1913 n = self.changelog.node(i)
1913 n = self.changelog.node(i)
1914 l = self.changelog.linkrev(n)
1914 l = self.changelog.linkrev(n)
1915 if l != i:
1915 if l != i:
1916 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1916 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1917 if n in seen:
1917 if n in seen:
1918 err(_("duplicate changeset at revision %d") % i)
1918 err(_("duplicate changeset at revision %d") % i)
1919 seen[n] = 1
1919 seen[n] = 1
1920
1920
1921 for p in self.changelog.parents(n):
1921 for p in self.changelog.parents(n):
1922 if p not in self.changelog.nodemap:
1922 if p not in self.changelog.nodemap:
1923 err(_("changeset %s has unknown parent %s") %
1923 err(_("changeset %s has unknown parent %s") %
1924 (short(n), short(p)))
1924 (short(n), short(p)))
1925 try:
1925 try:
1926 changes = self.changelog.read(n)
1926 changes = self.changelog.read(n)
1927 except KeyboardInterrupt:
1927 except KeyboardInterrupt:
1928 self.ui.warn(_("interrupted"))
1928 self.ui.warn(_("interrupted"))
1929 raise
1929 raise
1930 except Exception, inst:
1930 except Exception, inst:
1931 err(_("unpacking changeset %s: %s") % (short(n), inst))
1931 err(_("unpacking changeset %s: %s") % (short(n), inst))
1932 continue
1932 continue
1933
1933
1934 neededmanifests[changes[0]] = n
1934 neededmanifests[changes[0]] = n
1935
1935
1936 for f in changes[3]:
1936 for f in changes[3]:
1937 filelinkrevs.setdefault(f, []).append(i)
1937 filelinkrevs.setdefault(f, []).append(i)
1938
1938
1939 seen = {}
1939 seen = {}
1940 self.ui.status(_("checking manifests\n"))
1940 self.ui.status(_("checking manifests\n"))
1941 checkversion(self.manifest, "manifest")
1941 checkversion(self.manifest, "manifest")
1942 checksize(self.manifest, "manifest")
1942 checksize(self.manifest, "manifest")
1943
1943
1944 for i in range(self.manifest.count()):
1944 for i in range(self.manifest.count()):
1945 n = self.manifest.node(i)
1945 n = self.manifest.node(i)
1946 l = self.manifest.linkrev(n)
1946 l = self.manifest.linkrev(n)
1947
1947
1948 if l < 0 or l >= self.changelog.count():
1948 if l < 0 or l >= self.changelog.count():
1949 err(_("bad manifest link (%d) at revision %d") % (l, i))
1949 err(_("bad manifest link (%d) at revision %d") % (l, i))
1950
1950
1951 if n in neededmanifests:
1951 if n in neededmanifests:
1952 del neededmanifests[n]
1952 del neededmanifests[n]
1953
1953
1954 if n in seen:
1954 if n in seen:
1955 err(_("duplicate manifest at revision %d") % i)
1955 err(_("duplicate manifest at revision %d") % i)
1956
1956
1957 seen[n] = 1
1957 seen[n] = 1
1958
1958
1959 for p in self.manifest.parents(n):
1959 for p in self.manifest.parents(n):
1960 if p not in self.manifest.nodemap:
1960 if p not in self.manifest.nodemap:
1961 err(_("manifest %s has unknown parent %s") %
1961 err(_("manifest %s has unknown parent %s") %
1962 (short(n), short(p)))
1962 (short(n), short(p)))
1963
1963
1964 try:
1964 try:
1965 delta = mdiff.patchtext(self.manifest.delta(n))
1965 delta = mdiff.patchtext(self.manifest.delta(n))
1966 except KeyboardInterrupt:
1966 except KeyboardInterrupt:
1967 self.ui.warn(_("interrupted"))
1967 self.ui.warn(_("interrupted"))
1968 raise
1968 raise
1969 except Exception, inst:
1969 except Exception, inst:
1970 err(_("unpacking manifest %s: %s") % (short(n), inst))
1970 err(_("unpacking manifest %s: %s") % (short(n), inst))
1971 continue
1971 continue
1972
1972
1973 try:
1973 try:
1974 ff = [ l.split('\0') for l in delta.splitlines() ]
1974 ff = [ l.split('\0') for l in delta.splitlines() ]
1975 for f, fn in ff:
1975 for f, fn in ff:
1976 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1976 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1977 except (ValueError, TypeError), inst:
1977 except (ValueError, TypeError), inst:
1978 err(_("broken delta in manifest %s: %s") % (short(n), inst))
1978 err(_("broken delta in manifest %s: %s") % (short(n), inst))
1979
1979
1980 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1980 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1981
1981
1982 for m, c in neededmanifests.items():
1982 for m, c in neededmanifests.items():
1983 err(_("Changeset %s refers to unknown manifest %s") %
1983 err(_("Changeset %s refers to unknown manifest %s") %
1984 (short(m), short(c)))
1984 (short(m), short(c)))
1985 del neededmanifests
1985 del neededmanifests
1986
1986
1987 for f in filenodes:
1987 for f in filenodes:
1988 if f not in filelinkrevs:
1988 if f not in filelinkrevs:
1989 err(_("file %s in manifest but not in changesets") % f)
1989 err(_("file %s in manifest but not in changesets") % f)
1990
1990
1991 for f in filelinkrevs:
1991 for f in filelinkrevs:
1992 if f not in filenodes:
1992 if f not in filenodes:
1993 err(_("file %s in changeset but not in manifest") % f)
1993 err(_("file %s in changeset but not in manifest") % f)
1994
1994
1995 self.ui.status(_("checking files\n"))
1995 self.ui.status(_("checking files\n"))
1996 ff = filenodes.keys()
1996 ff = filenodes.keys()
1997 ff.sort()
1997 ff.sort()
1998 for f in ff:
1998 for f in ff:
1999 if f == "/dev/null":
1999 if f == "/dev/null":
2000 continue
2000 continue
2001 files += 1
2001 files += 1
2002 if not f:
2002 if not f:
2003 err(_("file without name in manifest %s") % short(n))
2003 err(_("file without name in manifest %s") % short(n))
2004 continue
2004 continue
2005 fl = self.file(f)
2005 fl = self.file(f)
2006 checkversion(fl, f)
2006 checkversion(fl, f)
2007 checksize(fl, f)
2007 checksize(fl, f)
2008
2008
2009 nodes = {nullid: 1}
2009 nodes = {nullid: 1}
2010 seen = {}
2010 seen = {}
2011 for i in range(fl.count()):
2011 for i in range(fl.count()):
2012 revisions += 1
2012 revisions += 1
2013 n = fl.node(i)
2013 n = fl.node(i)
2014
2014
2015 if n in seen:
2015 if n in seen:
2016 err(_("%s: duplicate revision %d") % (f, i))
2016 err(_("%s: duplicate revision %d") % (f, i))
2017 if n not in filenodes[f]:
2017 if n not in filenodes[f]:
2018 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
2018 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
2019 else:
2019 else:
2020 del filenodes[f][n]
2020 del filenodes[f][n]
2021
2021
2022 flr = fl.linkrev(n)
2022 flr = fl.linkrev(n)
2023 if flr not in filelinkrevs.get(f, []):
2023 if flr not in filelinkrevs.get(f, []):
2024 err(_("%s:%s points to unexpected changeset %d")
2024 err(_("%s:%s points to unexpected changeset %d")
2025 % (f, short(n), flr))
2025 % (f, short(n), flr))
2026 else:
2026 else:
2027 filelinkrevs[f].remove(flr)
2027 filelinkrevs[f].remove(flr)
2028
2028
2029 # verify contents
2029 # verify contents
2030 try:
2030 try:
2031 t = fl.read(n)
2031 t = fl.read(n)
2032 except KeyboardInterrupt:
2032 except KeyboardInterrupt:
2033 self.ui.warn(_("interrupted"))
2033 self.ui.warn(_("interrupted"))
2034 raise
2034 raise
2035 except Exception, inst:
2035 except Exception, inst:
2036 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
2036 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
2037
2037
2038 # verify parents
2038 # verify parents
2039 (p1, p2) = fl.parents(n)
2039 (p1, p2) = fl.parents(n)
2040 if p1 not in nodes:
2040 if p1 not in nodes:
2041 err(_("file %s:%s unknown parent 1 %s") %
2041 err(_("file %s:%s unknown parent 1 %s") %
2042 (f, short(n), short(p1)))
2042 (f, short(n), short(p1)))
2043 if p2 not in nodes:
2043 if p2 not in nodes:
2044 err(_("file %s:%s unknown parent 2 %s") %
2044 err(_("file %s:%s unknown parent 2 %s") %
2045 (f, short(n), short(p1)))
2045 (f, short(n), short(p1)))
2046 nodes[n] = 1
2046 nodes[n] = 1
2047
2047
2048 # cross-check
2048 # cross-check
2049 for node in filenodes[f]:
2049 for node in filenodes[f]:
2050 err(_("node %s in manifests not in %s") % (hex(node), f))
2050 err(_("node %s in manifests not in %s") % (hex(node), f))
2051
2051
2052 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
2052 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
2053 (files, changesets, revisions))
2053 (files, changesets, revisions))
2054
2054
2055 if warnings[0]:
2055 if warnings[0]:
2056 self.ui.warn(_("%d warnings encountered!\n") % warnings[0])
2056 self.ui.warn(_("%d warnings encountered!\n") % warnings[0])
2057 if errors[0]:
2057 if errors[0]:
2058 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
2058 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
2059 return 1
2059 return 1
2060
2060
2061 # used to avoid circular references so destructors work
2061 # used to avoid circular references so destructors work
2062 def aftertrans(base):
2062 def aftertrans(base):
2063 p = base
2063 p = base
2064 def a():
2064 def a():
2065 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
2065 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
2066 util.rename(os.path.join(p, "journal.dirstate"),
2066 util.rename(os.path.join(p, "journal.dirstate"),
2067 os.path.join(p, "undo.dirstate"))
2067 os.path.join(p, "undo.dirstate"))
2068 return a
2068 return a
2069
2069
General Comments 0
You need to be logged in to leave comments. Login now