##// END OF EJS Templates
changegroup hooks: add source to hook parameters
Vadim Gelfer -
r2229:0ff326c2 default
parent child Browse files
Show More
@@ -1,2076 +1,2078 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import os, util
8 import os, util
9 import filelog, manifest, changelog, dirstate, repo
9 import filelog, manifest, changelog, dirstate, repo
10 from node import *
10 from node import *
11 from i18n import gettext as _
11 from i18n import gettext as _
12 from demandload import *
12 from demandload import *
13 demandload(globals(), "appendfile changegroup")
13 demandload(globals(), "appendfile changegroup")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 demandload(globals(), "revlog traceback")
15 demandload(globals(), "revlog traceback")
16
16
17 class localrepository(object):
17 class localrepository(object):
18 def __del__(self):
18 def __del__(self):
19 self.transhandle = None
19 self.transhandle = None
20 def __init__(self, parentui, path=None, create=0):
20 def __init__(self, parentui, path=None, create=0):
21 if not path:
21 if not path:
22 p = os.getcwd()
22 p = os.getcwd()
23 while not os.path.isdir(os.path.join(p, ".hg")):
23 while not os.path.isdir(os.path.join(p, ".hg")):
24 oldp = p
24 oldp = p
25 p = os.path.dirname(p)
25 p = os.path.dirname(p)
26 if p == oldp:
26 if p == oldp:
27 raise repo.RepoError(_("no repo found"))
27 raise repo.RepoError(_("no repo found"))
28 path = p
28 path = p
29 self.path = os.path.join(path, ".hg")
29 self.path = os.path.join(path, ".hg")
30
30
31 if not create and not os.path.isdir(self.path):
31 if not create and not os.path.isdir(self.path):
32 raise repo.RepoError(_("repository %s not found") % path)
32 raise repo.RepoError(_("repository %s not found") % path)
33
33
34 self.root = os.path.abspath(path)
34 self.root = os.path.abspath(path)
35 self.origroot = path
35 self.origroot = path
36 self.ui = ui.ui(parentui=parentui)
36 self.ui = ui.ui(parentui=parentui)
37 self.opener = util.opener(self.path)
37 self.opener = util.opener(self.path)
38 self.wopener = util.opener(self.root)
38 self.wopener = util.opener(self.root)
39
39
40 try:
40 try:
41 self.ui.readconfig(self.join("hgrc"), self.root)
41 self.ui.readconfig(self.join("hgrc"), self.root)
42 except IOError:
42 except IOError:
43 pass
43 pass
44
44
45 v = self.ui.revlogopts
45 v = self.ui.revlogopts
46 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
46 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
47 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
47 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
48 fl = v.get('flags', None)
48 fl = v.get('flags', None)
49 flags = 0
49 flags = 0
50 if fl != None:
50 if fl != None:
51 for x in fl.split():
51 for x in fl.split():
52 flags |= revlog.flagstr(x)
52 flags |= revlog.flagstr(x)
53 elif self.revlogv1:
53 elif self.revlogv1:
54 flags = revlog.REVLOG_DEFAULT_FLAGS
54 flags = revlog.REVLOG_DEFAULT_FLAGS
55
55
56 v = self.revlogversion | flags
56 v = self.revlogversion | flags
57 self.manifest = manifest.manifest(self.opener, v)
57 self.manifest = manifest.manifest(self.opener, v)
58 self.changelog = changelog.changelog(self.opener, v)
58 self.changelog = changelog.changelog(self.opener, v)
59
59
60 # the changelog might not have the inline index flag
60 # the changelog might not have the inline index flag
61 # on. If the format of the changelog is the same as found in
61 # on. If the format of the changelog is the same as found in
62 # .hgrc, apply any flags found in the .hgrc as well.
62 # .hgrc, apply any flags found in the .hgrc as well.
63 # Otherwise, just version from the changelog
63 # Otherwise, just version from the changelog
64 v = self.changelog.version
64 v = self.changelog.version
65 if v == self.revlogversion:
65 if v == self.revlogversion:
66 v |= flags
66 v |= flags
67 self.revlogversion = v
67 self.revlogversion = v
68
68
69 self.tagscache = None
69 self.tagscache = None
70 self.nodetagscache = None
70 self.nodetagscache = None
71 self.encodepats = None
71 self.encodepats = None
72 self.decodepats = None
72 self.decodepats = None
73 self.transhandle = None
73 self.transhandle = None
74
74
75 if create:
75 if create:
76 os.mkdir(self.path)
76 os.mkdir(self.path)
77 os.mkdir(self.join("data"))
77 os.mkdir(self.join("data"))
78
78
79 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
79 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
80
80
81 def hook(self, name, throw=False, **args):
81 def hook(self, name, throw=False, **args):
82 def callhook(hname, funcname):
82 def callhook(hname, funcname):
83 '''call python hook. hook is callable object, looked up as
83 '''call python hook. hook is callable object, looked up as
84 name in python module. if callable returns "true", hook
84 name in python module. if callable returns "true", hook
85 fails, else passes. if hook raises exception, treated as
85 fails, else passes. if hook raises exception, treated as
86 hook failure. exception propagates if throw is "true".
86 hook failure. exception propagates if throw is "true".
87
87
88 reason for "true" meaning "hook failed" is so that
88 reason for "true" meaning "hook failed" is so that
89 unmodified commands (e.g. mercurial.commands.update) can
89 unmodified commands (e.g. mercurial.commands.update) can
90 be run as hooks without wrappers to convert return values.'''
90 be run as hooks without wrappers to convert return values.'''
91
91
92 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
92 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
93 d = funcname.rfind('.')
93 d = funcname.rfind('.')
94 if d == -1:
94 if d == -1:
95 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
95 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
96 % (hname, funcname))
96 % (hname, funcname))
97 modname = funcname[:d]
97 modname = funcname[:d]
98 try:
98 try:
99 obj = __import__(modname)
99 obj = __import__(modname)
100 except ImportError:
100 except ImportError:
101 raise util.Abort(_('%s hook is invalid '
101 raise util.Abort(_('%s hook is invalid '
102 '(import of "%s" failed)') %
102 '(import of "%s" failed)') %
103 (hname, modname))
103 (hname, modname))
104 try:
104 try:
105 for p in funcname.split('.')[1:]:
105 for p in funcname.split('.')[1:]:
106 obj = getattr(obj, p)
106 obj = getattr(obj, p)
107 except AttributeError, err:
107 except AttributeError, err:
108 raise util.Abort(_('%s hook is invalid '
108 raise util.Abort(_('%s hook is invalid '
109 '("%s" is not defined)') %
109 '("%s" is not defined)') %
110 (hname, funcname))
110 (hname, funcname))
111 if not callable(obj):
111 if not callable(obj):
112 raise util.Abort(_('%s hook is invalid '
112 raise util.Abort(_('%s hook is invalid '
113 '("%s" is not callable)') %
113 '("%s" is not callable)') %
114 (hname, funcname))
114 (hname, funcname))
115 try:
115 try:
116 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
116 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
117 except (KeyboardInterrupt, util.SignalInterrupt):
117 except (KeyboardInterrupt, util.SignalInterrupt):
118 raise
118 raise
119 except Exception, exc:
119 except Exception, exc:
120 if isinstance(exc, util.Abort):
120 if isinstance(exc, util.Abort):
121 self.ui.warn(_('error: %s hook failed: %s\n') %
121 self.ui.warn(_('error: %s hook failed: %s\n') %
122 (hname, exc.args[0] % exc.args[1:]))
122 (hname, exc.args[0] % exc.args[1:]))
123 else:
123 else:
124 self.ui.warn(_('error: %s hook raised an exception: '
124 self.ui.warn(_('error: %s hook raised an exception: '
125 '%s\n') % (hname, exc))
125 '%s\n') % (hname, exc))
126 if throw:
126 if throw:
127 raise
127 raise
128 if self.ui.traceback:
128 if self.ui.traceback:
129 traceback.print_exc()
129 traceback.print_exc()
130 return True
130 return True
131 if r:
131 if r:
132 if throw:
132 if throw:
133 raise util.Abort(_('%s hook failed') % hname)
133 raise util.Abort(_('%s hook failed') % hname)
134 self.ui.warn(_('warning: %s hook failed\n') % hname)
134 self.ui.warn(_('warning: %s hook failed\n') % hname)
135 return r
135 return r
136
136
137 def runhook(name, cmd):
137 def runhook(name, cmd):
138 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
138 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
139 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()] +
139 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()] +
140 [(k.upper(), v) for k, v in args.iteritems()])
140 [(k.upper(), v) for k, v in args.iteritems()])
141 r = util.system(cmd, environ=env, cwd=self.root)
141 r = util.system(cmd, environ=env, cwd=self.root)
142 if r:
142 if r:
143 desc, r = util.explain_exit(r)
143 desc, r = util.explain_exit(r)
144 if throw:
144 if throw:
145 raise util.Abort(_('%s hook %s') % (name, desc))
145 raise util.Abort(_('%s hook %s') % (name, desc))
146 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
146 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
147 return r
147 return r
148
148
149 r = False
149 r = False
150 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
150 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
151 if hname.split(".", 1)[0] == name and cmd]
151 if hname.split(".", 1)[0] == name and cmd]
152 hooks.sort()
152 hooks.sort()
153 for hname, cmd in hooks:
153 for hname, cmd in hooks:
154 if cmd.startswith('python:'):
154 if cmd.startswith('python:'):
155 r = callhook(hname, cmd[7:].strip()) or r
155 r = callhook(hname, cmd[7:].strip()) or r
156 else:
156 else:
157 r = runhook(hname, cmd) or r
157 r = runhook(hname, cmd) or r
158 return r
158 return r
159
159
160 def tags(self):
160 def tags(self):
161 '''return a mapping of tag to node'''
161 '''return a mapping of tag to node'''
162 if not self.tagscache:
162 if not self.tagscache:
163 self.tagscache = {}
163 self.tagscache = {}
164
164
165 def parsetag(line, context):
165 def parsetag(line, context):
166 if not line:
166 if not line:
167 return
167 return
168 s = l.split(" ", 1)
168 s = l.split(" ", 1)
169 if len(s) != 2:
169 if len(s) != 2:
170 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
170 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
171 return
171 return
172 node, key = s
172 node, key = s
173 try:
173 try:
174 bin_n = bin(node)
174 bin_n = bin(node)
175 except TypeError:
175 except TypeError:
176 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
176 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
177 return
177 return
178 if bin_n not in self.changelog.nodemap:
178 if bin_n not in self.changelog.nodemap:
179 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
179 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
180 return
180 return
181 self.tagscache[key.strip()] = bin_n
181 self.tagscache[key.strip()] = bin_n
182
182
183 # read each head of the tags file, ending with the tip
183 # read each head of the tags file, ending with the tip
184 # and add each tag found to the map, with "newer" ones
184 # and add each tag found to the map, with "newer" ones
185 # taking precedence
185 # taking precedence
186 fl = self.file(".hgtags")
186 fl = self.file(".hgtags")
187 h = fl.heads()
187 h = fl.heads()
188 h.reverse()
188 h.reverse()
189 for r in h:
189 for r in h:
190 count = 0
190 count = 0
191 for l in fl.read(r).splitlines():
191 for l in fl.read(r).splitlines():
192 count += 1
192 count += 1
193 parsetag(l, ".hgtags:%d" % count)
193 parsetag(l, ".hgtags:%d" % count)
194
194
195 try:
195 try:
196 f = self.opener("localtags")
196 f = self.opener("localtags")
197 count = 0
197 count = 0
198 for l in f:
198 for l in f:
199 count += 1
199 count += 1
200 parsetag(l, "localtags:%d" % count)
200 parsetag(l, "localtags:%d" % count)
201 except IOError:
201 except IOError:
202 pass
202 pass
203
203
204 self.tagscache['tip'] = self.changelog.tip()
204 self.tagscache['tip'] = self.changelog.tip()
205
205
206 return self.tagscache
206 return self.tagscache
207
207
208 def tagslist(self):
208 def tagslist(self):
209 '''return a list of tags ordered by revision'''
209 '''return a list of tags ordered by revision'''
210 l = []
210 l = []
211 for t, n in self.tags().items():
211 for t, n in self.tags().items():
212 try:
212 try:
213 r = self.changelog.rev(n)
213 r = self.changelog.rev(n)
214 except:
214 except:
215 r = -2 # sort to the beginning of the list if unknown
215 r = -2 # sort to the beginning of the list if unknown
216 l.append((r, t, n))
216 l.append((r, t, n))
217 l.sort()
217 l.sort()
218 return [(t, n) for r, t, n in l]
218 return [(t, n) for r, t, n in l]
219
219
220 def nodetags(self, node):
220 def nodetags(self, node):
221 '''return the tags associated with a node'''
221 '''return the tags associated with a node'''
222 if not self.nodetagscache:
222 if not self.nodetagscache:
223 self.nodetagscache = {}
223 self.nodetagscache = {}
224 for t, n in self.tags().items():
224 for t, n in self.tags().items():
225 self.nodetagscache.setdefault(n, []).append(t)
225 self.nodetagscache.setdefault(n, []).append(t)
226 return self.nodetagscache.get(node, [])
226 return self.nodetagscache.get(node, [])
227
227
228 def lookup(self, key):
228 def lookup(self, key):
229 try:
229 try:
230 return self.tags()[key]
230 return self.tags()[key]
231 except KeyError:
231 except KeyError:
232 try:
232 try:
233 return self.changelog.lookup(key)
233 return self.changelog.lookup(key)
234 except:
234 except:
235 raise repo.RepoError(_("unknown revision '%s'") % key)
235 raise repo.RepoError(_("unknown revision '%s'") % key)
236
236
237 def dev(self):
237 def dev(self):
238 return os.stat(self.path).st_dev
238 return os.stat(self.path).st_dev
239
239
240 def local(self):
240 def local(self):
241 return True
241 return True
242
242
243 def join(self, f):
243 def join(self, f):
244 return os.path.join(self.path, f)
244 return os.path.join(self.path, f)
245
245
246 def wjoin(self, f):
246 def wjoin(self, f):
247 return os.path.join(self.root, f)
247 return os.path.join(self.root, f)
248
248
249 def file(self, f):
249 def file(self, f):
250 if f[0] == '/':
250 if f[0] == '/':
251 f = f[1:]
251 f = f[1:]
252 return filelog.filelog(self.opener, f, self.revlogversion)
252 return filelog.filelog(self.opener, f, self.revlogversion)
253
253
254 def getcwd(self):
254 def getcwd(self):
255 return self.dirstate.getcwd()
255 return self.dirstate.getcwd()
256
256
257 def wfile(self, f, mode='r'):
257 def wfile(self, f, mode='r'):
258 return self.wopener(f, mode)
258 return self.wopener(f, mode)
259
259
260 def wread(self, filename):
260 def wread(self, filename):
261 if self.encodepats == None:
261 if self.encodepats == None:
262 l = []
262 l = []
263 for pat, cmd in self.ui.configitems("encode"):
263 for pat, cmd in self.ui.configitems("encode"):
264 mf = util.matcher(self.root, "", [pat], [], [])[1]
264 mf = util.matcher(self.root, "", [pat], [], [])[1]
265 l.append((mf, cmd))
265 l.append((mf, cmd))
266 self.encodepats = l
266 self.encodepats = l
267
267
268 data = self.wopener(filename, 'r').read()
268 data = self.wopener(filename, 'r').read()
269
269
270 for mf, cmd in self.encodepats:
270 for mf, cmd in self.encodepats:
271 if mf(filename):
271 if mf(filename):
272 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
272 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
273 data = util.filter(data, cmd)
273 data = util.filter(data, cmd)
274 break
274 break
275
275
276 return data
276 return data
277
277
278 def wwrite(self, filename, data, fd=None):
278 def wwrite(self, filename, data, fd=None):
279 if self.decodepats == None:
279 if self.decodepats == None:
280 l = []
280 l = []
281 for pat, cmd in self.ui.configitems("decode"):
281 for pat, cmd in self.ui.configitems("decode"):
282 mf = util.matcher(self.root, "", [pat], [], [])[1]
282 mf = util.matcher(self.root, "", [pat], [], [])[1]
283 l.append((mf, cmd))
283 l.append((mf, cmd))
284 self.decodepats = l
284 self.decodepats = l
285
285
286 for mf, cmd in self.decodepats:
286 for mf, cmd in self.decodepats:
287 if mf(filename):
287 if mf(filename):
288 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
288 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
289 data = util.filter(data, cmd)
289 data = util.filter(data, cmd)
290 break
290 break
291
291
292 if fd:
292 if fd:
293 return fd.write(data)
293 return fd.write(data)
294 return self.wopener(filename, 'w').write(data)
294 return self.wopener(filename, 'w').write(data)
295
295
296 def transaction(self):
296 def transaction(self):
297 tr = self.transhandle
297 tr = self.transhandle
298 if tr != None and tr.running():
298 if tr != None and tr.running():
299 return tr.nest()
299 return tr.nest()
300
300
301 # save dirstate for undo
301 # save dirstate for undo
302 try:
302 try:
303 ds = self.opener("dirstate").read()
303 ds = self.opener("dirstate").read()
304 except IOError:
304 except IOError:
305 ds = ""
305 ds = ""
306 self.opener("journal.dirstate", "w").write(ds)
306 self.opener("journal.dirstate", "w").write(ds)
307
307
308 tr = transaction.transaction(self.ui.warn, self.opener,
308 tr = transaction.transaction(self.ui.warn, self.opener,
309 self.join("journal"),
309 self.join("journal"),
310 aftertrans(self.path))
310 aftertrans(self.path))
311 self.transhandle = tr
311 self.transhandle = tr
312 return tr
312 return tr
313
313
314 def recover(self):
314 def recover(self):
315 l = self.lock()
315 l = self.lock()
316 if os.path.exists(self.join("journal")):
316 if os.path.exists(self.join("journal")):
317 self.ui.status(_("rolling back interrupted transaction\n"))
317 self.ui.status(_("rolling back interrupted transaction\n"))
318 transaction.rollback(self.opener, self.join("journal"))
318 transaction.rollback(self.opener, self.join("journal"))
319 self.reload()
319 self.reload()
320 return True
320 return True
321 else:
321 else:
322 self.ui.warn(_("no interrupted transaction available\n"))
322 self.ui.warn(_("no interrupted transaction available\n"))
323 return False
323 return False
324
324
325 def undo(self, wlock=None):
325 def undo(self, wlock=None):
326 if not wlock:
326 if not wlock:
327 wlock = self.wlock()
327 wlock = self.wlock()
328 l = self.lock()
328 l = self.lock()
329 if os.path.exists(self.join("undo")):
329 if os.path.exists(self.join("undo")):
330 self.ui.status(_("rolling back last transaction\n"))
330 self.ui.status(_("rolling back last transaction\n"))
331 transaction.rollback(self.opener, self.join("undo"))
331 transaction.rollback(self.opener, self.join("undo"))
332 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
332 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
333 self.reload()
333 self.reload()
334 self.wreload()
334 self.wreload()
335 else:
335 else:
336 self.ui.warn(_("no undo information available\n"))
336 self.ui.warn(_("no undo information available\n"))
337
337
338 def wreload(self):
338 def wreload(self):
339 self.dirstate.read()
339 self.dirstate.read()
340
340
341 def reload(self):
341 def reload(self):
342 self.changelog.load()
342 self.changelog.load()
343 self.manifest.load()
343 self.manifest.load()
344 self.tagscache = None
344 self.tagscache = None
345 self.nodetagscache = None
345 self.nodetagscache = None
346
346
347 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
347 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
348 desc=None):
348 desc=None):
349 try:
349 try:
350 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
350 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
351 except lock.LockHeld, inst:
351 except lock.LockHeld, inst:
352 if not wait:
352 if not wait:
353 raise
353 raise
354 self.ui.warn(_("waiting for lock on %s held by %s\n") %
354 self.ui.warn(_("waiting for lock on %s held by %s\n") %
355 (desc, inst.args[0]))
355 (desc, inst.args[0]))
356 # default to 600 seconds timeout
356 # default to 600 seconds timeout
357 l = lock.lock(self.join(lockname),
357 l = lock.lock(self.join(lockname),
358 int(self.ui.config("ui", "timeout") or 600),
358 int(self.ui.config("ui", "timeout") or 600),
359 releasefn, desc=desc)
359 releasefn, desc=desc)
360 if acquirefn:
360 if acquirefn:
361 acquirefn()
361 acquirefn()
362 return l
362 return l
363
363
364 def lock(self, wait=1):
364 def lock(self, wait=1):
365 return self.do_lock("lock", wait, acquirefn=self.reload,
365 return self.do_lock("lock", wait, acquirefn=self.reload,
366 desc=_('repository %s') % self.origroot)
366 desc=_('repository %s') % self.origroot)
367
367
368 def wlock(self, wait=1):
368 def wlock(self, wait=1):
369 return self.do_lock("wlock", wait, self.dirstate.write,
369 return self.do_lock("wlock", wait, self.dirstate.write,
370 self.wreload,
370 self.wreload,
371 desc=_('working directory of %s') % self.origroot)
371 desc=_('working directory of %s') % self.origroot)
372
372
373 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
373 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
374 "determine whether a new filenode is needed"
374 "determine whether a new filenode is needed"
375 fp1 = manifest1.get(filename, nullid)
375 fp1 = manifest1.get(filename, nullid)
376 fp2 = manifest2.get(filename, nullid)
376 fp2 = manifest2.get(filename, nullid)
377
377
378 if fp2 != nullid:
378 if fp2 != nullid:
379 # is one parent an ancestor of the other?
379 # is one parent an ancestor of the other?
380 fpa = filelog.ancestor(fp1, fp2)
380 fpa = filelog.ancestor(fp1, fp2)
381 if fpa == fp1:
381 if fpa == fp1:
382 fp1, fp2 = fp2, nullid
382 fp1, fp2 = fp2, nullid
383 elif fpa == fp2:
383 elif fpa == fp2:
384 fp2 = nullid
384 fp2 = nullid
385
385
386 # is the file unmodified from the parent? report existing entry
386 # is the file unmodified from the parent? report existing entry
387 if fp2 == nullid and text == filelog.read(fp1):
387 if fp2 == nullid and text == filelog.read(fp1):
388 return (fp1, None, None)
388 return (fp1, None, None)
389
389
390 return (None, fp1, fp2)
390 return (None, fp1, fp2)
391
391
392 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
392 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
393 orig_parent = self.dirstate.parents()[0] or nullid
393 orig_parent = self.dirstate.parents()[0] or nullid
394 p1 = p1 or self.dirstate.parents()[0] or nullid
394 p1 = p1 or self.dirstate.parents()[0] or nullid
395 p2 = p2 or self.dirstate.parents()[1] or nullid
395 p2 = p2 or self.dirstate.parents()[1] or nullid
396 c1 = self.changelog.read(p1)
396 c1 = self.changelog.read(p1)
397 c2 = self.changelog.read(p2)
397 c2 = self.changelog.read(p2)
398 m1 = self.manifest.read(c1[0])
398 m1 = self.manifest.read(c1[0])
399 mf1 = self.manifest.readflags(c1[0])
399 mf1 = self.manifest.readflags(c1[0])
400 m2 = self.manifest.read(c2[0])
400 m2 = self.manifest.read(c2[0])
401 changed = []
401 changed = []
402
402
403 if orig_parent == p1:
403 if orig_parent == p1:
404 update_dirstate = 1
404 update_dirstate = 1
405 else:
405 else:
406 update_dirstate = 0
406 update_dirstate = 0
407
407
408 if not wlock:
408 if not wlock:
409 wlock = self.wlock()
409 wlock = self.wlock()
410 l = self.lock()
410 l = self.lock()
411 tr = self.transaction()
411 tr = self.transaction()
412 mm = m1.copy()
412 mm = m1.copy()
413 mfm = mf1.copy()
413 mfm = mf1.copy()
414 linkrev = self.changelog.count()
414 linkrev = self.changelog.count()
415 for f in files:
415 for f in files:
416 try:
416 try:
417 t = self.wread(f)
417 t = self.wread(f)
418 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
418 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
419 r = self.file(f)
419 r = self.file(f)
420 mfm[f] = tm
420 mfm[f] = tm
421
421
422 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
422 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
423 if entry:
423 if entry:
424 mm[f] = entry
424 mm[f] = entry
425 continue
425 continue
426
426
427 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
427 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
428 changed.append(f)
428 changed.append(f)
429 if update_dirstate:
429 if update_dirstate:
430 self.dirstate.update([f], "n")
430 self.dirstate.update([f], "n")
431 except IOError:
431 except IOError:
432 try:
432 try:
433 del mm[f]
433 del mm[f]
434 del mfm[f]
434 del mfm[f]
435 if update_dirstate:
435 if update_dirstate:
436 self.dirstate.forget([f])
436 self.dirstate.forget([f])
437 except:
437 except:
438 # deleted from p2?
438 # deleted from p2?
439 pass
439 pass
440
440
441 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
441 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
442 user = user or self.ui.username()
442 user = user or self.ui.username()
443 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
443 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
444 tr.close()
444 tr.close()
445 if update_dirstate:
445 if update_dirstate:
446 self.dirstate.setparents(n, nullid)
446 self.dirstate.setparents(n, nullid)
447
447
448 def commit(self, files=None, text="", user=None, date=None,
448 def commit(self, files=None, text="", user=None, date=None,
449 match=util.always, force=False, lock=None, wlock=None):
449 match=util.always, force=False, lock=None, wlock=None):
450 commit = []
450 commit = []
451 remove = []
451 remove = []
452 changed = []
452 changed = []
453
453
454 if files:
454 if files:
455 for f in files:
455 for f in files:
456 s = self.dirstate.state(f)
456 s = self.dirstate.state(f)
457 if s in 'nmai':
457 if s in 'nmai':
458 commit.append(f)
458 commit.append(f)
459 elif s == 'r':
459 elif s == 'r':
460 remove.append(f)
460 remove.append(f)
461 else:
461 else:
462 self.ui.warn(_("%s not tracked!\n") % f)
462 self.ui.warn(_("%s not tracked!\n") % f)
463 else:
463 else:
464 modified, added, removed, deleted, unknown = self.changes(match=match)
464 modified, added, removed, deleted, unknown = self.changes(match=match)
465 commit = modified + added
465 commit = modified + added
466 remove = removed
466 remove = removed
467
467
468 p1, p2 = self.dirstate.parents()
468 p1, p2 = self.dirstate.parents()
469 c1 = self.changelog.read(p1)
469 c1 = self.changelog.read(p1)
470 c2 = self.changelog.read(p2)
470 c2 = self.changelog.read(p2)
471 m1 = self.manifest.read(c1[0])
471 m1 = self.manifest.read(c1[0])
472 mf1 = self.manifest.readflags(c1[0])
472 mf1 = self.manifest.readflags(c1[0])
473 m2 = self.manifest.read(c2[0])
473 m2 = self.manifest.read(c2[0])
474
474
475 if not commit and not remove and not force and p2 == nullid:
475 if not commit and not remove and not force and p2 == nullid:
476 self.ui.status(_("nothing changed\n"))
476 self.ui.status(_("nothing changed\n"))
477 return None
477 return None
478
478
479 xp1 = hex(p1)
479 xp1 = hex(p1)
480 if p2 == nullid: xp2 = ''
480 if p2 == nullid: xp2 = ''
481 else: xp2 = hex(p2)
481 else: xp2 = hex(p2)
482
482
483 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
483 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
484
484
485 if not wlock:
485 if not wlock:
486 wlock = self.wlock()
486 wlock = self.wlock()
487 if not lock:
487 if not lock:
488 lock = self.lock()
488 lock = self.lock()
489 tr = self.transaction()
489 tr = self.transaction()
490
490
491 # check in files
491 # check in files
492 new = {}
492 new = {}
493 linkrev = self.changelog.count()
493 linkrev = self.changelog.count()
494 commit.sort()
494 commit.sort()
495 for f in commit:
495 for f in commit:
496 self.ui.note(f + "\n")
496 self.ui.note(f + "\n")
497 try:
497 try:
498 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
498 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
499 t = self.wread(f)
499 t = self.wread(f)
500 except IOError:
500 except IOError:
501 self.ui.warn(_("trouble committing %s!\n") % f)
501 self.ui.warn(_("trouble committing %s!\n") % f)
502 raise
502 raise
503
503
504 r = self.file(f)
504 r = self.file(f)
505
505
506 meta = {}
506 meta = {}
507 cp = self.dirstate.copied(f)
507 cp = self.dirstate.copied(f)
508 if cp:
508 if cp:
509 meta["copy"] = cp
509 meta["copy"] = cp
510 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
510 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
511 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
511 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
512 fp1, fp2 = nullid, nullid
512 fp1, fp2 = nullid, nullid
513 else:
513 else:
514 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
514 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
515 if entry:
515 if entry:
516 new[f] = entry
516 new[f] = entry
517 continue
517 continue
518
518
519 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
519 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
520 # remember what we've added so that we can later calculate
520 # remember what we've added so that we can later calculate
521 # the files to pull from a set of changesets
521 # the files to pull from a set of changesets
522 changed.append(f)
522 changed.append(f)
523
523
524 # update manifest
524 # update manifest
525 m1 = m1.copy()
525 m1 = m1.copy()
526 m1.update(new)
526 m1.update(new)
527 for f in remove:
527 for f in remove:
528 if f in m1:
528 if f in m1:
529 del m1[f]
529 del m1[f]
530 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
530 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
531 (new, remove))
531 (new, remove))
532
532
533 # add changeset
533 # add changeset
534 new = new.keys()
534 new = new.keys()
535 new.sort()
535 new.sort()
536
536
537 user = user or self.ui.username()
537 user = user or self.ui.username()
538 if not text:
538 if not text:
539 edittext = [""]
539 edittext = [""]
540 if p2 != nullid:
540 if p2 != nullid:
541 edittext.append("HG: branch merge")
541 edittext.append("HG: branch merge")
542 edittext.extend(["HG: changed %s" % f for f in changed])
542 edittext.extend(["HG: changed %s" % f for f in changed])
543 edittext.extend(["HG: removed %s" % f for f in remove])
543 edittext.extend(["HG: removed %s" % f for f in remove])
544 if not changed and not remove:
544 if not changed and not remove:
545 edittext.append("HG: no files changed")
545 edittext.append("HG: no files changed")
546 edittext.append("")
546 edittext.append("")
547 # run editor in the repository root
547 # run editor in the repository root
548 olddir = os.getcwd()
548 olddir = os.getcwd()
549 os.chdir(self.root)
549 os.chdir(self.root)
550 edittext = self.ui.edit("\n".join(edittext), user)
550 edittext = self.ui.edit("\n".join(edittext), user)
551 os.chdir(olddir)
551 os.chdir(olddir)
552 if not edittext.rstrip():
552 if not edittext.rstrip():
553 return None
553 return None
554 text = edittext
554 text = edittext
555
555
556 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
556 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
557 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
557 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
558 parent2=xp2)
558 parent2=xp2)
559 tr.close()
559 tr.close()
560
560
561 self.dirstate.setparents(n)
561 self.dirstate.setparents(n)
562 self.dirstate.update(new, "n")
562 self.dirstate.update(new, "n")
563 self.dirstate.forget(remove)
563 self.dirstate.forget(remove)
564
564
565 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
565 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
566 return n
566 return n
567
567
568 def walk(self, node=None, files=[], match=util.always, badmatch=None):
568 def walk(self, node=None, files=[], match=util.always, badmatch=None):
569 if node:
569 if node:
570 fdict = dict.fromkeys(files)
570 fdict = dict.fromkeys(files)
571 for fn in self.manifest.read(self.changelog.read(node)[0]):
571 for fn in self.manifest.read(self.changelog.read(node)[0]):
572 fdict.pop(fn, None)
572 fdict.pop(fn, None)
573 if match(fn):
573 if match(fn):
574 yield 'm', fn
574 yield 'm', fn
575 for fn in fdict:
575 for fn in fdict:
576 if badmatch and badmatch(fn):
576 if badmatch and badmatch(fn):
577 if match(fn):
577 if match(fn):
578 yield 'b', fn
578 yield 'b', fn
579 else:
579 else:
580 self.ui.warn(_('%s: No such file in rev %s\n') % (
580 self.ui.warn(_('%s: No such file in rev %s\n') % (
581 util.pathto(self.getcwd(), fn), short(node)))
581 util.pathto(self.getcwd(), fn), short(node)))
582 else:
582 else:
583 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
583 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
584 yield src, fn
584 yield src, fn
585
585
586 def changes(self, node1=None, node2=None, files=[], match=util.always,
586 def changes(self, node1=None, node2=None, files=[], match=util.always,
587 wlock=None, show_ignored=None):
587 wlock=None, show_ignored=None):
588 """return changes between two nodes or node and working directory
588 """return changes between two nodes or node and working directory
589
589
590 If node1 is None, use the first dirstate parent instead.
590 If node1 is None, use the first dirstate parent instead.
591 If node2 is None, compare node1 with working directory.
591 If node2 is None, compare node1 with working directory.
592 """
592 """
593
593
594 def fcmp(fn, mf):
594 def fcmp(fn, mf):
595 t1 = self.wread(fn)
595 t1 = self.wread(fn)
596 t2 = self.file(fn).read(mf.get(fn, nullid))
596 t2 = self.file(fn).read(mf.get(fn, nullid))
597 return cmp(t1, t2)
597 return cmp(t1, t2)
598
598
599 def mfmatches(node):
599 def mfmatches(node):
600 change = self.changelog.read(node)
600 change = self.changelog.read(node)
601 mf = dict(self.manifest.read(change[0]))
601 mf = dict(self.manifest.read(change[0]))
602 for fn in mf.keys():
602 for fn in mf.keys():
603 if not match(fn):
603 if not match(fn):
604 del mf[fn]
604 del mf[fn]
605 return mf
605 return mf
606
606
607 if node1:
607 if node1:
608 # read the manifest from node1 before the manifest from node2,
608 # read the manifest from node1 before the manifest from node2,
609 # so that we'll hit the manifest cache if we're going through
609 # so that we'll hit the manifest cache if we're going through
610 # all the revisions in parent->child order.
610 # all the revisions in parent->child order.
611 mf1 = mfmatches(node1)
611 mf1 = mfmatches(node1)
612
612
613 # are we comparing the working directory?
613 # are we comparing the working directory?
614 if not node2:
614 if not node2:
615 if not wlock:
615 if not wlock:
616 try:
616 try:
617 wlock = self.wlock(wait=0)
617 wlock = self.wlock(wait=0)
618 except lock.LockException:
618 except lock.LockException:
619 wlock = None
619 wlock = None
620 lookup, modified, added, removed, deleted, unknown, ignored = (
620 lookup, modified, added, removed, deleted, unknown, ignored = (
621 self.dirstate.changes(files, match, show_ignored))
621 self.dirstate.changes(files, match, show_ignored))
622
622
623 # are we comparing working dir against its parent?
623 # are we comparing working dir against its parent?
624 if not node1:
624 if not node1:
625 if lookup:
625 if lookup:
626 # do a full compare of any files that might have changed
626 # do a full compare of any files that might have changed
627 mf2 = mfmatches(self.dirstate.parents()[0])
627 mf2 = mfmatches(self.dirstate.parents()[0])
628 for f in lookup:
628 for f in lookup:
629 if fcmp(f, mf2):
629 if fcmp(f, mf2):
630 modified.append(f)
630 modified.append(f)
631 elif wlock is not None:
631 elif wlock is not None:
632 self.dirstate.update([f], "n")
632 self.dirstate.update([f], "n")
633 else:
633 else:
634 # we are comparing working dir against non-parent
634 # we are comparing working dir against non-parent
635 # generate a pseudo-manifest for the working dir
635 # generate a pseudo-manifest for the working dir
636 mf2 = mfmatches(self.dirstate.parents()[0])
636 mf2 = mfmatches(self.dirstate.parents()[0])
637 for f in lookup + modified + added:
637 for f in lookup + modified + added:
638 mf2[f] = ""
638 mf2[f] = ""
639 for f in removed:
639 for f in removed:
640 if f in mf2:
640 if f in mf2:
641 del mf2[f]
641 del mf2[f]
642 else:
642 else:
643 # we are comparing two revisions
643 # we are comparing two revisions
644 deleted, unknown, ignored = [], [], []
644 deleted, unknown, ignored = [], [], []
645 mf2 = mfmatches(node2)
645 mf2 = mfmatches(node2)
646
646
647 if node1:
647 if node1:
648 # flush lists from dirstate before comparing manifests
648 # flush lists from dirstate before comparing manifests
649 modified, added = [], []
649 modified, added = [], []
650
650
651 for fn in mf2:
651 for fn in mf2:
652 if mf1.has_key(fn):
652 if mf1.has_key(fn):
653 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
653 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
654 modified.append(fn)
654 modified.append(fn)
655 del mf1[fn]
655 del mf1[fn]
656 else:
656 else:
657 added.append(fn)
657 added.append(fn)
658
658
659 removed = mf1.keys()
659 removed = mf1.keys()
660
660
661 # sort and return results:
661 # sort and return results:
662 for l in modified, added, removed, deleted, unknown, ignored:
662 for l in modified, added, removed, deleted, unknown, ignored:
663 l.sort()
663 l.sort()
664 if show_ignored is None:
664 if show_ignored is None:
665 return (modified, added, removed, deleted, unknown)
665 return (modified, added, removed, deleted, unknown)
666 else:
666 else:
667 return (modified, added, removed, deleted, unknown, ignored)
667 return (modified, added, removed, deleted, unknown, ignored)
668
668
669 def add(self, list, wlock=None):
669 def add(self, list, wlock=None):
670 if not wlock:
670 if not wlock:
671 wlock = self.wlock()
671 wlock = self.wlock()
672 for f in list:
672 for f in list:
673 p = self.wjoin(f)
673 p = self.wjoin(f)
674 if not os.path.exists(p):
674 if not os.path.exists(p):
675 self.ui.warn(_("%s does not exist!\n") % f)
675 self.ui.warn(_("%s does not exist!\n") % f)
676 elif not os.path.isfile(p):
676 elif not os.path.isfile(p):
677 self.ui.warn(_("%s not added: only files supported currently\n")
677 self.ui.warn(_("%s not added: only files supported currently\n")
678 % f)
678 % f)
679 elif self.dirstate.state(f) in 'an':
679 elif self.dirstate.state(f) in 'an':
680 self.ui.warn(_("%s already tracked!\n") % f)
680 self.ui.warn(_("%s already tracked!\n") % f)
681 else:
681 else:
682 self.dirstate.update([f], "a")
682 self.dirstate.update([f], "a")
683
683
684 def forget(self, list, wlock=None):
684 def forget(self, list, wlock=None):
685 if not wlock:
685 if not wlock:
686 wlock = self.wlock()
686 wlock = self.wlock()
687 for f in list:
687 for f in list:
688 if self.dirstate.state(f) not in 'ai':
688 if self.dirstate.state(f) not in 'ai':
689 self.ui.warn(_("%s not added!\n") % f)
689 self.ui.warn(_("%s not added!\n") % f)
690 else:
690 else:
691 self.dirstate.forget([f])
691 self.dirstate.forget([f])
692
692
693 def remove(self, list, unlink=False, wlock=None):
693 def remove(self, list, unlink=False, wlock=None):
694 if unlink:
694 if unlink:
695 for f in list:
695 for f in list:
696 try:
696 try:
697 util.unlink(self.wjoin(f))
697 util.unlink(self.wjoin(f))
698 except OSError, inst:
698 except OSError, inst:
699 if inst.errno != errno.ENOENT:
699 if inst.errno != errno.ENOENT:
700 raise
700 raise
701 if not wlock:
701 if not wlock:
702 wlock = self.wlock()
702 wlock = self.wlock()
703 for f in list:
703 for f in list:
704 p = self.wjoin(f)
704 p = self.wjoin(f)
705 if os.path.exists(p):
705 if os.path.exists(p):
706 self.ui.warn(_("%s still exists!\n") % f)
706 self.ui.warn(_("%s still exists!\n") % f)
707 elif self.dirstate.state(f) == 'a':
707 elif self.dirstate.state(f) == 'a':
708 self.dirstate.forget([f])
708 self.dirstate.forget([f])
709 elif f not in self.dirstate:
709 elif f not in self.dirstate:
710 self.ui.warn(_("%s not tracked!\n") % f)
710 self.ui.warn(_("%s not tracked!\n") % f)
711 else:
711 else:
712 self.dirstate.update([f], "r")
712 self.dirstate.update([f], "r")
713
713
714 def undelete(self, list, wlock=None):
714 def undelete(self, list, wlock=None):
715 p = self.dirstate.parents()[0]
715 p = self.dirstate.parents()[0]
716 mn = self.changelog.read(p)[0]
716 mn = self.changelog.read(p)[0]
717 mf = self.manifest.readflags(mn)
717 mf = self.manifest.readflags(mn)
718 m = self.manifest.read(mn)
718 m = self.manifest.read(mn)
719 if not wlock:
719 if not wlock:
720 wlock = self.wlock()
720 wlock = self.wlock()
721 for f in list:
721 for f in list:
722 if self.dirstate.state(f) not in "r":
722 if self.dirstate.state(f) not in "r":
723 self.ui.warn("%s not removed!\n" % f)
723 self.ui.warn("%s not removed!\n" % f)
724 else:
724 else:
725 t = self.file(f).read(m[f])
725 t = self.file(f).read(m[f])
726 self.wwrite(f, t)
726 self.wwrite(f, t)
727 util.set_exec(self.wjoin(f), mf[f])
727 util.set_exec(self.wjoin(f), mf[f])
728 self.dirstate.update([f], "n")
728 self.dirstate.update([f], "n")
729
729
730 def copy(self, source, dest, wlock=None):
730 def copy(self, source, dest, wlock=None):
731 p = self.wjoin(dest)
731 p = self.wjoin(dest)
732 if not os.path.exists(p):
732 if not os.path.exists(p):
733 self.ui.warn(_("%s does not exist!\n") % dest)
733 self.ui.warn(_("%s does not exist!\n") % dest)
734 elif not os.path.isfile(p):
734 elif not os.path.isfile(p):
735 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
735 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
736 else:
736 else:
737 if not wlock:
737 if not wlock:
738 wlock = self.wlock()
738 wlock = self.wlock()
739 if self.dirstate.state(dest) == '?':
739 if self.dirstate.state(dest) == '?':
740 self.dirstate.update([dest], "a")
740 self.dirstate.update([dest], "a")
741 self.dirstate.copy(source, dest)
741 self.dirstate.copy(source, dest)
742
742
743 def heads(self, start=None):
743 def heads(self, start=None):
744 heads = self.changelog.heads(start)
744 heads = self.changelog.heads(start)
745 # sort the output in rev descending order
745 # sort the output in rev descending order
746 heads = [(-self.changelog.rev(h), h) for h in heads]
746 heads = [(-self.changelog.rev(h), h) for h in heads]
747 heads.sort()
747 heads.sort()
748 return [n for (r, n) in heads]
748 return [n for (r, n) in heads]
749
749
750 # branchlookup returns a dict giving a list of branches for
750 # branchlookup returns a dict giving a list of branches for
751 # each head. A branch is defined as the tag of a node or
751 # each head. A branch is defined as the tag of a node or
752 # the branch of the node's parents. If a node has multiple
752 # the branch of the node's parents. If a node has multiple
753 # branch tags, tags are eliminated if they are visible from other
753 # branch tags, tags are eliminated if they are visible from other
754 # branch tags.
754 # branch tags.
755 #
755 #
756 # So, for this graph: a->b->c->d->e
756 # So, for this graph: a->b->c->d->e
757 # \ /
757 # \ /
758 # aa -----/
758 # aa -----/
759 # a has tag 2.6.12
759 # a has tag 2.6.12
760 # d has tag 2.6.13
760 # d has tag 2.6.13
761 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
761 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
762 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
762 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
763 # from the list.
763 # from the list.
764 #
764 #
765 # It is possible that more than one head will have the same branch tag.
765 # It is possible that more than one head will have the same branch tag.
766 # callers need to check the result for multiple heads under the same
766 # callers need to check the result for multiple heads under the same
767 # branch tag if that is a problem for them (ie checkout of a specific
767 # branch tag if that is a problem for them (ie checkout of a specific
768 # branch).
768 # branch).
769 #
769 #
770 # passing in a specific branch will limit the depth of the search
770 # passing in a specific branch will limit the depth of the search
771 # through the parents. It won't limit the branches returned in the
771 # through the parents. It won't limit the branches returned in the
772 # result though.
772 # result though.
773 def branchlookup(self, heads=None, branch=None):
773 def branchlookup(self, heads=None, branch=None):
774 if not heads:
774 if not heads:
775 heads = self.heads()
775 heads = self.heads()
776 headt = [ h for h in heads ]
776 headt = [ h for h in heads ]
777 chlog = self.changelog
777 chlog = self.changelog
778 branches = {}
778 branches = {}
779 merges = []
779 merges = []
780 seenmerge = {}
780 seenmerge = {}
781
781
782 # traverse the tree once for each head, recording in the branches
782 # traverse the tree once for each head, recording in the branches
783 # dict which tags are visible from this head. The branches
783 # dict which tags are visible from this head. The branches
784 # dict also records which tags are visible from each tag
784 # dict also records which tags are visible from each tag
785 # while we traverse.
785 # while we traverse.
786 while headt or merges:
786 while headt or merges:
787 if merges:
787 if merges:
788 n, found = merges.pop()
788 n, found = merges.pop()
789 visit = [n]
789 visit = [n]
790 else:
790 else:
791 h = headt.pop()
791 h = headt.pop()
792 visit = [h]
792 visit = [h]
793 found = [h]
793 found = [h]
794 seen = {}
794 seen = {}
795 while visit:
795 while visit:
796 n = visit.pop()
796 n = visit.pop()
797 if n in seen:
797 if n in seen:
798 continue
798 continue
799 pp = chlog.parents(n)
799 pp = chlog.parents(n)
800 tags = self.nodetags(n)
800 tags = self.nodetags(n)
801 if tags:
801 if tags:
802 for x in tags:
802 for x in tags:
803 if x == 'tip':
803 if x == 'tip':
804 continue
804 continue
805 for f in found:
805 for f in found:
806 branches.setdefault(f, {})[n] = 1
806 branches.setdefault(f, {})[n] = 1
807 branches.setdefault(n, {})[n] = 1
807 branches.setdefault(n, {})[n] = 1
808 break
808 break
809 if n not in found:
809 if n not in found:
810 found.append(n)
810 found.append(n)
811 if branch in tags:
811 if branch in tags:
812 continue
812 continue
813 seen[n] = 1
813 seen[n] = 1
814 if pp[1] != nullid and n not in seenmerge:
814 if pp[1] != nullid and n not in seenmerge:
815 merges.append((pp[1], [x for x in found]))
815 merges.append((pp[1], [x for x in found]))
816 seenmerge[n] = 1
816 seenmerge[n] = 1
817 if pp[0] != nullid:
817 if pp[0] != nullid:
818 visit.append(pp[0])
818 visit.append(pp[0])
819 # traverse the branches dict, eliminating branch tags from each
819 # traverse the branches dict, eliminating branch tags from each
820 # head that are visible from another branch tag for that head.
820 # head that are visible from another branch tag for that head.
821 out = {}
821 out = {}
822 viscache = {}
822 viscache = {}
823 for h in heads:
823 for h in heads:
824 def visible(node):
824 def visible(node):
825 if node in viscache:
825 if node in viscache:
826 return viscache[node]
826 return viscache[node]
827 ret = {}
827 ret = {}
828 visit = [node]
828 visit = [node]
829 while visit:
829 while visit:
830 x = visit.pop()
830 x = visit.pop()
831 if x in viscache:
831 if x in viscache:
832 ret.update(viscache[x])
832 ret.update(viscache[x])
833 elif x not in ret:
833 elif x not in ret:
834 ret[x] = 1
834 ret[x] = 1
835 if x in branches:
835 if x in branches:
836 visit[len(visit):] = branches[x].keys()
836 visit[len(visit):] = branches[x].keys()
837 viscache[node] = ret
837 viscache[node] = ret
838 return ret
838 return ret
839 if h not in branches:
839 if h not in branches:
840 continue
840 continue
841 # O(n^2), but somewhat limited. This only searches the
841 # O(n^2), but somewhat limited. This only searches the
842 # tags visible from a specific head, not all the tags in the
842 # tags visible from a specific head, not all the tags in the
843 # whole repo.
843 # whole repo.
844 for b in branches[h]:
844 for b in branches[h]:
845 vis = False
845 vis = False
846 for bb in branches[h].keys():
846 for bb in branches[h].keys():
847 if b != bb:
847 if b != bb:
848 if b in visible(bb):
848 if b in visible(bb):
849 vis = True
849 vis = True
850 break
850 break
851 if not vis:
851 if not vis:
852 l = out.setdefault(h, [])
852 l = out.setdefault(h, [])
853 l[len(l):] = self.nodetags(b)
853 l[len(l):] = self.nodetags(b)
854 return out
854 return out
855
855
856 def branches(self, nodes):
856 def branches(self, nodes):
857 if not nodes:
857 if not nodes:
858 nodes = [self.changelog.tip()]
858 nodes = [self.changelog.tip()]
859 b = []
859 b = []
860 for n in nodes:
860 for n in nodes:
861 t = n
861 t = n
862 while n:
862 while n:
863 p = self.changelog.parents(n)
863 p = self.changelog.parents(n)
864 if p[1] != nullid or p[0] == nullid:
864 if p[1] != nullid or p[0] == nullid:
865 b.append((t, n, p[0], p[1]))
865 b.append((t, n, p[0], p[1]))
866 break
866 break
867 n = p[0]
867 n = p[0]
868 return b
868 return b
869
869
870 def between(self, pairs):
870 def between(self, pairs):
871 r = []
871 r = []
872
872
873 for top, bottom in pairs:
873 for top, bottom in pairs:
874 n, l, i = top, [], 0
874 n, l, i = top, [], 0
875 f = 1
875 f = 1
876
876
877 while n != bottom:
877 while n != bottom:
878 p = self.changelog.parents(n)[0]
878 p = self.changelog.parents(n)[0]
879 if i == f:
879 if i == f:
880 l.append(n)
880 l.append(n)
881 f = f * 2
881 f = f * 2
882 n = p
882 n = p
883 i += 1
883 i += 1
884
884
885 r.append(l)
885 r.append(l)
886
886
887 return r
887 return r
888
888
889 def findincoming(self, remote, base=None, heads=None, force=False):
889 def findincoming(self, remote, base=None, heads=None, force=False):
890 m = self.changelog.nodemap
890 m = self.changelog.nodemap
891 search = []
891 search = []
892 fetch = {}
892 fetch = {}
893 seen = {}
893 seen = {}
894 seenbranch = {}
894 seenbranch = {}
895 if base == None:
895 if base == None:
896 base = {}
896 base = {}
897
897
898 if not heads:
898 if not heads:
899 heads = remote.heads()
899 heads = remote.heads()
900
900
901 if self.changelog.tip() == nullid:
901 if self.changelog.tip() == nullid:
902 if heads != [nullid]:
902 if heads != [nullid]:
903 return [nullid]
903 return [nullid]
904 return []
904 return []
905
905
906 # assume we're closer to the tip than the root
906 # assume we're closer to the tip than the root
907 # and start by examining the heads
907 # and start by examining the heads
908 self.ui.status(_("searching for changes\n"))
908 self.ui.status(_("searching for changes\n"))
909
909
910 unknown = []
910 unknown = []
911 for h in heads:
911 for h in heads:
912 if h not in m:
912 if h not in m:
913 unknown.append(h)
913 unknown.append(h)
914 else:
914 else:
915 base[h] = 1
915 base[h] = 1
916
916
917 if not unknown:
917 if not unknown:
918 return []
918 return []
919
919
920 rep = {}
920 rep = {}
921 reqcnt = 0
921 reqcnt = 0
922
922
923 # search through remote branches
923 # search through remote branches
924 # a 'branch' here is a linear segment of history, with four parts:
924 # a 'branch' here is a linear segment of history, with four parts:
925 # head, root, first parent, second parent
925 # head, root, first parent, second parent
926 # (a branch always has two parents (or none) by definition)
926 # (a branch always has two parents (or none) by definition)
927 unknown = remote.branches(unknown)
927 unknown = remote.branches(unknown)
928 while unknown:
928 while unknown:
929 r = []
929 r = []
930 while unknown:
930 while unknown:
931 n = unknown.pop(0)
931 n = unknown.pop(0)
932 if n[0] in seen:
932 if n[0] in seen:
933 continue
933 continue
934
934
935 self.ui.debug(_("examining %s:%s\n")
935 self.ui.debug(_("examining %s:%s\n")
936 % (short(n[0]), short(n[1])))
936 % (short(n[0]), short(n[1])))
937 if n[0] == nullid:
937 if n[0] == nullid:
938 break
938 break
939 if n in seenbranch:
939 if n in seenbranch:
940 self.ui.debug(_("branch already found\n"))
940 self.ui.debug(_("branch already found\n"))
941 continue
941 continue
942 if n[1] and n[1] in m: # do we know the base?
942 if n[1] and n[1] in m: # do we know the base?
943 self.ui.debug(_("found incomplete branch %s:%s\n")
943 self.ui.debug(_("found incomplete branch %s:%s\n")
944 % (short(n[0]), short(n[1])))
944 % (short(n[0]), short(n[1])))
945 search.append(n) # schedule branch range for scanning
945 search.append(n) # schedule branch range for scanning
946 seenbranch[n] = 1
946 seenbranch[n] = 1
947 else:
947 else:
948 if n[1] not in seen and n[1] not in fetch:
948 if n[1] not in seen and n[1] not in fetch:
949 if n[2] in m and n[3] in m:
949 if n[2] in m and n[3] in m:
950 self.ui.debug(_("found new changeset %s\n") %
950 self.ui.debug(_("found new changeset %s\n") %
951 short(n[1]))
951 short(n[1]))
952 fetch[n[1]] = 1 # earliest unknown
952 fetch[n[1]] = 1 # earliest unknown
953 base[n[2]] = 1 # latest known
953 base[n[2]] = 1 # latest known
954 continue
954 continue
955
955
956 for a in n[2:4]:
956 for a in n[2:4]:
957 if a not in rep:
957 if a not in rep:
958 r.append(a)
958 r.append(a)
959 rep[a] = 1
959 rep[a] = 1
960
960
961 seen[n[0]] = 1
961 seen[n[0]] = 1
962
962
963 if r:
963 if r:
964 reqcnt += 1
964 reqcnt += 1
965 self.ui.debug(_("request %d: %s\n") %
965 self.ui.debug(_("request %d: %s\n") %
966 (reqcnt, " ".join(map(short, r))))
966 (reqcnt, " ".join(map(short, r))))
967 for p in range(0, len(r), 10):
967 for p in range(0, len(r), 10):
968 for b in remote.branches(r[p:p+10]):
968 for b in remote.branches(r[p:p+10]):
969 self.ui.debug(_("received %s:%s\n") %
969 self.ui.debug(_("received %s:%s\n") %
970 (short(b[0]), short(b[1])))
970 (short(b[0]), short(b[1])))
971 if b[0] in m:
971 if b[0] in m:
972 self.ui.debug(_("found base node %s\n")
972 self.ui.debug(_("found base node %s\n")
973 % short(b[0]))
973 % short(b[0]))
974 base[b[0]] = 1
974 base[b[0]] = 1
975 elif b[0] not in seen:
975 elif b[0] not in seen:
976 unknown.append(b)
976 unknown.append(b)
977
977
978 # do binary search on the branches we found
978 # do binary search on the branches we found
979 while search:
979 while search:
980 n = search.pop(0)
980 n = search.pop(0)
981 reqcnt += 1
981 reqcnt += 1
982 l = remote.between([(n[0], n[1])])[0]
982 l = remote.between([(n[0], n[1])])[0]
983 l.append(n[1])
983 l.append(n[1])
984 p = n[0]
984 p = n[0]
985 f = 1
985 f = 1
986 for i in l:
986 for i in l:
987 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
987 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
988 if i in m:
988 if i in m:
989 if f <= 2:
989 if f <= 2:
990 self.ui.debug(_("found new branch changeset %s\n") %
990 self.ui.debug(_("found new branch changeset %s\n") %
991 short(p))
991 short(p))
992 fetch[p] = 1
992 fetch[p] = 1
993 base[i] = 1
993 base[i] = 1
994 else:
994 else:
995 self.ui.debug(_("narrowed branch search to %s:%s\n")
995 self.ui.debug(_("narrowed branch search to %s:%s\n")
996 % (short(p), short(i)))
996 % (short(p), short(i)))
997 search.append((p, i))
997 search.append((p, i))
998 break
998 break
999 p, f = i, f * 2
999 p, f = i, f * 2
1000
1000
1001 # sanity check our fetch list
1001 # sanity check our fetch list
1002 for f in fetch.keys():
1002 for f in fetch.keys():
1003 if f in m:
1003 if f in m:
1004 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1004 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1005
1005
1006 if base.keys() == [nullid]:
1006 if base.keys() == [nullid]:
1007 if force:
1007 if force:
1008 self.ui.warn(_("warning: repository is unrelated\n"))
1008 self.ui.warn(_("warning: repository is unrelated\n"))
1009 else:
1009 else:
1010 raise util.Abort(_("repository is unrelated"))
1010 raise util.Abort(_("repository is unrelated"))
1011
1011
1012 self.ui.note(_("found new changesets starting at ") +
1012 self.ui.note(_("found new changesets starting at ") +
1013 " ".join([short(f) for f in fetch]) + "\n")
1013 " ".join([short(f) for f in fetch]) + "\n")
1014
1014
1015 self.ui.debug(_("%d total queries\n") % reqcnt)
1015 self.ui.debug(_("%d total queries\n") % reqcnt)
1016
1016
1017 return fetch.keys()
1017 return fetch.keys()
1018
1018
1019 def findoutgoing(self, remote, base=None, heads=None, force=False):
1019 def findoutgoing(self, remote, base=None, heads=None, force=False):
1020 """Return list of nodes that are roots of subsets not in remote
1020 """Return list of nodes that are roots of subsets not in remote
1021
1021
1022 If base dict is specified, assume that these nodes and their parents
1022 If base dict is specified, assume that these nodes and their parents
1023 exist on the remote side.
1023 exist on the remote side.
1024 If a list of heads is specified, return only nodes which are heads
1024 If a list of heads is specified, return only nodes which are heads
1025 or ancestors of these heads, and return a second element which
1025 or ancestors of these heads, and return a second element which
1026 contains all remote heads which get new children.
1026 contains all remote heads which get new children.
1027 """
1027 """
1028 if base == None:
1028 if base == None:
1029 base = {}
1029 base = {}
1030 self.findincoming(remote, base, heads, force=force)
1030 self.findincoming(remote, base, heads, force=force)
1031
1031
1032 self.ui.debug(_("common changesets up to ")
1032 self.ui.debug(_("common changesets up to ")
1033 + " ".join(map(short, base.keys())) + "\n")
1033 + " ".join(map(short, base.keys())) + "\n")
1034
1034
1035 remain = dict.fromkeys(self.changelog.nodemap)
1035 remain = dict.fromkeys(self.changelog.nodemap)
1036
1036
1037 # prune everything remote has from the tree
1037 # prune everything remote has from the tree
1038 del remain[nullid]
1038 del remain[nullid]
1039 remove = base.keys()
1039 remove = base.keys()
1040 while remove:
1040 while remove:
1041 n = remove.pop(0)
1041 n = remove.pop(0)
1042 if n in remain:
1042 if n in remain:
1043 del remain[n]
1043 del remain[n]
1044 for p in self.changelog.parents(n):
1044 for p in self.changelog.parents(n):
1045 remove.append(p)
1045 remove.append(p)
1046
1046
1047 # find every node whose parents have been pruned
1047 # find every node whose parents have been pruned
1048 subset = []
1048 subset = []
1049 # find every remote head that will get new children
1049 # find every remote head that will get new children
1050 updated_heads = {}
1050 updated_heads = {}
1051 for n in remain:
1051 for n in remain:
1052 p1, p2 = self.changelog.parents(n)
1052 p1, p2 = self.changelog.parents(n)
1053 if p1 not in remain and p2 not in remain:
1053 if p1 not in remain and p2 not in remain:
1054 subset.append(n)
1054 subset.append(n)
1055 if heads:
1055 if heads:
1056 if p1 in heads:
1056 if p1 in heads:
1057 updated_heads[p1] = True
1057 updated_heads[p1] = True
1058 if p2 in heads:
1058 if p2 in heads:
1059 updated_heads[p2] = True
1059 updated_heads[p2] = True
1060
1060
1061 # this is the set of all roots we have to push
1061 # this is the set of all roots we have to push
1062 if heads:
1062 if heads:
1063 return subset, updated_heads.keys()
1063 return subset, updated_heads.keys()
1064 else:
1064 else:
1065 return subset
1065 return subset
1066
1066
1067 def pull(self, remote, heads=None, force=False):
1067 def pull(self, remote, heads=None, force=False):
1068 l = self.lock()
1068 l = self.lock()
1069
1069
1070 fetch = self.findincoming(remote, force=force)
1070 fetch = self.findincoming(remote, force=force)
1071 if fetch == [nullid]:
1071 if fetch == [nullid]:
1072 self.ui.status(_("requesting all changes\n"))
1072 self.ui.status(_("requesting all changes\n"))
1073
1073
1074 if not fetch:
1074 if not fetch:
1075 self.ui.status(_("no changes found\n"))
1075 self.ui.status(_("no changes found\n"))
1076 return 0
1076 return 0
1077
1077
1078 if heads is None:
1078 if heads is None:
1079 cg = remote.changegroup(fetch, 'pull')
1079 cg = remote.changegroup(fetch, 'pull')
1080 else:
1080 else:
1081 cg = remote.changegroupsubset(fetch, heads, 'pull')
1081 cg = remote.changegroupsubset(fetch, heads, 'pull')
1082 return self.addchangegroup(cg)
1082 return self.addchangegroup(cg)
1083
1083
1084 def push(self, remote, force=False, revs=None):
1084 def push(self, remote, force=False, revs=None):
1085 lock = remote.lock()
1085 lock = remote.lock()
1086
1086
1087 base = {}
1087 base = {}
1088 remote_heads = remote.heads()
1088 remote_heads = remote.heads()
1089 inc = self.findincoming(remote, base, remote_heads, force=force)
1089 inc = self.findincoming(remote, base, remote_heads, force=force)
1090 if not force and inc:
1090 if not force and inc:
1091 self.ui.warn(_("abort: unsynced remote changes!\n"))
1091 self.ui.warn(_("abort: unsynced remote changes!\n"))
1092 self.ui.status(_("(did you forget to sync?"
1092 self.ui.status(_("(did you forget to sync?"
1093 " use push -f to force)\n"))
1093 " use push -f to force)\n"))
1094 return 1
1094 return 1
1095
1095
1096 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1096 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1097 if revs is not None:
1097 if revs is not None:
1098 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1098 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1099 else:
1099 else:
1100 bases, heads = update, self.changelog.heads()
1100 bases, heads = update, self.changelog.heads()
1101
1101
1102 if not bases:
1102 if not bases:
1103 self.ui.status(_("no changes found\n"))
1103 self.ui.status(_("no changes found\n"))
1104 return 1
1104 return 1
1105 elif not force:
1105 elif not force:
1106 # FIXME we don't properly detect creation of new heads
1106 # FIXME we don't properly detect creation of new heads
1107 # in the push -r case, assume the user knows what he's doing
1107 # in the push -r case, assume the user knows what he's doing
1108 if not revs and len(remote_heads) < len(heads) \
1108 if not revs and len(remote_heads) < len(heads) \
1109 and remote_heads != [nullid]:
1109 and remote_heads != [nullid]:
1110 self.ui.warn(_("abort: push creates new remote branches!\n"))
1110 self.ui.warn(_("abort: push creates new remote branches!\n"))
1111 self.ui.status(_("(did you forget to merge?"
1111 self.ui.status(_("(did you forget to merge?"
1112 " use push -f to force)\n"))
1112 " use push -f to force)\n"))
1113 return 1
1113 return 1
1114
1114
1115 if revs is None:
1115 if revs is None:
1116 cg = self.changegroup(update, 'push')
1116 cg = self.changegroup(update, 'push')
1117 else:
1117 else:
1118 cg = self.changegroupsubset(update, revs, 'push')
1118 cg = self.changegroupsubset(update, revs, 'push')
1119 return remote.addchangegroup(cg)
1119 return remote.addchangegroup(cg)
1120
1120
1121 def changegroupsubset(self, bases, heads, source):
1121 def changegroupsubset(self, bases, heads, source):
1122 """This function generates a changegroup consisting of all the nodes
1122 """This function generates a changegroup consisting of all the nodes
1123 that are descendents of any of the bases, and ancestors of any of
1123 that are descendents of any of the bases, and ancestors of any of
1124 the heads.
1124 the heads.
1125
1125
1126 It is fairly complex as determining which filenodes and which
1126 It is fairly complex as determining which filenodes and which
1127 manifest nodes need to be included for the changeset to be complete
1127 manifest nodes need to be included for the changeset to be complete
1128 is non-trivial.
1128 is non-trivial.
1129
1129
1130 Another wrinkle is doing the reverse, figuring out which changeset in
1130 Another wrinkle is doing the reverse, figuring out which changeset in
1131 the changegroup a particular filenode or manifestnode belongs to."""
1131 the changegroup a particular filenode or manifestnode belongs to."""
1132
1132
1133 self.hook('preoutgoing', throw=True, source=source)
1133 self.hook('preoutgoing', throw=True, source=source)
1134
1134
1135 # Set up some initial variables
1135 # Set up some initial variables
1136 # Make it easy to refer to self.changelog
1136 # Make it easy to refer to self.changelog
1137 cl = self.changelog
1137 cl = self.changelog
1138 # msng is short for missing - compute the list of changesets in this
1138 # msng is short for missing - compute the list of changesets in this
1139 # changegroup.
1139 # changegroup.
1140 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1140 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1141 # Some bases may turn out to be superfluous, and some heads may be
1141 # Some bases may turn out to be superfluous, and some heads may be
1142 # too. nodesbetween will return the minimal set of bases and heads
1142 # too. nodesbetween will return the minimal set of bases and heads
1143 # necessary to re-create the changegroup.
1143 # necessary to re-create the changegroup.
1144
1144
1145 # Known heads are the list of heads that it is assumed the recipient
1145 # Known heads are the list of heads that it is assumed the recipient
1146 # of this changegroup will know about.
1146 # of this changegroup will know about.
1147 knownheads = {}
1147 knownheads = {}
1148 # We assume that all parents of bases are known heads.
1148 # We assume that all parents of bases are known heads.
1149 for n in bases:
1149 for n in bases:
1150 for p in cl.parents(n):
1150 for p in cl.parents(n):
1151 if p != nullid:
1151 if p != nullid:
1152 knownheads[p] = 1
1152 knownheads[p] = 1
1153 knownheads = knownheads.keys()
1153 knownheads = knownheads.keys()
1154 if knownheads:
1154 if knownheads:
1155 # Now that we know what heads are known, we can compute which
1155 # Now that we know what heads are known, we can compute which
1156 # changesets are known. The recipient must know about all
1156 # changesets are known. The recipient must know about all
1157 # changesets required to reach the known heads from the null
1157 # changesets required to reach the known heads from the null
1158 # changeset.
1158 # changeset.
1159 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1159 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1160 junk = None
1160 junk = None
1161 # Transform the list into an ersatz set.
1161 # Transform the list into an ersatz set.
1162 has_cl_set = dict.fromkeys(has_cl_set)
1162 has_cl_set = dict.fromkeys(has_cl_set)
1163 else:
1163 else:
1164 # If there were no known heads, the recipient cannot be assumed to
1164 # If there were no known heads, the recipient cannot be assumed to
1165 # know about any changesets.
1165 # know about any changesets.
1166 has_cl_set = {}
1166 has_cl_set = {}
1167
1167
1168 # Make it easy to refer to self.manifest
1168 # Make it easy to refer to self.manifest
1169 mnfst = self.manifest
1169 mnfst = self.manifest
1170 # We don't know which manifests are missing yet
1170 # We don't know which manifests are missing yet
1171 msng_mnfst_set = {}
1171 msng_mnfst_set = {}
1172 # Nor do we know which filenodes are missing.
1172 # Nor do we know which filenodes are missing.
1173 msng_filenode_set = {}
1173 msng_filenode_set = {}
1174
1174
1175 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1175 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1176 junk = None
1176 junk = None
1177
1177
1178 # A changeset always belongs to itself, so the changenode lookup
1178 # A changeset always belongs to itself, so the changenode lookup
1179 # function for a changenode is identity.
1179 # function for a changenode is identity.
1180 def identity(x):
1180 def identity(x):
1181 return x
1181 return x
1182
1182
1183 # A function generating function. Sets up an environment for the
1183 # A function generating function. Sets up an environment for the
1184 # inner function.
1184 # inner function.
1185 def cmp_by_rev_func(revlog):
1185 def cmp_by_rev_func(revlog):
1186 # Compare two nodes by their revision number in the environment's
1186 # Compare two nodes by their revision number in the environment's
1187 # revision history. Since the revision number both represents the
1187 # revision history. Since the revision number both represents the
1188 # most efficient order to read the nodes in, and represents a
1188 # most efficient order to read the nodes in, and represents a
1189 # topological sorting of the nodes, this function is often useful.
1189 # topological sorting of the nodes, this function is often useful.
1190 def cmp_by_rev(a, b):
1190 def cmp_by_rev(a, b):
1191 return cmp(revlog.rev(a), revlog.rev(b))
1191 return cmp(revlog.rev(a), revlog.rev(b))
1192 return cmp_by_rev
1192 return cmp_by_rev
1193
1193
1194 # If we determine that a particular file or manifest node must be a
1194 # If we determine that a particular file or manifest node must be a
1195 # node that the recipient of the changegroup will already have, we can
1195 # node that the recipient of the changegroup will already have, we can
1196 # also assume the recipient will have all the parents. This function
1196 # also assume the recipient will have all the parents. This function
1197 # prunes them from the set of missing nodes.
1197 # prunes them from the set of missing nodes.
1198 def prune_parents(revlog, hasset, msngset):
1198 def prune_parents(revlog, hasset, msngset):
1199 haslst = hasset.keys()
1199 haslst = hasset.keys()
1200 haslst.sort(cmp_by_rev_func(revlog))
1200 haslst.sort(cmp_by_rev_func(revlog))
1201 for node in haslst:
1201 for node in haslst:
1202 parentlst = [p for p in revlog.parents(node) if p != nullid]
1202 parentlst = [p for p in revlog.parents(node) if p != nullid]
1203 while parentlst:
1203 while parentlst:
1204 n = parentlst.pop()
1204 n = parentlst.pop()
1205 if n not in hasset:
1205 if n not in hasset:
1206 hasset[n] = 1
1206 hasset[n] = 1
1207 p = [p for p in revlog.parents(n) if p != nullid]
1207 p = [p for p in revlog.parents(n) if p != nullid]
1208 parentlst.extend(p)
1208 parentlst.extend(p)
1209 for n in hasset:
1209 for n in hasset:
1210 msngset.pop(n, None)
1210 msngset.pop(n, None)
1211
1211
1212 # This is a function generating function used to set up an environment
1212 # This is a function generating function used to set up an environment
1213 # for the inner function to execute in.
1213 # for the inner function to execute in.
1214 def manifest_and_file_collector(changedfileset):
1214 def manifest_and_file_collector(changedfileset):
1215 # This is an information gathering function that gathers
1215 # This is an information gathering function that gathers
1216 # information from each changeset node that goes out as part of
1216 # information from each changeset node that goes out as part of
1217 # the changegroup. The information gathered is a list of which
1217 # the changegroup. The information gathered is a list of which
1218 # manifest nodes are potentially required (the recipient may
1218 # manifest nodes are potentially required (the recipient may
1219 # already have them) and total list of all files which were
1219 # already have them) and total list of all files which were
1220 # changed in any changeset in the changegroup.
1220 # changed in any changeset in the changegroup.
1221 #
1221 #
1222 # We also remember the first changenode we saw any manifest
1222 # We also remember the first changenode we saw any manifest
1223 # referenced by so we can later determine which changenode 'owns'
1223 # referenced by so we can later determine which changenode 'owns'
1224 # the manifest.
1224 # the manifest.
1225 def collect_manifests_and_files(clnode):
1225 def collect_manifests_and_files(clnode):
1226 c = cl.read(clnode)
1226 c = cl.read(clnode)
1227 for f in c[3]:
1227 for f in c[3]:
1228 # This is to make sure we only have one instance of each
1228 # This is to make sure we only have one instance of each
1229 # filename string for each filename.
1229 # filename string for each filename.
1230 changedfileset.setdefault(f, f)
1230 changedfileset.setdefault(f, f)
1231 msng_mnfst_set.setdefault(c[0], clnode)
1231 msng_mnfst_set.setdefault(c[0], clnode)
1232 return collect_manifests_and_files
1232 return collect_manifests_and_files
1233
1233
1234 # Figure out which manifest nodes (of the ones we think might be part
1234 # Figure out which manifest nodes (of the ones we think might be part
1235 # of the changegroup) the recipient must know about and remove them
1235 # of the changegroup) the recipient must know about and remove them
1236 # from the changegroup.
1236 # from the changegroup.
1237 def prune_manifests():
1237 def prune_manifests():
1238 has_mnfst_set = {}
1238 has_mnfst_set = {}
1239 for n in msng_mnfst_set:
1239 for n in msng_mnfst_set:
1240 # If a 'missing' manifest thinks it belongs to a changenode
1240 # If a 'missing' manifest thinks it belongs to a changenode
1241 # the recipient is assumed to have, obviously the recipient
1241 # the recipient is assumed to have, obviously the recipient
1242 # must have that manifest.
1242 # must have that manifest.
1243 linknode = cl.node(mnfst.linkrev(n))
1243 linknode = cl.node(mnfst.linkrev(n))
1244 if linknode in has_cl_set:
1244 if linknode in has_cl_set:
1245 has_mnfst_set[n] = 1
1245 has_mnfst_set[n] = 1
1246 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1246 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1247
1247
1248 # Use the information collected in collect_manifests_and_files to say
1248 # Use the information collected in collect_manifests_and_files to say
1249 # which changenode any manifestnode belongs to.
1249 # which changenode any manifestnode belongs to.
1250 def lookup_manifest_link(mnfstnode):
1250 def lookup_manifest_link(mnfstnode):
1251 return msng_mnfst_set[mnfstnode]
1251 return msng_mnfst_set[mnfstnode]
1252
1252
1253 # A function generating function that sets up the initial environment
1253 # A function generating function that sets up the initial environment
1254 # the inner function.
1254 # the inner function.
1255 def filenode_collector(changedfiles):
1255 def filenode_collector(changedfiles):
1256 next_rev = [0]
1256 next_rev = [0]
1257 # This gathers information from each manifestnode included in the
1257 # This gathers information from each manifestnode included in the
1258 # changegroup about which filenodes the manifest node references
1258 # changegroup about which filenodes the manifest node references
1259 # so we can include those in the changegroup too.
1259 # so we can include those in the changegroup too.
1260 #
1260 #
1261 # It also remembers which changenode each filenode belongs to. It
1261 # It also remembers which changenode each filenode belongs to. It
1262 # does this by assuming the a filenode belongs to the changenode
1262 # does this by assuming the a filenode belongs to the changenode
1263 # the first manifest that references it belongs to.
1263 # the first manifest that references it belongs to.
1264 def collect_msng_filenodes(mnfstnode):
1264 def collect_msng_filenodes(mnfstnode):
1265 r = mnfst.rev(mnfstnode)
1265 r = mnfst.rev(mnfstnode)
1266 if r == next_rev[0]:
1266 if r == next_rev[0]:
1267 # If the last rev we looked at was the one just previous,
1267 # If the last rev we looked at was the one just previous,
1268 # we only need to see a diff.
1268 # we only need to see a diff.
1269 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1269 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1270 # For each line in the delta
1270 # For each line in the delta
1271 for dline in delta.splitlines():
1271 for dline in delta.splitlines():
1272 # get the filename and filenode for that line
1272 # get the filename and filenode for that line
1273 f, fnode = dline.split('\0')
1273 f, fnode = dline.split('\0')
1274 fnode = bin(fnode[:40])
1274 fnode = bin(fnode[:40])
1275 f = changedfiles.get(f, None)
1275 f = changedfiles.get(f, None)
1276 # And if the file is in the list of files we care
1276 # And if the file is in the list of files we care
1277 # about.
1277 # about.
1278 if f is not None:
1278 if f is not None:
1279 # Get the changenode this manifest belongs to
1279 # Get the changenode this manifest belongs to
1280 clnode = msng_mnfst_set[mnfstnode]
1280 clnode = msng_mnfst_set[mnfstnode]
1281 # Create the set of filenodes for the file if
1281 # Create the set of filenodes for the file if
1282 # there isn't one already.
1282 # there isn't one already.
1283 ndset = msng_filenode_set.setdefault(f, {})
1283 ndset = msng_filenode_set.setdefault(f, {})
1284 # And set the filenode's changelog node to the
1284 # And set the filenode's changelog node to the
1285 # manifest's if it hasn't been set already.
1285 # manifest's if it hasn't been set already.
1286 ndset.setdefault(fnode, clnode)
1286 ndset.setdefault(fnode, clnode)
1287 else:
1287 else:
1288 # Otherwise we need a full manifest.
1288 # Otherwise we need a full manifest.
1289 m = mnfst.read(mnfstnode)
1289 m = mnfst.read(mnfstnode)
1290 # For every file in we care about.
1290 # For every file in we care about.
1291 for f in changedfiles:
1291 for f in changedfiles:
1292 fnode = m.get(f, None)
1292 fnode = m.get(f, None)
1293 # If it's in the manifest
1293 # If it's in the manifest
1294 if fnode is not None:
1294 if fnode is not None:
1295 # See comments above.
1295 # See comments above.
1296 clnode = msng_mnfst_set[mnfstnode]
1296 clnode = msng_mnfst_set[mnfstnode]
1297 ndset = msng_filenode_set.setdefault(f, {})
1297 ndset = msng_filenode_set.setdefault(f, {})
1298 ndset.setdefault(fnode, clnode)
1298 ndset.setdefault(fnode, clnode)
1299 # Remember the revision we hope to see next.
1299 # Remember the revision we hope to see next.
1300 next_rev[0] = r + 1
1300 next_rev[0] = r + 1
1301 return collect_msng_filenodes
1301 return collect_msng_filenodes
1302
1302
1303 # We have a list of filenodes we think we need for a file, lets remove
1303 # We have a list of filenodes we think we need for a file, lets remove
1304 # all those we now the recipient must have.
1304 # all those we now the recipient must have.
1305 def prune_filenodes(f, filerevlog):
1305 def prune_filenodes(f, filerevlog):
1306 msngset = msng_filenode_set[f]
1306 msngset = msng_filenode_set[f]
1307 hasset = {}
1307 hasset = {}
1308 # If a 'missing' filenode thinks it belongs to a changenode we
1308 # If a 'missing' filenode thinks it belongs to a changenode we
1309 # assume the recipient must have, then the recipient must have
1309 # assume the recipient must have, then the recipient must have
1310 # that filenode.
1310 # that filenode.
1311 for n in msngset:
1311 for n in msngset:
1312 clnode = cl.node(filerevlog.linkrev(n))
1312 clnode = cl.node(filerevlog.linkrev(n))
1313 if clnode in has_cl_set:
1313 if clnode in has_cl_set:
1314 hasset[n] = 1
1314 hasset[n] = 1
1315 prune_parents(filerevlog, hasset, msngset)
1315 prune_parents(filerevlog, hasset, msngset)
1316
1316
1317 # A function generator function that sets up the a context for the
1317 # A function generator function that sets up the a context for the
1318 # inner function.
1318 # inner function.
1319 def lookup_filenode_link_func(fname):
1319 def lookup_filenode_link_func(fname):
1320 msngset = msng_filenode_set[fname]
1320 msngset = msng_filenode_set[fname]
1321 # Lookup the changenode the filenode belongs to.
1321 # Lookup the changenode the filenode belongs to.
1322 def lookup_filenode_link(fnode):
1322 def lookup_filenode_link(fnode):
1323 return msngset[fnode]
1323 return msngset[fnode]
1324 return lookup_filenode_link
1324 return lookup_filenode_link
1325
1325
1326 # Now that we have all theses utility functions to help out and
1326 # Now that we have all theses utility functions to help out and
1327 # logically divide up the task, generate the group.
1327 # logically divide up the task, generate the group.
1328 def gengroup():
1328 def gengroup():
1329 # The set of changed files starts empty.
1329 # The set of changed files starts empty.
1330 changedfiles = {}
1330 changedfiles = {}
1331 # Create a changenode group generator that will call our functions
1331 # Create a changenode group generator that will call our functions
1332 # back to lookup the owning changenode and collect information.
1332 # back to lookup the owning changenode and collect information.
1333 group = cl.group(msng_cl_lst, identity,
1333 group = cl.group(msng_cl_lst, identity,
1334 manifest_and_file_collector(changedfiles))
1334 manifest_and_file_collector(changedfiles))
1335 for chnk in group:
1335 for chnk in group:
1336 yield chnk
1336 yield chnk
1337
1337
1338 # The list of manifests has been collected by the generator
1338 # The list of manifests has been collected by the generator
1339 # calling our functions back.
1339 # calling our functions back.
1340 prune_manifests()
1340 prune_manifests()
1341 msng_mnfst_lst = msng_mnfst_set.keys()
1341 msng_mnfst_lst = msng_mnfst_set.keys()
1342 # Sort the manifestnodes by revision number.
1342 # Sort the manifestnodes by revision number.
1343 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1343 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1344 # Create a generator for the manifestnodes that calls our lookup
1344 # Create a generator for the manifestnodes that calls our lookup
1345 # and data collection functions back.
1345 # and data collection functions back.
1346 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1346 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1347 filenode_collector(changedfiles))
1347 filenode_collector(changedfiles))
1348 for chnk in group:
1348 for chnk in group:
1349 yield chnk
1349 yield chnk
1350
1350
1351 # These are no longer needed, dereference and toss the memory for
1351 # These are no longer needed, dereference and toss the memory for
1352 # them.
1352 # them.
1353 msng_mnfst_lst = None
1353 msng_mnfst_lst = None
1354 msng_mnfst_set.clear()
1354 msng_mnfst_set.clear()
1355
1355
1356 changedfiles = changedfiles.keys()
1356 changedfiles = changedfiles.keys()
1357 changedfiles.sort()
1357 changedfiles.sort()
1358 # Go through all our files in order sorted by name.
1358 # Go through all our files in order sorted by name.
1359 for fname in changedfiles:
1359 for fname in changedfiles:
1360 filerevlog = self.file(fname)
1360 filerevlog = self.file(fname)
1361 # Toss out the filenodes that the recipient isn't really
1361 # Toss out the filenodes that the recipient isn't really
1362 # missing.
1362 # missing.
1363 if msng_filenode_set.has_key(fname):
1363 if msng_filenode_set.has_key(fname):
1364 prune_filenodes(fname, filerevlog)
1364 prune_filenodes(fname, filerevlog)
1365 msng_filenode_lst = msng_filenode_set[fname].keys()
1365 msng_filenode_lst = msng_filenode_set[fname].keys()
1366 else:
1366 else:
1367 msng_filenode_lst = []
1367 msng_filenode_lst = []
1368 # If any filenodes are left, generate the group for them,
1368 # If any filenodes are left, generate the group for them,
1369 # otherwise don't bother.
1369 # otherwise don't bother.
1370 if len(msng_filenode_lst) > 0:
1370 if len(msng_filenode_lst) > 0:
1371 yield changegroup.genchunk(fname)
1371 yield changegroup.genchunk(fname)
1372 # Sort the filenodes by their revision #
1372 # Sort the filenodes by their revision #
1373 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1373 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1374 # Create a group generator and only pass in a changenode
1374 # Create a group generator and only pass in a changenode
1375 # lookup function as we need to collect no information
1375 # lookup function as we need to collect no information
1376 # from filenodes.
1376 # from filenodes.
1377 group = filerevlog.group(msng_filenode_lst,
1377 group = filerevlog.group(msng_filenode_lst,
1378 lookup_filenode_link_func(fname))
1378 lookup_filenode_link_func(fname))
1379 for chnk in group:
1379 for chnk in group:
1380 yield chnk
1380 yield chnk
1381 if msng_filenode_set.has_key(fname):
1381 if msng_filenode_set.has_key(fname):
1382 # Don't need this anymore, toss it to free memory.
1382 # Don't need this anymore, toss it to free memory.
1383 del msng_filenode_set[fname]
1383 del msng_filenode_set[fname]
1384 # Signal that no more groups are left.
1384 # Signal that no more groups are left.
1385 yield changegroup.closechunk()
1385 yield changegroup.closechunk()
1386
1386
1387 if msng_cl_lst:
1387 if msng_cl_lst:
1388 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1388 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1389
1389
1390 return util.chunkbuffer(gengroup())
1390 return util.chunkbuffer(gengroup())
1391
1391
1392 def changegroup(self, basenodes, source):
1392 def changegroup(self, basenodes, source):
1393 """Generate a changegroup of all nodes that we have that a recipient
1393 """Generate a changegroup of all nodes that we have that a recipient
1394 doesn't.
1394 doesn't.
1395
1395
1396 This is much easier than the previous function as we can assume that
1396 This is much easier than the previous function as we can assume that
1397 the recipient has any changenode we aren't sending them."""
1397 the recipient has any changenode we aren't sending them."""
1398
1398
1399 self.hook('preoutgoing', throw=True, source=source)
1399 self.hook('preoutgoing', throw=True, source=source)
1400
1400
1401 cl = self.changelog
1401 cl = self.changelog
1402 nodes = cl.nodesbetween(basenodes, None)[0]
1402 nodes = cl.nodesbetween(basenodes, None)[0]
1403 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1403 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1404
1404
1405 def identity(x):
1405 def identity(x):
1406 return x
1406 return x
1407
1407
1408 def gennodelst(revlog):
1408 def gennodelst(revlog):
1409 for r in xrange(0, revlog.count()):
1409 for r in xrange(0, revlog.count()):
1410 n = revlog.node(r)
1410 n = revlog.node(r)
1411 if revlog.linkrev(n) in revset:
1411 if revlog.linkrev(n) in revset:
1412 yield n
1412 yield n
1413
1413
1414 def changed_file_collector(changedfileset):
1414 def changed_file_collector(changedfileset):
1415 def collect_changed_files(clnode):
1415 def collect_changed_files(clnode):
1416 c = cl.read(clnode)
1416 c = cl.read(clnode)
1417 for fname in c[3]:
1417 for fname in c[3]:
1418 changedfileset[fname] = 1
1418 changedfileset[fname] = 1
1419 return collect_changed_files
1419 return collect_changed_files
1420
1420
1421 def lookuprevlink_func(revlog):
1421 def lookuprevlink_func(revlog):
1422 def lookuprevlink(n):
1422 def lookuprevlink(n):
1423 return cl.node(revlog.linkrev(n))
1423 return cl.node(revlog.linkrev(n))
1424 return lookuprevlink
1424 return lookuprevlink
1425
1425
1426 def gengroup():
1426 def gengroup():
1427 # construct a list of all changed files
1427 # construct a list of all changed files
1428 changedfiles = {}
1428 changedfiles = {}
1429
1429
1430 for chnk in cl.group(nodes, identity,
1430 for chnk in cl.group(nodes, identity,
1431 changed_file_collector(changedfiles)):
1431 changed_file_collector(changedfiles)):
1432 yield chnk
1432 yield chnk
1433 changedfiles = changedfiles.keys()
1433 changedfiles = changedfiles.keys()
1434 changedfiles.sort()
1434 changedfiles.sort()
1435
1435
1436 mnfst = self.manifest
1436 mnfst = self.manifest
1437 nodeiter = gennodelst(mnfst)
1437 nodeiter = gennodelst(mnfst)
1438 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1438 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1439 yield chnk
1439 yield chnk
1440
1440
1441 for fname in changedfiles:
1441 for fname in changedfiles:
1442 filerevlog = self.file(fname)
1442 filerevlog = self.file(fname)
1443 nodeiter = gennodelst(filerevlog)
1443 nodeiter = gennodelst(filerevlog)
1444 nodeiter = list(nodeiter)
1444 nodeiter = list(nodeiter)
1445 if nodeiter:
1445 if nodeiter:
1446 yield changegroup.genchunk(fname)
1446 yield changegroup.genchunk(fname)
1447 lookup = lookuprevlink_func(filerevlog)
1447 lookup = lookuprevlink_func(filerevlog)
1448 for chnk in filerevlog.group(nodeiter, lookup):
1448 for chnk in filerevlog.group(nodeiter, lookup):
1449 yield chnk
1449 yield chnk
1450
1450
1451 yield changegroup.closechunk()
1451 yield changegroup.closechunk()
1452
1452
1453 if nodes:
1453 if nodes:
1454 self.hook('outgoing', node=hex(nodes[0]), source=source)
1454 self.hook('outgoing', node=hex(nodes[0]), source=source)
1455
1455
1456 return util.chunkbuffer(gengroup())
1456 return util.chunkbuffer(gengroup())
1457
1457
1458 def addchangegroup(self, source):
1458 def addchangegroup(self, source):
1459 """add changegroup to repo.
1459 """add changegroup to repo.
1460 returns number of heads modified or added + 1."""
1460 returns number of heads modified or added + 1."""
1461
1461
1462 def csmap(x):
1462 def csmap(x):
1463 self.ui.debug(_("add changeset %s\n") % short(x))
1463 self.ui.debug(_("add changeset %s\n") % short(x))
1464 return cl.count()
1464 return cl.count()
1465
1465
1466 def revmap(x):
1466 def revmap(x):
1467 return cl.rev(x)
1467 return cl.rev(x)
1468
1468
1469 if not source:
1469 if not source:
1470 return 0
1470 return 0
1471
1471
1472 self.hook('prechangegroup', throw=True)
1472 self.hook('prechangegroup', throw=True, source=source)
1473
1473
1474 changesets = files = revisions = 0
1474 changesets = files = revisions = 0
1475
1475
1476 tr = self.transaction()
1476 tr = self.transaction()
1477
1477
1478 # write changelog and manifest data to temp files so
1478 # write changelog and manifest data to temp files so
1479 # concurrent readers will not see inconsistent view
1479 # concurrent readers will not see inconsistent view
1480 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1480 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1481
1481
1482 oldheads = len(cl.heads())
1482 oldheads = len(cl.heads())
1483
1483
1484 # pull off the changeset group
1484 # pull off the changeset group
1485 self.ui.status(_("adding changesets\n"))
1485 self.ui.status(_("adding changesets\n"))
1486 co = cl.tip()
1486 co = cl.tip()
1487 chunkiter = changegroup.chunkiter(source)
1487 chunkiter = changegroup.chunkiter(source)
1488 cn = cl.addgroup(chunkiter, csmap, tr, 1) # unique
1488 cn = cl.addgroup(chunkiter, csmap, tr, 1) # unique
1489 cnr, cor = map(cl.rev, (cn, co))
1489 cnr, cor = map(cl.rev, (cn, co))
1490 if cn == nullid:
1490 if cn == nullid:
1491 cnr = cor
1491 cnr = cor
1492 changesets = cnr - cor
1492 changesets = cnr - cor
1493
1493
1494 mf = appendfile.appendmanifest(self.opener, self.manifest.version)
1494 mf = appendfile.appendmanifest(self.opener, self.manifest.version)
1495
1495
1496 # pull off the manifest group
1496 # pull off the manifest group
1497 self.ui.status(_("adding manifests\n"))
1497 self.ui.status(_("adding manifests\n"))
1498 mm = mf.tip()
1498 mm = mf.tip()
1499 chunkiter = changegroup.chunkiter(source)
1499 chunkiter = changegroup.chunkiter(source)
1500 mo = mf.addgroup(chunkiter, revmap, tr)
1500 mo = mf.addgroup(chunkiter, revmap, tr)
1501
1501
1502 # process the files
1502 # process the files
1503 self.ui.status(_("adding file changes\n"))
1503 self.ui.status(_("adding file changes\n"))
1504 while 1:
1504 while 1:
1505 f = changegroup.getchunk(source)
1505 f = changegroup.getchunk(source)
1506 if not f:
1506 if not f:
1507 break
1507 break
1508 self.ui.debug(_("adding %s revisions\n") % f)
1508 self.ui.debug(_("adding %s revisions\n") % f)
1509 fl = self.file(f)
1509 fl = self.file(f)
1510 o = fl.count()
1510 o = fl.count()
1511 chunkiter = changegroup.chunkiter(source)
1511 chunkiter = changegroup.chunkiter(source)
1512 n = fl.addgroup(chunkiter, revmap, tr)
1512 n = fl.addgroup(chunkiter, revmap, tr)
1513 revisions += fl.count() - o
1513 revisions += fl.count() - o
1514 files += 1
1514 files += 1
1515
1515
1516 # write order here is important so concurrent readers will see
1516 # write order here is important so concurrent readers will see
1517 # consistent view of repo
1517 # consistent view of repo
1518 mf.writedata()
1518 mf.writedata()
1519 cl.writedata()
1519 cl.writedata()
1520
1520
1521 # make changelog and manifest see real files again
1521 # make changelog and manifest see real files again
1522 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1522 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1523 self.manifest = manifest.manifest(self.opener, self.manifest.version)
1523 self.manifest = manifest.manifest(self.opener, self.manifest.version)
1524 self.changelog.checkinlinesize(tr)
1524 self.changelog.checkinlinesize(tr)
1525 self.manifest.checkinlinesize(tr)
1525 self.manifest.checkinlinesize(tr)
1526
1526
1527 newheads = len(self.changelog.heads())
1527 newheads = len(self.changelog.heads())
1528 heads = ""
1528 heads = ""
1529 if oldheads and newheads > oldheads:
1529 if oldheads and newheads > oldheads:
1530 heads = _(" (+%d heads)") % (newheads - oldheads)
1530 heads = _(" (+%d heads)") % (newheads - oldheads)
1531
1531
1532 self.ui.status(_("added %d changesets"
1532 self.ui.status(_("added %d changesets"
1533 " with %d changes to %d files%s\n")
1533 " with %d changes to %d files%s\n")
1534 % (changesets, revisions, files, heads))
1534 % (changesets, revisions, files, heads))
1535
1535
1536 self.hook('pretxnchangegroup', throw=True,
1536 self.hook('pretxnchangegroup', throw=True,
1537 node=hex(self.changelog.node(cor+1)))
1537 node=hex(self.changelog.node(cor+1)), source=source)
1538
1538
1539 tr.close()
1539 tr.close()
1540
1540
1541 if changesets > 0:
1541 if changesets > 0:
1542 self.hook("changegroup", node=hex(self.changelog.node(cor+1)))
1542 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1543 source=source)
1543
1544
1544 for i in range(cor + 1, cnr + 1):
1545 for i in range(cor + 1, cnr + 1):
1545 self.hook("incoming", node=hex(self.changelog.node(i)))
1546 self.hook("incoming", node=hex(self.changelog.node(i)),
1547 source=source)
1546
1548
1547 return newheads - oldheads + 1
1549 return newheads - oldheads + 1
1548
1550
1549 def update(self, node, allow=False, force=False, choose=None,
1551 def update(self, node, allow=False, force=False, choose=None,
1550 moddirstate=True, forcemerge=False, wlock=None, show_stats=True):
1552 moddirstate=True, forcemerge=False, wlock=None, show_stats=True):
1551 pl = self.dirstate.parents()
1553 pl = self.dirstate.parents()
1552 if not force and pl[1] != nullid:
1554 if not force and pl[1] != nullid:
1553 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1555 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1554 return 1
1556 return 1
1555
1557
1556 err = False
1558 err = False
1557
1559
1558 p1, p2 = pl[0], node
1560 p1, p2 = pl[0], node
1559 pa = self.changelog.ancestor(p1, p2)
1561 pa = self.changelog.ancestor(p1, p2)
1560 m1n = self.changelog.read(p1)[0]
1562 m1n = self.changelog.read(p1)[0]
1561 m2n = self.changelog.read(p2)[0]
1563 m2n = self.changelog.read(p2)[0]
1562 man = self.manifest.ancestor(m1n, m2n)
1564 man = self.manifest.ancestor(m1n, m2n)
1563 m1 = self.manifest.read(m1n)
1565 m1 = self.manifest.read(m1n)
1564 mf1 = self.manifest.readflags(m1n)
1566 mf1 = self.manifest.readflags(m1n)
1565 m2 = self.manifest.read(m2n).copy()
1567 m2 = self.manifest.read(m2n).copy()
1566 mf2 = self.manifest.readflags(m2n)
1568 mf2 = self.manifest.readflags(m2n)
1567 ma = self.manifest.read(man)
1569 ma = self.manifest.read(man)
1568 mfa = self.manifest.readflags(man)
1570 mfa = self.manifest.readflags(man)
1569
1571
1570 modified, added, removed, deleted, unknown = self.changes()
1572 modified, added, removed, deleted, unknown = self.changes()
1571
1573
1572 # is this a jump, or a merge? i.e. is there a linear path
1574 # is this a jump, or a merge? i.e. is there a linear path
1573 # from p1 to p2?
1575 # from p1 to p2?
1574 linear_path = (pa == p1 or pa == p2)
1576 linear_path = (pa == p1 or pa == p2)
1575
1577
1576 if allow and linear_path:
1578 if allow and linear_path:
1577 raise util.Abort(_("there is nothing to merge, "
1579 raise util.Abort(_("there is nothing to merge, "
1578 "just use 'hg update'"))
1580 "just use 'hg update'"))
1579 if allow and not forcemerge:
1581 if allow and not forcemerge:
1580 if modified or added or removed:
1582 if modified or added or removed:
1581 raise util.Abort(_("outstanding uncommitted changes"))
1583 raise util.Abort(_("outstanding uncommitted changes"))
1582 if not forcemerge and not force:
1584 if not forcemerge and not force:
1583 for f in unknown:
1585 for f in unknown:
1584 if f in m2:
1586 if f in m2:
1585 t1 = self.wread(f)
1587 t1 = self.wread(f)
1586 t2 = self.file(f).read(m2[f])
1588 t2 = self.file(f).read(m2[f])
1587 if cmp(t1, t2) != 0:
1589 if cmp(t1, t2) != 0:
1588 raise util.Abort(_("'%s' already exists in the working"
1590 raise util.Abort(_("'%s' already exists in the working"
1589 " dir and differs from remote") % f)
1591 " dir and differs from remote") % f)
1590
1592
1591 # resolve the manifest to determine which files
1593 # resolve the manifest to determine which files
1592 # we care about merging
1594 # we care about merging
1593 self.ui.note(_("resolving manifests\n"))
1595 self.ui.note(_("resolving manifests\n"))
1594 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1596 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1595 (force, allow, moddirstate, linear_path))
1597 (force, allow, moddirstate, linear_path))
1596 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1598 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1597 (short(man), short(m1n), short(m2n)))
1599 (short(man), short(m1n), short(m2n)))
1598
1600
1599 merge = {}
1601 merge = {}
1600 get = {}
1602 get = {}
1601 remove = []
1603 remove = []
1602
1604
1603 # construct a working dir manifest
1605 # construct a working dir manifest
1604 mw = m1.copy()
1606 mw = m1.copy()
1605 mfw = mf1.copy()
1607 mfw = mf1.copy()
1606 umap = dict.fromkeys(unknown)
1608 umap = dict.fromkeys(unknown)
1607
1609
1608 for f in added + modified + unknown:
1610 for f in added + modified + unknown:
1609 mw[f] = ""
1611 mw[f] = ""
1610 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1612 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1611
1613
1612 if moddirstate and not wlock:
1614 if moddirstate and not wlock:
1613 wlock = self.wlock()
1615 wlock = self.wlock()
1614
1616
1615 for f in deleted + removed:
1617 for f in deleted + removed:
1616 if f in mw:
1618 if f in mw:
1617 del mw[f]
1619 del mw[f]
1618
1620
1619 # If we're jumping between revisions (as opposed to merging),
1621 # If we're jumping between revisions (as opposed to merging),
1620 # and if neither the working directory nor the target rev has
1622 # and if neither the working directory nor the target rev has
1621 # the file, then we need to remove it from the dirstate, to
1623 # the file, then we need to remove it from the dirstate, to
1622 # prevent the dirstate from listing the file when it is no
1624 # prevent the dirstate from listing the file when it is no
1623 # longer in the manifest.
1625 # longer in the manifest.
1624 if moddirstate and linear_path and f not in m2:
1626 if moddirstate and linear_path and f not in m2:
1625 self.dirstate.forget((f,))
1627 self.dirstate.forget((f,))
1626
1628
1627 # Compare manifests
1629 # Compare manifests
1628 for f, n in mw.iteritems():
1630 for f, n in mw.iteritems():
1629 if choose and not choose(f):
1631 if choose and not choose(f):
1630 continue
1632 continue
1631 if f in m2:
1633 if f in m2:
1632 s = 0
1634 s = 0
1633
1635
1634 # is the wfile new since m1, and match m2?
1636 # is the wfile new since m1, and match m2?
1635 if f not in m1:
1637 if f not in m1:
1636 t1 = self.wread(f)
1638 t1 = self.wread(f)
1637 t2 = self.file(f).read(m2[f])
1639 t2 = self.file(f).read(m2[f])
1638 if cmp(t1, t2) == 0:
1640 if cmp(t1, t2) == 0:
1639 n = m2[f]
1641 n = m2[f]
1640 del t1, t2
1642 del t1, t2
1641
1643
1642 # are files different?
1644 # are files different?
1643 if n != m2[f]:
1645 if n != m2[f]:
1644 a = ma.get(f, nullid)
1646 a = ma.get(f, nullid)
1645 # are both different from the ancestor?
1647 # are both different from the ancestor?
1646 if n != a and m2[f] != a:
1648 if n != a and m2[f] != a:
1647 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1649 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1648 # merge executable bits
1650 # merge executable bits
1649 # "if we changed or they changed, change in merge"
1651 # "if we changed or they changed, change in merge"
1650 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1652 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1651 mode = ((a^b) | (a^c)) ^ a
1653 mode = ((a^b) | (a^c)) ^ a
1652 merge[f] = (m1.get(f, nullid), m2[f], mode)
1654 merge[f] = (m1.get(f, nullid), m2[f], mode)
1653 s = 1
1655 s = 1
1654 # are we clobbering?
1656 # are we clobbering?
1655 # is remote's version newer?
1657 # is remote's version newer?
1656 # or are we going back in time?
1658 # or are we going back in time?
1657 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1659 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1658 self.ui.debug(_(" remote %s is newer, get\n") % f)
1660 self.ui.debug(_(" remote %s is newer, get\n") % f)
1659 get[f] = m2[f]
1661 get[f] = m2[f]
1660 s = 1
1662 s = 1
1661 elif f in umap or f in added:
1663 elif f in umap or f in added:
1662 # this unknown file is the same as the checkout
1664 # this unknown file is the same as the checkout
1663 # we need to reset the dirstate if the file was added
1665 # we need to reset the dirstate if the file was added
1664 get[f] = m2[f]
1666 get[f] = m2[f]
1665
1667
1666 if not s and mfw[f] != mf2[f]:
1668 if not s and mfw[f] != mf2[f]:
1667 if force:
1669 if force:
1668 self.ui.debug(_(" updating permissions for %s\n") % f)
1670 self.ui.debug(_(" updating permissions for %s\n") % f)
1669 util.set_exec(self.wjoin(f), mf2[f])
1671 util.set_exec(self.wjoin(f), mf2[f])
1670 else:
1672 else:
1671 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1673 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1672 mode = ((a^b) | (a^c)) ^ a
1674 mode = ((a^b) | (a^c)) ^ a
1673 if mode != b:
1675 if mode != b:
1674 self.ui.debug(_(" updating permissions for %s\n")
1676 self.ui.debug(_(" updating permissions for %s\n")
1675 % f)
1677 % f)
1676 util.set_exec(self.wjoin(f), mode)
1678 util.set_exec(self.wjoin(f), mode)
1677 del m2[f]
1679 del m2[f]
1678 elif f in ma:
1680 elif f in ma:
1679 if n != ma[f]:
1681 if n != ma[f]:
1680 r = _("d")
1682 r = _("d")
1681 if not force and (linear_path or allow):
1683 if not force and (linear_path or allow):
1682 r = self.ui.prompt(
1684 r = self.ui.prompt(
1683 (_(" local changed %s which remote deleted\n") % f) +
1685 (_(" local changed %s which remote deleted\n") % f) +
1684 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1686 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1685 if r == _("d"):
1687 if r == _("d"):
1686 remove.append(f)
1688 remove.append(f)
1687 else:
1689 else:
1688 self.ui.debug(_("other deleted %s\n") % f)
1690 self.ui.debug(_("other deleted %s\n") % f)
1689 remove.append(f) # other deleted it
1691 remove.append(f) # other deleted it
1690 else:
1692 else:
1691 # file is created on branch or in working directory
1693 # file is created on branch or in working directory
1692 if force and f not in umap:
1694 if force and f not in umap:
1693 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1695 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1694 remove.append(f)
1696 remove.append(f)
1695 elif n == m1.get(f, nullid): # same as parent
1697 elif n == m1.get(f, nullid): # same as parent
1696 if p2 == pa: # going backwards?
1698 if p2 == pa: # going backwards?
1697 self.ui.debug(_("remote deleted %s\n") % f)
1699 self.ui.debug(_("remote deleted %s\n") % f)
1698 remove.append(f)
1700 remove.append(f)
1699 else:
1701 else:
1700 self.ui.debug(_("local modified %s, keeping\n") % f)
1702 self.ui.debug(_("local modified %s, keeping\n") % f)
1701 else:
1703 else:
1702 self.ui.debug(_("working dir created %s, keeping\n") % f)
1704 self.ui.debug(_("working dir created %s, keeping\n") % f)
1703
1705
1704 for f, n in m2.iteritems():
1706 for f, n in m2.iteritems():
1705 if choose and not choose(f):
1707 if choose and not choose(f):
1706 continue
1708 continue
1707 if f[0] == "/":
1709 if f[0] == "/":
1708 continue
1710 continue
1709 if f in ma and n != ma[f]:
1711 if f in ma and n != ma[f]:
1710 r = _("k")
1712 r = _("k")
1711 if not force and (linear_path or allow):
1713 if not force and (linear_path or allow):
1712 r = self.ui.prompt(
1714 r = self.ui.prompt(
1713 (_("remote changed %s which local deleted\n") % f) +
1715 (_("remote changed %s which local deleted\n") % f) +
1714 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1716 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1715 if r == _("k"):
1717 if r == _("k"):
1716 get[f] = n
1718 get[f] = n
1717 elif f not in ma:
1719 elif f not in ma:
1718 self.ui.debug(_("remote created %s\n") % f)
1720 self.ui.debug(_("remote created %s\n") % f)
1719 get[f] = n
1721 get[f] = n
1720 else:
1722 else:
1721 if force or p2 == pa: # going backwards?
1723 if force or p2 == pa: # going backwards?
1722 self.ui.debug(_("local deleted %s, recreating\n") % f)
1724 self.ui.debug(_("local deleted %s, recreating\n") % f)
1723 get[f] = n
1725 get[f] = n
1724 else:
1726 else:
1725 self.ui.debug(_("local deleted %s\n") % f)
1727 self.ui.debug(_("local deleted %s\n") % f)
1726
1728
1727 del mw, m1, m2, ma
1729 del mw, m1, m2, ma
1728
1730
1729 if force:
1731 if force:
1730 for f in merge:
1732 for f in merge:
1731 get[f] = merge[f][1]
1733 get[f] = merge[f][1]
1732 merge = {}
1734 merge = {}
1733
1735
1734 if linear_path or force:
1736 if linear_path or force:
1735 # we don't need to do any magic, just jump to the new rev
1737 # we don't need to do any magic, just jump to the new rev
1736 branch_merge = False
1738 branch_merge = False
1737 p1, p2 = p2, nullid
1739 p1, p2 = p2, nullid
1738 else:
1740 else:
1739 if not allow:
1741 if not allow:
1740 self.ui.status(_("this update spans a branch"
1742 self.ui.status(_("this update spans a branch"
1741 " affecting the following files:\n"))
1743 " affecting the following files:\n"))
1742 fl = merge.keys() + get.keys()
1744 fl = merge.keys() + get.keys()
1743 fl.sort()
1745 fl.sort()
1744 for f in fl:
1746 for f in fl:
1745 cf = ""
1747 cf = ""
1746 if f in merge:
1748 if f in merge:
1747 cf = _(" (resolve)")
1749 cf = _(" (resolve)")
1748 self.ui.status(" %s%s\n" % (f, cf))
1750 self.ui.status(" %s%s\n" % (f, cf))
1749 self.ui.warn(_("aborting update spanning branches!\n"))
1751 self.ui.warn(_("aborting update spanning branches!\n"))
1750 self.ui.status(_("(use 'hg merge' to merge across branches"
1752 self.ui.status(_("(use 'hg merge' to merge across branches"
1751 " or 'hg update -C' to lose changes)\n"))
1753 " or 'hg update -C' to lose changes)\n"))
1752 return 1
1754 return 1
1753 branch_merge = True
1755 branch_merge = True
1754
1756
1755 # get the files we don't need to change
1757 # get the files we don't need to change
1756 files = get.keys()
1758 files = get.keys()
1757 files.sort()
1759 files.sort()
1758 for f in files:
1760 for f in files:
1759 if f[0] == "/":
1761 if f[0] == "/":
1760 continue
1762 continue
1761 self.ui.note(_("getting %s\n") % f)
1763 self.ui.note(_("getting %s\n") % f)
1762 t = self.file(f).read(get[f])
1764 t = self.file(f).read(get[f])
1763 self.wwrite(f, t)
1765 self.wwrite(f, t)
1764 util.set_exec(self.wjoin(f), mf2[f])
1766 util.set_exec(self.wjoin(f), mf2[f])
1765 if moddirstate:
1767 if moddirstate:
1766 if branch_merge:
1768 if branch_merge:
1767 self.dirstate.update([f], 'n', st_mtime=-1)
1769 self.dirstate.update([f], 'n', st_mtime=-1)
1768 else:
1770 else:
1769 self.dirstate.update([f], 'n')
1771 self.dirstate.update([f], 'n')
1770
1772
1771 # merge the tricky bits
1773 # merge the tricky bits
1772 failedmerge = []
1774 failedmerge = []
1773 files = merge.keys()
1775 files = merge.keys()
1774 files.sort()
1776 files.sort()
1775 xp1 = hex(p1)
1777 xp1 = hex(p1)
1776 xp2 = hex(p2)
1778 xp2 = hex(p2)
1777 for f in files:
1779 for f in files:
1778 self.ui.status(_("merging %s\n") % f)
1780 self.ui.status(_("merging %s\n") % f)
1779 my, other, flag = merge[f]
1781 my, other, flag = merge[f]
1780 ret = self.merge3(f, my, other, xp1, xp2)
1782 ret = self.merge3(f, my, other, xp1, xp2)
1781 if ret:
1783 if ret:
1782 err = True
1784 err = True
1783 failedmerge.append(f)
1785 failedmerge.append(f)
1784 util.set_exec(self.wjoin(f), flag)
1786 util.set_exec(self.wjoin(f), flag)
1785 if moddirstate:
1787 if moddirstate:
1786 if branch_merge:
1788 if branch_merge:
1787 # We've done a branch merge, mark this file as merged
1789 # We've done a branch merge, mark this file as merged
1788 # so that we properly record the merger later
1790 # so that we properly record the merger later
1789 self.dirstate.update([f], 'm')
1791 self.dirstate.update([f], 'm')
1790 else:
1792 else:
1791 # We've update-merged a locally modified file, so
1793 # We've update-merged a locally modified file, so
1792 # we set the dirstate to emulate a normal checkout
1794 # we set the dirstate to emulate a normal checkout
1793 # of that file some time in the past. Thus our
1795 # of that file some time in the past. Thus our
1794 # merge will appear as a normal local file
1796 # merge will appear as a normal local file
1795 # modification.
1797 # modification.
1796 f_len = len(self.file(f).read(other))
1798 f_len = len(self.file(f).read(other))
1797 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1799 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1798
1800
1799 remove.sort()
1801 remove.sort()
1800 for f in remove:
1802 for f in remove:
1801 self.ui.note(_("removing %s\n") % f)
1803 self.ui.note(_("removing %s\n") % f)
1802 util.audit_path(f)
1804 util.audit_path(f)
1803 try:
1805 try:
1804 util.unlink(self.wjoin(f))
1806 util.unlink(self.wjoin(f))
1805 except OSError, inst:
1807 except OSError, inst:
1806 if inst.errno != errno.ENOENT:
1808 if inst.errno != errno.ENOENT:
1807 self.ui.warn(_("update failed to remove %s: %s!\n") %
1809 self.ui.warn(_("update failed to remove %s: %s!\n") %
1808 (f, inst.strerror))
1810 (f, inst.strerror))
1809 if moddirstate:
1811 if moddirstate:
1810 if branch_merge:
1812 if branch_merge:
1811 self.dirstate.update(remove, 'r')
1813 self.dirstate.update(remove, 'r')
1812 else:
1814 else:
1813 self.dirstate.forget(remove)
1815 self.dirstate.forget(remove)
1814
1816
1815 if moddirstate:
1817 if moddirstate:
1816 self.dirstate.setparents(p1, p2)
1818 self.dirstate.setparents(p1, p2)
1817
1819
1818 if show_stats:
1820 if show_stats:
1819 stats = ((len(get), _("updated")),
1821 stats = ((len(get), _("updated")),
1820 (len(merge) - len(failedmerge), _("merged")),
1822 (len(merge) - len(failedmerge), _("merged")),
1821 (len(remove), _("removed")),
1823 (len(remove), _("removed")),
1822 (len(failedmerge), _("unresolved")))
1824 (len(failedmerge), _("unresolved")))
1823 note = ", ".join([_("%d files %s") % s for s in stats])
1825 note = ", ".join([_("%d files %s") % s for s in stats])
1824 self.ui.status("%s\n" % note)
1826 self.ui.status("%s\n" % note)
1825 if moddirstate:
1827 if moddirstate:
1826 if branch_merge:
1828 if branch_merge:
1827 if failedmerge:
1829 if failedmerge:
1828 self.ui.status(_("There are unresolved merges,"
1830 self.ui.status(_("There are unresolved merges,"
1829 " you can redo the full merge using:\n"
1831 " you can redo the full merge using:\n"
1830 " hg update -C %s\n"
1832 " hg update -C %s\n"
1831 " hg merge %s\n"
1833 " hg merge %s\n"
1832 % (self.changelog.rev(p1),
1834 % (self.changelog.rev(p1),
1833 self.changelog.rev(p2))))
1835 self.changelog.rev(p2))))
1834 else:
1836 else:
1835 self.ui.status(_("(branch merge, don't forget to commit)\n"))
1837 self.ui.status(_("(branch merge, don't forget to commit)\n"))
1836 elif failedmerge:
1838 elif failedmerge:
1837 self.ui.status(_("There are unresolved merges with"
1839 self.ui.status(_("There are unresolved merges with"
1838 " locally modified files.\n"))
1840 " locally modified files.\n"))
1839
1841
1840 return err
1842 return err
1841
1843
1842 def merge3(self, fn, my, other, p1, p2):
1844 def merge3(self, fn, my, other, p1, p2):
1843 """perform a 3-way merge in the working directory"""
1845 """perform a 3-way merge in the working directory"""
1844
1846
1845 def temp(prefix, node):
1847 def temp(prefix, node):
1846 pre = "%s~%s." % (os.path.basename(fn), prefix)
1848 pre = "%s~%s." % (os.path.basename(fn), prefix)
1847 (fd, name) = tempfile.mkstemp(prefix=pre)
1849 (fd, name) = tempfile.mkstemp(prefix=pre)
1848 f = os.fdopen(fd, "wb")
1850 f = os.fdopen(fd, "wb")
1849 self.wwrite(fn, fl.read(node), f)
1851 self.wwrite(fn, fl.read(node), f)
1850 f.close()
1852 f.close()
1851 return name
1853 return name
1852
1854
1853 fl = self.file(fn)
1855 fl = self.file(fn)
1854 base = fl.ancestor(my, other)
1856 base = fl.ancestor(my, other)
1855 a = self.wjoin(fn)
1857 a = self.wjoin(fn)
1856 b = temp("base", base)
1858 b = temp("base", base)
1857 c = temp("other", other)
1859 c = temp("other", other)
1858
1860
1859 self.ui.note(_("resolving %s\n") % fn)
1861 self.ui.note(_("resolving %s\n") % fn)
1860 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1862 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1861 (fn, short(my), short(other), short(base)))
1863 (fn, short(my), short(other), short(base)))
1862
1864
1863 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1865 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1864 or "hgmerge")
1866 or "hgmerge")
1865 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
1867 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
1866 environ={'HG_FILE': fn,
1868 environ={'HG_FILE': fn,
1867 'HG_MY_NODE': p1,
1869 'HG_MY_NODE': p1,
1868 'HG_OTHER_NODE': p2,
1870 'HG_OTHER_NODE': p2,
1869 'HG_FILE_MY_NODE': hex(my),
1871 'HG_FILE_MY_NODE': hex(my),
1870 'HG_FILE_OTHER_NODE': hex(other),
1872 'HG_FILE_OTHER_NODE': hex(other),
1871 'HG_FILE_BASE_NODE': hex(base)})
1873 'HG_FILE_BASE_NODE': hex(base)})
1872 if r:
1874 if r:
1873 self.ui.warn(_("merging %s failed!\n") % fn)
1875 self.ui.warn(_("merging %s failed!\n") % fn)
1874
1876
1875 os.unlink(b)
1877 os.unlink(b)
1876 os.unlink(c)
1878 os.unlink(c)
1877 return r
1879 return r
1878
1880
1879 def verify(self):
1881 def verify(self):
1880 filelinkrevs = {}
1882 filelinkrevs = {}
1881 filenodes = {}
1883 filenodes = {}
1882 changesets = revisions = files = 0
1884 changesets = revisions = files = 0
1883 errors = [0]
1885 errors = [0]
1884 warnings = [0]
1886 warnings = [0]
1885 neededmanifests = {}
1887 neededmanifests = {}
1886
1888
1887 def err(msg):
1889 def err(msg):
1888 self.ui.warn(msg + "\n")
1890 self.ui.warn(msg + "\n")
1889 errors[0] += 1
1891 errors[0] += 1
1890
1892
1891 def warn(msg):
1893 def warn(msg):
1892 self.ui.warn(msg + "\n")
1894 self.ui.warn(msg + "\n")
1893 warnings[0] += 1
1895 warnings[0] += 1
1894
1896
1895 def checksize(obj, name):
1897 def checksize(obj, name):
1896 d = obj.checksize()
1898 d = obj.checksize()
1897 if d[0]:
1899 if d[0]:
1898 err(_("%s data length off by %d bytes") % (name, d[0]))
1900 err(_("%s data length off by %d bytes") % (name, d[0]))
1899 if d[1]:
1901 if d[1]:
1900 err(_("%s index contains %d extra bytes") % (name, d[1]))
1902 err(_("%s index contains %d extra bytes") % (name, d[1]))
1901
1903
1902 def checkversion(obj, name):
1904 def checkversion(obj, name):
1903 if obj.version != revlog.REVLOGV0:
1905 if obj.version != revlog.REVLOGV0:
1904 if not revlogv1:
1906 if not revlogv1:
1905 warn(_("warning: `%s' uses revlog format 1") % name)
1907 warn(_("warning: `%s' uses revlog format 1") % name)
1906 elif revlogv1:
1908 elif revlogv1:
1907 warn(_("warning: `%s' uses revlog format 0") % name)
1909 warn(_("warning: `%s' uses revlog format 0") % name)
1908
1910
1909 revlogv1 = self.revlogversion != revlog.REVLOGV0
1911 revlogv1 = self.revlogversion != revlog.REVLOGV0
1910 if self.ui.verbose or revlogv1 != self.revlogv1:
1912 if self.ui.verbose or revlogv1 != self.revlogv1:
1911 self.ui.status(_("repository uses revlog format %d\n") %
1913 self.ui.status(_("repository uses revlog format %d\n") %
1912 (revlogv1 and 1 or 0))
1914 (revlogv1 and 1 or 0))
1913
1915
1914 seen = {}
1916 seen = {}
1915 self.ui.status(_("checking changesets\n"))
1917 self.ui.status(_("checking changesets\n"))
1916 checksize(self.changelog, "changelog")
1918 checksize(self.changelog, "changelog")
1917
1919
1918 for i in range(self.changelog.count()):
1920 for i in range(self.changelog.count()):
1919 changesets += 1
1921 changesets += 1
1920 n = self.changelog.node(i)
1922 n = self.changelog.node(i)
1921 l = self.changelog.linkrev(n)
1923 l = self.changelog.linkrev(n)
1922 if l != i:
1924 if l != i:
1923 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1925 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1924 if n in seen:
1926 if n in seen:
1925 err(_("duplicate changeset at revision %d") % i)
1927 err(_("duplicate changeset at revision %d") % i)
1926 seen[n] = 1
1928 seen[n] = 1
1927
1929
1928 for p in self.changelog.parents(n):
1930 for p in self.changelog.parents(n):
1929 if p not in self.changelog.nodemap:
1931 if p not in self.changelog.nodemap:
1930 err(_("changeset %s has unknown parent %s") %
1932 err(_("changeset %s has unknown parent %s") %
1931 (short(n), short(p)))
1933 (short(n), short(p)))
1932 try:
1934 try:
1933 changes = self.changelog.read(n)
1935 changes = self.changelog.read(n)
1934 except KeyboardInterrupt:
1936 except KeyboardInterrupt:
1935 self.ui.warn(_("interrupted"))
1937 self.ui.warn(_("interrupted"))
1936 raise
1938 raise
1937 except Exception, inst:
1939 except Exception, inst:
1938 err(_("unpacking changeset %s: %s") % (short(n), inst))
1940 err(_("unpacking changeset %s: %s") % (short(n), inst))
1939 continue
1941 continue
1940
1942
1941 neededmanifests[changes[0]] = n
1943 neededmanifests[changes[0]] = n
1942
1944
1943 for f in changes[3]:
1945 for f in changes[3]:
1944 filelinkrevs.setdefault(f, []).append(i)
1946 filelinkrevs.setdefault(f, []).append(i)
1945
1947
1946 seen = {}
1948 seen = {}
1947 self.ui.status(_("checking manifests\n"))
1949 self.ui.status(_("checking manifests\n"))
1948 checkversion(self.manifest, "manifest")
1950 checkversion(self.manifest, "manifest")
1949 checksize(self.manifest, "manifest")
1951 checksize(self.manifest, "manifest")
1950
1952
1951 for i in range(self.manifest.count()):
1953 for i in range(self.manifest.count()):
1952 n = self.manifest.node(i)
1954 n = self.manifest.node(i)
1953 l = self.manifest.linkrev(n)
1955 l = self.manifest.linkrev(n)
1954
1956
1955 if l < 0 or l >= self.changelog.count():
1957 if l < 0 or l >= self.changelog.count():
1956 err(_("bad manifest link (%d) at revision %d") % (l, i))
1958 err(_("bad manifest link (%d) at revision %d") % (l, i))
1957
1959
1958 if n in neededmanifests:
1960 if n in neededmanifests:
1959 del neededmanifests[n]
1961 del neededmanifests[n]
1960
1962
1961 if n in seen:
1963 if n in seen:
1962 err(_("duplicate manifest at revision %d") % i)
1964 err(_("duplicate manifest at revision %d") % i)
1963
1965
1964 seen[n] = 1
1966 seen[n] = 1
1965
1967
1966 for p in self.manifest.parents(n):
1968 for p in self.manifest.parents(n):
1967 if p not in self.manifest.nodemap:
1969 if p not in self.manifest.nodemap:
1968 err(_("manifest %s has unknown parent %s") %
1970 err(_("manifest %s has unknown parent %s") %
1969 (short(n), short(p)))
1971 (short(n), short(p)))
1970
1972
1971 try:
1973 try:
1972 delta = mdiff.patchtext(self.manifest.delta(n))
1974 delta = mdiff.patchtext(self.manifest.delta(n))
1973 except KeyboardInterrupt:
1975 except KeyboardInterrupt:
1974 self.ui.warn(_("interrupted"))
1976 self.ui.warn(_("interrupted"))
1975 raise
1977 raise
1976 except Exception, inst:
1978 except Exception, inst:
1977 err(_("unpacking manifest %s: %s") % (short(n), inst))
1979 err(_("unpacking manifest %s: %s") % (short(n), inst))
1978 continue
1980 continue
1979
1981
1980 try:
1982 try:
1981 ff = [ l.split('\0') for l in delta.splitlines() ]
1983 ff = [ l.split('\0') for l in delta.splitlines() ]
1982 for f, fn in ff:
1984 for f, fn in ff:
1983 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1985 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1984 except (ValueError, TypeError), inst:
1986 except (ValueError, TypeError), inst:
1985 err(_("broken delta in manifest %s: %s") % (short(n), inst))
1987 err(_("broken delta in manifest %s: %s") % (short(n), inst))
1986
1988
1987 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1989 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1988
1990
1989 for m, c in neededmanifests.items():
1991 for m, c in neededmanifests.items():
1990 err(_("Changeset %s refers to unknown manifest %s") %
1992 err(_("Changeset %s refers to unknown manifest %s") %
1991 (short(m), short(c)))
1993 (short(m), short(c)))
1992 del neededmanifests
1994 del neededmanifests
1993
1995
1994 for f in filenodes:
1996 for f in filenodes:
1995 if f not in filelinkrevs:
1997 if f not in filelinkrevs:
1996 err(_("file %s in manifest but not in changesets") % f)
1998 err(_("file %s in manifest but not in changesets") % f)
1997
1999
1998 for f in filelinkrevs:
2000 for f in filelinkrevs:
1999 if f not in filenodes:
2001 if f not in filenodes:
2000 err(_("file %s in changeset but not in manifest") % f)
2002 err(_("file %s in changeset but not in manifest") % f)
2001
2003
2002 self.ui.status(_("checking files\n"))
2004 self.ui.status(_("checking files\n"))
2003 ff = filenodes.keys()
2005 ff = filenodes.keys()
2004 ff.sort()
2006 ff.sort()
2005 for f in ff:
2007 for f in ff:
2006 if f == "/dev/null":
2008 if f == "/dev/null":
2007 continue
2009 continue
2008 files += 1
2010 files += 1
2009 if not f:
2011 if not f:
2010 err(_("file without name in manifest %s") % short(n))
2012 err(_("file without name in manifest %s") % short(n))
2011 continue
2013 continue
2012 fl = self.file(f)
2014 fl = self.file(f)
2013 checkversion(fl, f)
2015 checkversion(fl, f)
2014 checksize(fl, f)
2016 checksize(fl, f)
2015
2017
2016 nodes = {nullid: 1}
2018 nodes = {nullid: 1}
2017 seen = {}
2019 seen = {}
2018 for i in range(fl.count()):
2020 for i in range(fl.count()):
2019 revisions += 1
2021 revisions += 1
2020 n = fl.node(i)
2022 n = fl.node(i)
2021
2023
2022 if n in seen:
2024 if n in seen:
2023 err(_("%s: duplicate revision %d") % (f, i))
2025 err(_("%s: duplicate revision %d") % (f, i))
2024 if n not in filenodes[f]:
2026 if n not in filenodes[f]:
2025 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
2027 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
2026 else:
2028 else:
2027 del filenodes[f][n]
2029 del filenodes[f][n]
2028
2030
2029 flr = fl.linkrev(n)
2031 flr = fl.linkrev(n)
2030 if flr not in filelinkrevs.get(f, []):
2032 if flr not in filelinkrevs.get(f, []):
2031 err(_("%s:%s points to unexpected changeset %d")
2033 err(_("%s:%s points to unexpected changeset %d")
2032 % (f, short(n), flr))
2034 % (f, short(n), flr))
2033 else:
2035 else:
2034 filelinkrevs[f].remove(flr)
2036 filelinkrevs[f].remove(flr)
2035
2037
2036 # verify contents
2038 # verify contents
2037 try:
2039 try:
2038 t = fl.read(n)
2040 t = fl.read(n)
2039 except KeyboardInterrupt:
2041 except KeyboardInterrupt:
2040 self.ui.warn(_("interrupted"))
2042 self.ui.warn(_("interrupted"))
2041 raise
2043 raise
2042 except Exception, inst:
2044 except Exception, inst:
2043 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
2045 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
2044
2046
2045 # verify parents
2047 # verify parents
2046 (p1, p2) = fl.parents(n)
2048 (p1, p2) = fl.parents(n)
2047 if p1 not in nodes:
2049 if p1 not in nodes:
2048 err(_("file %s:%s unknown parent 1 %s") %
2050 err(_("file %s:%s unknown parent 1 %s") %
2049 (f, short(n), short(p1)))
2051 (f, short(n), short(p1)))
2050 if p2 not in nodes:
2052 if p2 not in nodes:
2051 err(_("file %s:%s unknown parent 2 %s") %
2053 err(_("file %s:%s unknown parent 2 %s") %
2052 (f, short(n), short(p1)))
2054 (f, short(n), short(p1)))
2053 nodes[n] = 1
2055 nodes[n] = 1
2054
2056
2055 # cross-check
2057 # cross-check
2056 for node in filenodes[f]:
2058 for node in filenodes[f]:
2057 err(_("node %s in manifests not in %s") % (hex(node), f))
2059 err(_("node %s in manifests not in %s") % (hex(node), f))
2058
2060
2059 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
2061 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
2060 (files, changesets, revisions))
2062 (files, changesets, revisions))
2061
2063
2062 if warnings[0]:
2064 if warnings[0]:
2063 self.ui.warn(_("%d warnings encountered!\n") % warnings[0])
2065 self.ui.warn(_("%d warnings encountered!\n") % warnings[0])
2064 if errors[0]:
2066 if errors[0]:
2065 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
2067 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
2066 return 1
2068 return 1
2067
2069
2068 # used to avoid circular references so destructors work
2070 # used to avoid circular references so destructors work
2069 def aftertrans(base):
2071 def aftertrans(base):
2070 p = base
2072 p = base
2071 def a():
2073 def a():
2072 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
2074 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
2073 util.rename(os.path.join(p, "journal.dirstate"),
2075 util.rename(os.path.join(p, "journal.dirstate"),
2074 os.path.join(p, "undo.dirstate"))
2076 os.path.join(p, "undo.dirstate"))
2075 return a
2077 return a
2076
2078
General Comments 0
You need to be logged in to leave comments. Login now