##// END OF EJS Templates
document and fix findincoming...
Benoit Boissinot -
r2339:11422943 default
parent child Browse files
Show More
@@ -0,0 +1,49 b''
1 #!/bin/sh
2 #
3 # A B
4 #
5 # 3 4 3
6 # |\/| |\
7 # |/\| | \
8 # 1 2 1 2
9 # \ / \ /
10 # 0 0
11 #
12 # if the result of the merge of 1 and 2
13 # is the same in 3 and 4, no new manifest
14 # will be created and the manifest group
15 # will be empty during the pull
16 #
17 # (plus we test a failure where outgoing
18 # wrongly reported the number of csets)
19 #
20
21 hg init a
22 cd a
23 touch init
24 hg ci -A -m 0 -d "1000000 0"
25 touch x y
26 hg ci -A -m 1 -d "1000000 0"
27 hg update 0
28 touch x y
29 hg ci -A -m 2 -d "1000000 0"
30 hg merge 1
31 hg ci -A -m m1 -d "1000000 0"
32 #hg log
33 #hg debugindex .hg/00manifest.i
34 hg update -C 1
35 hg merge 2
36 hg ci -A -m m2 -d "1000000 0"
37 #hg log
38 #hg debugindex .hg/00manifest.i
39
40 cd ..
41 hg clone -r 3 a b
42 hg clone -r 4 a c
43 hg -R a outgoing b
44 hg -R a outgoing c
45 hg -R b outgoing c
46 hg -R c outgoing b
47
48 hg -R b pull a
49 hg -R c pull a
@@ -0,0 +1,72 b''
1 adding init
2 adding x
3 adding y
4 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
5 adding x
6 adding y
7 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
8 (branch merge, don't forget to commit)
9 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
10 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
11 (branch merge, don't forget to commit)
12 requesting all changes
13 adding changesets
14 adding manifests
15 adding file changes
16 added 4 changesets with 3 changes to 3 files
17 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
18 requesting all changes
19 adding changesets
20 adding manifests
21 adding file changes
22 added 4 changesets with 3 changes to 3 files
23 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
24 searching for changes
25 changeset: 4:fdb3c546e859
26 tag: tip
27 parent: 1:1f703b3fcbc6
28 parent: 2:de997049e034
29 user: test
30 date: Mon Jan 12 13:46:40 1970 +0000
31 summary: m2
32
33 searching for changes
34 changeset: 3:f40f830c0024
35 parent: 2:de997049e034
36 parent: 1:1f703b3fcbc6
37 user: test
38 date: Mon Jan 12 13:46:40 1970 +0000
39 summary: m1
40
41 searching for changes
42 changeset: 3:f40f830c0024
43 tag: tip
44 parent: 2:de997049e034
45 parent: 1:1f703b3fcbc6
46 user: test
47 date: Mon Jan 12 13:46:40 1970 +0000
48 summary: m1
49
50 searching for changes
51 changeset: 3:fdb3c546e859
52 tag: tip
53 parent: 1:1f703b3fcbc6
54 parent: 2:de997049e034
55 user: test
56 date: Mon Jan 12 13:46:40 1970 +0000
57 summary: m2
58
59 pulling from a
60 searching for changes
61 adding changesets
62 adding manifests
63 adding file changes
64 added 1 changesets with 0 changes to 0 files (+1 heads)
65 (run 'hg heads' to see heads, 'hg merge' to merge)
66 pulling from a
67 searching for changes
68 adding changesets
69 adding manifests
70 adding file changes
71 added 1 changesets with 0 changes to 0 files (+1 heads)
72 (run 'hg heads' to see heads, 'hg merge' to merge)
@@ -1,2108 +1,2119 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import os, util
8 import os, util
9 import filelog, manifest, changelog, dirstate, repo
9 import filelog, manifest, changelog, dirstate, repo
10 from node import *
10 from node import *
11 from i18n import gettext as _
11 from i18n import gettext as _
12 from demandload import *
12 from demandload import *
13 demandload(globals(), "appendfile changegroup")
13 demandload(globals(), "appendfile changegroup")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 demandload(globals(), "revlog")
15 demandload(globals(), "revlog")
16
16
17 class localrepository(object):
17 class localrepository(object):
18 def __del__(self):
18 def __del__(self):
19 self.transhandle = None
19 self.transhandle = None
20 def __init__(self, parentui, path=None, create=0):
20 def __init__(self, parentui, path=None, create=0):
21 if not path:
21 if not path:
22 p = os.getcwd()
22 p = os.getcwd()
23 while not os.path.isdir(os.path.join(p, ".hg")):
23 while not os.path.isdir(os.path.join(p, ".hg")):
24 oldp = p
24 oldp = p
25 p = os.path.dirname(p)
25 p = os.path.dirname(p)
26 if p == oldp:
26 if p == oldp:
27 raise repo.RepoError(_("no repo found"))
27 raise repo.RepoError(_("no repo found"))
28 path = p
28 path = p
29 self.path = os.path.join(path, ".hg")
29 self.path = os.path.join(path, ".hg")
30
30
31 if not create and not os.path.isdir(self.path):
31 if not create and not os.path.isdir(self.path):
32 raise repo.RepoError(_("repository %s not found") % path)
32 raise repo.RepoError(_("repository %s not found") % path)
33
33
34 self.root = os.path.abspath(path)
34 self.root = os.path.abspath(path)
35 self.origroot = path
35 self.origroot = path
36 self.ui = ui.ui(parentui=parentui)
36 self.ui = ui.ui(parentui=parentui)
37 self.opener = util.opener(self.path)
37 self.opener = util.opener(self.path)
38 self.wopener = util.opener(self.root)
38 self.wopener = util.opener(self.root)
39
39
40 try:
40 try:
41 self.ui.readconfig(self.join("hgrc"), self.root)
41 self.ui.readconfig(self.join("hgrc"), self.root)
42 except IOError:
42 except IOError:
43 pass
43 pass
44
44
45 v = self.ui.revlogopts
45 v = self.ui.revlogopts
46 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
46 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
47 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
47 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
48 fl = v.get('flags', None)
48 fl = v.get('flags', None)
49 flags = 0
49 flags = 0
50 if fl != None:
50 if fl != None:
51 for x in fl.split():
51 for x in fl.split():
52 flags |= revlog.flagstr(x)
52 flags |= revlog.flagstr(x)
53 elif self.revlogv1:
53 elif self.revlogv1:
54 flags = revlog.REVLOG_DEFAULT_FLAGS
54 flags = revlog.REVLOG_DEFAULT_FLAGS
55
55
56 v = self.revlogversion | flags
56 v = self.revlogversion | flags
57 self.manifest = manifest.manifest(self.opener, v)
57 self.manifest = manifest.manifest(self.opener, v)
58 self.changelog = changelog.changelog(self.opener, v)
58 self.changelog = changelog.changelog(self.opener, v)
59
59
60 # the changelog might not have the inline index flag
60 # the changelog might not have the inline index flag
61 # on. If the format of the changelog is the same as found in
61 # on. If the format of the changelog is the same as found in
62 # .hgrc, apply any flags found in the .hgrc as well.
62 # .hgrc, apply any flags found in the .hgrc as well.
63 # Otherwise, just version from the changelog
63 # Otherwise, just version from the changelog
64 v = self.changelog.version
64 v = self.changelog.version
65 if v == self.revlogversion:
65 if v == self.revlogversion:
66 v |= flags
66 v |= flags
67 self.revlogversion = v
67 self.revlogversion = v
68
68
69 self.tagscache = None
69 self.tagscache = None
70 self.nodetagscache = None
70 self.nodetagscache = None
71 self.encodepats = None
71 self.encodepats = None
72 self.decodepats = None
72 self.decodepats = None
73 self.transhandle = None
73 self.transhandle = None
74
74
75 if create:
75 if create:
76 os.mkdir(self.path)
76 os.mkdir(self.path)
77 os.mkdir(self.join("data"))
77 os.mkdir(self.join("data"))
78
78
79 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
79 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
80
80
81 def hook(self, name, throw=False, **args):
81 def hook(self, name, throw=False, **args):
82 def callhook(hname, funcname):
82 def callhook(hname, funcname):
83 '''call python hook. hook is callable object, looked up as
83 '''call python hook. hook is callable object, looked up as
84 name in python module. if callable returns "true", hook
84 name in python module. if callable returns "true", hook
85 fails, else passes. if hook raises exception, treated as
85 fails, else passes. if hook raises exception, treated as
86 hook failure. exception propagates if throw is "true".
86 hook failure. exception propagates if throw is "true".
87
87
88 reason for "true" meaning "hook failed" is so that
88 reason for "true" meaning "hook failed" is so that
89 unmodified commands (e.g. mercurial.commands.update) can
89 unmodified commands (e.g. mercurial.commands.update) can
90 be run as hooks without wrappers to convert return values.'''
90 be run as hooks without wrappers to convert return values.'''
91
91
92 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
92 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
93 d = funcname.rfind('.')
93 d = funcname.rfind('.')
94 if d == -1:
94 if d == -1:
95 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
95 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
96 % (hname, funcname))
96 % (hname, funcname))
97 modname = funcname[:d]
97 modname = funcname[:d]
98 try:
98 try:
99 obj = __import__(modname)
99 obj = __import__(modname)
100 except ImportError:
100 except ImportError:
101 raise util.Abort(_('%s hook is invalid '
101 raise util.Abort(_('%s hook is invalid '
102 '(import of "%s" failed)') %
102 '(import of "%s" failed)') %
103 (hname, modname))
103 (hname, modname))
104 try:
104 try:
105 for p in funcname.split('.')[1:]:
105 for p in funcname.split('.')[1:]:
106 obj = getattr(obj, p)
106 obj = getattr(obj, p)
107 except AttributeError, err:
107 except AttributeError, err:
108 raise util.Abort(_('%s hook is invalid '
108 raise util.Abort(_('%s hook is invalid '
109 '("%s" is not defined)') %
109 '("%s" is not defined)') %
110 (hname, funcname))
110 (hname, funcname))
111 if not callable(obj):
111 if not callable(obj):
112 raise util.Abort(_('%s hook is invalid '
112 raise util.Abort(_('%s hook is invalid '
113 '("%s" is not callable)') %
113 '("%s" is not callable)') %
114 (hname, funcname))
114 (hname, funcname))
115 try:
115 try:
116 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
116 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
117 except (KeyboardInterrupt, util.SignalInterrupt):
117 except (KeyboardInterrupt, util.SignalInterrupt):
118 raise
118 raise
119 except Exception, exc:
119 except Exception, exc:
120 if isinstance(exc, util.Abort):
120 if isinstance(exc, util.Abort):
121 self.ui.warn(_('error: %s hook failed: %s\n') %
121 self.ui.warn(_('error: %s hook failed: %s\n') %
122 (hname, exc.args[0] % exc.args[1:]))
122 (hname, exc.args[0] % exc.args[1:]))
123 else:
123 else:
124 self.ui.warn(_('error: %s hook raised an exception: '
124 self.ui.warn(_('error: %s hook raised an exception: '
125 '%s\n') % (hname, exc))
125 '%s\n') % (hname, exc))
126 if throw:
126 if throw:
127 raise
127 raise
128 self.ui.print_exc()
128 self.ui.print_exc()
129 return True
129 return True
130 if r:
130 if r:
131 if throw:
131 if throw:
132 raise util.Abort(_('%s hook failed') % hname)
132 raise util.Abort(_('%s hook failed') % hname)
133 self.ui.warn(_('warning: %s hook failed\n') % hname)
133 self.ui.warn(_('warning: %s hook failed\n') % hname)
134 return r
134 return r
135
135
136 def runhook(name, cmd):
136 def runhook(name, cmd):
137 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
137 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
138 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
138 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
139 r = util.system(cmd, environ=env, cwd=self.root)
139 r = util.system(cmd, environ=env, cwd=self.root)
140 if r:
140 if r:
141 desc, r = util.explain_exit(r)
141 desc, r = util.explain_exit(r)
142 if throw:
142 if throw:
143 raise util.Abort(_('%s hook %s') % (name, desc))
143 raise util.Abort(_('%s hook %s') % (name, desc))
144 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
144 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
145 return r
145 return r
146
146
147 r = False
147 r = False
148 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
148 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
149 if hname.split(".", 1)[0] == name and cmd]
149 if hname.split(".", 1)[0] == name and cmd]
150 hooks.sort()
150 hooks.sort()
151 for hname, cmd in hooks:
151 for hname, cmd in hooks:
152 if cmd.startswith('python:'):
152 if cmd.startswith('python:'):
153 r = callhook(hname, cmd[7:].strip()) or r
153 r = callhook(hname, cmd[7:].strip()) or r
154 else:
154 else:
155 r = runhook(hname, cmd) or r
155 r = runhook(hname, cmd) or r
156 return r
156 return r
157
157
158 def tags(self):
158 def tags(self):
159 '''return a mapping of tag to node'''
159 '''return a mapping of tag to node'''
160 if not self.tagscache:
160 if not self.tagscache:
161 self.tagscache = {}
161 self.tagscache = {}
162
162
163 def parsetag(line, context):
163 def parsetag(line, context):
164 if not line:
164 if not line:
165 return
165 return
166 s = l.split(" ", 1)
166 s = l.split(" ", 1)
167 if len(s) != 2:
167 if len(s) != 2:
168 self.ui.warn(_("%s: cannot parse entry\n") % context)
168 self.ui.warn(_("%s: cannot parse entry\n") % context)
169 return
169 return
170 node, key = s
170 node, key = s
171 key = key.strip()
171 key = key.strip()
172 try:
172 try:
173 bin_n = bin(node)
173 bin_n = bin(node)
174 except TypeError:
174 except TypeError:
175 self.ui.warn(_("%s: node '%s' is not well formed\n") %
175 self.ui.warn(_("%s: node '%s' is not well formed\n") %
176 (context, node))
176 (context, node))
177 return
177 return
178 if bin_n not in self.changelog.nodemap:
178 if bin_n not in self.changelog.nodemap:
179 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
179 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
180 (context, key))
180 (context, key))
181 return
181 return
182 self.tagscache[key] = bin_n
182 self.tagscache[key] = bin_n
183
183
184 # read the tags file from each head, ending with the tip,
184 # read the tags file from each head, ending with the tip,
185 # and add each tag found to the map, with "newer" ones
185 # and add each tag found to the map, with "newer" ones
186 # taking precedence
186 # taking precedence
187 heads = self.heads()
187 heads = self.heads()
188 heads.reverse()
188 heads.reverse()
189 fl = self.file(".hgtags")
189 fl = self.file(".hgtags")
190 for node in heads:
190 for node in heads:
191 change = self.changelog.read(node)
191 change = self.changelog.read(node)
192 rev = self.changelog.rev(node)
192 rev = self.changelog.rev(node)
193 fn, ff = self.manifest.find(change[0], '.hgtags')
193 fn, ff = self.manifest.find(change[0], '.hgtags')
194 if fn is None: continue
194 if fn is None: continue
195 count = 0
195 count = 0
196 for l in fl.read(fn).splitlines():
196 for l in fl.read(fn).splitlines():
197 count += 1
197 count += 1
198 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
198 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
199 (rev, short(node), count))
199 (rev, short(node), count))
200 try:
200 try:
201 f = self.opener("localtags")
201 f = self.opener("localtags")
202 count = 0
202 count = 0
203 for l in f:
203 for l in f:
204 count += 1
204 count += 1
205 parsetag(l, _("localtags, line %d") % count)
205 parsetag(l, _("localtags, line %d") % count)
206 except IOError:
206 except IOError:
207 pass
207 pass
208
208
209 self.tagscache['tip'] = self.changelog.tip()
209 self.tagscache['tip'] = self.changelog.tip()
210
210
211 return self.tagscache
211 return self.tagscache
212
212
213 def tagslist(self):
213 def tagslist(self):
214 '''return a list of tags ordered by revision'''
214 '''return a list of tags ordered by revision'''
215 l = []
215 l = []
216 for t, n in self.tags().items():
216 for t, n in self.tags().items():
217 try:
217 try:
218 r = self.changelog.rev(n)
218 r = self.changelog.rev(n)
219 except:
219 except:
220 r = -2 # sort to the beginning of the list if unknown
220 r = -2 # sort to the beginning of the list if unknown
221 l.append((r, t, n))
221 l.append((r, t, n))
222 l.sort()
222 l.sort()
223 return [(t, n) for r, t, n in l]
223 return [(t, n) for r, t, n in l]
224
224
225 def nodetags(self, node):
225 def nodetags(self, node):
226 '''return the tags associated with a node'''
226 '''return the tags associated with a node'''
227 if not self.nodetagscache:
227 if not self.nodetagscache:
228 self.nodetagscache = {}
228 self.nodetagscache = {}
229 for t, n in self.tags().items():
229 for t, n in self.tags().items():
230 self.nodetagscache.setdefault(n, []).append(t)
230 self.nodetagscache.setdefault(n, []).append(t)
231 return self.nodetagscache.get(node, [])
231 return self.nodetagscache.get(node, [])
232
232
233 def lookup(self, key):
233 def lookup(self, key):
234 try:
234 try:
235 return self.tags()[key]
235 return self.tags()[key]
236 except KeyError:
236 except KeyError:
237 try:
237 try:
238 return self.changelog.lookup(key)
238 return self.changelog.lookup(key)
239 except:
239 except:
240 raise repo.RepoError(_("unknown revision '%s'") % key)
240 raise repo.RepoError(_("unknown revision '%s'") % key)
241
241
242 def dev(self):
242 def dev(self):
243 return os.stat(self.path).st_dev
243 return os.stat(self.path).st_dev
244
244
245 def local(self):
245 def local(self):
246 return True
246 return True
247
247
248 def join(self, f):
248 def join(self, f):
249 return os.path.join(self.path, f)
249 return os.path.join(self.path, f)
250
250
251 def wjoin(self, f):
251 def wjoin(self, f):
252 return os.path.join(self.root, f)
252 return os.path.join(self.root, f)
253
253
254 def file(self, f):
254 def file(self, f):
255 if f[0] == '/':
255 if f[0] == '/':
256 f = f[1:]
256 f = f[1:]
257 return filelog.filelog(self.opener, f, self.revlogversion)
257 return filelog.filelog(self.opener, f, self.revlogversion)
258
258
259 def getcwd(self):
259 def getcwd(self):
260 return self.dirstate.getcwd()
260 return self.dirstate.getcwd()
261
261
262 def wfile(self, f, mode='r'):
262 def wfile(self, f, mode='r'):
263 return self.wopener(f, mode)
263 return self.wopener(f, mode)
264
264
265 def wread(self, filename):
265 def wread(self, filename):
266 if self.encodepats == None:
266 if self.encodepats == None:
267 l = []
267 l = []
268 for pat, cmd in self.ui.configitems("encode"):
268 for pat, cmd in self.ui.configitems("encode"):
269 mf = util.matcher(self.root, "", [pat], [], [])[1]
269 mf = util.matcher(self.root, "", [pat], [], [])[1]
270 l.append((mf, cmd))
270 l.append((mf, cmd))
271 self.encodepats = l
271 self.encodepats = l
272
272
273 data = self.wopener(filename, 'r').read()
273 data = self.wopener(filename, 'r').read()
274
274
275 for mf, cmd in self.encodepats:
275 for mf, cmd in self.encodepats:
276 if mf(filename):
276 if mf(filename):
277 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
277 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
278 data = util.filter(data, cmd)
278 data = util.filter(data, cmd)
279 break
279 break
280
280
281 return data
281 return data
282
282
283 def wwrite(self, filename, data, fd=None):
283 def wwrite(self, filename, data, fd=None):
284 if self.decodepats == None:
284 if self.decodepats == None:
285 l = []
285 l = []
286 for pat, cmd in self.ui.configitems("decode"):
286 for pat, cmd in self.ui.configitems("decode"):
287 mf = util.matcher(self.root, "", [pat], [], [])[1]
287 mf = util.matcher(self.root, "", [pat], [], [])[1]
288 l.append((mf, cmd))
288 l.append((mf, cmd))
289 self.decodepats = l
289 self.decodepats = l
290
290
291 for mf, cmd in self.decodepats:
291 for mf, cmd in self.decodepats:
292 if mf(filename):
292 if mf(filename):
293 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
293 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
294 data = util.filter(data, cmd)
294 data = util.filter(data, cmd)
295 break
295 break
296
296
297 if fd:
297 if fd:
298 return fd.write(data)
298 return fd.write(data)
299 return self.wopener(filename, 'w').write(data)
299 return self.wopener(filename, 'w').write(data)
300
300
301 def transaction(self):
301 def transaction(self):
302 tr = self.transhandle
302 tr = self.transhandle
303 if tr != None and tr.running():
303 if tr != None and tr.running():
304 return tr.nest()
304 return tr.nest()
305
305
306 # save dirstate for undo
306 # save dirstate for undo
307 try:
307 try:
308 ds = self.opener("dirstate").read()
308 ds = self.opener("dirstate").read()
309 except IOError:
309 except IOError:
310 ds = ""
310 ds = ""
311 self.opener("journal.dirstate", "w").write(ds)
311 self.opener("journal.dirstate", "w").write(ds)
312
312
313 tr = transaction.transaction(self.ui.warn, self.opener,
313 tr = transaction.transaction(self.ui.warn, self.opener,
314 self.join("journal"),
314 self.join("journal"),
315 aftertrans(self.path))
315 aftertrans(self.path))
316 self.transhandle = tr
316 self.transhandle = tr
317 return tr
317 return tr
318
318
319 def recover(self):
319 def recover(self):
320 l = self.lock()
320 l = self.lock()
321 if os.path.exists(self.join("journal")):
321 if os.path.exists(self.join("journal")):
322 self.ui.status(_("rolling back interrupted transaction\n"))
322 self.ui.status(_("rolling back interrupted transaction\n"))
323 transaction.rollback(self.opener, self.join("journal"))
323 transaction.rollback(self.opener, self.join("journal"))
324 self.reload()
324 self.reload()
325 return True
325 return True
326 else:
326 else:
327 self.ui.warn(_("no interrupted transaction available\n"))
327 self.ui.warn(_("no interrupted transaction available\n"))
328 return False
328 return False
329
329
330 def undo(self, wlock=None):
330 def undo(self, wlock=None):
331 if not wlock:
331 if not wlock:
332 wlock = self.wlock()
332 wlock = self.wlock()
333 l = self.lock()
333 l = self.lock()
334 if os.path.exists(self.join("undo")):
334 if os.path.exists(self.join("undo")):
335 self.ui.status(_("rolling back last transaction\n"))
335 self.ui.status(_("rolling back last transaction\n"))
336 transaction.rollback(self.opener, self.join("undo"))
336 transaction.rollback(self.opener, self.join("undo"))
337 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
337 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
338 self.reload()
338 self.reload()
339 self.wreload()
339 self.wreload()
340 else:
340 else:
341 self.ui.warn(_("no undo information available\n"))
341 self.ui.warn(_("no undo information available\n"))
342
342
343 def wreload(self):
343 def wreload(self):
344 self.dirstate.read()
344 self.dirstate.read()
345
345
346 def reload(self):
346 def reload(self):
347 self.changelog.load()
347 self.changelog.load()
348 self.manifest.load()
348 self.manifest.load()
349 self.tagscache = None
349 self.tagscache = None
350 self.nodetagscache = None
350 self.nodetagscache = None
351
351
352 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
352 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
353 desc=None):
353 desc=None):
354 try:
354 try:
355 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
355 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
356 except lock.LockHeld, inst:
356 except lock.LockHeld, inst:
357 if not wait:
357 if not wait:
358 raise
358 raise
359 self.ui.warn(_("waiting for lock on %s held by %s\n") %
359 self.ui.warn(_("waiting for lock on %s held by %s\n") %
360 (desc, inst.args[0]))
360 (desc, inst.args[0]))
361 # default to 600 seconds timeout
361 # default to 600 seconds timeout
362 l = lock.lock(self.join(lockname),
362 l = lock.lock(self.join(lockname),
363 int(self.ui.config("ui", "timeout") or 600),
363 int(self.ui.config("ui", "timeout") or 600),
364 releasefn, desc=desc)
364 releasefn, desc=desc)
365 if acquirefn:
365 if acquirefn:
366 acquirefn()
366 acquirefn()
367 return l
367 return l
368
368
369 def lock(self, wait=1):
369 def lock(self, wait=1):
370 return self.do_lock("lock", wait, acquirefn=self.reload,
370 return self.do_lock("lock", wait, acquirefn=self.reload,
371 desc=_('repository %s') % self.origroot)
371 desc=_('repository %s') % self.origroot)
372
372
373 def wlock(self, wait=1):
373 def wlock(self, wait=1):
374 return self.do_lock("wlock", wait, self.dirstate.write,
374 return self.do_lock("wlock", wait, self.dirstate.write,
375 self.wreload,
375 self.wreload,
376 desc=_('working directory of %s') % self.origroot)
376 desc=_('working directory of %s') % self.origroot)
377
377
378 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
378 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
379 "determine whether a new filenode is needed"
379 "determine whether a new filenode is needed"
380 fp1 = manifest1.get(filename, nullid)
380 fp1 = manifest1.get(filename, nullid)
381 fp2 = manifest2.get(filename, nullid)
381 fp2 = manifest2.get(filename, nullid)
382
382
383 if fp2 != nullid:
383 if fp2 != nullid:
384 # is one parent an ancestor of the other?
384 # is one parent an ancestor of the other?
385 fpa = filelog.ancestor(fp1, fp2)
385 fpa = filelog.ancestor(fp1, fp2)
386 if fpa == fp1:
386 if fpa == fp1:
387 fp1, fp2 = fp2, nullid
387 fp1, fp2 = fp2, nullid
388 elif fpa == fp2:
388 elif fpa == fp2:
389 fp2 = nullid
389 fp2 = nullid
390
390
391 # is the file unmodified from the parent? report existing entry
391 # is the file unmodified from the parent? report existing entry
392 if fp2 == nullid and text == filelog.read(fp1):
392 if fp2 == nullid and text == filelog.read(fp1):
393 return (fp1, None, None)
393 return (fp1, None, None)
394
394
395 return (None, fp1, fp2)
395 return (None, fp1, fp2)
396
396
397 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
397 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
398 orig_parent = self.dirstate.parents()[0] or nullid
398 orig_parent = self.dirstate.parents()[0] or nullid
399 p1 = p1 or self.dirstate.parents()[0] or nullid
399 p1 = p1 or self.dirstate.parents()[0] or nullid
400 p2 = p2 or self.dirstate.parents()[1] or nullid
400 p2 = p2 or self.dirstate.parents()[1] or nullid
401 c1 = self.changelog.read(p1)
401 c1 = self.changelog.read(p1)
402 c2 = self.changelog.read(p2)
402 c2 = self.changelog.read(p2)
403 m1 = self.manifest.read(c1[0])
403 m1 = self.manifest.read(c1[0])
404 mf1 = self.manifest.readflags(c1[0])
404 mf1 = self.manifest.readflags(c1[0])
405 m2 = self.manifest.read(c2[0])
405 m2 = self.manifest.read(c2[0])
406 changed = []
406 changed = []
407
407
408 if orig_parent == p1:
408 if orig_parent == p1:
409 update_dirstate = 1
409 update_dirstate = 1
410 else:
410 else:
411 update_dirstate = 0
411 update_dirstate = 0
412
412
413 if not wlock:
413 if not wlock:
414 wlock = self.wlock()
414 wlock = self.wlock()
415 l = self.lock()
415 l = self.lock()
416 tr = self.transaction()
416 tr = self.transaction()
417 mm = m1.copy()
417 mm = m1.copy()
418 mfm = mf1.copy()
418 mfm = mf1.copy()
419 linkrev = self.changelog.count()
419 linkrev = self.changelog.count()
420 for f in files:
420 for f in files:
421 try:
421 try:
422 t = self.wread(f)
422 t = self.wread(f)
423 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
423 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
424 r = self.file(f)
424 r = self.file(f)
425 mfm[f] = tm
425 mfm[f] = tm
426
426
427 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
427 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
428 if entry:
428 if entry:
429 mm[f] = entry
429 mm[f] = entry
430 continue
430 continue
431
431
432 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
432 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
433 changed.append(f)
433 changed.append(f)
434 if update_dirstate:
434 if update_dirstate:
435 self.dirstate.update([f], "n")
435 self.dirstate.update([f], "n")
436 except IOError:
436 except IOError:
437 try:
437 try:
438 del mm[f]
438 del mm[f]
439 del mfm[f]
439 del mfm[f]
440 if update_dirstate:
440 if update_dirstate:
441 self.dirstate.forget([f])
441 self.dirstate.forget([f])
442 except:
442 except:
443 # deleted from p2?
443 # deleted from p2?
444 pass
444 pass
445
445
446 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
446 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
447 user = user or self.ui.username()
447 user = user or self.ui.username()
448 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
448 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
449 tr.close()
449 tr.close()
450 if update_dirstate:
450 if update_dirstate:
451 self.dirstate.setparents(n, nullid)
451 self.dirstate.setparents(n, nullid)
452
452
453 def commit(self, files=None, text="", user=None, date=None,
453 def commit(self, files=None, text="", user=None, date=None,
454 match=util.always, force=False, lock=None, wlock=None,
454 match=util.always, force=False, lock=None, wlock=None,
455 force_editor=False):
455 force_editor=False):
456 commit = []
456 commit = []
457 remove = []
457 remove = []
458 changed = []
458 changed = []
459
459
460 if files:
460 if files:
461 for f in files:
461 for f in files:
462 s = self.dirstate.state(f)
462 s = self.dirstate.state(f)
463 if s in 'nmai':
463 if s in 'nmai':
464 commit.append(f)
464 commit.append(f)
465 elif s == 'r':
465 elif s == 'r':
466 remove.append(f)
466 remove.append(f)
467 else:
467 else:
468 self.ui.warn(_("%s not tracked!\n") % f)
468 self.ui.warn(_("%s not tracked!\n") % f)
469 else:
469 else:
470 modified, added, removed, deleted, unknown = self.changes(match=match)
470 modified, added, removed, deleted, unknown = self.changes(match=match)
471 commit = modified + added
471 commit = modified + added
472 remove = removed
472 remove = removed
473
473
474 p1, p2 = self.dirstate.parents()
474 p1, p2 = self.dirstate.parents()
475 c1 = self.changelog.read(p1)
475 c1 = self.changelog.read(p1)
476 c2 = self.changelog.read(p2)
476 c2 = self.changelog.read(p2)
477 m1 = self.manifest.read(c1[0])
477 m1 = self.manifest.read(c1[0])
478 mf1 = self.manifest.readflags(c1[0])
478 mf1 = self.manifest.readflags(c1[0])
479 m2 = self.manifest.read(c2[0])
479 m2 = self.manifest.read(c2[0])
480
480
481 if not commit and not remove and not force and p2 == nullid:
481 if not commit and not remove and not force and p2 == nullid:
482 self.ui.status(_("nothing changed\n"))
482 self.ui.status(_("nothing changed\n"))
483 return None
483 return None
484
484
485 xp1 = hex(p1)
485 xp1 = hex(p1)
486 if p2 == nullid: xp2 = ''
486 if p2 == nullid: xp2 = ''
487 else: xp2 = hex(p2)
487 else: xp2 = hex(p2)
488
488
489 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
489 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
490
490
491 if not wlock:
491 if not wlock:
492 wlock = self.wlock()
492 wlock = self.wlock()
493 if not lock:
493 if not lock:
494 lock = self.lock()
494 lock = self.lock()
495 tr = self.transaction()
495 tr = self.transaction()
496
496
497 # check in files
497 # check in files
498 new = {}
498 new = {}
499 linkrev = self.changelog.count()
499 linkrev = self.changelog.count()
500 commit.sort()
500 commit.sort()
501 for f in commit:
501 for f in commit:
502 self.ui.note(f + "\n")
502 self.ui.note(f + "\n")
503 try:
503 try:
504 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
504 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
505 t = self.wread(f)
505 t = self.wread(f)
506 except IOError:
506 except IOError:
507 self.ui.warn(_("trouble committing %s!\n") % f)
507 self.ui.warn(_("trouble committing %s!\n") % f)
508 raise
508 raise
509
509
510 r = self.file(f)
510 r = self.file(f)
511
511
512 meta = {}
512 meta = {}
513 cp = self.dirstate.copied(f)
513 cp = self.dirstate.copied(f)
514 if cp:
514 if cp:
515 meta["copy"] = cp
515 meta["copy"] = cp
516 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
516 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
517 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
517 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
518 fp1, fp2 = nullid, nullid
518 fp1, fp2 = nullid, nullid
519 else:
519 else:
520 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
520 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
521 if entry:
521 if entry:
522 new[f] = entry
522 new[f] = entry
523 continue
523 continue
524
524
525 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
525 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
526 # remember what we've added so that we can later calculate
526 # remember what we've added so that we can later calculate
527 # the files to pull from a set of changesets
527 # the files to pull from a set of changesets
528 changed.append(f)
528 changed.append(f)
529
529
530 # update manifest
530 # update manifest
531 m1 = m1.copy()
531 m1 = m1.copy()
532 m1.update(new)
532 m1.update(new)
533 for f in remove:
533 for f in remove:
534 if f in m1:
534 if f in m1:
535 del m1[f]
535 del m1[f]
536 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
536 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
537 (new, remove))
537 (new, remove))
538
538
539 # add changeset
539 # add changeset
540 new = new.keys()
540 new = new.keys()
541 new.sort()
541 new.sort()
542
542
543 user = user or self.ui.username()
543 user = user or self.ui.username()
544 if not text or force_editor:
544 if not text or force_editor:
545 edittext = []
545 edittext = []
546 if text:
546 if text:
547 edittext.append(text)
547 edittext.append(text)
548 edittext.append("")
548 edittext.append("")
549 if p2 != nullid:
549 if p2 != nullid:
550 edittext.append("HG: branch merge")
550 edittext.append("HG: branch merge")
551 edittext.extend(["HG: changed %s" % f for f in changed])
551 edittext.extend(["HG: changed %s" % f for f in changed])
552 edittext.extend(["HG: removed %s" % f for f in remove])
552 edittext.extend(["HG: removed %s" % f for f in remove])
553 if not changed and not remove:
553 if not changed and not remove:
554 edittext.append("HG: no files changed")
554 edittext.append("HG: no files changed")
555 edittext.append("")
555 edittext.append("")
556 # run editor in the repository root
556 # run editor in the repository root
557 olddir = os.getcwd()
557 olddir = os.getcwd()
558 os.chdir(self.root)
558 os.chdir(self.root)
559 text = self.ui.edit("\n".join(edittext), user)
559 text = self.ui.edit("\n".join(edittext), user)
560 os.chdir(olddir)
560 os.chdir(olddir)
561
561
562 lines = [line.rstrip() for line in text.rstrip().splitlines()]
562 lines = [line.rstrip() for line in text.rstrip().splitlines()]
563 while lines and not lines[0]:
563 while lines and not lines[0]:
564 del lines[0]
564 del lines[0]
565 if not lines:
565 if not lines:
566 return None
566 return None
567 text = '\n'.join(lines)
567 text = '\n'.join(lines)
568 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
568 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
569 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
569 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
570 parent2=xp2)
570 parent2=xp2)
571 tr.close()
571 tr.close()
572
572
573 self.dirstate.setparents(n)
573 self.dirstate.setparents(n)
574 self.dirstate.update(new, "n")
574 self.dirstate.update(new, "n")
575 self.dirstate.forget(remove)
575 self.dirstate.forget(remove)
576
576
577 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
577 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
578 return n
578 return n
579
579
580 def walk(self, node=None, files=[], match=util.always, badmatch=None):
580 def walk(self, node=None, files=[], match=util.always, badmatch=None):
581 if node:
581 if node:
582 fdict = dict.fromkeys(files)
582 fdict = dict.fromkeys(files)
583 for fn in self.manifest.read(self.changelog.read(node)[0]):
583 for fn in self.manifest.read(self.changelog.read(node)[0]):
584 fdict.pop(fn, None)
584 fdict.pop(fn, None)
585 if match(fn):
585 if match(fn):
586 yield 'm', fn
586 yield 'm', fn
587 for fn in fdict:
587 for fn in fdict:
588 if badmatch and badmatch(fn):
588 if badmatch and badmatch(fn):
589 if match(fn):
589 if match(fn):
590 yield 'b', fn
590 yield 'b', fn
591 else:
591 else:
592 self.ui.warn(_('%s: No such file in rev %s\n') % (
592 self.ui.warn(_('%s: No such file in rev %s\n') % (
593 util.pathto(self.getcwd(), fn), short(node)))
593 util.pathto(self.getcwd(), fn), short(node)))
594 else:
594 else:
595 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
595 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
596 yield src, fn
596 yield src, fn
597
597
598 def changes(self, node1=None, node2=None, files=[], match=util.always,
598 def changes(self, node1=None, node2=None, files=[], match=util.always,
599 wlock=None, show_ignored=None):
599 wlock=None, show_ignored=None):
600 """return changes between two nodes or node and working directory
600 """return changes between two nodes or node and working directory
601
601
602 If node1 is None, use the first dirstate parent instead.
602 If node1 is None, use the first dirstate parent instead.
603 If node2 is None, compare node1 with working directory.
603 If node2 is None, compare node1 with working directory.
604 """
604 """
605
605
606 def fcmp(fn, mf):
606 def fcmp(fn, mf):
607 t1 = self.wread(fn)
607 t1 = self.wread(fn)
608 t2 = self.file(fn).read(mf.get(fn, nullid))
608 t2 = self.file(fn).read(mf.get(fn, nullid))
609 return cmp(t1, t2)
609 return cmp(t1, t2)
610
610
611 def mfmatches(node):
611 def mfmatches(node):
612 change = self.changelog.read(node)
612 change = self.changelog.read(node)
613 mf = dict(self.manifest.read(change[0]))
613 mf = dict(self.manifest.read(change[0]))
614 for fn in mf.keys():
614 for fn in mf.keys():
615 if not match(fn):
615 if not match(fn):
616 del mf[fn]
616 del mf[fn]
617 return mf
617 return mf
618
618
619 if node1:
619 if node1:
620 # read the manifest from node1 before the manifest from node2,
620 # read the manifest from node1 before the manifest from node2,
621 # so that we'll hit the manifest cache if we're going through
621 # so that we'll hit the manifest cache if we're going through
622 # all the revisions in parent->child order.
622 # all the revisions in parent->child order.
623 mf1 = mfmatches(node1)
623 mf1 = mfmatches(node1)
624
624
625 # are we comparing the working directory?
625 # are we comparing the working directory?
626 if not node2:
626 if not node2:
627 if not wlock:
627 if not wlock:
628 try:
628 try:
629 wlock = self.wlock(wait=0)
629 wlock = self.wlock(wait=0)
630 except lock.LockException:
630 except lock.LockException:
631 wlock = None
631 wlock = None
632 lookup, modified, added, removed, deleted, unknown, ignored = (
632 lookup, modified, added, removed, deleted, unknown, ignored = (
633 self.dirstate.changes(files, match, show_ignored))
633 self.dirstate.changes(files, match, show_ignored))
634
634
635 # are we comparing working dir against its parent?
635 # are we comparing working dir against its parent?
636 if not node1:
636 if not node1:
637 if lookup:
637 if lookup:
638 # do a full compare of any files that might have changed
638 # do a full compare of any files that might have changed
639 mf2 = mfmatches(self.dirstate.parents()[0])
639 mf2 = mfmatches(self.dirstate.parents()[0])
640 for f in lookup:
640 for f in lookup:
641 if fcmp(f, mf2):
641 if fcmp(f, mf2):
642 modified.append(f)
642 modified.append(f)
643 elif wlock is not None:
643 elif wlock is not None:
644 self.dirstate.update([f], "n")
644 self.dirstate.update([f], "n")
645 else:
645 else:
646 # we are comparing working dir against non-parent
646 # we are comparing working dir against non-parent
647 # generate a pseudo-manifest for the working dir
647 # generate a pseudo-manifest for the working dir
648 mf2 = mfmatches(self.dirstate.parents()[0])
648 mf2 = mfmatches(self.dirstate.parents()[0])
649 for f in lookup + modified + added:
649 for f in lookup + modified + added:
650 mf2[f] = ""
650 mf2[f] = ""
651 for f in removed:
651 for f in removed:
652 if f in mf2:
652 if f in mf2:
653 del mf2[f]
653 del mf2[f]
654 else:
654 else:
655 # we are comparing two revisions
655 # we are comparing two revisions
656 deleted, unknown, ignored = [], [], []
656 deleted, unknown, ignored = [], [], []
657 mf2 = mfmatches(node2)
657 mf2 = mfmatches(node2)
658
658
659 if node1:
659 if node1:
660 # flush lists from dirstate before comparing manifests
660 # flush lists from dirstate before comparing manifests
661 modified, added = [], []
661 modified, added = [], []
662
662
663 for fn in mf2:
663 for fn in mf2:
664 if mf1.has_key(fn):
664 if mf1.has_key(fn):
665 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
665 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
666 modified.append(fn)
666 modified.append(fn)
667 del mf1[fn]
667 del mf1[fn]
668 else:
668 else:
669 added.append(fn)
669 added.append(fn)
670
670
671 removed = mf1.keys()
671 removed = mf1.keys()
672
672
673 # sort and return results:
673 # sort and return results:
674 for l in modified, added, removed, deleted, unknown, ignored:
674 for l in modified, added, removed, deleted, unknown, ignored:
675 l.sort()
675 l.sort()
676 if show_ignored is None:
676 if show_ignored is None:
677 return (modified, added, removed, deleted, unknown)
677 return (modified, added, removed, deleted, unknown)
678 else:
678 else:
679 return (modified, added, removed, deleted, unknown, ignored)
679 return (modified, added, removed, deleted, unknown, ignored)
680
680
681 def add(self, list, wlock=None):
681 def add(self, list, wlock=None):
682 if not wlock:
682 if not wlock:
683 wlock = self.wlock()
683 wlock = self.wlock()
684 for f in list:
684 for f in list:
685 p = self.wjoin(f)
685 p = self.wjoin(f)
686 if not os.path.exists(p):
686 if not os.path.exists(p):
687 self.ui.warn(_("%s does not exist!\n") % f)
687 self.ui.warn(_("%s does not exist!\n") % f)
688 elif not os.path.isfile(p):
688 elif not os.path.isfile(p):
689 self.ui.warn(_("%s not added: only files supported currently\n")
689 self.ui.warn(_("%s not added: only files supported currently\n")
690 % f)
690 % f)
691 elif self.dirstate.state(f) in 'an':
691 elif self.dirstate.state(f) in 'an':
692 self.ui.warn(_("%s already tracked!\n") % f)
692 self.ui.warn(_("%s already tracked!\n") % f)
693 else:
693 else:
694 self.dirstate.update([f], "a")
694 self.dirstate.update([f], "a")
695
695
696 def forget(self, list, wlock=None):
696 def forget(self, list, wlock=None):
697 if not wlock:
697 if not wlock:
698 wlock = self.wlock()
698 wlock = self.wlock()
699 for f in list:
699 for f in list:
700 if self.dirstate.state(f) not in 'ai':
700 if self.dirstate.state(f) not in 'ai':
701 self.ui.warn(_("%s not added!\n") % f)
701 self.ui.warn(_("%s not added!\n") % f)
702 else:
702 else:
703 self.dirstate.forget([f])
703 self.dirstate.forget([f])
704
704
705 def remove(self, list, unlink=False, wlock=None):
705 def remove(self, list, unlink=False, wlock=None):
706 if unlink:
706 if unlink:
707 for f in list:
707 for f in list:
708 try:
708 try:
709 util.unlink(self.wjoin(f))
709 util.unlink(self.wjoin(f))
710 except OSError, inst:
710 except OSError, inst:
711 if inst.errno != errno.ENOENT:
711 if inst.errno != errno.ENOENT:
712 raise
712 raise
713 if not wlock:
713 if not wlock:
714 wlock = self.wlock()
714 wlock = self.wlock()
715 for f in list:
715 for f in list:
716 p = self.wjoin(f)
716 p = self.wjoin(f)
717 if os.path.exists(p):
717 if os.path.exists(p):
718 self.ui.warn(_("%s still exists!\n") % f)
718 self.ui.warn(_("%s still exists!\n") % f)
719 elif self.dirstate.state(f) == 'a':
719 elif self.dirstate.state(f) == 'a':
720 self.dirstate.forget([f])
720 self.dirstate.forget([f])
721 elif f not in self.dirstate:
721 elif f not in self.dirstate:
722 self.ui.warn(_("%s not tracked!\n") % f)
722 self.ui.warn(_("%s not tracked!\n") % f)
723 else:
723 else:
724 self.dirstate.update([f], "r")
724 self.dirstate.update([f], "r")
725
725
726 def undelete(self, list, wlock=None):
726 def undelete(self, list, wlock=None):
727 p = self.dirstate.parents()[0]
727 p = self.dirstate.parents()[0]
728 mn = self.changelog.read(p)[0]
728 mn = self.changelog.read(p)[0]
729 mf = self.manifest.readflags(mn)
729 mf = self.manifest.readflags(mn)
730 m = self.manifest.read(mn)
730 m = self.manifest.read(mn)
731 if not wlock:
731 if not wlock:
732 wlock = self.wlock()
732 wlock = self.wlock()
733 for f in list:
733 for f in list:
734 if self.dirstate.state(f) not in "r":
734 if self.dirstate.state(f) not in "r":
735 self.ui.warn("%s not removed!\n" % f)
735 self.ui.warn("%s not removed!\n" % f)
736 else:
736 else:
737 t = self.file(f).read(m[f])
737 t = self.file(f).read(m[f])
738 self.wwrite(f, t)
738 self.wwrite(f, t)
739 util.set_exec(self.wjoin(f), mf[f])
739 util.set_exec(self.wjoin(f), mf[f])
740 self.dirstate.update([f], "n")
740 self.dirstate.update([f], "n")
741
741
742 def copy(self, source, dest, wlock=None):
742 def copy(self, source, dest, wlock=None):
743 p = self.wjoin(dest)
743 p = self.wjoin(dest)
744 if not os.path.exists(p):
744 if not os.path.exists(p):
745 self.ui.warn(_("%s does not exist!\n") % dest)
745 self.ui.warn(_("%s does not exist!\n") % dest)
746 elif not os.path.isfile(p):
746 elif not os.path.isfile(p):
747 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
747 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
748 else:
748 else:
749 if not wlock:
749 if not wlock:
750 wlock = self.wlock()
750 wlock = self.wlock()
751 if self.dirstate.state(dest) == '?':
751 if self.dirstate.state(dest) == '?':
752 self.dirstate.update([dest], "a")
752 self.dirstate.update([dest], "a")
753 self.dirstate.copy(source, dest)
753 self.dirstate.copy(source, dest)
754
754
755 def heads(self, start=None):
755 def heads(self, start=None):
756 heads = self.changelog.heads(start)
756 heads = self.changelog.heads(start)
757 # sort the output in rev descending order
757 # sort the output in rev descending order
758 heads = [(-self.changelog.rev(h), h) for h in heads]
758 heads = [(-self.changelog.rev(h), h) for h in heads]
759 heads.sort()
759 heads.sort()
760 return [n for (r, n) in heads]
760 return [n for (r, n) in heads]
761
761
762 # branchlookup returns a dict giving a list of branches for
762 # branchlookup returns a dict giving a list of branches for
763 # each head. A branch is defined as the tag of a node or
763 # each head. A branch is defined as the tag of a node or
764 # the branch of the node's parents. If a node has multiple
764 # the branch of the node's parents. If a node has multiple
765 # branch tags, tags are eliminated if they are visible from other
765 # branch tags, tags are eliminated if they are visible from other
766 # branch tags.
766 # branch tags.
767 #
767 #
768 # So, for this graph: a->b->c->d->e
768 # So, for this graph: a->b->c->d->e
769 # \ /
769 # \ /
770 # aa -----/
770 # aa -----/
771 # a has tag 2.6.12
771 # a has tag 2.6.12
772 # d has tag 2.6.13
772 # d has tag 2.6.13
773 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
773 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
774 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
774 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
775 # from the list.
775 # from the list.
776 #
776 #
777 # It is possible that more than one head will have the same branch tag.
777 # It is possible that more than one head will have the same branch tag.
778 # callers need to check the result for multiple heads under the same
778 # callers need to check the result for multiple heads under the same
779 # branch tag if that is a problem for them (ie checkout of a specific
779 # branch tag if that is a problem for them (ie checkout of a specific
780 # branch).
780 # branch).
781 #
781 #
782 # passing in a specific branch will limit the depth of the search
782 # passing in a specific branch will limit the depth of the search
783 # through the parents. It won't limit the branches returned in the
783 # through the parents. It won't limit the branches returned in the
784 # result though.
784 # result though.
785 def branchlookup(self, heads=None, branch=None):
785 def branchlookup(self, heads=None, branch=None):
786 if not heads:
786 if not heads:
787 heads = self.heads()
787 heads = self.heads()
788 headt = [ h for h in heads ]
788 headt = [ h for h in heads ]
789 chlog = self.changelog
789 chlog = self.changelog
790 branches = {}
790 branches = {}
791 merges = []
791 merges = []
792 seenmerge = {}
792 seenmerge = {}
793
793
794 # traverse the tree once for each head, recording in the branches
794 # traverse the tree once for each head, recording in the branches
795 # dict which tags are visible from this head. The branches
795 # dict which tags are visible from this head. The branches
796 # dict also records which tags are visible from each tag
796 # dict also records which tags are visible from each tag
797 # while we traverse.
797 # while we traverse.
798 while headt or merges:
798 while headt or merges:
799 if merges:
799 if merges:
800 n, found = merges.pop()
800 n, found = merges.pop()
801 visit = [n]
801 visit = [n]
802 else:
802 else:
803 h = headt.pop()
803 h = headt.pop()
804 visit = [h]
804 visit = [h]
805 found = [h]
805 found = [h]
806 seen = {}
806 seen = {}
807 while visit:
807 while visit:
808 n = visit.pop()
808 n = visit.pop()
809 if n in seen:
809 if n in seen:
810 continue
810 continue
811 pp = chlog.parents(n)
811 pp = chlog.parents(n)
812 tags = self.nodetags(n)
812 tags = self.nodetags(n)
813 if tags:
813 if tags:
814 for x in tags:
814 for x in tags:
815 if x == 'tip':
815 if x == 'tip':
816 continue
816 continue
817 for f in found:
817 for f in found:
818 branches.setdefault(f, {})[n] = 1
818 branches.setdefault(f, {})[n] = 1
819 branches.setdefault(n, {})[n] = 1
819 branches.setdefault(n, {})[n] = 1
820 break
820 break
821 if n not in found:
821 if n not in found:
822 found.append(n)
822 found.append(n)
823 if branch in tags:
823 if branch in tags:
824 continue
824 continue
825 seen[n] = 1
825 seen[n] = 1
826 if pp[1] != nullid and n not in seenmerge:
826 if pp[1] != nullid and n not in seenmerge:
827 merges.append((pp[1], [x for x in found]))
827 merges.append((pp[1], [x for x in found]))
828 seenmerge[n] = 1
828 seenmerge[n] = 1
829 if pp[0] != nullid:
829 if pp[0] != nullid:
830 visit.append(pp[0])
830 visit.append(pp[0])
831 # traverse the branches dict, eliminating branch tags from each
831 # traverse the branches dict, eliminating branch tags from each
832 # head that are visible from another branch tag for that head.
832 # head that are visible from another branch tag for that head.
833 out = {}
833 out = {}
834 viscache = {}
834 viscache = {}
835 for h in heads:
835 for h in heads:
836 def visible(node):
836 def visible(node):
837 if node in viscache:
837 if node in viscache:
838 return viscache[node]
838 return viscache[node]
839 ret = {}
839 ret = {}
840 visit = [node]
840 visit = [node]
841 while visit:
841 while visit:
842 x = visit.pop()
842 x = visit.pop()
843 if x in viscache:
843 if x in viscache:
844 ret.update(viscache[x])
844 ret.update(viscache[x])
845 elif x not in ret:
845 elif x not in ret:
846 ret[x] = 1
846 ret[x] = 1
847 if x in branches:
847 if x in branches:
848 visit[len(visit):] = branches[x].keys()
848 visit[len(visit):] = branches[x].keys()
849 viscache[node] = ret
849 viscache[node] = ret
850 return ret
850 return ret
851 if h not in branches:
851 if h not in branches:
852 continue
852 continue
853 # O(n^2), but somewhat limited. This only searches the
853 # O(n^2), but somewhat limited. This only searches the
854 # tags visible from a specific head, not all the tags in the
854 # tags visible from a specific head, not all the tags in the
855 # whole repo.
855 # whole repo.
856 for b in branches[h]:
856 for b in branches[h]:
857 vis = False
857 vis = False
858 for bb in branches[h].keys():
858 for bb in branches[h].keys():
859 if b != bb:
859 if b != bb:
860 if b in visible(bb):
860 if b in visible(bb):
861 vis = True
861 vis = True
862 break
862 break
863 if not vis:
863 if not vis:
864 l = out.setdefault(h, [])
864 l = out.setdefault(h, [])
865 l[len(l):] = self.nodetags(b)
865 l[len(l):] = self.nodetags(b)
866 return out
866 return out
867
867
868 def branches(self, nodes):
868 def branches(self, nodes):
869 if not nodes:
869 if not nodes:
870 nodes = [self.changelog.tip()]
870 nodes = [self.changelog.tip()]
871 b = []
871 b = []
872 for n in nodes:
872 for n in nodes:
873 t = n
873 t = n
874 while n:
874 while n:
875 p = self.changelog.parents(n)
875 p = self.changelog.parents(n)
876 if p[1] != nullid or p[0] == nullid:
876 if p[1] != nullid or p[0] == nullid:
877 b.append((t, n, p[0], p[1]))
877 b.append((t, n, p[0], p[1]))
878 break
878 break
879 n = p[0]
879 n = p[0]
880 return b
880 return b
881
881
882 def between(self, pairs):
882 def between(self, pairs):
883 r = []
883 r = []
884
884
885 for top, bottom in pairs:
885 for top, bottom in pairs:
886 n, l, i = top, [], 0
886 n, l, i = top, [], 0
887 f = 1
887 f = 1
888
888
889 while n != bottom:
889 while n != bottom:
890 p = self.changelog.parents(n)[0]
890 p = self.changelog.parents(n)[0]
891 if i == f:
891 if i == f:
892 l.append(n)
892 l.append(n)
893 f = f * 2
893 f = f * 2
894 n = p
894 n = p
895 i += 1
895 i += 1
896
896
897 r.append(l)
897 r.append(l)
898
898
899 return r
899 return r
900
900
901 def findincoming(self, remote, base=None, heads=None, force=False):
901 def findincoming(self, remote, base=None, heads=None, force=False):
902 """Return list of roots of the subsets of missing nodes from remote
903
904 If base dict is specified, assume that these nodes and their parents
905 exist on the remote side and that no child of a node of base exists
906 in both remote and self.
907 Furthermore base will be updated to include the nodes that exists
908 in self and remote but no children exists in self and remote.
909 If a list of heads is specified, return only nodes which are heads
910 or ancestors of these heads.
911
912 All the ancestors of base are in self and in remote.
913 All the descendants of the list returned are missing in self.
914 (and so we know that the rest of the nodes are missing in remote, see
915 outgoing)
916 """
902 m = self.changelog.nodemap
917 m = self.changelog.nodemap
903 search = []
918 search = []
904 fetch = {}
919 fetch = {}
905 seen = {}
920 seen = {}
906 seenbranch = {}
921 seenbranch = {}
907 if base == None:
922 if base == None:
908 base = {}
923 base = {}
909
924
910 if not heads:
925 if not heads:
911 heads = remote.heads()
926 heads = remote.heads()
912
927
913 if self.changelog.tip() == nullid:
928 if self.changelog.tip() == nullid:
929 base[nullid] = 1
914 if heads != [nullid]:
930 if heads != [nullid]:
915 return [nullid]
931 return [nullid]
916 return []
932 return []
917
933
918 # assume we're closer to the tip than the root
934 # assume we're closer to the tip than the root
919 # and start by examining the heads
935 # and start by examining the heads
920 self.ui.status(_("searching for changes\n"))
936 self.ui.status(_("searching for changes\n"))
921
937
922 unknown = []
938 unknown = []
923 for h in heads:
939 for h in heads:
924 if h not in m:
940 if h not in m:
925 unknown.append(h)
941 unknown.append(h)
926 else:
942 else:
927 base[h] = 1
943 base[h] = 1
928
944
929 if not unknown:
945 if not unknown:
930 return []
946 return []
931
947
932 rep = {}
948 req = dict.fromkeys(unknown)
933 reqcnt = 0
949 reqcnt = 0
934
950
935 # search through remote branches
951 # search through remote branches
936 # a 'branch' here is a linear segment of history, with four parts:
952 # a 'branch' here is a linear segment of history, with four parts:
937 # head, root, first parent, second parent
953 # head, root, first parent, second parent
938 # (a branch always has two parents (or none) by definition)
954 # (a branch always has two parents (or none) by definition)
939 unknown = remote.branches(unknown)
955 unknown = remote.branches(unknown)
940 while unknown:
956 while unknown:
941 r = []
957 r = []
942 while unknown:
958 while unknown:
943 n = unknown.pop(0)
959 n = unknown.pop(0)
944 if n[0] in seen:
960 if n[0] in seen:
945 continue
961 continue
946
962
947 self.ui.debug(_("examining %s:%s\n")
963 self.ui.debug(_("examining %s:%s\n")
948 % (short(n[0]), short(n[1])))
964 % (short(n[0]), short(n[1])))
949 if n[0] == nullid:
965 if n[0] == nullid: # found the end of the branch
950 break
966 pass
951 if n in seenbranch:
967 elif n in seenbranch:
952 self.ui.debug(_("branch already found\n"))
968 self.ui.debug(_("branch already found\n"))
953 continue
969 continue
954 if n[1] and n[1] in m: # do we know the base?
970 elif n[1] and n[1] in m: # do we know the base?
955 self.ui.debug(_("found incomplete branch %s:%s\n")
971 self.ui.debug(_("found incomplete branch %s:%s\n")
956 % (short(n[0]), short(n[1])))
972 % (short(n[0]), short(n[1])))
957 search.append(n) # schedule branch range for scanning
973 search.append(n) # schedule branch range for scanning
958 seenbranch[n] = 1
974 seenbranch[n] = 1
959 else:
975 else:
960 if n[1] not in seen and n[1] not in fetch:
976 if n[1] not in seen and n[1] not in fetch:
961 if n[2] in m and n[3] in m:
977 if n[2] in m and n[3] in m:
962 self.ui.debug(_("found new changeset %s\n") %
978 self.ui.debug(_("found new changeset %s\n") %
963 short(n[1]))
979 short(n[1]))
964 fetch[n[1]] = 1 # earliest unknown
980 fetch[n[1]] = 1 # earliest unknown
965 base[n[2]] = 1 # latest known
981 for p in n[2:4]:
966 continue
982 if p in m:
983 base[p] = 1 # latest known
967
984
968 for a in n[2:4]:
985 for p in n[2:4]:
969 if a not in rep:
986 if p not in req and p not in m:
970 r.append(a)
987 r.append(p)
971 rep[a] = 1
988 req[p] = 1
972
973 seen[n[0]] = 1
989 seen[n[0]] = 1
974
990
975 if r:
991 if r:
976 reqcnt += 1
992 reqcnt += 1
977 self.ui.debug(_("request %d: %s\n") %
993 self.ui.debug(_("request %d: %s\n") %
978 (reqcnt, " ".join(map(short, r))))
994 (reqcnt, " ".join(map(short, r))))
979 for p in range(0, len(r), 10):
995 for p in range(0, len(r), 10):
980 for b in remote.branches(r[p:p+10]):
996 for b in remote.branches(r[p:p+10]):
981 self.ui.debug(_("received %s:%s\n") %
997 self.ui.debug(_("received %s:%s\n") %
982 (short(b[0]), short(b[1])))
998 (short(b[0]), short(b[1])))
983 if b[0] in m:
984 self.ui.debug(_("found base node %s\n")
985 % short(b[0]))
986 base[b[0]] = 1
987 elif b[0] not in seen:
988 unknown.append(b)
999 unknown.append(b)
989
1000
990 # do binary search on the branches we found
1001 # do binary search on the branches we found
991 while search:
1002 while search:
992 n = search.pop(0)
1003 n = search.pop(0)
993 reqcnt += 1
1004 reqcnt += 1
994 l = remote.between([(n[0], n[1])])[0]
1005 l = remote.between([(n[0], n[1])])[0]
995 l.append(n[1])
1006 l.append(n[1])
996 p = n[0]
1007 p = n[0]
997 f = 1
1008 f = 1
998 for i in l:
1009 for i in l:
999 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1010 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1000 if i in m:
1011 if i in m:
1001 if f <= 2:
1012 if f <= 2:
1002 self.ui.debug(_("found new branch changeset %s\n") %
1013 self.ui.debug(_("found new branch changeset %s\n") %
1003 short(p))
1014 short(p))
1004 fetch[p] = 1
1015 fetch[p] = 1
1005 base[i] = 1
1016 base[i] = 1
1006 else:
1017 else:
1007 self.ui.debug(_("narrowed branch search to %s:%s\n")
1018 self.ui.debug(_("narrowed branch search to %s:%s\n")
1008 % (short(p), short(i)))
1019 % (short(p), short(i)))
1009 search.append((p, i))
1020 search.append((p, i))
1010 break
1021 break
1011 p, f = i, f * 2
1022 p, f = i, f * 2
1012
1023
1013 # sanity check our fetch list
1024 # sanity check our fetch list
1014 for f in fetch.keys():
1025 for f in fetch.keys():
1015 if f in m:
1026 if f in m:
1016 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1027 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1017
1028
1018 if base.keys() == [nullid]:
1029 if base.keys() == [nullid]:
1019 if force:
1030 if force:
1020 self.ui.warn(_("warning: repository is unrelated\n"))
1031 self.ui.warn(_("warning: repository is unrelated\n"))
1021 else:
1032 else:
1022 raise util.Abort(_("repository is unrelated"))
1033 raise util.Abort(_("repository is unrelated"))
1023
1034
1024 self.ui.note(_("found new changesets starting at ") +
1035 self.ui.note(_("found new changesets starting at ") +
1025 " ".join([short(f) for f in fetch]) + "\n")
1036 " ".join([short(f) for f in fetch]) + "\n")
1026
1037
1027 self.ui.debug(_("%d total queries\n") % reqcnt)
1038 self.ui.debug(_("%d total queries\n") % reqcnt)
1028
1039
1029 return fetch.keys()
1040 return fetch.keys()
1030
1041
1031 def findoutgoing(self, remote, base=None, heads=None, force=False):
1042 def findoutgoing(self, remote, base=None, heads=None, force=False):
1032 """Return list of nodes that are roots of subsets not in remote
1043 """Return list of nodes that are roots of subsets not in remote
1033
1044
1034 If base dict is specified, assume that these nodes and their parents
1045 If base dict is specified, assume that these nodes and their parents
1035 exist on the remote side.
1046 exist on the remote side.
1036 If a list of heads is specified, return only nodes which are heads
1047 If a list of heads is specified, return only nodes which are heads
1037 or ancestors of these heads, and return a second element which
1048 or ancestors of these heads, and return a second element which
1038 contains all remote heads which get new children.
1049 contains all remote heads which get new children.
1039 """
1050 """
1040 if base == None:
1051 if base == None:
1041 base = {}
1052 base = {}
1042 self.findincoming(remote, base, heads, force=force)
1053 self.findincoming(remote, base, heads, force=force)
1043
1054
1044 self.ui.debug(_("common changesets up to ")
1055 self.ui.debug(_("common changesets up to ")
1045 + " ".join(map(short, base.keys())) + "\n")
1056 + " ".join(map(short, base.keys())) + "\n")
1046
1057
1047 remain = dict.fromkeys(self.changelog.nodemap)
1058 remain = dict.fromkeys(self.changelog.nodemap)
1048
1059
1049 # prune everything remote has from the tree
1060 # prune everything remote has from the tree
1050 del remain[nullid]
1061 del remain[nullid]
1051 remove = base.keys()
1062 remove = base.keys()
1052 while remove:
1063 while remove:
1053 n = remove.pop(0)
1064 n = remove.pop(0)
1054 if n in remain:
1065 if n in remain:
1055 del remain[n]
1066 del remain[n]
1056 for p in self.changelog.parents(n):
1067 for p in self.changelog.parents(n):
1057 remove.append(p)
1068 remove.append(p)
1058
1069
1059 # find every node whose parents have been pruned
1070 # find every node whose parents have been pruned
1060 subset = []
1071 subset = []
1061 # find every remote head that will get new children
1072 # find every remote head that will get new children
1062 updated_heads = {}
1073 updated_heads = {}
1063 for n in remain:
1074 for n in remain:
1064 p1, p2 = self.changelog.parents(n)
1075 p1, p2 = self.changelog.parents(n)
1065 if p1 not in remain and p2 not in remain:
1076 if p1 not in remain and p2 not in remain:
1066 subset.append(n)
1077 subset.append(n)
1067 if heads:
1078 if heads:
1068 if p1 in heads:
1079 if p1 in heads:
1069 updated_heads[p1] = True
1080 updated_heads[p1] = True
1070 if p2 in heads:
1081 if p2 in heads:
1071 updated_heads[p2] = True
1082 updated_heads[p2] = True
1072
1083
1073 # this is the set of all roots we have to push
1084 # this is the set of all roots we have to push
1074 if heads:
1085 if heads:
1075 return subset, updated_heads.keys()
1086 return subset, updated_heads.keys()
1076 else:
1087 else:
1077 return subset
1088 return subset
1078
1089
1079 def pull(self, remote, heads=None, force=False):
1090 def pull(self, remote, heads=None, force=False):
1080 l = self.lock()
1091 l = self.lock()
1081
1092
1082 fetch = self.findincoming(remote, force=force)
1093 fetch = self.findincoming(remote, force=force)
1083 if fetch == [nullid]:
1094 if fetch == [nullid]:
1084 self.ui.status(_("requesting all changes\n"))
1095 self.ui.status(_("requesting all changes\n"))
1085
1096
1086 if not fetch:
1097 if not fetch:
1087 self.ui.status(_("no changes found\n"))
1098 self.ui.status(_("no changes found\n"))
1088 return 0
1099 return 0
1089
1100
1090 if heads is None:
1101 if heads is None:
1091 cg = remote.changegroup(fetch, 'pull')
1102 cg = remote.changegroup(fetch, 'pull')
1092 else:
1103 else:
1093 cg = remote.changegroupsubset(fetch, heads, 'pull')
1104 cg = remote.changegroupsubset(fetch, heads, 'pull')
1094 return self.addchangegroup(cg, 'pull')
1105 return self.addchangegroup(cg, 'pull')
1095
1106
1096 def push(self, remote, force=False, revs=None):
1107 def push(self, remote, force=False, revs=None):
1097 lock = remote.lock()
1108 lock = remote.lock()
1098
1109
1099 base = {}
1110 base = {}
1100 remote_heads = remote.heads()
1111 remote_heads = remote.heads()
1101 inc = self.findincoming(remote, base, remote_heads, force=force)
1112 inc = self.findincoming(remote, base, remote_heads, force=force)
1102 if not force and inc:
1113 if not force and inc:
1103 self.ui.warn(_("abort: unsynced remote changes!\n"))
1114 self.ui.warn(_("abort: unsynced remote changes!\n"))
1104 self.ui.status(_("(did you forget to sync?"
1115 self.ui.status(_("(did you forget to sync?"
1105 " use push -f to force)\n"))
1116 " use push -f to force)\n"))
1106 return 1
1117 return 1
1107
1118
1108 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1119 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1109 if revs is not None:
1120 if revs is not None:
1110 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1121 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1111 else:
1122 else:
1112 bases, heads = update, self.changelog.heads()
1123 bases, heads = update, self.changelog.heads()
1113
1124
1114 if not bases:
1125 if not bases:
1115 self.ui.status(_("no changes found\n"))
1126 self.ui.status(_("no changes found\n"))
1116 return 1
1127 return 1
1117 elif not force:
1128 elif not force:
1118 # FIXME we don't properly detect creation of new heads
1129 # FIXME we don't properly detect creation of new heads
1119 # in the push -r case, assume the user knows what he's doing
1130 # in the push -r case, assume the user knows what he's doing
1120 if not revs and len(remote_heads) < len(heads) \
1131 if not revs and len(remote_heads) < len(heads) \
1121 and remote_heads != [nullid]:
1132 and remote_heads != [nullid]:
1122 self.ui.warn(_("abort: push creates new remote branches!\n"))
1133 self.ui.warn(_("abort: push creates new remote branches!\n"))
1123 self.ui.status(_("(did you forget to merge?"
1134 self.ui.status(_("(did you forget to merge?"
1124 " use push -f to force)\n"))
1135 " use push -f to force)\n"))
1125 return 1
1136 return 1
1126
1137
1127 if revs is None:
1138 if revs is None:
1128 cg = self.changegroup(update, 'push')
1139 cg = self.changegroup(update, 'push')
1129 else:
1140 else:
1130 cg = self.changegroupsubset(update, revs, 'push')
1141 cg = self.changegroupsubset(update, revs, 'push')
1131 return remote.addchangegroup(cg, 'push')
1142 return remote.addchangegroup(cg, 'push')
1132
1143
1133 def changegroupsubset(self, bases, heads, source):
1144 def changegroupsubset(self, bases, heads, source):
1134 """This function generates a changegroup consisting of all the nodes
1145 """This function generates a changegroup consisting of all the nodes
1135 that are descendents of any of the bases, and ancestors of any of
1146 that are descendents of any of the bases, and ancestors of any of
1136 the heads.
1147 the heads.
1137
1148
1138 It is fairly complex as determining which filenodes and which
1149 It is fairly complex as determining which filenodes and which
1139 manifest nodes need to be included for the changeset to be complete
1150 manifest nodes need to be included for the changeset to be complete
1140 is non-trivial.
1151 is non-trivial.
1141
1152
1142 Another wrinkle is doing the reverse, figuring out which changeset in
1153 Another wrinkle is doing the reverse, figuring out which changeset in
1143 the changegroup a particular filenode or manifestnode belongs to."""
1154 the changegroup a particular filenode or manifestnode belongs to."""
1144
1155
1145 self.hook('preoutgoing', throw=True, source=source)
1156 self.hook('preoutgoing', throw=True, source=source)
1146
1157
1147 # Set up some initial variables
1158 # Set up some initial variables
1148 # Make it easy to refer to self.changelog
1159 # Make it easy to refer to self.changelog
1149 cl = self.changelog
1160 cl = self.changelog
1150 # msng is short for missing - compute the list of changesets in this
1161 # msng is short for missing - compute the list of changesets in this
1151 # changegroup.
1162 # changegroup.
1152 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1163 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1153 # Some bases may turn out to be superfluous, and some heads may be
1164 # Some bases may turn out to be superfluous, and some heads may be
1154 # too. nodesbetween will return the minimal set of bases and heads
1165 # too. nodesbetween will return the minimal set of bases and heads
1155 # necessary to re-create the changegroup.
1166 # necessary to re-create the changegroup.
1156
1167
1157 # Known heads are the list of heads that it is assumed the recipient
1168 # Known heads are the list of heads that it is assumed the recipient
1158 # of this changegroup will know about.
1169 # of this changegroup will know about.
1159 knownheads = {}
1170 knownheads = {}
1160 # We assume that all parents of bases are known heads.
1171 # We assume that all parents of bases are known heads.
1161 for n in bases:
1172 for n in bases:
1162 for p in cl.parents(n):
1173 for p in cl.parents(n):
1163 if p != nullid:
1174 if p != nullid:
1164 knownheads[p] = 1
1175 knownheads[p] = 1
1165 knownheads = knownheads.keys()
1176 knownheads = knownheads.keys()
1166 if knownheads:
1177 if knownheads:
1167 # Now that we know what heads are known, we can compute which
1178 # Now that we know what heads are known, we can compute which
1168 # changesets are known. The recipient must know about all
1179 # changesets are known. The recipient must know about all
1169 # changesets required to reach the known heads from the null
1180 # changesets required to reach the known heads from the null
1170 # changeset.
1181 # changeset.
1171 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1182 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1172 junk = None
1183 junk = None
1173 # Transform the list into an ersatz set.
1184 # Transform the list into an ersatz set.
1174 has_cl_set = dict.fromkeys(has_cl_set)
1185 has_cl_set = dict.fromkeys(has_cl_set)
1175 else:
1186 else:
1176 # If there were no known heads, the recipient cannot be assumed to
1187 # If there were no known heads, the recipient cannot be assumed to
1177 # know about any changesets.
1188 # know about any changesets.
1178 has_cl_set = {}
1189 has_cl_set = {}
1179
1190
1180 # Make it easy to refer to self.manifest
1191 # Make it easy to refer to self.manifest
1181 mnfst = self.manifest
1192 mnfst = self.manifest
1182 # We don't know which manifests are missing yet
1193 # We don't know which manifests are missing yet
1183 msng_mnfst_set = {}
1194 msng_mnfst_set = {}
1184 # Nor do we know which filenodes are missing.
1195 # Nor do we know which filenodes are missing.
1185 msng_filenode_set = {}
1196 msng_filenode_set = {}
1186
1197
1187 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1198 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1188 junk = None
1199 junk = None
1189
1200
1190 # A changeset always belongs to itself, so the changenode lookup
1201 # A changeset always belongs to itself, so the changenode lookup
1191 # function for a changenode is identity.
1202 # function for a changenode is identity.
1192 def identity(x):
1203 def identity(x):
1193 return x
1204 return x
1194
1205
1195 # A function generating function. Sets up an environment for the
1206 # A function generating function. Sets up an environment for the
1196 # inner function.
1207 # inner function.
1197 def cmp_by_rev_func(revlog):
1208 def cmp_by_rev_func(revlog):
1198 # Compare two nodes by their revision number in the environment's
1209 # Compare two nodes by their revision number in the environment's
1199 # revision history. Since the revision number both represents the
1210 # revision history. Since the revision number both represents the
1200 # most efficient order to read the nodes in, and represents a
1211 # most efficient order to read the nodes in, and represents a
1201 # topological sorting of the nodes, this function is often useful.
1212 # topological sorting of the nodes, this function is often useful.
1202 def cmp_by_rev(a, b):
1213 def cmp_by_rev(a, b):
1203 return cmp(revlog.rev(a), revlog.rev(b))
1214 return cmp(revlog.rev(a), revlog.rev(b))
1204 return cmp_by_rev
1215 return cmp_by_rev
1205
1216
1206 # If we determine that a particular file or manifest node must be a
1217 # If we determine that a particular file or manifest node must be a
1207 # node that the recipient of the changegroup will already have, we can
1218 # node that the recipient of the changegroup will already have, we can
1208 # also assume the recipient will have all the parents. This function
1219 # also assume the recipient will have all the parents. This function
1209 # prunes them from the set of missing nodes.
1220 # prunes them from the set of missing nodes.
1210 def prune_parents(revlog, hasset, msngset):
1221 def prune_parents(revlog, hasset, msngset):
1211 haslst = hasset.keys()
1222 haslst = hasset.keys()
1212 haslst.sort(cmp_by_rev_func(revlog))
1223 haslst.sort(cmp_by_rev_func(revlog))
1213 for node in haslst:
1224 for node in haslst:
1214 parentlst = [p for p in revlog.parents(node) if p != nullid]
1225 parentlst = [p for p in revlog.parents(node) if p != nullid]
1215 while parentlst:
1226 while parentlst:
1216 n = parentlst.pop()
1227 n = parentlst.pop()
1217 if n not in hasset:
1228 if n not in hasset:
1218 hasset[n] = 1
1229 hasset[n] = 1
1219 p = [p for p in revlog.parents(n) if p != nullid]
1230 p = [p for p in revlog.parents(n) if p != nullid]
1220 parentlst.extend(p)
1231 parentlst.extend(p)
1221 for n in hasset:
1232 for n in hasset:
1222 msngset.pop(n, None)
1233 msngset.pop(n, None)
1223
1234
1224 # This is a function generating function used to set up an environment
1235 # This is a function generating function used to set up an environment
1225 # for the inner function to execute in.
1236 # for the inner function to execute in.
1226 def manifest_and_file_collector(changedfileset):
1237 def manifest_and_file_collector(changedfileset):
1227 # This is an information gathering function that gathers
1238 # This is an information gathering function that gathers
1228 # information from each changeset node that goes out as part of
1239 # information from each changeset node that goes out as part of
1229 # the changegroup. The information gathered is a list of which
1240 # the changegroup. The information gathered is a list of which
1230 # manifest nodes are potentially required (the recipient may
1241 # manifest nodes are potentially required (the recipient may
1231 # already have them) and total list of all files which were
1242 # already have them) and total list of all files which were
1232 # changed in any changeset in the changegroup.
1243 # changed in any changeset in the changegroup.
1233 #
1244 #
1234 # We also remember the first changenode we saw any manifest
1245 # We also remember the first changenode we saw any manifest
1235 # referenced by so we can later determine which changenode 'owns'
1246 # referenced by so we can later determine which changenode 'owns'
1236 # the manifest.
1247 # the manifest.
1237 def collect_manifests_and_files(clnode):
1248 def collect_manifests_and_files(clnode):
1238 c = cl.read(clnode)
1249 c = cl.read(clnode)
1239 for f in c[3]:
1250 for f in c[3]:
1240 # This is to make sure we only have one instance of each
1251 # This is to make sure we only have one instance of each
1241 # filename string for each filename.
1252 # filename string for each filename.
1242 changedfileset.setdefault(f, f)
1253 changedfileset.setdefault(f, f)
1243 msng_mnfst_set.setdefault(c[0], clnode)
1254 msng_mnfst_set.setdefault(c[0], clnode)
1244 return collect_manifests_and_files
1255 return collect_manifests_and_files
1245
1256
1246 # Figure out which manifest nodes (of the ones we think might be part
1257 # Figure out which manifest nodes (of the ones we think might be part
1247 # of the changegroup) the recipient must know about and remove them
1258 # of the changegroup) the recipient must know about and remove them
1248 # from the changegroup.
1259 # from the changegroup.
1249 def prune_manifests():
1260 def prune_manifests():
1250 has_mnfst_set = {}
1261 has_mnfst_set = {}
1251 for n in msng_mnfst_set:
1262 for n in msng_mnfst_set:
1252 # If a 'missing' manifest thinks it belongs to a changenode
1263 # If a 'missing' manifest thinks it belongs to a changenode
1253 # the recipient is assumed to have, obviously the recipient
1264 # the recipient is assumed to have, obviously the recipient
1254 # must have that manifest.
1265 # must have that manifest.
1255 linknode = cl.node(mnfst.linkrev(n))
1266 linknode = cl.node(mnfst.linkrev(n))
1256 if linknode in has_cl_set:
1267 if linknode in has_cl_set:
1257 has_mnfst_set[n] = 1
1268 has_mnfst_set[n] = 1
1258 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1269 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1259
1270
1260 # Use the information collected in collect_manifests_and_files to say
1271 # Use the information collected in collect_manifests_and_files to say
1261 # which changenode any manifestnode belongs to.
1272 # which changenode any manifestnode belongs to.
1262 def lookup_manifest_link(mnfstnode):
1273 def lookup_manifest_link(mnfstnode):
1263 return msng_mnfst_set[mnfstnode]
1274 return msng_mnfst_set[mnfstnode]
1264
1275
1265 # A function generating function that sets up the initial environment
1276 # A function generating function that sets up the initial environment
1266 # the inner function.
1277 # the inner function.
1267 def filenode_collector(changedfiles):
1278 def filenode_collector(changedfiles):
1268 next_rev = [0]
1279 next_rev = [0]
1269 # This gathers information from each manifestnode included in the
1280 # This gathers information from each manifestnode included in the
1270 # changegroup about which filenodes the manifest node references
1281 # changegroup about which filenodes the manifest node references
1271 # so we can include those in the changegroup too.
1282 # so we can include those in the changegroup too.
1272 #
1283 #
1273 # It also remembers which changenode each filenode belongs to. It
1284 # It also remembers which changenode each filenode belongs to. It
1274 # does this by assuming the a filenode belongs to the changenode
1285 # does this by assuming the a filenode belongs to the changenode
1275 # the first manifest that references it belongs to.
1286 # the first manifest that references it belongs to.
1276 def collect_msng_filenodes(mnfstnode):
1287 def collect_msng_filenodes(mnfstnode):
1277 r = mnfst.rev(mnfstnode)
1288 r = mnfst.rev(mnfstnode)
1278 if r == next_rev[0]:
1289 if r == next_rev[0]:
1279 # If the last rev we looked at was the one just previous,
1290 # If the last rev we looked at was the one just previous,
1280 # we only need to see a diff.
1291 # we only need to see a diff.
1281 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1292 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1282 # For each line in the delta
1293 # For each line in the delta
1283 for dline in delta.splitlines():
1294 for dline in delta.splitlines():
1284 # get the filename and filenode for that line
1295 # get the filename and filenode for that line
1285 f, fnode = dline.split('\0')
1296 f, fnode = dline.split('\0')
1286 fnode = bin(fnode[:40])
1297 fnode = bin(fnode[:40])
1287 f = changedfiles.get(f, None)
1298 f = changedfiles.get(f, None)
1288 # And if the file is in the list of files we care
1299 # And if the file is in the list of files we care
1289 # about.
1300 # about.
1290 if f is not None:
1301 if f is not None:
1291 # Get the changenode this manifest belongs to
1302 # Get the changenode this manifest belongs to
1292 clnode = msng_mnfst_set[mnfstnode]
1303 clnode = msng_mnfst_set[mnfstnode]
1293 # Create the set of filenodes for the file if
1304 # Create the set of filenodes for the file if
1294 # there isn't one already.
1305 # there isn't one already.
1295 ndset = msng_filenode_set.setdefault(f, {})
1306 ndset = msng_filenode_set.setdefault(f, {})
1296 # And set the filenode's changelog node to the
1307 # And set the filenode's changelog node to the
1297 # manifest's if it hasn't been set already.
1308 # manifest's if it hasn't been set already.
1298 ndset.setdefault(fnode, clnode)
1309 ndset.setdefault(fnode, clnode)
1299 else:
1310 else:
1300 # Otherwise we need a full manifest.
1311 # Otherwise we need a full manifest.
1301 m = mnfst.read(mnfstnode)
1312 m = mnfst.read(mnfstnode)
1302 # For every file in we care about.
1313 # For every file in we care about.
1303 for f in changedfiles:
1314 for f in changedfiles:
1304 fnode = m.get(f, None)
1315 fnode = m.get(f, None)
1305 # If it's in the manifest
1316 # If it's in the manifest
1306 if fnode is not None:
1317 if fnode is not None:
1307 # See comments above.
1318 # See comments above.
1308 clnode = msng_mnfst_set[mnfstnode]
1319 clnode = msng_mnfst_set[mnfstnode]
1309 ndset = msng_filenode_set.setdefault(f, {})
1320 ndset = msng_filenode_set.setdefault(f, {})
1310 ndset.setdefault(fnode, clnode)
1321 ndset.setdefault(fnode, clnode)
1311 # Remember the revision we hope to see next.
1322 # Remember the revision we hope to see next.
1312 next_rev[0] = r + 1
1323 next_rev[0] = r + 1
1313 return collect_msng_filenodes
1324 return collect_msng_filenodes
1314
1325
1315 # We have a list of filenodes we think we need for a file, lets remove
1326 # We have a list of filenodes we think we need for a file, lets remove
1316 # all those we now the recipient must have.
1327 # all those we now the recipient must have.
1317 def prune_filenodes(f, filerevlog):
1328 def prune_filenodes(f, filerevlog):
1318 msngset = msng_filenode_set[f]
1329 msngset = msng_filenode_set[f]
1319 hasset = {}
1330 hasset = {}
1320 # If a 'missing' filenode thinks it belongs to a changenode we
1331 # If a 'missing' filenode thinks it belongs to a changenode we
1321 # assume the recipient must have, then the recipient must have
1332 # assume the recipient must have, then the recipient must have
1322 # that filenode.
1333 # that filenode.
1323 for n in msngset:
1334 for n in msngset:
1324 clnode = cl.node(filerevlog.linkrev(n))
1335 clnode = cl.node(filerevlog.linkrev(n))
1325 if clnode in has_cl_set:
1336 if clnode in has_cl_set:
1326 hasset[n] = 1
1337 hasset[n] = 1
1327 prune_parents(filerevlog, hasset, msngset)
1338 prune_parents(filerevlog, hasset, msngset)
1328
1339
1329 # A function generator function that sets up the a context for the
1340 # A function generator function that sets up the a context for the
1330 # inner function.
1341 # inner function.
1331 def lookup_filenode_link_func(fname):
1342 def lookup_filenode_link_func(fname):
1332 msngset = msng_filenode_set[fname]
1343 msngset = msng_filenode_set[fname]
1333 # Lookup the changenode the filenode belongs to.
1344 # Lookup the changenode the filenode belongs to.
1334 def lookup_filenode_link(fnode):
1345 def lookup_filenode_link(fnode):
1335 return msngset[fnode]
1346 return msngset[fnode]
1336 return lookup_filenode_link
1347 return lookup_filenode_link
1337
1348
1338 # Now that we have all theses utility functions to help out and
1349 # Now that we have all theses utility functions to help out and
1339 # logically divide up the task, generate the group.
1350 # logically divide up the task, generate the group.
1340 def gengroup():
1351 def gengroup():
1341 # The set of changed files starts empty.
1352 # The set of changed files starts empty.
1342 changedfiles = {}
1353 changedfiles = {}
1343 # Create a changenode group generator that will call our functions
1354 # Create a changenode group generator that will call our functions
1344 # back to lookup the owning changenode and collect information.
1355 # back to lookup the owning changenode and collect information.
1345 group = cl.group(msng_cl_lst, identity,
1356 group = cl.group(msng_cl_lst, identity,
1346 manifest_and_file_collector(changedfiles))
1357 manifest_and_file_collector(changedfiles))
1347 for chnk in group:
1358 for chnk in group:
1348 yield chnk
1359 yield chnk
1349
1360
1350 # The list of manifests has been collected by the generator
1361 # The list of manifests has been collected by the generator
1351 # calling our functions back.
1362 # calling our functions back.
1352 prune_manifests()
1363 prune_manifests()
1353 msng_mnfst_lst = msng_mnfst_set.keys()
1364 msng_mnfst_lst = msng_mnfst_set.keys()
1354 # Sort the manifestnodes by revision number.
1365 # Sort the manifestnodes by revision number.
1355 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1366 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1356 # Create a generator for the manifestnodes that calls our lookup
1367 # Create a generator for the manifestnodes that calls our lookup
1357 # and data collection functions back.
1368 # and data collection functions back.
1358 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1369 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1359 filenode_collector(changedfiles))
1370 filenode_collector(changedfiles))
1360 for chnk in group:
1371 for chnk in group:
1361 yield chnk
1372 yield chnk
1362
1373
1363 # These are no longer needed, dereference and toss the memory for
1374 # These are no longer needed, dereference and toss the memory for
1364 # them.
1375 # them.
1365 msng_mnfst_lst = None
1376 msng_mnfst_lst = None
1366 msng_mnfst_set.clear()
1377 msng_mnfst_set.clear()
1367
1378
1368 changedfiles = changedfiles.keys()
1379 changedfiles = changedfiles.keys()
1369 changedfiles.sort()
1380 changedfiles.sort()
1370 # Go through all our files in order sorted by name.
1381 # Go through all our files in order sorted by name.
1371 for fname in changedfiles:
1382 for fname in changedfiles:
1372 filerevlog = self.file(fname)
1383 filerevlog = self.file(fname)
1373 # Toss out the filenodes that the recipient isn't really
1384 # Toss out the filenodes that the recipient isn't really
1374 # missing.
1385 # missing.
1375 if msng_filenode_set.has_key(fname):
1386 if msng_filenode_set.has_key(fname):
1376 prune_filenodes(fname, filerevlog)
1387 prune_filenodes(fname, filerevlog)
1377 msng_filenode_lst = msng_filenode_set[fname].keys()
1388 msng_filenode_lst = msng_filenode_set[fname].keys()
1378 else:
1389 else:
1379 msng_filenode_lst = []
1390 msng_filenode_lst = []
1380 # If any filenodes are left, generate the group for them,
1391 # If any filenodes are left, generate the group for them,
1381 # otherwise don't bother.
1392 # otherwise don't bother.
1382 if len(msng_filenode_lst) > 0:
1393 if len(msng_filenode_lst) > 0:
1383 yield changegroup.genchunk(fname)
1394 yield changegroup.genchunk(fname)
1384 # Sort the filenodes by their revision #
1395 # Sort the filenodes by their revision #
1385 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1396 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1386 # Create a group generator and only pass in a changenode
1397 # Create a group generator and only pass in a changenode
1387 # lookup function as we need to collect no information
1398 # lookup function as we need to collect no information
1388 # from filenodes.
1399 # from filenodes.
1389 group = filerevlog.group(msng_filenode_lst,
1400 group = filerevlog.group(msng_filenode_lst,
1390 lookup_filenode_link_func(fname))
1401 lookup_filenode_link_func(fname))
1391 for chnk in group:
1402 for chnk in group:
1392 yield chnk
1403 yield chnk
1393 if msng_filenode_set.has_key(fname):
1404 if msng_filenode_set.has_key(fname):
1394 # Don't need this anymore, toss it to free memory.
1405 # Don't need this anymore, toss it to free memory.
1395 del msng_filenode_set[fname]
1406 del msng_filenode_set[fname]
1396 # Signal that no more groups are left.
1407 # Signal that no more groups are left.
1397 yield changegroup.closechunk()
1408 yield changegroup.closechunk()
1398
1409
1399 if msng_cl_lst:
1410 if msng_cl_lst:
1400 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1411 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1401
1412
1402 return util.chunkbuffer(gengroup())
1413 return util.chunkbuffer(gengroup())
1403
1414
1404 def changegroup(self, basenodes, source):
1415 def changegroup(self, basenodes, source):
1405 """Generate a changegroup of all nodes that we have that a recipient
1416 """Generate a changegroup of all nodes that we have that a recipient
1406 doesn't.
1417 doesn't.
1407
1418
1408 This is much easier than the previous function as we can assume that
1419 This is much easier than the previous function as we can assume that
1409 the recipient has any changenode we aren't sending them."""
1420 the recipient has any changenode we aren't sending them."""
1410
1421
1411 self.hook('preoutgoing', throw=True, source=source)
1422 self.hook('preoutgoing', throw=True, source=source)
1412
1423
1413 cl = self.changelog
1424 cl = self.changelog
1414 nodes = cl.nodesbetween(basenodes, None)[0]
1425 nodes = cl.nodesbetween(basenodes, None)[0]
1415 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1426 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1416
1427
1417 def identity(x):
1428 def identity(x):
1418 return x
1429 return x
1419
1430
1420 def gennodelst(revlog):
1431 def gennodelst(revlog):
1421 for r in xrange(0, revlog.count()):
1432 for r in xrange(0, revlog.count()):
1422 n = revlog.node(r)
1433 n = revlog.node(r)
1423 if revlog.linkrev(n) in revset:
1434 if revlog.linkrev(n) in revset:
1424 yield n
1435 yield n
1425
1436
1426 def changed_file_collector(changedfileset):
1437 def changed_file_collector(changedfileset):
1427 def collect_changed_files(clnode):
1438 def collect_changed_files(clnode):
1428 c = cl.read(clnode)
1439 c = cl.read(clnode)
1429 for fname in c[3]:
1440 for fname in c[3]:
1430 changedfileset[fname] = 1
1441 changedfileset[fname] = 1
1431 return collect_changed_files
1442 return collect_changed_files
1432
1443
1433 def lookuprevlink_func(revlog):
1444 def lookuprevlink_func(revlog):
1434 def lookuprevlink(n):
1445 def lookuprevlink(n):
1435 return cl.node(revlog.linkrev(n))
1446 return cl.node(revlog.linkrev(n))
1436 return lookuprevlink
1447 return lookuprevlink
1437
1448
1438 def gengroup():
1449 def gengroup():
1439 # construct a list of all changed files
1450 # construct a list of all changed files
1440 changedfiles = {}
1451 changedfiles = {}
1441
1452
1442 for chnk in cl.group(nodes, identity,
1453 for chnk in cl.group(nodes, identity,
1443 changed_file_collector(changedfiles)):
1454 changed_file_collector(changedfiles)):
1444 yield chnk
1455 yield chnk
1445 changedfiles = changedfiles.keys()
1456 changedfiles = changedfiles.keys()
1446 changedfiles.sort()
1457 changedfiles.sort()
1447
1458
1448 mnfst = self.manifest
1459 mnfst = self.manifest
1449 nodeiter = gennodelst(mnfst)
1460 nodeiter = gennodelst(mnfst)
1450 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1461 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1451 yield chnk
1462 yield chnk
1452
1463
1453 for fname in changedfiles:
1464 for fname in changedfiles:
1454 filerevlog = self.file(fname)
1465 filerevlog = self.file(fname)
1455 nodeiter = gennodelst(filerevlog)
1466 nodeiter = gennodelst(filerevlog)
1456 nodeiter = list(nodeiter)
1467 nodeiter = list(nodeiter)
1457 if nodeiter:
1468 if nodeiter:
1458 yield changegroup.genchunk(fname)
1469 yield changegroup.genchunk(fname)
1459 lookup = lookuprevlink_func(filerevlog)
1470 lookup = lookuprevlink_func(filerevlog)
1460 for chnk in filerevlog.group(nodeiter, lookup):
1471 for chnk in filerevlog.group(nodeiter, lookup):
1461 yield chnk
1472 yield chnk
1462
1473
1463 yield changegroup.closechunk()
1474 yield changegroup.closechunk()
1464
1475
1465 if nodes:
1476 if nodes:
1466 self.hook('outgoing', node=hex(nodes[0]), source=source)
1477 self.hook('outgoing', node=hex(nodes[0]), source=source)
1467
1478
1468 return util.chunkbuffer(gengroup())
1479 return util.chunkbuffer(gengroup())
1469
1480
1470 def addchangegroup(self, source, srctype):
1481 def addchangegroup(self, source, srctype):
1471 """add changegroup to repo.
1482 """add changegroup to repo.
1472 returns number of heads modified or added + 1."""
1483 returns number of heads modified or added + 1."""
1473
1484
1474 def csmap(x):
1485 def csmap(x):
1475 self.ui.debug(_("add changeset %s\n") % short(x))
1486 self.ui.debug(_("add changeset %s\n") % short(x))
1476 return cl.count()
1487 return cl.count()
1477
1488
1478 def revmap(x):
1489 def revmap(x):
1479 return cl.rev(x)
1490 return cl.rev(x)
1480
1491
1481 if not source:
1492 if not source:
1482 return 0
1493 return 0
1483
1494
1484 self.hook('prechangegroup', throw=True, source=srctype)
1495 self.hook('prechangegroup', throw=True, source=srctype)
1485
1496
1486 changesets = files = revisions = 0
1497 changesets = files = revisions = 0
1487
1498
1488 tr = self.transaction()
1499 tr = self.transaction()
1489
1500
1490 # write changelog and manifest data to temp files so
1501 # write changelog and manifest data to temp files so
1491 # concurrent readers will not see inconsistent view
1502 # concurrent readers will not see inconsistent view
1492 cl = None
1503 cl = None
1493 try:
1504 try:
1494 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1505 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1495
1506
1496 oldheads = len(cl.heads())
1507 oldheads = len(cl.heads())
1497
1508
1498 # pull off the changeset group
1509 # pull off the changeset group
1499 self.ui.status(_("adding changesets\n"))
1510 self.ui.status(_("adding changesets\n"))
1500 co = cl.tip()
1511 co = cl.tip()
1501 chunkiter = changegroup.chunkiter(source)
1512 chunkiter = changegroup.chunkiter(source)
1502 cn = cl.addgroup(chunkiter, csmap, tr, 1) # unique
1513 cn = cl.addgroup(chunkiter, csmap, tr, 1) # unique
1503 cnr, cor = map(cl.rev, (cn, co))
1514 cnr, cor = map(cl.rev, (cn, co))
1504 if cn == nullid:
1515 if cn == nullid:
1505 cnr = cor
1516 cnr = cor
1506 changesets = cnr - cor
1517 changesets = cnr - cor
1507
1518
1508 mf = None
1519 mf = None
1509 try:
1520 try:
1510 mf = appendfile.appendmanifest(self.opener,
1521 mf = appendfile.appendmanifest(self.opener,
1511 self.manifest.version)
1522 self.manifest.version)
1512
1523
1513 # pull off the manifest group
1524 # pull off the manifest group
1514 self.ui.status(_("adding manifests\n"))
1525 self.ui.status(_("adding manifests\n"))
1515 mm = mf.tip()
1526 mm = mf.tip()
1516 chunkiter = changegroup.chunkiter(source)
1527 chunkiter = changegroup.chunkiter(source)
1517 mo = mf.addgroup(chunkiter, revmap, tr)
1528 mo = mf.addgroup(chunkiter, revmap, tr)
1518
1529
1519 # process the files
1530 # process the files
1520 self.ui.status(_("adding file changes\n"))
1531 self.ui.status(_("adding file changes\n"))
1521 while 1:
1532 while 1:
1522 f = changegroup.getchunk(source)
1533 f = changegroup.getchunk(source)
1523 if not f:
1534 if not f:
1524 break
1535 break
1525 self.ui.debug(_("adding %s revisions\n") % f)
1536 self.ui.debug(_("adding %s revisions\n") % f)
1526 fl = self.file(f)
1537 fl = self.file(f)
1527 o = fl.count()
1538 o = fl.count()
1528 chunkiter = changegroup.chunkiter(source)
1539 chunkiter = changegroup.chunkiter(source)
1529 n = fl.addgroup(chunkiter, revmap, tr)
1540 n = fl.addgroup(chunkiter, revmap, tr)
1530 revisions += fl.count() - o
1541 revisions += fl.count() - o
1531 files += 1
1542 files += 1
1532
1543
1533 # write order here is important so concurrent readers will see
1544 # write order here is important so concurrent readers will see
1534 # consistent view of repo
1545 # consistent view of repo
1535 mf.writedata()
1546 mf.writedata()
1536 finally:
1547 finally:
1537 if mf:
1548 if mf:
1538 mf.cleanup()
1549 mf.cleanup()
1539 cl.writedata()
1550 cl.writedata()
1540 finally:
1551 finally:
1541 if cl:
1552 if cl:
1542 cl.cleanup()
1553 cl.cleanup()
1543
1554
1544 # make changelog and manifest see real files again
1555 # make changelog and manifest see real files again
1545 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1556 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1546 self.manifest = manifest.manifest(self.opener, self.manifest.version)
1557 self.manifest = manifest.manifest(self.opener, self.manifest.version)
1547 self.changelog.checkinlinesize(tr)
1558 self.changelog.checkinlinesize(tr)
1548 self.manifest.checkinlinesize(tr)
1559 self.manifest.checkinlinesize(tr)
1549
1560
1550 newheads = len(self.changelog.heads())
1561 newheads = len(self.changelog.heads())
1551 heads = ""
1562 heads = ""
1552 if oldheads and newheads > oldheads:
1563 if oldheads and newheads > oldheads:
1553 heads = _(" (+%d heads)") % (newheads - oldheads)
1564 heads = _(" (+%d heads)") % (newheads - oldheads)
1554
1565
1555 self.ui.status(_("added %d changesets"
1566 self.ui.status(_("added %d changesets"
1556 " with %d changes to %d files%s\n")
1567 " with %d changes to %d files%s\n")
1557 % (changesets, revisions, files, heads))
1568 % (changesets, revisions, files, heads))
1558
1569
1559 if changesets > 0:
1570 if changesets > 0:
1560 self.hook('pretxnchangegroup', throw=True,
1571 self.hook('pretxnchangegroup', throw=True,
1561 node=hex(self.changelog.node(cor+1)), source=srctype)
1572 node=hex(self.changelog.node(cor+1)), source=srctype)
1562
1573
1563 tr.close()
1574 tr.close()
1564
1575
1565 if changesets > 0:
1576 if changesets > 0:
1566 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1577 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1567 source=srctype)
1578 source=srctype)
1568
1579
1569 for i in range(cor + 1, cnr + 1):
1580 for i in range(cor + 1, cnr + 1):
1570 self.hook("incoming", node=hex(self.changelog.node(i)),
1581 self.hook("incoming", node=hex(self.changelog.node(i)),
1571 source=srctype)
1582 source=srctype)
1572
1583
1573 return newheads - oldheads + 1
1584 return newheads - oldheads + 1
1574
1585
1575 def update(self, node, allow=False, force=False, choose=None,
1586 def update(self, node, allow=False, force=False, choose=None,
1576 moddirstate=True, forcemerge=False, wlock=None, show_stats=True):
1587 moddirstate=True, forcemerge=False, wlock=None, show_stats=True):
1577 pl = self.dirstate.parents()
1588 pl = self.dirstate.parents()
1578 if not force and pl[1] != nullid:
1589 if not force and pl[1] != nullid:
1579 raise util.Abort(_("outstanding uncommitted merges"))
1590 raise util.Abort(_("outstanding uncommitted merges"))
1580
1591
1581 err = False
1592 err = False
1582
1593
1583 p1, p2 = pl[0], node
1594 p1, p2 = pl[0], node
1584 pa = self.changelog.ancestor(p1, p2)
1595 pa = self.changelog.ancestor(p1, p2)
1585 m1n = self.changelog.read(p1)[0]
1596 m1n = self.changelog.read(p1)[0]
1586 m2n = self.changelog.read(p2)[0]
1597 m2n = self.changelog.read(p2)[0]
1587 man = self.manifest.ancestor(m1n, m2n)
1598 man = self.manifest.ancestor(m1n, m2n)
1588 m1 = self.manifest.read(m1n)
1599 m1 = self.manifest.read(m1n)
1589 mf1 = self.manifest.readflags(m1n)
1600 mf1 = self.manifest.readflags(m1n)
1590 m2 = self.manifest.read(m2n).copy()
1601 m2 = self.manifest.read(m2n).copy()
1591 mf2 = self.manifest.readflags(m2n)
1602 mf2 = self.manifest.readflags(m2n)
1592 ma = self.manifest.read(man)
1603 ma = self.manifest.read(man)
1593 mfa = self.manifest.readflags(man)
1604 mfa = self.manifest.readflags(man)
1594
1605
1595 modified, added, removed, deleted, unknown = self.changes()
1606 modified, added, removed, deleted, unknown = self.changes()
1596
1607
1597 # is this a jump, or a merge? i.e. is there a linear path
1608 # is this a jump, or a merge? i.e. is there a linear path
1598 # from p1 to p2?
1609 # from p1 to p2?
1599 linear_path = (pa == p1 or pa == p2)
1610 linear_path = (pa == p1 or pa == p2)
1600
1611
1601 if allow and linear_path:
1612 if allow and linear_path:
1602 raise util.Abort(_("there is nothing to merge, "
1613 raise util.Abort(_("there is nothing to merge, "
1603 "just use 'hg update'"))
1614 "just use 'hg update'"))
1604 if allow and not forcemerge:
1615 if allow and not forcemerge:
1605 if modified or added or removed:
1616 if modified or added or removed:
1606 raise util.Abort(_("outstanding uncommitted changes"))
1617 raise util.Abort(_("outstanding uncommitted changes"))
1607
1618
1608 if not forcemerge and not force:
1619 if not forcemerge and not force:
1609 for f in unknown:
1620 for f in unknown:
1610 if f in m2:
1621 if f in m2:
1611 t1 = self.wread(f)
1622 t1 = self.wread(f)
1612 t2 = self.file(f).read(m2[f])
1623 t2 = self.file(f).read(m2[f])
1613 if cmp(t1, t2) != 0:
1624 if cmp(t1, t2) != 0:
1614 raise util.Abort(_("'%s' already exists in the working"
1625 raise util.Abort(_("'%s' already exists in the working"
1615 " dir and differs from remote") % f)
1626 " dir and differs from remote") % f)
1616
1627
1617 # resolve the manifest to determine which files
1628 # resolve the manifest to determine which files
1618 # we care about merging
1629 # we care about merging
1619 self.ui.note(_("resolving manifests\n"))
1630 self.ui.note(_("resolving manifests\n"))
1620 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1631 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1621 (force, allow, moddirstate, linear_path))
1632 (force, allow, moddirstate, linear_path))
1622 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1633 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1623 (short(man), short(m1n), short(m2n)))
1634 (short(man), short(m1n), short(m2n)))
1624
1635
1625 merge = {}
1636 merge = {}
1626 get = {}
1637 get = {}
1627 remove = []
1638 remove = []
1628
1639
1629 # construct a working dir manifest
1640 # construct a working dir manifest
1630 mw = m1.copy()
1641 mw = m1.copy()
1631 mfw = mf1.copy()
1642 mfw = mf1.copy()
1632 umap = dict.fromkeys(unknown)
1643 umap = dict.fromkeys(unknown)
1633
1644
1634 for f in added + modified + unknown:
1645 for f in added + modified + unknown:
1635 mw[f] = ""
1646 mw[f] = ""
1636 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1647 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1637
1648
1638 if moddirstate and not wlock:
1649 if moddirstate and not wlock:
1639 wlock = self.wlock()
1650 wlock = self.wlock()
1640
1651
1641 for f in deleted + removed:
1652 for f in deleted + removed:
1642 if f in mw:
1653 if f in mw:
1643 del mw[f]
1654 del mw[f]
1644
1655
1645 # If we're jumping between revisions (as opposed to merging),
1656 # If we're jumping between revisions (as opposed to merging),
1646 # and if neither the working directory nor the target rev has
1657 # and if neither the working directory nor the target rev has
1647 # the file, then we need to remove it from the dirstate, to
1658 # the file, then we need to remove it from the dirstate, to
1648 # prevent the dirstate from listing the file when it is no
1659 # prevent the dirstate from listing the file when it is no
1649 # longer in the manifest.
1660 # longer in the manifest.
1650 if moddirstate and linear_path and f not in m2:
1661 if moddirstate and linear_path and f not in m2:
1651 self.dirstate.forget((f,))
1662 self.dirstate.forget((f,))
1652
1663
1653 # Compare manifests
1664 # Compare manifests
1654 for f, n in mw.iteritems():
1665 for f, n in mw.iteritems():
1655 if choose and not choose(f):
1666 if choose and not choose(f):
1656 continue
1667 continue
1657 if f in m2:
1668 if f in m2:
1658 s = 0
1669 s = 0
1659
1670
1660 # is the wfile new since m1, and match m2?
1671 # is the wfile new since m1, and match m2?
1661 if f not in m1:
1672 if f not in m1:
1662 t1 = self.wread(f)
1673 t1 = self.wread(f)
1663 t2 = self.file(f).read(m2[f])
1674 t2 = self.file(f).read(m2[f])
1664 if cmp(t1, t2) == 0:
1675 if cmp(t1, t2) == 0:
1665 n = m2[f]
1676 n = m2[f]
1666 del t1, t2
1677 del t1, t2
1667
1678
1668 # are files different?
1679 # are files different?
1669 if n != m2[f]:
1680 if n != m2[f]:
1670 a = ma.get(f, nullid)
1681 a = ma.get(f, nullid)
1671 # are both different from the ancestor?
1682 # are both different from the ancestor?
1672 if n != a and m2[f] != a:
1683 if n != a and m2[f] != a:
1673 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1684 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1674 # merge executable bits
1685 # merge executable bits
1675 # "if we changed or they changed, change in merge"
1686 # "if we changed or they changed, change in merge"
1676 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1687 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1677 mode = ((a^b) | (a^c)) ^ a
1688 mode = ((a^b) | (a^c)) ^ a
1678 merge[f] = (m1.get(f, nullid), m2[f], mode)
1689 merge[f] = (m1.get(f, nullid), m2[f], mode)
1679 s = 1
1690 s = 1
1680 # are we clobbering?
1691 # are we clobbering?
1681 # is remote's version newer?
1692 # is remote's version newer?
1682 # or are we going back in time?
1693 # or are we going back in time?
1683 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1694 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1684 self.ui.debug(_(" remote %s is newer, get\n") % f)
1695 self.ui.debug(_(" remote %s is newer, get\n") % f)
1685 get[f] = m2[f]
1696 get[f] = m2[f]
1686 s = 1
1697 s = 1
1687 elif f in umap or f in added:
1698 elif f in umap or f in added:
1688 # this unknown file is the same as the checkout
1699 # this unknown file is the same as the checkout
1689 # we need to reset the dirstate if the file was added
1700 # we need to reset the dirstate if the file was added
1690 get[f] = m2[f]
1701 get[f] = m2[f]
1691
1702
1692 if not s and mfw[f] != mf2[f]:
1703 if not s and mfw[f] != mf2[f]:
1693 if force:
1704 if force:
1694 self.ui.debug(_(" updating permissions for %s\n") % f)
1705 self.ui.debug(_(" updating permissions for %s\n") % f)
1695 util.set_exec(self.wjoin(f), mf2[f])
1706 util.set_exec(self.wjoin(f), mf2[f])
1696 else:
1707 else:
1697 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1708 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1698 mode = ((a^b) | (a^c)) ^ a
1709 mode = ((a^b) | (a^c)) ^ a
1699 if mode != b:
1710 if mode != b:
1700 self.ui.debug(_(" updating permissions for %s\n")
1711 self.ui.debug(_(" updating permissions for %s\n")
1701 % f)
1712 % f)
1702 util.set_exec(self.wjoin(f), mode)
1713 util.set_exec(self.wjoin(f), mode)
1703 del m2[f]
1714 del m2[f]
1704 elif f in ma:
1715 elif f in ma:
1705 if n != ma[f]:
1716 if n != ma[f]:
1706 r = _("d")
1717 r = _("d")
1707 if not force and (linear_path or allow):
1718 if not force and (linear_path or allow):
1708 r = self.ui.prompt(
1719 r = self.ui.prompt(
1709 (_(" local changed %s which remote deleted\n") % f) +
1720 (_(" local changed %s which remote deleted\n") % f) +
1710 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1721 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1711 if r == _("d"):
1722 if r == _("d"):
1712 remove.append(f)
1723 remove.append(f)
1713 else:
1724 else:
1714 self.ui.debug(_("other deleted %s\n") % f)
1725 self.ui.debug(_("other deleted %s\n") % f)
1715 remove.append(f) # other deleted it
1726 remove.append(f) # other deleted it
1716 else:
1727 else:
1717 # file is created on branch or in working directory
1728 # file is created on branch or in working directory
1718 if force and f not in umap:
1729 if force and f not in umap:
1719 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1730 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1720 remove.append(f)
1731 remove.append(f)
1721 elif n == m1.get(f, nullid): # same as parent
1732 elif n == m1.get(f, nullid): # same as parent
1722 if p2 == pa: # going backwards?
1733 if p2 == pa: # going backwards?
1723 self.ui.debug(_("remote deleted %s\n") % f)
1734 self.ui.debug(_("remote deleted %s\n") % f)
1724 remove.append(f)
1735 remove.append(f)
1725 else:
1736 else:
1726 self.ui.debug(_("local modified %s, keeping\n") % f)
1737 self.ui.debug(_("local modified %s, keeping\n") % f)
1727 else:
1738 else:
1728 self.ui.debug(_("working dir created %s, keeping\n") % f)
1739 self.ui.debug(_("working dir created %s, keeping\n") % f)
1729
1740
1730 for f, n in m2.iteritems():
1741 for f, n in m2.iteritems():
1731 if choose and not choose(f):
1742 if choose and not choose(f):
1732 continue
1743 continue
1733 if f[0] == "/":
1744 if f[0] == "/":
1734 continue
1745 continue
1735 if f in ma and n != ma[f]:
1746 if f in ma and n != ma[f]:
1736 r = _("k")
1747 r = _("k")
1737 if not force and (linear_path or allow):
1748 if not force and (linear_path or allow):
1738 r = self.ui.prompt(
1749 r = self.ui.prompt(
1739 (_("remote changed %s which local deleted\n") % f) +
1750 (_("remote changed %s which local deleted\n") % f) +
1740 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1751 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1741 if r == _("k"):
1752 if r == _("k"):
1742 get[f] = n
1753 get[f] = n
1743 elif f not in ma:
1754 elif f not in ma:
1744 self.ui.debug(_("remote created %s\n") % f)
1755 self.ui.debug(_("remote created %s\n") % f)
1745 get[f] = n
1756 get[f] = n
1746 else:
1757 else:
1747 if force or p2 == pa: # going backwards?
1758 if force or p2 == pa: # going backwards?
1748 self.ui.debug(_("local deleted %s, recreating\n") % f)
1759 self.ui.debug(_("local deleted %s, recreating\n") % f)
1749 get[f] = n
1760 get[f] = n
1750 else:
1761 else:
1751 self.ui.debug(_("local deleted %s\n") % f)
1762 self.ui.debug(_("local deleted %s\n") % f)
1752
1763
1753 del mw, m1, m2, ma
1764 del mw, m1, m2, ma
1754
1765
1755 if force:
1766 if force:
1756 for f in merge:
1767 for f in merge:
1757 get[f] = merge[f][1]
1768 get[f] = merge[f][1]
1758 merge = {}
1769 merge = {}
1759
1770
1760 if linear_path or force:
1771 if linear_path or force:
1761 # we don't need to do any magic, just jump to the new rev
1772 # we don't need to do any magic, just jump to the new rev
1762 branch_merge = False
1773 branch_merge = False
1763 p1, p2 = p2, nullid
1774 p1, p2 = p2, nullid
1764 else:
1775 else:
1765 if not allow:
1776 if not allow:
1766 self.ui.status(_("this update spans a branch"
1777 self.ui.status(_("this update spans a branch"
1767 " affecting the following files:\n"))
1778 " affecting the following files:\n"))
1768 fl = merge.keys() + get.keys()
1779 fl = merge.keys() + get.keys()
1769 fl.sort()
1780 fl.sort()
1770 for f in fl:
1781 for f in fl:
1771 cf = ""
1782 cf = ""
1772 if f in merge:
1783 if f in merge:
1773 cf = _(" (resolve)")
1784 cf = _(" (resolve)")
1774 self.ui.status(" %s%s\n" % (f, cf))
1785 self.ui.status(" %s%s\n" % (f, cf))
1775 self.ui.warn(_("aborting update spanning branches!\n"))
1786 self.ui.warn(_("aborting update spanning branches!\n"))
1776 self.ui.status(_("(use 'hg merge' to merge across branches"
1787 self.ui.status(_("(use 'hg merge' to merge across branches"
1777 " or 'hg update -C' to lose changes)\n"))
1788 " or 'hg update -C' to lose changes)\n"))
1778 return 1
1789 return 1
1779 branch_merge = True
1790 branch_merge = True
1780
1791
1781 xp1 = hex(p1)
1792 xp1 = hex(p1)
1782 xp2 = hex(p2)
1793 xp2 = hex(p2)
1783 if p2 == nullid: xxp2 = ''
1794 if p2 == nullid: xxp2 = ''
1784 else: xxp2 = xp2
1795 else: xxp2 = xp2
1785
1796
1786 self.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2)
1797 self.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2)
1787
1798
1788 # get the files we don't need to change
1799 # get the files we don't need to change
1789 files = get.keys()
1800 files = get.keys()
1790 files.sort()
1801 files.sort()
1791 for f in files:
1802 for f in files:
1792 if f[0] == "/":
1803 if f[0] == "/":
1793 continue
1804 continue
1794 self.ui.note(_("getting %s\n") % f)
1805 self.ui.note(_("getting %s\n") % f)
1795 t = self.file(f).read(get[f])
1806 t = self.file(f).read(get[f])
1796 self.wwrite(f, t)
1807 self.wwrite(f, t)
1797 util.set_exec(self.wjoin(f), mf2[f])
1808 util.set_exec(self.wjoin(f), mf2[f])
1798 if moddirstate:
1809 if moddirstate:
1799 if branch_merge:
1810 if branch_merge:
1800 self.dirstate.update([f], 'n', st_mtime=-1)
1811 self.dirstate.update([f], 'n', st_mtime=-1)
1801 else:
1812 else:
1802 self.dirstate.update([f], 'n')
1813 self.dirstate.update([f], 'n')
1803
1814
1804 # merge the tricky bits
1815 # merge the tricky bits
1805 failedmerge = []
1816 failedmerge = []
1806 files = merge.keys()
1817 files = merge.keys()
1807 files.sort()
1818 files.sort()
1808 for f in files:
1819 for f in files:
1809 self.ui.status(_("merging %s\n") % f)
1820 self.ui.status(_("merging %s\n") % f)
1810 my, other, flag = merge[f]
1821 my, other, flag = merge[f]
1811 ret = self.merge3(f, my, other, xp1, xp2)
1822 ret = self.merge3(f, my, other, xp1, xp2)
1812 if ret:
1823 if ret:
1813 err = True
1824 err = True
1814 failedmerge.append(f)
1825 failedmerge.append(f)
1815 util.set_exec(self.wjoin(f), flag)
1826 util.set_exec(self.wjoin(f), flag)
1816 if moddirstate:
1827 if moddirstate:
1817 if branch_merge:
1828 if branch_merge:
1818 # We've done a branch merge, mark this file as merged
1829 # We've done a branch merge, mark this file as merged
1819 # so that we properly record the merger later
1830 # so that we properly record the merger later
1820 self.dirstate.update([f], 'm')
1831 self.dirstate.update([f], 'm')
1821 else:
1832 else:
1822 # We've update-merged a locally modified file, so
1833 # We've update-merged a locally modified file, so
1823 # we set the dirstate to emulate a normal checkout
1834 # we set the dirstate to emulate a normal checkout
1824 # of that file some time in the past. Thus our
1835 # of that file some time in the past. Thus our
1825 # merge will appear as a normal local file
1836 # merge will appear as a normal local file
1826 # modification.
1837 # modification.
1827 f_len = len(self.file(f).read(other))
1838 f_len = len(self.file(f).read(other))
1828 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1839 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1829
1840
1830 remove.sort()
1841 remove.sort()
1831 for f in remove:
1842 for f in remove:
1832 self.ui.note(_("removing %s\n") % f)
1843 self.ui.note(_("removing %s\n") % f)
1833 util.audit_path(f)
1844 util.audit_path(f)
1834 try:
1845 try:
1835 util.unlink(self.wjoin(f))
1846 util.unlink(self.wjoin(f))
1836 except OSError, inst:
1847 except OSError, inst:
1837 if inst.errno != errno.ENOENT:
1848 if inst.errno != errno.ENOENT:
1838 self.ui.warn(_("update failed to remove %s: %s!\n") %
1849 self.ui.warn(_("update failed to remove %s: %s!\n") %
1839 (f, inst.strerror))
1850 (f, inst.strerror))
1840 if moddirstate:
1851 if moddirstate:
1841 if branch_merge:
1852 if branch_merge:
1842 self.dirstate.update(remove, 'r')
1853 self.dirstate.update(remove, 'r')
1843 else:
1854 else:
1844 self.dirstate.forget(remove)
1855 self.dirstate.forget(remove)
1845
1856
1846 if moddirstate:
1857 if moddirstate:
1847 self.dirstate.setparents(p1, p2)
1858 self.dirstate.setparents(p1, p2)
1848
1859
1849 if show_stats:
1860 if show_stats:
1850 stats = ((len(get), _("updated")),
1861 stats = ((len(get), _("updated")),
1851 (len(merge) - len(failedmerge), _("merged")),
1862 (len(merge) - len(failedmerge), _("merged")),
1852 (len(remove), _("removed")),
1863 (len(remove), _("removed")),
1853 (len(failedmerge), _("unresolved")))
1864 (len(failedmerge), _("unresolved")))
1854 note = ", ".join([_("%d files %s") % s for s in stats])
1865 note = ", ".join([_("%d files %s") % s for s in stats])
1855 self.ui.status("%s\n" % note)
1866 self.ui.status("%s\n" % note)
1856 if moddirstate:
1867 if moddirstate:
1857 if branch_merge:
1868 if branch_merge:
1858 if failedmerge:
1869 if failedmerge:
1859 self.ui.status(_("There are unresolved merges,"
1870 self.ui.status(_("There are unresolved merges,"
1860 " you can redo the full merge using:\n"
1871 " you can redo the full merge using:\n"
1861 " hg update -C %s\n"
1872 " hg update -C %s\n"
1862 " hg merge %s\n"
1873 " hg merge %s\n"
1863 % (self.changelog.rev(p1),
1874 % (self.changelog.rev(p1),
1864 self.changelog.rev(p2))))
1875 self.changelog.rev(p2))))
1865 else:
1876 else:
1866 self.ui.status(_("(branch merge, don't forget to commit)\n"))
1877 self.ui.status(_("(branch merge, don't forget to commit)\n"))
1867 elif failedmerge:
1878 elif failedmerge:
1868 self.ui.status(_("There are unresolved merges with"
1879 self.ui.status(_("There are unresolved merges with"
1869 " locally modified files.\n"))
1880 " locally modified files.\n"))
1870
1881
1871 self.hook('update', parent1=xp1, parent2=xxp2, error=int(err))
1882 self.hook('update', parent1=xp1, parent2=xxp2, error=int(err))
1872 return err
1883 return err
1873
1884
1874 def merge3(self, fn, my, other, p1, p2):
1885 def merge3(self, fn, my, other, p1, p2):
1875 """perform a 3-way merge in the working directory"""
1886 """perform a 3-way merge in the working directory"""
1876
1887
1877 def temp(prefix, node):
1888 def temp(prefix, node):
1878 pre = "%s~%s." % (os.path.basename(fn), prefix)
1889 pre = "%s~%s." % (os.path.basename(fn), prefix)
1879 (fd, name) = tempfile.mkstemp(prefix=pre)
1890 (fd, name) = tempfile.mkstemp(prefix=pre)
1880 f = os.fdopen(fd, "wb")
1891 f = os.fdopen(fd, "wb")
1881 self.wwrite(fn, fl.read(node), f)
1892 self.wwrite(fn, fl.read(node), f)
1882 f.close()
1893 f.close()
1883 return name
1894 return name
1884
1895
1885 fl = self.file(fn)
1896 fl = self.file(fn)
1886 base = fl.ancestor(my, other)
1897 base = fl.ancestor(my, other)
1887 a = self.wjoin(fn)
1898 a = self.wjoin(fn)
1888 b = temp("base", base)
1899 b = temp("base", base)
1889 c = temp("other", other)
1900 c = temp("other", other)
1890
1901
1891 self.ui.note(_("resolving %s\n") % fn)
1902 self.ui.note(_("resolving %s\n") % fn)
1892 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1903 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1893 (fn, short(my), short(other), short(base)))
1904 (fn, short(my), short(other), short(base)))
1894
1905
1895 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1906 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1896 or "hgmerge")
1907 or "hgmerge")
1897 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
1908 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
1898 environ={'HG_FILE': fn,
1909 environ={'HG_FILE': fn,
1899 'HG_MY_NODE': p1,
1910 'HG_MY_NODE': p1,
1900 'HG_OTHER_NODE': p2,
1911 'HG_OTHER_NODE': p2,
1901 'HG_FILE_MY_NODE': hex(my),
1912 'HG_FILE_MY_NODE': hex(my),
1902 'HG_FILE_OTHER_NODE': hex(other),
1913 'HG_FILE_OTHER_NODE': hex(other),
1903 'HG_FILE_BASE_NODE': hex(base)})
1914 'HG_FILE_BASE_NODE': hex(base)})
1904 if r:
1915 if r:
1905 self.ui.warn(_("merging %s failed!\n") % fn)
1916 self.ui.warn(_("merging %s failed!\n") % fn)
1906
1917
1907 os.unlink(b)
1918 os.unlink(b)
1908 os.unlink(c)
1919 os.unlink(c)
1909 return r
1920 return r
1910
1921
1911 def verify(self):
1922 def verify(self):
1912 filelinkrevs = {}
1923 filelinkrevs = {}
1913 filenodes = {}
1924 filenodes = {}
1914 changesets = revisions = files = 0
1925 changesets = revisions = files = 0
1915 errors = [0]
1926 errors = [0]
1916 warnings = [0]
1927 warnings = [0]
1917 neededmanifests = {}
1928 neededmanifests = {}
1918
1929
1919 def err(msg):
1930 def err(msg):
1920 self.ui.warn(msg + "\n")
1931 self.ui.warn(msg + "\n")
1921 errors[0] += 1
1932 errors[0] += 1
1922
1933
1923 def warn(msg):
1934 def warn(msg):
1924 self.ui.warn(msg + "\n")
1935 self.ui.warn(msg + "\n")
1925 warnings[0] += 1
1936 warnings[0] += 1
1926
1937
1927 def checksize(obj, name):
1938 def checksize(obj, name):
1928 d = obj.checksize()
1939 d = obj.checksize()
1929 if d[0]:
1940 if d[0]:
1930 err(_("%s data length off by %d bytes") % (name, d[0]))
1941 err(_("%s data length off by %d bytes") % (name, d[0]))
1931 if d[1]:
1942 if d[1]:
1932 err(_("%s index contains %d extra bytes") % (name, d[1]))
1943 err(_("%s index contains %d extra bytes") % (name, d[1]))
1933
1944
1934 def checkversion(obj, name):
1945 def checkversion(obj, name):
1935 if obj.version != revlog.REVLOGV0:
1946 if obj.version != revlog.REVLOGV0:
1936 if not revlogv1:
1947 if not revlogv1:
1937 warn(_("warning: `%s' uses revlog format 1") % name)
1948 warn(_("warning: `%s' uses revlog format 1") % name)
1938 elif revlogv1:
1949 elif revlogv1:
1939 warn(_("warning: `%s' uses revlog format 0") % name)
1950 warn(_("warning: `%s' uses revlog format 0") % name)
1940
1951
1941 revlogv1 = self.revlogversion != revlog.REVLOGV0
1952 revlogv1 = self.revlogversion != revlog.REVLOGV0
1942 if self.ui.verbose or revlogv1 != self.revlogv1:
1953 if self.ui.verbose or revlogv1 != self.revlogv1:
1943 self.ui.status(_("repository uses revlog format %d\n") %
1954 self.ui.status(_("repository uses revlog format %d\n") %
1944 (revlogv1 and 1 or 0))
1955 (revlogv1 and 1 or 0))
1945
1956
1946 seen = {}
1957 seen = {}
1947 self.ui.status(_("checking changesets\n"))
1958 self.ui.status(_("checking changesets\n"))
1948 checksize(self.changelog, "changelog")
1959 checksize(self.changelog, "changelog")
1949
1960
1950 for i in range(self.changelog.count()):
1961 for i in range(self.changelog.count()):
1951 changesets += 1
1962 changesets += 1
1952 n = self.changelog.node(i)
1963 n = self.changelog.node(i)
1953 l = self.changelog.linkrev(n)
1964 l = self.changelog.linkrev(n)
1954 if l != i:
1965 if l != i:
1955 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1966 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1956 if n in seen:
1967 if n in seen:
1957 err(_("duplicate changeset at revision %d") % i)
1968 err(_("duplicate changeset at revision %d") % i)
1958 seen[n] = 1
1969 seen[n] = 1
1959
1970
1960 for p in self.changelog.parents(n):
1971 for p in self.changelog.parents(n):
1961 if p not in self.changelog.nodemap:
1972 if p not in self.changelog.nodemap:
1962 err(_("changeset %s has unknown parent %s") %
1973 err(_("changeset %s has unknown parent %s") %
1963 (short(n), short(p)))
1974 (short(n), short(p)))
1964 try:
1975 try:
1965 changes = self.changelog.read(n)
1976 changes = self.changelog.read(n)
1966 except KeyboardInterrupt:
1977 except KeyboardInterrupt:
1967 self.ui.warn(_("interrupted"))
1978 self.ui.warn(_("interrupted"))
1968 raise
1979 raise
1969 except Exception, inst:
1980 except Exception, inst:
1970 err(_("unpacking changeset %s: %s") % (short(n), inst))
1981 err(_("unpacking changeset %s: %s") % (short(n), inst))
1971 continue
1982 continue
1972
1983
1973 neededmanifests[changes[0]] = n
1984 neededmanifests[changes[0]] = n
1974
1985
1975 for f in changes[3]:
1986 for f in changes[3]:
1976 filelinkrevs.setdefault(f, []).append(i)
1987 filelinkrevs.setdefault(f, []).append(i)
1977
1988
1978 seen = {}
1989 seen = {}
1979 self.ui.status(_("checking manifests\n"))
1990 self.ui.status(_("checking manifests\n"))
1980 checkversion(self.manifest, "manifest")
1991 checkversion(self.manifest, "manifest")
1981 checksize(self.manifest, "manifest")
1992 checksize(self.manifest, "manifest")
1982
1993
1983 for i in range(self.manifest.count()):
1994 for i in range(self.manifest.count()):
1984 n = self.manifest.node(i)
1995 n = self.manifest.node(i)
1985 l = self.manifest.linkrev(n)
1996 l = self.manifest.linkrev(n)
1986
1997
1987 if l < 0 or l >= self.changelog.count():
1998 if l < 0 or l >= self.changelog.count():
1988 err(_("bad manifest link (%d) at revision %d") % (l, i))
1999 err(_("bad manifest link (%d) at revision %d") % (l, i))
1989
2000
1990 if n in neededmanifests:
2001 if n in neededmanifests:
1991 del neededmanifests[n]
2002 del neededmanifests[n]
1992
2003
1993 if n in seen:
2004 if n in seen:
1994 err(_("duplicate manifest at revision %d") % i)
2005 err(_("duplicate manifest at revision %d") % i)
1995
2006
1996 seen[n] = 1
2007 seen[n] = 1
1997
2008
1998 for p in self.manifest.parents(n):
2009 for p in self.manifest.parents(n):
1999 if p not in self.manifest.nodemap:
2010 if p not in self.manifest.nodemap:
2000 err(_("manifest %s has unknown parent %s") %
2011 err(_("manifest %s has unknown parent %s") %
2001 (short(n), short(p)))
2012 (short(n), short(p)))
2002
2013
2003 try:
2014 try:
2004 delta = mdiff.patchtext(self.manifest.delta(n))
2015 delta = mdiff.patchtext(self.manifest.delta(n))
2005 except KeyboardInterrupt:
2016 except KeyboardInterrupt:
2006 self.ui.warn(_("interrupted"))
2017 self.ui.warn(_("interrupted"))
2007 raise
2018 raise
2008 except Exception, inst:
2019 except Exception, inst:
2009 err(_("unpacking manifest %s: %s") % (short(n), inst))
2020 err(_("unpacking manifest %s: %s") % (short(n), inst))
2010 continue
2021 continue
2011
2022
2012 try:
2023 try:
2013 ff = [ l.split('\0') for l in delta.splitlines() ]
2024 ff = [ l.split('\0') for l in delta.splitlines() ]
2014 for f, fn in ff:
2025 for f, fn in ff:
2015 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
2026 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
2016 except (ValueError, TypeError), inst:
2027 except (ValueError, TypeError), inst:
2017 err(_("broken delta in manifest %s: %s") % (short(n), inst))
2028 err(_("broken delta in manifest %s: %s") % (short(n), inst))
2018
2029
2019 self.ui.status(_("crosschecking files in changesets and manifests\n"))
2030 self.ui.status(_("crosschecking files in changesets and manifests\n"))
2020
2031
2021 for m, c in neededmanifests.items():
2032 for m, c in neededmanifests.items():
2022 err(_("Changeset %s refers to unknown manifest %s") %
2033 err(_("Changeset %s refers to unknown manifest %s") %
2023 (short(m), short(c)))
2034 (short(m), short(c)))
2024 del neededmanifests
2035 del neededmanifests
2025
2036
2026 for f in filenodes:
2037 for f in filenodes:
2027 if f not in filelinkrevs:
2038 if f not in filelinkrevs:
2028 err(_("file %s in manifest but not in changesets") % f)
2039 err(_("file %s in manifest but not in changesets") % f)
2029
2040
2030 for f in filelinkrevs:
2041 for f in filelinkrevs:
2031 if f not in filenodes:
2042 if f not in filenodes:
2032 err(_("file %s in changeset but not in manifest") % f)
2043 err(_("file %s in changeset but not in manifest") % f)
2033
2044
2034 self.ui.status(_("checking files\n"))
2045 self.ui.status(_("checking files\n"))
2035 ff = filenodes.keys()
2046 ff = filenodes.keys()
2036 ff.sort()
2047 ff.sort()
2037 for f in ff:
2048 for f in ff:
2038 if f == "/dev/null":
2049 if f == "/dev/null":
2039 continue
2050 continue
2040 files += 1
2051 files += 1
2041 if not f:
2052 if not f:
2042 err(_("file without name in manifest %s") % short(n))
2053 err(_("file without name in manifest %s") % short(n))
2043 continue
2054 continue
2044 fl = self.file(f)
2055 fl = self.file(f)
2045 checkversion(fl, f)
2056 checkversion(fl, f)
2046 checksize(fl, f)
2057 checksize(fl, f)
2047
2058
2048 nodes = {nullid: 1}
2059 nodes = {nullid: 1}
2049 seen = {}
2060 seen = {}
2050 for i in range(fl.count()):
2061 for i in range(fl.count()):
2051 revisions += 1
2062 revisions += 1
2052 n = fl.node(i)
2063 n = fl.node(i)
2053
2064
2054 if n in seen:
2065 if n in seen:
2055 err(_("%s: duplicate revision %d") % (f, i))
2066 err(_("%s: duplicate revision %d") % (f, i))
2056 if n not in filenodes[f]:
2067 if n not in filenodes[f]:
2057 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
2068 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
2058 else:
2069 else:
2059 del filenodes[f][n]
2070 del filenodes[f][n]
2060
2071
2061 flr = fl.linkrev(n)
2072 flr = fl.linkrev(n)
2062 if flr not in filelinkrevs.get(f, []):
2073 if flr not in filelinkrevs.get(f, []):
2063 err(_("%s:%s points to unexpected changeset %d")
2074 err(_("%s:%s points to unexpected changeset %d")
2064 % (f, short(n), flr))
2075 % (f, short(n), flr))
2065 else:
2076 else:
2066 filelinkrevs[f].remove(flr)
2077 filelinkrevs[f].remove(flr)
2067
2078
2068 # verify contents
2079 # verify contents
2069 try:
2080 try:
2070 t = fl.read(n)
2081 t = fl.read(n)
2071 except KeyboardInterrupt:
2082 except KeyboardInterrupt:
2072 self.ui.warn(_("interrupted"))
2083 self.ui.warn(_("interrupted"))
2073 raise
2084 raise
2074 except Exception, inst:
2085 except Exception, inst:
2075 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
2086 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
2076
2087
2077 # verify parents
2088 # verify parents
2078 (p1, p2) = fl.parents(n)
2089 (p1, p2) = fl.parents(n)
2079 if p1 not in nodes:
2090 if p1 not in nodes:
2080 err(_("file %s:%s unknown parent 1 %s") %
2091 err(_("file %s:%s unknown parent 1 %s") %
2081 (f, short(n), short(p1)))
2092 (f, short(n), short(p1)))
2082 if p2 not in nodes:
2093 if p2 not in nodes:
2083 err(_("file %s:%s unknown parent 2 %s") %
2094 err(_("file %s:%s unknown parent 2 %s") %
2084 (f, short(n), short(p1)))
2095 (f, short(n), short(p1)))
2085 nodes[n] = 1
2096 nodes[n] = 1
2086
2097
2087 # cross-check
2098 # cross-check
2088 for node in filenodes[f]:
2099 for node in filenodes[f]:
2089 err(_("node %s in manifests not in %s") % (hex(node), f))
2100 err(_("node %s in manifests not in %s") % (hex(node), f))
2090
2101
2091 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
2102 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
2092 (files, changesets, revisions))
2103 (files, changesets, revisions))
2093
2104
2094 if warnings[0]:
2105 if warnings[0]:
2095 self.ui.warn(_("%d warnings encountered!\n") % warnings[0])
2106 self.ui.warn(_("%d warnings encountered!\n") % warnings[0])
2096 if errors[0]:
2107 if errors[0]:
2097 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
2108 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
2098 return 1
2109 return 1
2099
2110
2100 # used to avoid circular references so destructors work
2111 # used to avoid circular references so destructors work
2101 def aftertrans(base):
2112 def aftertrans(base):
2102 p = base
2113 p = base
2103 def a():
2114 def a():
2104 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
2115 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
2105 util.rename(os.path.join(p, "journal.dirstate"),
2116 util.rename(os.path.join(p, "journal.dirstate"),
2106 os.path.join(p, "undo.dirstate"))
2117 os.path.join(p, "undo.dirstate"))
2107 return a
2118 return a
2108
2119
General Comments 0
You need to be logged in to leave comments. Login now