##// END OF EJS Templates
merge with upstream
Benoit Boissinot -
r3320:fa59d676 merge default
parent child Browse files
Show More
@@ -1,1768 +1,1782 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 import repo
11 import repo
12 demandload(globals(), "appendfile changegroup")
12 demandload(globals(), "appendfile changegroup")
13 demandload(globals(), "changelog dirstate filelog manifest context")
13 demandload(globals(), "changelog dirstate filelog manifest context")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 demandload(globals(), "os revlog time util")
15 demandload(globals(), "os revlog time util")
16
16
17 class localrepository(repo.repository):
17 class localrepository(repo.repository):
18 capabilities = ()
18 capabilities = ()
19
19
20 def __del__(self):
20 def __del__(self):
21 self.transhandle = None
21 self.transhandle = None
22 def __init__(self, parentui, path=None, create=0):
22 def __init__(self, parentui, path=None, create=0):
23 repo.repository.__init__(self)
23 repo.repository.__init__(self)
24 if not path:
24 if not path:
25 p = os.getcwd()
25 p = os.getcwd()
26 while not os.path.isdir(os.path.join(p, ".hg")):
26 while not os.path.isdir(os.path.join(p, ".hg")):
27 oldp = p
27 oldp = p
28 p = os.path.dirname(p)
28 p = os.path.dirname(p)
29 if p == oldp:
29 if p == oldp:
30 raise repo.RepoError(_("There is no Mercurial repository"
30 raise repo.RepoError(_("There is no Mercurial repository"
31 " here (.hg not found)"))
31 " here (.hg not found)"))
32 path = p
32 path = p
33 self.path = os.path.join(path, ".hg")
33 self.path = os.path.join(path, ".hg")
34
34
35 if not os.path.isdir(self.path):
35 if not os.path.isdir(self.path):
36 if create:
36 if create:
37 if not os.path.exists(path):
37 if not os.path.exists(path):
38 os.mkdir(path)
38 os.mkdir(path)
39 os.mkdir(self.path)
39 os.mkdir(self.path)
40 os.mkdir(self.join("data"))
40 os.mkdir(self.join("data"))
41 else:
41 else:
42 raise repo.RepoError(_("repository %s not found") % path)
42 raise repo.RepoError(_("repository %s not found") % path)
43 elif create:
43 elif create:
44 raise repo.RepoError(_("repository %s already exists") % path)
44 raise repo.RepoError(_("repository %s already exists") % path)
45
45
46 self.root = os.path.abspath(path)
46 self.root = os.path.abspath(path)
47 self.origroot = path
47 self.origroot = path
48 self.ui = ui.ui(parentui=parentui)
48 self.ui = ui.ui(parentui=parentui)
49 self.opener = util.opener(self.path)
49 self.opener = util.opener(self.path)
50 self.wopener = util.opener(self.root)
50 self.wopener = util.opener(self.root)
51
51
52 try:
52 try:
53 self.ui.readconfig(self.join("hgrc"), self.root)
53 self.ui.readconfig(self.join("hgrc"), self.root)
54 except IOError:
54 except IOError:
55 pass
55 pass
56
56
57 v = self.ui.revlogopts
57 v = self.ui.revlogopts
58 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
58 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
59 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
59 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
60 fl = v.get('flags', None)
60 fl = v.get('flags', None)
61 flags = 0
61 flags = 0
62 if fl != None:
62 if fl != None:
63 for x in fl.split():
63 for x in fl.split():
64 flags |= revlog.flagstr(x)
64 flags |= revlog.flagstr(x)
65 elif self.revlogv1:
65 elif self.revlogv1:
66 flags = revlog.REVLOG_DEFAULT_FLAGS
66 flags = revlog.REVLOG_DEFAULT_FLAGS
67
67
68 v = self.revlogversion | flags
68 v = self.revlogversion | flags
69 self.manifest = manifest.manifest(self.opener, v)
69 self.manifest = manifest.manifest(self.opener, v)
70 self.changelog = changelog.changelog(self.opener, v)
70 self.changelog = changelog.changelog(self.opener, v)
71
71
72 # the changelog might not have the inline index flag
72 # the changelog might not have the inline index flag
73 # on. If the format of the changelog is the same as found in
73 # on. If the format of the changelog is the same as found in
74 # .hgrc, apply any flags found in the .hgrc as well.
74 # .hgrc, apply any flags found in the .hgrc as well.
75 # Otherwise, just version from the changelog
75 # Otherwise, just version from the changelog
76 v = self.changelog.version
76 v = self.changelog.version
77 if v == self.revlogversion:
77 if v == self.revlogversion:
78 v |= flags
78 v |= flags
79 self.revlogversion = v
79 self.revlogversion = v
80
80
81 self.tagscache = None
81 self.tagscache = None
82 self.nodetagscache = None
82 self.nodetagscache = None
83 self.encodepats = None
83 self.encodepats = None
84 self.decodepats = None
84 self.decodepats = None
85 self.transhandle = None
85 self.transhandle = None
86
86
87 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
87 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
88
88
89 def url(self):
89 def url(self):
90 return 'file:' + self.root
90 return 'file:' + self.root
91
91
92 def hook(self, name, throw=False, **args):
92 def hook(self, name, throw=False, **args):
93 def callhook(hname, funcname):
93 def callhook(hname, funcname):
94 '''call python hook. hook is callable object, looked up as
94 '''call python hook. hook is callable object, looked up as
95 name in python module. if callable returns "true", hook
95 name in python module. if callable returns "true", hook
96 fails, else passes. if hook raises exception, treated as
96 fails, else passes. if hook raises exception, treated as
97 hook failure. exception propagates if throw is "true".
97 hook failure. exception propagates if throw is "true".
98
98
99 reason for "true" meaning "hook failed" is so that
99 reason for "true" meaning "hook failed" is so that
100 unmodified commands (e.g. mercurial.commands.update) can
100 unmodified commands (e.g. mercurial.commands.update) can
101 be run as hooks without wrappers to convert return values.'''
101 be run as hooks without wrappers to convert return values.'''
102
102
103 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
103 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
104 d = funcname.rfind('.')
104 d = funcname.rfind('.')
105 if d == -1:
105 if d == -1:
106 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
106 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
107 % (hname, funcname))
107 % (hname, funcname))
108 modname = funcname[:d]
108 modname = funcname[:d]
109 try:
109 try:
110 obj = __import__(modname)
110 obj = __import__(modname)
111 except ImportError:
111 except ImportError:
112 try:
112 try:
113 # extensions are loaded with hgext_ prefix
113 # extensions are loaded with hgext_ prefix
114 obj = __import__("hgext_%s" % modname)
114 obj = __import__("hgext_%s" % modname)
115 except ImportError:
115 except ImportError:
116 raise util.Abort(_('%s hook is invalid '
116 raise util.Abort(_('%s hook is invalid '
117 '(import of "%s" failed)') %
117 '(import of "%s" failed)') %
118 (hname, modname))
118 (hname, modname))
119 try:
119 try:
120 for p in funcname.split('.')[1:]:
120 for p in funcname.split('.')[1:]:
121 obj = getattr(obj, p)
121 obj = getattr(obj, p)
122 except AttributeError, err:
122 except AttributeError, err:
123 raise util.Abort(_('%s hook is invalid '
123 raise util.Abort(_('%s hook is invalid '
124 '("%s" is not defined)') %
124 '("%s" is not defined)') %
125 (hname, funcname))
125 (hname, funcname))
126 if not callable(obj):
126 if not callable(obj):
127 raise util.Abort(_('%s hook is invalid '
127 raise util.Abort(_('%s hook is invalid '
128 '("%s" is not callable)') %
128 '("%s" is not callable)') %
129 (hname, funcname))
129 (hname, funcname))
130 try:
130 try:
131 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
131 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
132 except (KeyboardInterrupt, util.SignalInterrupt):
132 except (KeyboardInterrupt, util.SignalInterrupt):
133 raise
133 raise
134 except Exception, exc:
134 except Exception, exc:
135 if isinstance(exc, util.Abort):
135 if isinstance(exc, util.Abort):
136 self.ui.warn(_('error: %s hook failed: %s\n') %
136 self.ui.warn(_('error: %s hook failed: %s\n') %
137 (hname, exc.args[0]))
137 (hname, exc.args[0]))
138 else:
138 else:
139 self.ui.warn(_('error: %s hook raised an exception: '
139 self.ui.warn(_('error: %s hook raised an exception: '
140 '%s\n') % (hname, exc))
140 '%s\n') % (hname, exc))
141 if throw:
141 if throw:
142 raise
142 raise
143 self.ui.print_exc()
143 self.ui.print_exc()
144 return True
144 return True
145 if r:
145 if r:
146 if throw:
146 if throw:
147 raise util.Abort(_('%s hook failed') % hname)
147 raise util.Abort(_('%s hook failed') % hname)
148 self.ui.warn(_('warning: %s hook failed\n') % hname)
148 self.ui.warn(_('warning: %s hook failed\n') % hname)
149 return r
149 return r
150
150
151 def runhook(name, cmd):
151 def runhook(name, cmd):
152 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
152 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
153 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
153 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
154 r = util.system(cmd, environ=env, cwd=self.root)
154 r = util.system(cmd, environ=env, cwd=self.root)
155 if r:
155 if r:
156 desc, r = util.explain_exit(r)
156 desc, r = util.explain_exit(r)
157 if throw:
157 if throw:
158 raise util.Abort(_('%s hook %s') % (name, desc))
158 raise util.Abort(_('%s hook %s') % (name, desc))
159 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
159 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
160 return r
160 return r
161
161
162 r = False
162 r = False
163 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
163 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
164 if hname.split(".", 1)[0] == name and cmd]
164 if hname.split(".", 1)[0] == name and cmd]
165 hooks.sort()
165 hooks.sort()
166 for hname, cmd in hooks:
166 for hname, cmd in hooks:
167 if cmd.startswith('python:'):
167 if cmd.startswith('python:'):
168 r = callhook(hname, cmd[7:].strip()) or r
168 r = callhook(hname, cmd[7:].strip()) or r
169 else:
169 else:
170 r = runhook(hname, cmd) or r
170 r = runhook(hname, cmd) or r
171 return r
171 return r
172
172
173 tag_disallowed = ':\r\n'
173 tag_disallowed = ':\r\n'
174
174
175 def tag(self, name, node, message, local, user, date):
175 def tag(self, name, node, message, local, user, date):
176 '''tag a revision with a symbolic name.
176 '''tag a revision with a symbolic name.
177
177
178 if local is True, the tag is stored in a per-repository file.
178 if local is True, the tag is stored in a per-repository file.
179 otherwise, it is stored in the .hgtags file, and a new
179 otherwise, it is stored in the .hgtags file, and a new
180 changeset is committed with the change.
180 changeset is committed with the change.
181
181
182 keyword arguments:
182 keyword arguments:
183
183
184 local: whether to store tag in non-version-controlled file
184 local: whether to store tag in non-version-controlled file
185 (default False)
185 (default False)
186
186
187 message: commit message to use if committing
187 message: commit message to use if committing
188
188
189 user: name of user to use if committing
189 user: name of user to use if committing
190
190
191 date: date tuple to use if committing'''
191 date: date tuple to use if committing'''
192
192
193 for c in self.tag_disallowed:
193 for c in self.tag_disallowed:
194 if c in name:
194 if c in name:
195 raise util.Abort(_('%r cannot be used in a tag name') % c)
195 raise util.Abort(_('%r cannot be used in a tag name') % c)
196
196
197 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
197 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
198
198
199 if local:
199 if local:
200 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
200 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
201 self.hook('tag', node=hex(node), tag=name, local=local)
201 self.hook('tag', node=hex(node), tag=name, local=local)
202 return
202 return
203
203
204 for x in self.status()[:5]:
204 for x in self.status()[:5]:
205 if '.hgtags' in x:
205 if '.hgtags' in x:
206 raise util.Abort(_('working copy of .hgtags is changed '
206 raise util.Abort(_('working copy of .hgtags is changed '
207 '(please commit .hgtags manually)'))
207 '(please commit .hgtags manually)'))
208
208
209 self.wfile('.hgtags', 'ab').write('%s %s\n' % (hex(node), name))
209 self.wfile('.hgtags', 'ab').write('%s %s\n' % (hex(node), name))
210 if self.dirstate.state('.hgtags') == '?':
210 if self.dirstate.state('.hgtags') == '?':
211 self.add(['.hgtags'])
211 self.add(['.hgtags'])
212
212
213 self.commit(['.hgtags'], message, user, date)
213 self.commit(['.hgtags'], message, user, date)
214 self.hook('tag', node=hex(node), tag=name, local=local)
214 self.hook('tag', node=hex(node), tag=name, local=local)
215
215
216 def tags(self):
216 def tags(self):
217 '''return a mapping of tag to node'''
217 '''return a mapping of tag to node'''
218 if not self.tagscache:
218 if not self.tagscache:
219 self.tagscache = {}
219 self.tagscache = {}
220
220
221 def parsetag(line, context):
221 def parsetag(line, context):
222 if not line:
222 if not line:
223 return
223 return
224 s = l.split(" ", 1)
224 s = l.split(" ", 1)
225 if len(s) != 2:
225 if len(s) != 2:
226 self.ui.warn(_("%s: cannot parse entry\n") % context)
226 self.ui.warn(_("%s: cannot parse entry\n") % context)
227 return
227 return
228 node, key = s
228 node, key = s
229 key = key.strip()
229 key = key.strip()
230 try:
230 try:
231 bin_n = bin(node)
231 bin_n = bin(node)
232 except TypeError:
232 except TypeError:
233 self.ui.warn(_("%s: node '%s' is not well formed\n") %
233 self.ui.warn(_("%s: node '%s' is not well formed\n") %
234 (context, node))
234 (context, node))
235 return
235 return
236 if bin_n not in self.changelog.nodemap:
236 if bin_n not in self.changelog.nodemap:
237 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
237 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
238 (context, key))
238 (context, key))
239 return
239 return
240 self.tagscache[key] = bin_n
240 self.tagscache[key] = bin_n
241
241
242 # read the tags file from each head, ending with the tip,
242 # read the tags file from each head, ending with the tip,
243 # and add each tag found to the map, with "newer" ones
243 # and add each tag found to the map, with "newer" ones
244 # taking precedence
244 # taking precedence
245 heads = self.heads()
245 heads = self.heads()
246 heads.reverse()
246 heads.reverse()
247 fl = self.file(".hgtags")
247 fl = self.file(".hgtags")
248 for node in heads:
248 for node in heads:
249 change = self.changelog.read(node)
249 change = self.changelog.read(node)
250 rev = self.changelog.rev(node)
250 rev = self.changelog.rev(node)
251 fn, ff = self.manifest.find(change[0], '.hgtags')
251 fn, ff = self.manifest.find(change[0], '.hgtags')
252 if fn is None: continue
252 if fn is None: continue
253 count = 0
253 count = 0
254 for l in fl.read(fn).splitlines():
254 for l in fl.read(fn).splitlines():
255 count += 1
255 count += 1
256 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
256 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
257 (rev, short(node), count))
257 (rev, short(node), count))
258 try:
258 try:
259 f = self.opener("localtags")
259 f = self.opener("localtags")
260 count = 0
260 count = 0
261 for l in f:
261 for l in f:
262 count += 1
262 count += 1
263 parsetag(l, _("localtags, line %d") % count)
263 parsetag(l, _("localtags, line %d") % count)
264 except IOError:
264 except IOError:
265 pass
265 pass
266
266
267 self.tagscache['tip'] = self.changelog.tip()
267 self.tagscache['tip'] = self.changelog.tip()
268
268
269 return self.tagscache
269 return self.tagscache
270
270
271 def tagslist(self):
271 def tagslist(self):
272 '''return a list of tags ordered by revision'''
272 '''return a list of tags ordered by revision'''
273 l = []
273 l = []
274 for t, n in self.tags().items():
274 for t, n in self.tags().items():
275 try:
275 try:
276 r = self.changelog.rev(n)
276 r = self.changelog.rev(n)
277 except:
277 except:
278 r = -2 # sort to the beginning of the list if unknown
278 r = -2 # sort to the beginning of the list if unknown
279 l.append((r, t, n))
279 l.append((r, t, n))
280 l.sort()
280 l.sort()
281 return [(t, n) for r, t, n in l]
281 return [(t, n) for r, t, n in l]
282
282
283 def nodetags(self, node):
283 def nodetags(self, node):
284 '''return the tags associated with a node'''
284 '''return the tags associated with a node'''
285 if not self.nodetagscache:
285 if not self.nodetagscache:
286 self.nodetagscache = {}
286 self.nodetagscache = {}
287 for t, n in self.tags().items():
287 for t, n in self.tags().items():
288 self.nodetagscache.setdefault(n, []).append(t)
288 self.nodetagscache.setdefault(n, []).append(t)
289 return self.nodetagscache.get(node, [])
289 return self.nodetagscache.get(node, [])
290
290
291 def lookup(self, key):
291 def lookup(self, key):
292 try:
292 try:
293 return self.tags()[key]
293 return self.tags()[key]
294 except KeyError:
294 except KeyError:
295 if key == '.':
295 if key == '.':
296 key = self.dirstate.parents()[0]
296 key = self.dirstate.parents()[0]
297 if key == nullid:
297 if key == nullid:
298 raise repo.RepoError(_("no revision checked out"))
298 raise repo.RepoError(_("no revision checked out"))
299 try:
299 try:
300 return self.changelog.lookup(key)
300 return self.changelog.lookup(key)
301 except:
301 except:
302 raise repo.RepoError(_("unknown revision '%s'") % key)
302 raise repo.RepoError(_("unknown revision '%s'") % key)
303
303
304 def dev(self):
304 def dev(self):
305 return os.lstat(self.path).st_dev
305 return os.lstat(self.path).st_dev
306
306
307 def local(self):
307 def local(self):
308 return True
308 return True
309
309
310 def join(self, f):
310 def join(self, f):
311 return os.path.join(self.path, f)
311 return os.path.join(self.path, f)
312
312
313 def wjoin(self, f):
313 def wjoin(self, f):
314 return os.path.join(self.root, f)
314 return os.path.join(self.root, f)
315
315
316 def file(self, f):
316 def file(self, f):
317 if f[0] == '/':
317 if f[0] == '/':
318 f = f[1:]
318 f = f[1:]
319 return filelog.filelog(self.opener, f, self.revlogversion)
319 return filelog.filelog(self.opener, f, self.revlogversion)
320
320
321 def changectx(self, changeid=None):
321 def changectx(self, changeid=None):
322 return context.changectx(self, changeid)
322 return context.changectx(self, changeid)
323
323
324 def workingctx(self):
324 def workingctx(self):
325 return context.workingctx(self)
325 return context.workingctx(self)
326
326
327 def parents(self, changeid=None):
327 def parents(self, changeid=None):
328 '''
328 '''
329 get list of changectxs for parents of changeid or working directory
329 get list of changectxs for parents of changeid or working directory
330 '''
330 '''
331 if changeid is None:
331 if changeid is None:
332 pl = self.dirstate.parents()
332 pl = self.dirstate.parents()
333 else:
333 else:
334 n = self.changelog.lookup(changeid)
334 n = self.changelog.lookup(changeid)
335 pl = self.changelog.parents(n)
335 pl = self.changelog.parents(n)
336 if pl[1] == nullid:
336 if pl[1] == nullid:
337 return [self.changectx(pl[0])]
337 return [self.changectx(pl[0])]
338 return [self.changectx(pl[0]), self.changectx(pl[1])]
338 return [self.changectx(pl[0]), self.changectx(pl[1])]
339
339
340 def filectx(self, path, changeid=None, fileid=None):
340 def filectx(self, path, changeid=None, fileid=None):
341 """changeid can be a changeset revision, node, or tag.
341 """changeid can be a changeset revision, node, or tag.
342 fileid can be a file revision or node."""
342 fileid can be a file revision or node."""
343 return context.filectx(self, path, changeid, fileid)
343 return context.filectx(self, path, changeid, fileid)
344
344
345 def getcwd(self):
345 def getcwd(self):
346 return self.dirstate.getcwd()
346 return self.dirstate.getcwd()
347
347
348 def wfile(self, f, mode='r'):
348 def wfile(self, f, mode='r'):
349 return self.wopener(f, mode)
349 return self.wopener(f, mode)
350
350
351 def wread(self, filename):
351 def wread(self, filename):
352 if self.encodepats == None:
352 if self.encodepats == None:
353 l = []
353 l = []
354 for pat, cmd in self.ui.configitems("encode"):
354 for pat, cmd in self.ui.configitems("encode"):
355 mf = util.matcher(self.root, "", [pat], [], [])[1]
355 mf = util.matcher(self.root, "", [pat], [], [])[1]
356 l.append((mf, cmd))
356 l.append((mf, cmd))
357 self.encodepats = l
357 self.encodepats = l
358
358
359 data = self.wopener(filename, 'r').read()
359 data = self.wopener(filename, 'r').read()
360
360
361 for mf, cmd in self.encodepats:
361 for mf, cmd in self.encodepats:
362 if mf(filename):
362 if mf(filename):
363 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
363 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
364 data = util.filter(data, cmd)
364 data = util.filter(data, cmd)
365 break
365 break
366
366
367 return data
367 return data
368
368
369 def wwrite(self, filename, data, fd=None):
369 def wwrite(self, filename, data, fd=None):
370 if self.decodepats == None:
370 if self.decodepats == None:
371 l = []
371 l = []
372 for pat, cmd in self.ui.configitems("decode"):
372 for pat, cmd in self.ui.configitems("decode"):
373 mf = util.matcher(self.root, "", [pat], [], [])[1]
373 mf = util.matcher(self.root, "", [pat], [], [])[1]
374 l.append((mf, cmd))
374 l.append((mf, cmd))
375 self.decodepats = l
375 self.decodepats = l
376
376
377 for mf, cmd in self.decodepats:
377 for mf, cmd in self.decodepats:
378 if mf(filename):
378 if mf(filename):
379 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
379 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
380 data = util.filter(data, cmd)
380 data = util.filter(data, cmd)
381 break
381 break
382
382
383 if fd:
383 if fd:
384 return fd.write(data)
384 return fd.write(data)
385 return self.wopener(filename, 'w').write(data)
385 return self.wopener(filename, 'w').write(data)
386
386
387 def transaction(self):
387 def transaction(self):
388 tr = self.transhandle
388 tr = self.transhandle
389 if tr != None and tr.running():
389 if tr != None and tr.running():
390 return tr.nest()
390 return tr.nest()
391
391
392 # save dirstate for rollback
392 # save dirstate for rollback
393 try:
393 try:
394 ds = self.opener("dirstate").read()
394 ds = self.opener("dirstate").read()
395 except IOError:
395 except IOError:
396 ds = ""
396 ds = ""
397 self.opener("journal.dirstate", "w").write(ds)
397 self.opener("journal.dirstate", "w").write(ds)
398
398
399 tr = transaction.transaction(self.ui.warn, self.opener,
399 tr = transaction.transaction(self.ui.warn, self.opener,
400 self.join("journal"),
400 self.join("journal"),
401 aftertrans(self.path))
401 aftertrans(self.path))
402 self.transhandle = tr
402 self.transhandle = tr
403 return tr
403 return tr
404
404
405 def recover(self):
405 def recover(self):
406 l = self.lock()
406 l = self.lock()
407 if os.path.exists(self.join("journal")):
407 if os.path.exists(self.join("journal")):
408 self.ui.status(_("rolling back interrupted transaction\n"))
408 self.ui.status(_("rolling back interrupted transaction\n"))
409 transaction.rollback(self.opener, self.join("journal"))
409 transaction.rollback(self.opener, self.join("journal"))
410 self.reload()
410 self.reload()
411 return True
411 return True
412 else:
412 else:
413 self.ui.warn(_("no interrupted transaction available\n"))
413 self.ui.warn(_("no interrupted transaction available\n"))
414 return False
414 return False
415
415
416 def rollback(self, wlock=None):
416 def rollback(self, wlock=None):
417 if not wlock:
417 if not wlock:
418 wlock = self.wlock()
418 wlock = self.wlock()
419 l = self.lock()
419 l = self.lock()
420 if os.path.exists(self.join("undo")):
420 if os.path.exists(self.join("undo")):
421 self.ui.status(_("rolling back last transaction\n"))
421 self.ui.status(_("rolling back last transaction\n"))
422 transaction.rollback(self.opener, self.join("undo"))
422 transaction.rollback(self.opener, self.join("undo"))
423 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
423 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
424 self.reload()
424 self.reload()
425 self.wreload()
425 self.wreload()
426 else:
426 else:
427 self.ui.warn(_("no rollback information available\n"))
427 self.ui.warn(_("no rollback information available\n"))
428
428
429 def wreload(self):
429 def wreload(self):
430 self.dirstate.read()
430 self.dirstate.read()
431
431
432 def reload(self):
432 def reload(self):
433 self.changelog.load()
433 self.changelog.load()
434 self.manifest.load()
434 self.manifest.load()
435 self.tagscache = None
435 self.tagscache = None
436 self.nodetagscache = None
436 self.nodetagscache = None
437
437
438 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
438 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
439 desc=None):
439 desc=None):
440 try:
440 try:
441 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
441 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
442 except lock.LockHeld, inst:
442 except lock.LockHeld, inst:
443 if not wait:
443 if not wait:
444 raise
444 raise
445 self.ui.warn(_("waiting for lock on %s held by %s\n") %
445 self.ui.warn(_("waiting for lock on %s held by %s\n") %
446 (desc, inst.args[0]))
446 (desc, inst.args[0]))
447 # default to 600 seconds timeout
447 # default to 600 seconds timeout
448 l = lock.lock(self.join(lockname),
448 l = lock.lock(self.join(lockname),
449 int(self.ui.config("ui", "timeout") or 600),
449 int(self.ui.config("ui", "timeout") or 600),
450 releasefn, desc=desc)
450 releasefn, desc=desc)
451 if acquirefn:
451 if acquirefn:
452 acquirefn()
452 acquirefn()
453 return l
453 return l
454
454
455 def lock(self, wait=1):
455 def lock(self, wait=1):
456 return self.do_lock("lock", wait, acquirefn=self.reload,
456 return self.do_lock("lock", wait, acquirefn=self.reload,
457 desc=_('repository %s') % self.origroot)
457 desc=_('repository %s') % self.origroot)
458
458
459 def wlock(self, wait=1):
459 def wlock(self, wait=1):
460 return self.do_lock("wlock", wait, self.dirstate.write,
460 return self.do_lock("wlock", wait, self.dirstate.write,
461 self.wreload,
461 self.wreload,
462 desc=_('working directory of %s') % self.origroot)
462 desc=_('working directory of %s') % self.origroot)
463
463
464 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
464 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
465 "determine whether a new filenode is needed"
465 """
466 Determine whether a new filenode is needed and what parent
467 and rename information is needed for a file commit.
468
469 Returns (old entry, file parent 1, file parent 2, metadata)
470
471 If old entry is not None, a commit is not needed.
472 """
466 fp1 = manifest1.get(filename, nullid)
473 fp1 = manifest1.get(filename, nullid)
467 fp2 = manifest2.get(filename, nullid)
474 fp2 = manifest2.get(filename, nullid)
468
475
469 if fp2 != nullid:
476 meta = {}
477 cp = self.dirstate.copied(filename)
478 if cp:
479 meta["copy"] = cp
480 if not manifest2: # not a branch merge
481 meta["copyrev"] = hex(manifest1.get(cp, nullid))
482 fp2 = nullid
483 elif fp2 != nullid: # copied on remote side
484 meta["copyrev"] = hex(manifest1.get(cp, nullid))
485 else: # copied on local side, reversed
486 meta["copyrev"] = hex(manifest2.get(cp))
487 fp2 = nullid
488 self.ui.debug(_(" %s: copy %s:%s\n") %
489 (filename, cp, meta["copyrev"]))
490 fp1 = nullid
491 elif fp2 != nullid:
470 # is one parent an ancestor of the other?
492 # is one parent an ancestor of the other?
471 fpa = filelog.ancestor(fp1, fp2)
493 fpa = filelog.ancestor(fp1, fp2)
472 if fpa == fp1:
494 if fpa == fp1:
473 fp1, fp2 = fp2, nullid
495 fp1, fp2 = fp2, nullid
474 elif fpa == fp2:
496 elif fpa == fp2:
475 fp2 = nullid
497 fp2 = nullid
476
498
477 # is the file unmodified from the parent? report existing entry
499 # is the file unmodified from the parent? report existing entry
478 if fp2 == nullid and text == filelog.read(fp1):
500 if fp2 == nullid and not filelog.cmp(fp1, text):
479 return (fp1, None, None)
501 return (fp1, None, None, {})
480
502
481 return (None, fp1, fp2)
503 return (None, fp1, fp2, meta)
482
504
483 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
505 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
484 orig_parent = self.dirstate.parents()[0] or nullid
506 orig_parent = self.dirstate.parents()[0] or nullid
485 p1 = p1 or self.dirstate.parents()[0] or nullid
507 p1 = p1 or self.dirstate.parents()[0] or nullid
486 p2 = p2 or self.dirstate.parents()[1] or nullid
508 p2 = p2 or self.dirstate.parents()[1] or nullid
487 c1 = self.changelog.read(p1)
509 c1 = self.changelog.read(p1)
488 c2 = self.changelog.read(p2)
510 c2 = self.changelog.read(p2)
489 m1 = self.manifest.read(c1[0]).copy()
511 m1 = self.manifest.read(c1[0]).copy()
490 m2 = self.manifest.read(c2[0])
512 m2 = self.manifest.read(c2[0])
491 changed = []
513 changed = []
492
514
493 if orig_parent == p1:
515 if orig_parent == p1:
494 update_dirstate = 1
516 update_dirstate = 1
495 else:
517 else:
496 update_dirstate = 0
518 update_dirstate = 0
497
519
498 if not wlock:
520 if not wlock:
499 wlock = self.wlock()
521 wlock = self.wlock()
500 l = self.lock()
522 l = self.lock()
501 tr = self.transaction()
523 tr = self.transaction()
502 linkrev = self.changelog.count()
524 linkrev = self.changelog.count()
503 for f in files:
525 for f in files:
504 try:
526 try:
505 t = self.wread(f)
527 t = self.wread(f)
506 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
528 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
507 r = self.file(f)
529 r = self.file(f)
508
530
509 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
531 entry, fp1, fp2, meta = self.checkfilemerge(f, t, r, m1, m2)
510 if entry:
532 if entry:
511 m1[f] = entry
533 m1[f] = entry
512 continue
534 continue
513
535
514 m1[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
536 m1[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
515 changed.append(f)
537 changed.append(f)
516 if update_dirstate:
538 if update_dirstate:
517 self.dirstate.update([f], "n")
539 self.dirstate.update([f], "n")
518 except IOError:
540 except IOError:
519 try:
541 try:
520 del m1[f]
542 del m1[f]
521 if update_dirstate:
543 if update_dirstate:
522 self.dirstate.forget([f])
544 self.dirstate.forget([f])
523 except:
545 except:
524 # deleted from p2?
546 # deleted from p2?
525 pass
547 pass
526
548
527 mnode = self.manifest.add(m1, tr, linkrev, c1[0], c2[0])
549 mnode = self.manifest.add(m1, tr, linkrev, c1[0], c2[0])
528 user = user or self.ui.username()
550 user = user or self.ui.username()
529 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
551 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
530 tr.close()
552 tr.close()
531 if update_dirstate:
553 if update_dirstate:
532 self.dirstate.setparents(n, nullid)
554 self.dirstate.setparents(n, nullid)
533
555
534 def commit(self, files=None, text="", user=None, date=None,
556 def commit(self, files=None, text="", user=None, date=None,
535 match=util.always, force=False, lock=None, wlock=None,
557 match=util.always, force=False, lock=None, wlock=None,
536 force_editor=False):
558 force_editor=False):
537 commit = []
559 commit = []
538 remove = []
560 remove = []
539 changed = []
561 changed = []
540
562
541 if files:
563 if files:
542 for f in files:
564 for f in files:
543 s = self.dirstate.state(f)
565 s = self.dirstate.state(f)
544 if s in 'nmai':
566 if s in 'nmai':
545 commit.append(f)
567 commit.append(f)
546 elif s == 'r':
568 elif s == 'r':
547 remove.append(f)
569 remove.append(f)
548 else:
570 else:
549 self.ui.warn(_("%s not tracked!\n") % f)
571 self.ui.warn(_("%s not tracked!\n") % f)
550 else:
572 else:
551 modified, added, removed, deleted, unknown = self.status(match=match)[:5]
573 modified, added, removed, deleted, unknown = self.status(match=match)[:5]
552 commit = modified + added
574 commit = modified + added
553 remove = removed
575 remove = removed
554
576
555 p1, p2 = self.dirstate.parents()
577 p1, p2 = self.dirstate.parents()
556 c1 = self.changelog.read(p1)
578 c1 = self.changelog.read(p1)
557 c2 = self.changelog.read(p2)
579 c2 = self.changelog.read(p2)
558 m1 = self.manifest.read(c1[0]).copy()
580 m1 = self.manifest.read(c1[0]).copy()
559 m2 = self.manifest.read(c2[0])
581 m2 = self.manifest.read(c2[0])
560
582
561 if not commit and not remove and not force and p2 == nullid:
583 if not commit and not remove and not force and p2 == nullid:
562 self.ui.status(_("nothing changed\n"))
584 self.ui.status(_("nothing changed\n"))
563 return None
585 return None
564
586
565 xp1 = hex(p1)
587 xp1 = hex(p1)
566 if p2 == nullid: xp2 = ''
588 if p2 == nullid: xp2 = ''
567 else: xp2 = hex(p2)
589 else: xp2 = hex(p2)
568
590
569 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
591 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
570
592
571 if not wlock:
593 if not wlock:
572 wlock = self.wlock()
594 wlock = self.wlock()
573 if not lock:
595 if not lock:
574 lock = self.lock()
596 lock = self.lock()
575 tr = self.transaction()
597 tr = self.transaction()
576
598
577 # check in files
599 # check in files
578 new = {}
600 new = {}
579 linkrev = self.changelog.count()
601 linkrev = self.changelog.count()
580 commit.sort()
602 commit.sort()
581 for f in commit:
603 for f in commit:
582 self.ui.note(f + "\n")
604 self.ui.note(f + "\n")
583 try:
605 try:
584 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
606 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
585 t = self.wread(f)
607 t = self.wread(f)
586 except IOError:
608 except IOError:
587 self.ui.warn(_("trouble committing %s!\n") % f)
609 self.ui.warn(_("trouble committing %s!\n") % f)
588 raise
610 raise
589
611
590 r = self.file(f)
612 r = self.file(f)
591
613
592 meta = {}
614 entry, fp1, fp2, meta = self.checkfilemerge(f, t, r, m1, m2)
593 cp = self.dirstate.copied(f)
594 if cp:
595 meta["copy"] = cp
596 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
597 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
598 fp1, fp2 = nullid, nullid
599 else:
600 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
601 if entry:
615 if entry:
602 new[f] = entry
616 new[f] = entry
603 continue
617 continue
604
618
605 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
619 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
606 # remember what we've added so that we can later calculate
620 # remember what we've added so that we can later calculate
607 # the files to pull from a set of changesets
621 # the files to pull from a set of changesets
608 changed.append(f)
622 changed.append(f)
609
623
610 # update manifest
624 # update manifest
611 m1.update(new)
625 m1.update(new)
612 for f in remove:
626 for f in remove:
613 if f in m1:
627 if f in m1:
614 del m1[f]
628 del m1[f]
615 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0],
629 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0],
616 (new, remove))
630 (new, remove))
617
631
618 # add changeset
632 # add changeset
619 new = new.keys()
633 new = new.keys()
620 new.sort()
634 new.sort()
621
635
622 user = user or self.ui.username()
636 user = user or self.ui.username()
623 if not text or force_editor:
637 if not text or force_editor:
624 edittext = []
638 edittext = []
625 if text:
639 if text:
626 edittext.append(text)
640 edittext.append(text)
627 edittext.append("")
641 edittext.append("")
628 if p2 != nullid:
642 if p2 != nullid:
629 edittext.append("HG: branch merge")
643 edittext.append("HG: branch merge")
630 edittext.extend(["HG: changed %s" % f for f in changed])
644 edittext.extend(["HG: changed %s" % f for f in changed])
631 edittext.extend(["HG: removed %s" % f for f in remove])
645 edittext.extend(["HG: removed %s" % f for f in remove])
632 if not changed and not remove:
646 if not changed and not remove:
633 edittext.append("HG: no files changed")
647 edittext.append("HG: no files changed")
634 edittext.append("")
648 edittext.append("")
635 # run editor in the repository root
649 # run editor in the repository root
636 olddir = os.getcwd()
650 olddir = os.getcwd()
637 os.chdir(self.root)
651 os.chdir(self.root)
638 text = self.ui.edit("\n".join(edittext), user)
652 text = self.ui.edit("\n".join(edittext), user)
639 os.chdir(olddir)
653 os.chdir(olddir)
640
654
641 lines = [line.rstrip() for line in text.rstrip().splitlines()]
655 lines = [line.rstrip() for line in text.rstrip().splitlines()]
642 while lines and not lines[0]:
656 while lines and not lines[0]:
643 del lines[0]
657 del lines[0]
644 if not lines:
658 if not lines:
645 return None
659 return None
646 text = '\n'.join(lines)
660 text = '\n'.join(lines)
647 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
661 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
648 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
662 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
649 parent2=xp2)
663 parent2=xp2)
650 tr.close()
664 tr.close()
651
665
652 self.dirstate.setparents(n)
666 self.dirstate.setparents(n)
653 self.dirstate.update(new, "n")
667 self.dirstate.update(new, "n")
654 self.dirstate.forget(remove)
668 self.dirstate.forget(remove)
655
669
656 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
670 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
657 return n
671 return n
658
672
659 def walk(self, node=None, files=[], match=util.always, badmatch=None):
673 def walk(self, node=None, files=[], match=util.always, badmatch=None):
660 if node:
674 if node:
661 fdict = dict.fromkeys(files)
675 fdict = dict.fromkeys(files)
662 for fn in self.manifest.read(self.changelog.read(node)[0]):
676 for fn in self.manifest.read(self.changelog.read(node)[0]):
663 for ffn in fdict:
677 for ffn in fdict:
664 # match if the file is the exact name or a directory
678 # match if the file is the exact name or a directory
665 if ffn == fn or fn.startswith("%s/" % ffn):
679 if ffn == fn or fn.startswith("%s/" % ffn):
666 del fdict[ffn]
680 del fdict[ffn]
667 break
681 break
668 if match(fn):
682 if match(fn):
669 yield 'm', fn
683 yield 'm', fn
670 for fn in fdict:
684 for fn in fdict:
671 if badmatch and badmatch(fn):
685 if badmatch and badmatch(fn):
672 if match(fn):
686 if match(fn):
673 yield 'b', fn
687 yield 'b', fn
674 else:
688 else:
675 self.ui.warn(_('%s: No such file in rev %s\n') % (
689 self.ui.warn(_('%s: No such file in rev %s\n') % (
676 util.pathto(self.getcwd(), fn), short(node)))
690 util.pathto(self.getcwd(), fn), short(node)))
677 else:
691 else:
678 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
692 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
679 yield src, fn
693 yield src, fn
680
694
681 def status(self, node1=None, node2=None, files=[], match=util.always,
695 def status(self, node1=None, node2=None, files=[], match=util.always,
682 wlock=None, list_ignored=False, list_clean=False):
696 wlock=None, list_ignored=False, list_clean=False):
683 """return status of files between two nodes or node and working directory
697 """return status of files between two nodes or node and working directory
684
698
685 If node1 is None, use the first dirstate parent instead.
699 If node1 is None, use the first dirstate parent instead.
686 If node2 is None, compare node1 with working directory.
700 If node2 is None, compare node1 with working directory.
687 """
701 """
688
702
689 def fcmp(fn, mf):
703 def fcmp(fn, mf):
690 t1 = self.wread(fn)
704 t1 = self.wread(fn)
691 return self.file(fn).cmp(mf.get(fn, nullid), t1)
705 return self.file(fn).cmp(mf.get(fn, nullid), t1)
692
706
693 def mfmatches(node):
707 def mfmatches(node):
694 change = self.changelog.read(node)
708 change = self.changelog.read(node)
695 mf = dict(self.manifest.read(change[0]))
709 mf = dict(self.manifest.read(change[0]))
696 for fn in mf.keys():
710 for fn in mf.keys():
697 if not match(fn):
711 if not match(fn):
698 del mf[fn]
712 del mf[fn]
699 return mf
713 return mf
700
714
701 modified, added, removed, deleted, unknown = [], [], [], [], []
715 modified, added, removed, deleted, unknown = [], [], [], [], []
702 ignored, clean = [], []
716 ignored, clean = [], []
703
717
704 compareworking = False
718 compareworking = False
705 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
719 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
706 compareworking = True
720 compareworking = True
707
721
708 if not compareworking:
722 if not compareworking:
709 # read the manifest from node1 before the manifest from node2,
723 # read the manifest from node1 before the manifest from node2,
710 # so that we'll hit the manifest cache if we're going through
724 # so that we'll hit the manifest cache if we're going through
711 # all the revisions in parent->child order.
725 # all the revisions in parent->child order.
712 mf1 = mfmatches(node1)
726 mf1 = mfmatches(node1)
713
727
714 # are we comparing the working directory?
728 # are we comparing the working directory?
715 if not node2:
729 if not node2:
716 if not wlock:
730 if not wlock:
717 try:
731 try:
718 wlock = self.wlock(wait=0)
732 wlock = self.wlock(wait=0)
719 except lock.LockException:
733 except lock.LockException:
720 wlock = None
734 wlock = None
721 (lookup, modified, added, removed, deleted, unknown,
735 (lookup, modified, added, removed, deleted, unknown,
722 ignored, clean) = self.dirstate.status(files, match,
736 ignored, clean) = self.dirstate.status(files, match,
723 list_ignored, list_clean)
737 list_ignored, list_clean)
724
738
725 # are we comparing working dir against its parent?
739 # are we comparing working dir against its parent?
726 if compareworking:
740 if compareworking:
727 if lookup:
741 if lookup:
728 # do a full compare of any files that might have changed
742 # do a full compare of any files that might have changed
729 mf2 = mfmatches(self.dirstate.parents()[0])
743 mf2 = mfmatches(self.dirstate.parents()[0])
730 for f in lookup:
744 for f in lookup:
731 if fcmp(f, mf2):
745 if fcmp(f, mf2):
732 modified.append(f)
746 modified.append(f)
733 else:
747 else:
734 clean.append(f)
748 clean.append(f)
735 if wlock is not None:
749 if wlock is not None:
736 self.dirstate.update([f], "n")
750 self.dirstate.update([f], "n")
737 else:
751 else:
738 # we are comparing working dir against non-parent
752 # we are comparing working dir against non-parent
739 # generate a pseudo-manifest for the working dir
753 # generate a pseudo-manifest for the working dir
740 mf2 = mfmatches(self.dirstate.parents()[0])
754 mf2 = mfmatches(self.dirstate.parents()[0])
741 for f in lookup + modified + added:
755 for f in lookup + modified + added:
742 mf2[f] = ""
756 mf2[f] = ""
743 for f in removed:
757 for f in removed:
744 if f in mf2:
758 if f in mf2:
745 del mf2[f]
759 del mf2[f]
746 else:
760 else:
747 # we are comparing two revisions
761 # we are comparing two revisions
748 mf2 = mfmatches(node2)
762 mf2 = mfmatches(node2)
749
763
750 if not compareworking:
764 if not compareworking:
751 # flush lists from dirstate before comparing manifests
765 # flush lists from dirstate before comparing manifests
752 modified, added, clean = [], [], []
766 modified, added, clean = [], [], []
753
767
754 # make sure to sort the files so we talk to the disk in a
768 # make sure to sort the files so we talk to the disk in a
755 # reasonable order
769 # reasonable order
756 mf2keys = mf2.keys()
770 mf2keys = mf2.keys()
757 mf2keys.sort()
771 mf2keys.sort()
758 for fn in mf2keys:
772 for fn in mf2keys:
759 if mf1.has_key(fn):
773 if mf1.has_key(fn):
760 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
774 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
761 modified.append(fn)
775 modified.append(fn)
762 elif list_clean:
776 elif list_clean:
763 clean.append(fn)
777 clean.append(fn)
764 del mf1[fn]
778 del mf1[fn]
765 else:
779 else:
766 added.append(fn)
780 added.append(fn)
767
781
768 removed = mf1.keys()
782 removed = mf1.keys()
769
783
770 # sort and return results:
784 # sort and return results:
771 for l in modified, added, removed, deleted, unknown, ignored, clean:
785 for l in modified, added, removed, deleted, unknown, ignored, clean:
772 l.sort()
786 l.sort()
773 return (modified, added, removed, deleted, unknown, ignored, clean)
787 return (modified, added, removed, deleted, unknown, ignored, clean)
774
788
775 def add(self, list, wlock=None):
789 def add(self, list, wlock=None):
776 if not wlock:
790 if not wlock:
777 wlock = self.wlock()
791 wlock = self.wlock()
778 for f in list:
792 for f in list:
779 p = self.wjoin(f)
793 p = self.wjoin(f)
780 if not os.path.exists(p):
794 if not os.path.exists(p):
781 self.ui.warn(_("%s does not exist!\n") % f)
795 self.ui.warn(_("%s does not exist!\n") % f)
782 elif not os.path.isfile(p):
796 elif not os.path.isfile(p):
783 self.ui.warn(_("%s not added: only files supported currently\n")
797 self.ui.warn(_("%s not added: only files supported currently\n")
784 % f)
798 % f)
785 elif self.dirstate.state(f) in 'an':
799 elif self.dirstate.state(f) in 'an':
786 self.ui.warn(_("%s already tracked!\n") % f)
800 self.ui.warn(_("%s already tracked!\n") % f)
787 else:
801 else:
788 self.dirstate.update([f], "a")
802 self.dirstate.update([f], "a")
789
803
790 def forget(self, list, wlock=None):
804 def forget(self, list, wlock=None):
791 if not wlock:
805 if not wlock:
792 wlock = self.wlock()
806 wlock = self.wlock()
793 for f in list:
807 for f in list:
794 if self.dirstate.state(f) not in 'ai':
808 if self.dirstate.state(f) not in 'ai':
795 self.ui.warn(_("%s not added!\n") % f)
809 self.ui.warn(_("%s not added!\n") % f)
796 else:
810 else:
797 self.dirstate.forget([f])
811 self.dirstate.forget([f])
798
812
799 def remove(self, list, unlink=False, wlock=None):
813 def remove(self, list, unlink=False, wlock=None):
800 if unlink:
814 if unlink:
801 for f in list:
815 for f in list:
802 try:
816 try:
803 util.unlink(self.wjoin(f))
817 util.unlink(self.wjoin(f))
804 except OSError, inst:
818 except OSError, inst:
805 if inst.errno != errno.ENOENT:
819 if inst.errno != errno.ENOENT:
806 raise
820 raise
807 if not wlock:
821 if not wlock:
808 wlock = self.wlock()
822 wlock = self.wlock()
809 for f in list:
823 for f in list:
810 p = self.wjoin(f)
824 p = self.wjoin(f)
811 if os.path.exists(p):
825 if os.path.exists(p):
812 self.ui.warn(_("%s still exists!\n") % f)
826 self.ui.warn(_("%s still exists!\n") % f)
813 elif self.dirstate.state(f) == 'a':
827 elif self.dirstate.state(f) == 'a':
814 self.dirstate.forget([f])
828 self.dirstate.forget([f])
815 elif f not in self.dirstate:
829 elif f not in self.dirstate:
816 self.ui.warn(_("%s not tracked!\n") % f)
830 self.ui.warn(_("%s not tracked!\n") % f)
817 else:
831 else:
818 self.dirstate.update([f], "r")
832 self.dirstate.update([f], "r")
819
833
820 def undelete(self, list, wlock=None):
834 def undelete(self, list, wlock=None):
821 p = self.dirstate.parents()[0]
835 p = self.dirstate.parents()[0]
822 mn = self.changelog.read(p)[0]
836 mn = self.changelog.read(p)[0]
823 m = self.manifest.read(mn)
837 m = self.manifest.read(mn)
824 if not wlock:
838 if not wlock:
825 wlock = self.wlock()
839 wlock = self.wlock()
826 for f in list:
840 for f in list:
827 if self.dirstate.state(f) not in "r":
841 if self.dirstate.state(f) not in "r":
828 self.ui.warn("%s not removed!\n" % f)
842 self.ui.warn("%s not removed!\n" % f)
829 else:
843 else:
830 t = self.file(f).read(m[f])
844 t = self.file(f).read(m[f])
831 self.wwrite(f, t)
845 self.wwrite(f, t)
832 util.set_exec(self.wjoin(f), m.execf(f))
846 util.set_exec(self.wjoin(f), m.execf(f))
833 self.dirstate.update([f], "n")
847 self.dirstate.update([f], "n")
834
848
835 def copy(self, source, dest, wlock=None):
849 def copy(self, source, dest, wlock=None):
836 p = self.wjoin(dest)
850 p = self.wjoin(dest)
837 if not os.path.exists(p):
851 if not os.path.exists(p):
838 self.ui.warn(_("%s does not exist!\n") % dest)
852 self.ui.warn(_("%s does not exist!\n") % dest)
839 elif not os.path.isfile(p):
853 elif not os.path.isfile(p):
840 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
854 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
841 else:
855 else:
842 if not wlock:
856 if not wlock:
843 wlock = self.wlock()
857 wlock = self.wlock()
844 if self.dirstate.state(dest) == '?':
858 if self.dirstate.state(dest) == '?':
845 self.dirstate.update([dest], "a")
859 self.dirstate.update([dest], "a")
846 self.dirstate.copy(source, dest)
860 self.dirstate.copy(source, dest)
847
861
848 def heads(self, start=None):
862 def heads(self, start=None):
849 heads = self.changelog.heads(start)
863 heads = self.changelog.heads(start)
850 # sort the output in rev descending order
864 # sort the output in rev descending order
851 heads = [(-self.changelog.rev(h), h) for h in heads]
865 heads = [(-self.changelog.rev(h), h) for h in heads]
852 heads.sort()
866 heads.sort()
853 return [n for (r, n) in heads]
867 return [n for (r, n) in heads]
854
868
855 # branchlookup returns a dict giving a list of branches for
869 # branchlookup returns a dict giving a list of branches for
856 # each head. A branch is defined as the tag of a node or
870 # each head. A branch is defined as the tag of a node or
857 # the branch of the node's parents. If a node has multiple
871 # the branch of the node's parents. If a node has multiple
858 # branch tags, tags are eliminated if they are visible from other
872 # branch tags, tags are eliminated if they are visible from other
859 # branch tags.
873 # branch tags.
860 #
874 #
861 # So, for this graph: a->b->c->d->e
875 # So, for this graph: a->b->c->d->e
862 # \ /
876 # \ /
863 # aa -----/
877 # aa -----/
864 # a has tag 2.6.12
878 # a has tag 2.6.12
865 # d has tag 2.6.13
879 # d has tag 2.6.13
866 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
880 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
867 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
881 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
868 # from the list.
882 # from the list.
869 #
883 #
870 # It is possible that more than one head will have the same branch tag.
884 # It is possible that more than one head will have the same branch tag.
871 # callers need to check the result for multiple heads under the same
885 # callers need to check the result for multiple heads under the same
872 # branch tag if that is a problem for them (ie checkout of a specific
886 # branch tag if that is a problem for them (ie checkout of a specific
873 # branch).
887 # branch).
874 #
888 #
875 # passing in a specific branch will limit the depth of the search
889 # passing in a specific branch will limit the depth of the search
876 # through the parents. It won't limit the branches returned in the
890 # through the parents. It won't limit the branches returned in the
877 # result though.
891 # result though.
878 def branchlookup(self, heads=None, branch=None):
892 def branchlookup(self, heads=None, branch=None):
879 if not heads:
893 if not heads:
880 heads = self.heads()
894 heads = self.heads()
881 headt = [ h for h in heads ]
895 headt = [ h for h in heads ]
882 chlog = self.changelog
896 chlog = self.changelog
883 branches = {}
897 branches = {}
884 merges = []
898 merges = []
885 seenmerge = {}
899 seenmerge = {}
886
900
887 # traverse the tree once for each head, recording in the branches
901 # traverse the tree once for each head, recording in the branches
888 # dict which tags are visible from this head. The branches
902 # dict which tags are visible from this head. The branches
889 # dict also records which tags are visible from each tag
903 # dict also records which tags are visible from each tag
890 # while we traverse.
904 # while we traverse.
891 while headt or merges:
905 while headt or merges:
892 if merges:
906 if merges:
893 n, found = merges.pop()
907 n, found = merges.pop()
894 visit = [n]
908 visit = [n]
895 else:
909 else:
896 h = headt.pop()
910 h = headt.pop()
897 visit = [h]
911 visit = [h]
898 found = [h]
912 found = [h]
899 seen = {}
913 seen = {}
900 while visit:
914 while visit:
901 n = visit.pop()
915 n = visit.pop()
902 if n in seen:
916 if n in seen:
903 continue
917 continue
904 pp = chlog.parents(n)
918 pp = chlog.parents(n)
905 tags = self.nodetags(n)
919 tags = self.nodetags(n)
906 if tags:
920 if tags:
907 for x in tags:
921 for x in tags:
908 if x == 'tip':
922 if x == 'tip':
909 continue
923 continue
910 for f in found:
924 for f in found:
911 branches.setdefault(f, {})[n] = 1
925 branches.setdefault(f, {})[n] = 1
912 branches.setdefault(n, {})[n] = 1
926 branches.setdefault(n, {})[n] = 1
913 break
927 break
914 if n not in found:
928 if n not in found:
915 found.append(n)
929 found.append(n)
916 if branch in tags:
930 if branch in tags:
917 continue
931 continue
918 seen[n] = 1
932 seen[n] = 1
919 if pp[1] != nullid and n not in seenmerge:
933 if pp[1] != nullid and n not in seenmerge:
920 merges.append((pp[1], [x for x in found]))
934 merges.append((pp[1], [x for x in found]))
921 seenmerge[n] = 1
935 seenmerge[n] = 1
922 if pp[0] != nullid:
936 if pp[0] != nullid:
923 visit.append(pp[0])
937 visit.append(pp[0])
924 # traverse the branches dict, eliminating branch tags from each
938 # traverse the branches dict, eliminating branch tags from each
925 # head that are visible from another branch tag for that head.
939 # head that are visible from another branch tag for that head.
926 out = {}
940 out = {}
927 viscache = {}
941 viscache = {}
928 for h in heads:
942 for h in heads:
929 def visible(node):
943 def visible(node):
930 if node in viscache:
944 if node in viscache:
931 return viscache[node]
945 return viscache[node]
932 ret = {}
946 ret = {}
933 visit = [node]
947 visit = [node]
934 while visit:
948 while visit:
935 x = visit.pop()
949 x = visit.pop()
936 if x in viscache:
950 if x in viscache:
937 ret.update(viscache[x])
951 ret.update(viscache[x])
938 elif x not in ret:
952 elif x not in ret:
939 ret[x] = 1
953 ret[x] = 1
940 if x in branches:
954 if x in branches:
941 visit[len(visit):] = branches[x].keys()
955 visit[len(visit):] = branches[x].keys()
942 viscache[node] = ret
956 viscache[node] = ret
943 return ret
957 return ret
944 if h not in branches:
958 if h not in branches:
945 continue
959 continue
946 # O(n^2), but somewhat limited. This only searches the
960 # O(n^2), but somewhat limited. This only searches the
947 # tags visible from a specific head, not all the tags in the
961 # tags visible from a specific head, not all the tags in the
948 # whole repo.
962 # whole repo.
949 for b in branches[h]:
963 for b in branches[h]:
950 vis = False
964 vis = False
951 for bb in branches[h].keys():
965 for bb in branches[h].keys():
952 if b != bb:
966 if b != bb:
953 if b in visible(bb):
967 if b in visible(bb):
954 vis = True
968 vis = True
955 break
969 break
956 if not vis:
970 if not vis:
957 l = out.setdefault(h, [])
971 l = out.setdefault(h, [])
958 l[len(l):] = self.nodetags(b)
972 l[len(l):] = self.nodetags(b)
959 return out
973 return out
960
974
961 def branches(self, nodes):
975 def branches(self, nodes):
962 if not nodes:
976 if not nodes:
963 nodes = [self.changelog.tip()]
977 nodes = [self.changelog.tip()]
964 b = []
978 b = []
965 for n in nodes:
979 for n in nodes:
966 t = n
980 t = n
967 while 1:
981 while 1:
968 p = self.changelog.parents(n)
982 p = self.changelog.parents(n)
969 if p[1] != nullid or p[0] == nullid:
983 if p[1] != nullid or p[0] == nullid:
970 b.append((t, n, p[0], p[1]))
984 b.append((t, n, p[0], p[1]))
971 break
985 break
972 n = p[0]
986 n = p[0]
973 return b
987 return b
974
988
975 def between(self, pairs):
989 def between(self, pairs):
976 r = []
990 r = []
977
991
978 for top, bottom in pairs:
992 for top, bottom in pairs:
979 n, l, i = top, [], 0
993 n, l, i = top, [], 0
980 f = 1
994 f = 1
981
995
982 while n != bottom:
996 while n != bottom:
983 p = self.changelog.parents(n)[0]
997 p = self.changelog.parents(n)[0]
984 if i == f:
998 if i == f:
985 l.append(n)
999 l.append(n)
986 f = f * 2
1000 f = f * 2
987 n = p
1001 n = p
988 i += 1
1002 i += 1
989
1003
990 r.append(l)
1004 r.append(l)
991
1005
992 return r
1006 return r
993
1007
994 def findincoming(self, remote, base=None, heads=None, force=False):
1008 def findincoming(self, remote, base=None, heads=None, force=False):
995 """Return list of roots of the subsets of missing nodes from remote
1009 """Return list of roots of the subsets of missing nodes from remote
996
1010
997 If base dict is specified, assume that these nodes and their parents
1011 If base dict is specified, assume that these nodes and their parents
998 exist on the remote side and that no child of a node of base exists
1012 exist on the remote side and that no child of a node of base exists
999 in both remote and self.
1013 in both remote and self.
1000 Furthermore base will be updated to include the nodes that exists
1014 Furthermore base will be updated to include the nodes that exists
1001 in self and remote but no children exists in self and remote.
1015 in self and remote but no children exists in self and remote.
1002 If a list of heads is specified, return only nodes which are heads
1016 If a list of heads is specified, return only nodes which are heads
1003 or ancestors of these heads.
1017 or ancestors of these heads.
1004
1018
1005 All the ancestors of base are in self and in remote.
1019 All the ancestors of base are in self and in remote.
1006 All the descendants of the list returned are missing in self.
1020 All the descendants of the list returned are missing in self.
1007 (and so we know that the rest of the nodes are missing in remote, see
1021 (and so we know that the rest of the nodes are missing in remote, see
1008 outgoing)
1022 outgoing)
1009 """
1023 """
1010 m = self.changelog.nodemap
1024 m = self.changelog.nodemap
1011 search = []
1025 search = []
1012 fetch = {}
1026 fetch = {}
1013 seen = {}
1027 seen = {}
1014 seenbranch = {}
1028 seenbranch = {}
1015 if base == None:
1029 if base == None:
1016 base = {}
1030 base = {}
1017
1031
1018 if not heads:
1032 if not heads:
1019 heads = remote.heads()
1033 heads = remote.heads()
1020
1034
1021 if self.changelog.tip() == nullid:
1035 if self.changelog.tip() == nullid:
1022 base[nullid] = 1
1036 base[nullid] = 1
1023 if heads != [nullid]:
1037 if heads != [nullid]:
1024 return [nullid]
1038 return [nullid]
1025 return []
1039 return []
1026
1040
1027 # assume we're closer to the tip than the root
1041 # assume we're closer to the tip than the root
1028 # and start by examining the heads
1042 # and start by examining the heads
1029 self.ui.status(_("searching for changes\n"))
1043 self.ui.status(_("searching for changes\n"))
1030
1044
1031 unknown = []
1045 unknown = []
1032 for h in heads:
1046 for h in heads:
1033 if h not in m:
1047 if h not in m:
1034 unknown.append(h)
1048 unknown.append(h)
1035 else:
1049 else:
1036 base[h] = 1
1050 base[h] = 1
1037
1051
1038 if not unknown:
1052 if not unknown:
1039 return []
1053 return []
1040
1054
1041 req = dict.fromkeys(unknown)
1055 req = dict.fromkeys(unknown)
1042 reqcnt = 0
1056 reqcnt = 0
1043
1057
1044 # search through remote branches
1058 # search through remote branches
1045 # a 'branch' here is a linear segment of history, with four parts:
1059 # a 'branch' here is a linear segment of history, with four parts:
1046 # head, root, first parent, second parent
1060 # head, root, first parent, second parent
1047 # (a branch always has two parents (or none) by definition)
1061 # (a branch always has two parents (or none) by definition)
1048 unknown = remote.branches(unknown)
1062 unknown = remote.branches(unknown)
1049 while unknown:
1063 while unknown:
1050 r = []
1064 r = []
1051 while unknown:
1065 while unknown:
1052 n = unknown.pop(0)
1066 n = unknown.pop(0)
1053 if n[0] in seen:
1067 if n[0] in seen:
1054 continue
1068 continue
1055
1069
1056 self.ui.debug(_("examining %s:%s\n")
1070 self.ui.debug(_("examining %s:%s\n")
1057 % (short(n[0]), short(n[1])))
1071 % (short(n[0]), short(n[1])))
1058 if n[0] == nullid: # found the end of the branch
1072 if n[0] == nullid: # found the end of the branch
1059 pass
1073 pass
1060 elif n in seenbranch:
1074 elif n in seenbranch:
1061 self.ui.debug(_("branch already found\n"))
1075 self.ui.debug(_("branch already found\n"))
1062 continue
1076 continue
1063 elif n[1] and n[1] in m: # do we know the base?
1077 elif n[1] and n[1] in m: # do we know the base?
1064 self.ui.debug(_("found incomplete branch %s:%s\n")
1078 self.ui.debug(_("found incomplete branch %s:%s\n")
1065 % (short(n[0]), short(n[1])))
1079 % (short(n[0]), short(n[1])))
1066 search.append(n) # schedule branch range for scanning
1080 search.append(n) # schedule branch range for scanning
1067 seenbranch[n] = 1
1081 seenbranch[n] = 1
1068 else:
1082 else:
1069 if n[1] not in seen and n[1] not in fetch:
1083 if n[1] not in seen and n[1] not in fetch:
1070 if n[2] in m and n[3] in m:
1084 if n[2] in m and n[3] in m:
1071 self.ui.debug(_("found new changeset %s\n") %
1085 self.ui.debug(_("found new changeset %s\n") %
1072 short(n[1]))
1086 short(n[1]))
1073 fetch[n[1]] = 1 # earliest unknown
1087 fetch[n[1]] = 1 # earliest unknown
1074 for p in n[2:4]:
1088 for p in n[2:4]:
1075 if p in m:
1089 if p in m:
1076 base[p] = 1 # latest known
1090 base[p] = 1 # latest known
1077
1091
1078 for p in n[2:4]:
1092 for p in n[2:4]:
1079 if p not in req and p not in m:
1093 if p not in req and p not in m:
1080 r.append(p)
1094 r.append(p)
1081 req[p] = 1
1095 req[p] = 1
1082 seen[n[0]] = 1
1096 seen[n[0]] = 1
1083
1097
1084 if r:
1098 if r:
1085 reqcnt += 1
1099 reqcnt += 1
1086 self.ui.debug(_("request %d: %s\n") %
1100 self.ui.debug(_("request %d: %s\n") %
1087 (reqcnt, " ".join(map(short, r))))
1101 (reqcnt, " ".join(map(short, r))))
1088 for p in range(0, len(r), 10):
1102 for p in range(0, len(r), 10):
1089 for b in remote.branches(r[p:p+10]):
1103 for b in remote.branches(r[p:p+10]):
1090 self.ui.debug(_("received %s:%s\n") %
1104 self.ui.debug(_("received %s:%s\n") %
1091 (short(b[0]), short(b[1])))
1105 (short(b[0]), short(b[1])))
1092 unknown.append(b)
1106 unknown.append(b)
1093
1107
1094 # do binary search on the branches we found
1108 # do binary search on the branches we found
1095 while search:
1109 while search:
1096 n = search.pop(0)
1110 n = search.pop(0)
1097 reqcnt += 1
1111 reqcnt += 1
1098 l = remote.between([(n[0], n[1])])[0]
1112 l = remote.between([(n[0], n[1])])[0]
1099 l.append(n[1])
1113 l.append(n[1])
1100 p = n[0]
1114 p = n[0]
1101 f = 1
1115 f = 1
1102 for i in l:
1116 for i in l:
1103 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1117 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1104 if i in m:
1118 if i in m:
1105 if f <= 2:
1119 if f <= 2:
1106 self.ui.debug(_("found new branch changeset %s\n") %
1120 self.ui.debug(_("found new branch changeset %s\n") %
1107 short(p))
1121 short(p))
1108 fetch[p] = 1
1122 fetch[p] = 1
1109 base[i] = 1
1123 base[i] = 1
1110 else:
1124 else:
1111 self.ui.debug(_("narrowed branch search to %s:%s\n")
1125 self.ui.debug(_("narrowed branch search to %s:%s\n")
1112 % (short(p), short(i)))
1126 % (short(p), short(i)))
1113 search.append((p, i))
1127 search.append((p, i))
1114 break
1128 break
1115 p, f = i, f * 2
1129 p, f = i, f * 2
1116
1130
1117 # sanity check our fetch list
1131 # sanity check our fetch list
1118 for f in fetch.keys():
1132 for f in fetch.keys():
1119 if f in m:
1133 if f in m:
1120 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1134 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1121
1135
1122 if base.keys() == [nullid]:
1136 if base.keys() == [nullid]:
1123 if force:
1137 if force:
1124 self.ui.warn(_("warning: repository is unrelated\n"))
1138 self.ui.warn(_("warning: repository is unrelated\n"))
1125 else:
1139 else:
1126 raise util.Abort(_("repository is unrelated"))
1140 raise util.Abort(_("repository is unrelated"))
1127
1141
1128 self.ui.debug(_("found new changesets starting at ") +
1142 self.ui.debug(_("found new changesets starting at ") +
1129 " ".join([short(f) for f in fetch]) + "\n")
1143 " ".join([short(f) for f in fetch]) + "\n")
1130
1144
1131 self.ui.debug(_("%d total queries\n") % reqcnt)
1145 self.ui.debug(_("%d total queries\n") % reqcnt)
1132
1146
1133 return fetch.keys()
1147 return fetch.keys()
1134
1148
1135 def findoutgoing(self, remote, base=None, heads=None, force=False):
1149 def findoutgoing(self, remote, base=None, heads=None, force=False):
1136 """Return list of nodes that are roots of subsets not in remote
1150 """Return list of nodes that are roots of subsets not in remote
1137
1151
1138 If base dict is specified, assume that these nodes and their parents
1152 If base dict is specified, assume that these nodes and their parents
1139 exist on the remote side.
1153 exist on the remote side.
1140 If a list of heads is specified, return only nodes which are heads
1154 If a list of heads is specified, return only nodes which are heads
1141 or ancestors of these heads, and return a second element which
1155 or ancestors of these heads, and return a second element which
1142 contains all remote heads which get new children.
1156 contains all remote heads which get new children.
1143 """
1157 """
1144 if base == None:
1158 if base == None:
1145 base = {}
1159 base = {}
1146 self.findincoming(remote, base, heads, force=force)
1160 self.findincoming(remote, base, heads, force=force)
1147
1161
1148 self.ui.debug(_("common changesets up to ")
1162 self.ui.debug(_("common changesets up to ")
1149 + " ".join(map(short, base.keys())) + "\n")
1163 + " ".join(map(short, base.keys())) + "\n")
1150
1164
1151 remain = dict.fromkeys(self.changelog.nodemap)
1165 remain = dict.fromkeys(self.changelog.nodemap)
1152
1166
1153 # prune everything remote has from the tree
1167 # prune everything remote has from the tree
1154 del remain[nullid]
1168 del remain[nullid]
1155 remove = base.keys()
1169 remove = base.keys()
1156 while remove:
1170 while remove:
1157 n = remove.pop(0)
1171 n = remove.pop(0)
1158 if n in remain:
1172 if n in remain:
1159 del remain[n]
1173 del remain[n]
1160 for p in self.changelog.parents(n):
1174 for p in self.changelog.parents(n):
1161 remove.append(p)
1175 remove.append(p)
1162
1176
1163 # find every node whose parents have been pruned
1177 # find every node whose parents have been pruned
1164 subset = []
1178 subset = []
1165 # find every remote head that will get new children
1179 # find every remote head that will get new children
1166 updated_heads = {}
1180 updated_heads = {}
1167 for n in remain:
1181 for n in remain:
1168 p1, p2 = self.changelog.parents(n)
1182 p1, p2 = self.changelog.parents(n)
1169 if p1 not in remain and p2 not in remain:
1183 if p1 not in remain and p2 not in remain:
1170 subset.append(n)
1184 subset.append(n)
1171 if heads:
1185 if heads:
1172 if p1 in heads:
1186 if p1 in heads:
1173 updated_heads[p1] = True
1187 updated_heads[p1] = True
1174 if p2 in heads:
1188 if p2 in heads:
1175 updated_heads[p2] = True
1189 updated_heads[p2] = True
1176
1190
1177 # this is the set of all roots we have to push
1191 # this is the set of all roots we have to push
1178 if heads:
1192 if heads:
1179 return subset, updated_heads.keys()
1193 return subset, updated_heads.keys()
1180 else:
1194 else:
1181 return subset
1195 return subset
1182
1196
1183 def pull(self, remote, heads=None, force=False, lock=None):
1197 def pull(self, remote, heads=None, force=False, lock=None):
1184 mylock = False
1198 mylock = False
1185 if not lock:
1199 if not lock:
1186 lock = self.lock()
1200 lock = self.lock()
1187 mylock = True
1201 mylock = True
1188
1202
1189 try:
1203 try:
1190 fetch = self.findincoming(remote, force=force)
1204 fetch = self.findincoming(remote, force=force)
1191 if fetch == [nullid]:
1205 if fetch == [nullid]:
1192 self.ui.status(_("requesting all changes\n"))
1206 self.ui.status(_("requesting all changes\n"))
1193
1207
1194 if not fetch:
1208 if not fetch:
1195 self.ui.status(_("no changes found\n"))
1209 self.ui.status(_("no changes found\n"))
1196 return 0
1210 return 0
1197
1211
1198 if heads is None:
1212 if heads is None:
1199 cg = remote.changegroup(fetch, 'pull')
1213 cg = remote.changegroup(fetch, 'pull')
1200 else:
1214 else:
1201 cg = remote.changegroupsubset(fetch, heads, 'pull')
1215 cg = remote.changegroupsubset(fetch, heads, 'pull')
1202 return self.addchangegroup(cg, 'pull', remote.url())
1216 return self.addchangegroup(cg, 'pull', remote.url())
1203 finally:
1217 finally:
1204 if mylock:
1218 if mylock:
1205 lock.release()
1219 lock.release()
1206
1220
1207 def push(self, remote, force=False, revs=None):
1221 def push(self, remote, force=False, revs=None):
1208 # there are two ways to push to remote repo:
1222 # there are two ways to push to remote repo:
1209 #
1223 #
1210 # addchangegroup assumes local user can lock remote
1224 # addchangegroup assumes local user can lock remote
1211 # repo (local filesystem, old ssh servers).
1225 # repo (local filesystem, old ssh servers).
1212 #
1226 #
1213 # unbundle assumes local user cannot lock remote repo (new ssh
1227 # unbundle assumes local user cannot lock remote repo (new ssh
1214 # servers, http servers).
1228 # servers, http servers).
1215
1229
1216 if remote.capable('unbundle'):
1230 if remote.capable('unbundle'):
1217 return self.push_unbundle(remote, force, revs)
1231 return self.push_unbundle(remote, force, revs)
1218 return self.push_addchangegroup(remote, force, revs)
1232 return self.push_addchangegroup(remote, force, revs)
1219
1233
1220 def prepush(self, remote, force, revs):
1234 def prepush(self, remote, force, revs):
1221 base = {}
1235 base = {}
1222 remote_heads = remote.heads()
1236 remote_heads = remote.heads()
1223 inc = self.findincoming(remote, base, remote_heads, force=force)
1237 inc = self.findincoming(remote, base, remote_heads, force=force)
1224 if not force and inc:
1238 if not force and inc:
1225 self.ui.warn(_("abort: unsynced remote changes!\n"))
1239 self.ui.warn(_("abort: unsynced remote changes!\n"))
1226 self.ui.status(_("(did you forget to sync?"
1240 self.ui.status(_("(did you forget to sync?"
1227 " use push -f to force)\n"))
1241 " use push -f to force)\n"))
1228 return None, 1
1242 return None, 1
1229
1243
1230 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1244 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1231 if revs is not None:
1245 if revs is not None:
1232 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1246 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1233 else:
1247 else:
1234 bases, heads = update, self.changelog.heads()
1248 bases, heads = update, self.changelog.heads()
1235
1249
1236 if not bases:
1250 if not bases:
1237 self.ui.status(_("no changes found\n"))
1251 self.ui.status(_("no changes found\n"))
1238 return None, 1
1252 return None, 1
1239 elif not force:
1253 elif not force:
1240 # FIXME we don't properly detect creation of new heads
1254 # FIXME we don't properly detect creation of new heads
1241 # in the push -r case, assume the user knows what he's doing
1255 # in the push -r case, assume the user knows what he's doing
1242 if not revs and len(remote_heads) < len(heads) \
1256 if not revs and len(remote_heads) < len(heads) \
1243 and remote_heads != [nullid]:
1257 and remote_heads != [nullid]:
1244 self.ui.warn(_("abort: push creates new remote branches!\n"))
1258 self.ui.warn(_("abort: push creates new remote branches!\n"))
1245 self.ui.status(_("(did you forget to merge?"
1259 self.ui.status(_("(did you forget to merge?"
1246 " use push -f to force)\n"))
1260 " use push -f to force)\n"))
1247 return None, 1
1261 return None, 1
1248
1262
1249 if revs is None:
1263 if revs is None:
1250 cg = self.changegroup(update, 'push')
1264 cg = self.changegroup(update, 'push')
1251 else:
1265 else:
1252 cg = self.changegroupsubset(update, revs, 'push')
1266 cg = self.changegroupsubset(update, revs, 'push')
1253 return cg, remote_heads
1267 return cg, remote_heads
1254
1268
1255 def push_addchangegroup(self, remote, force, revs):
1269 def push_addchangegroup(self, remote, force, revs):
1256 lock = remote.lock()
1270 lock = remote.lock()
1257
1271
1258 ret = self.prepush(remote, force, revs)
1272 ret = self.prepush(remote, force, revs)
1259 if ret[0] is not None:
1273 if ret[0] is not None:
1260 cg, remote_heads = ret
1274 cg, remote_heads = ret
1261 return remote.addchangegroup(cg, 'push', self.url())
1275 return remote.addchangegroup(cg, 'push', self.url())
1262 return ret[1]
1276 return ret[1]
1263
1277
1264 def push_unbundle(self, remote, force, revs):
1278 def push_unbundle(self, remote, force, revs):
1265 # local repo finds heads on server, finds out what revs it
1279 # local repo finds heads on server, finds out what revs it
1266 # must push. once revs transferred, if server finds it has
1280 # must push. once revs transferred, if server finds it has
1267 # different heads (someone else won commit/push race), server
1281 # different heads (someone else won commit/push race), server
1268 # aborts.
1282 # aborts.
1269
1283
1270 ret = self.prepush(remote, force, revs)
1284 ret = self.prepush(remote, force, revs)
1271 if ret[0] is not None:
1285 if ret[0] is not None:
1272 cg, remote_heads = ret
1286 cg, remote_heads = ret
1273 if force: remote_heads = ['force']
1287 if force: remote_heads = ['force']
1274 return remote.unbundle(cg, remote_heads, 'push')
1288 return remote.unbundle(cg, remote_heads, 'push')
1275 return ret[1]
1289 return ret[1]
1276
1290
1277 def changegroupsubset(self, bases, heads, source):
1291 def changegroupsubset(self, bases, heads, source):
1278 """This function generates a changegroup consisting of all the nodes
1292 """This function generates a changegroup consisting of all the nodes
1279 that are descendents of any of the bases, and ancestors of any of
1293 that are descendents of any of the bases, and ancestors of any of
1280 the heads.
1294 the heads.
1281
1295
1282 It is fairly complex as determining which filenodes and which
1296 It is fairly complex as determining which filenodes and which
1283 manifest nodes need to be included for the changeset to be complete
1297 manifest nodes need to be included for the changeset to be complete
1284 is non-trivial.
1298 is non-trivial.
1285
1299
1286 Another wrinkle is doing the reverse, figuring out which changeset in
1300 Another wrinkle is doing the reverse, figuring out which changeset in
1287 the changegroup a particular filenode or manifestnode belongs to."""
1301 the changegroup a particular filenode or manifestnode belongs to."""
1288
1302
1289 self.hook('preoutgoing', throw=True, source=source)
1303 self.hook('preoutgoing', throw=True, source=source)
1290
1304
1291 # Set up some initial variables
1305 # Set up some initial variables
1292 # Make it easy to refer to self.changelog
1306 # Make it easy to refer to self.changelog
1293 cl = self.changelog
1307 cl = self.changelog
1294 # msng is short for missing - compute the list of changesets in this
1308 # msng is short for missing - compute the list of changesets in this
1295 # changegroup.
1309 # changegroup.
1296 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1310 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1297 # Some bases may turn out to be superfluous, and some heads may be
1311 # Some bases may turn out to be superfluous, and some heads may be
1298 # too. nodesbetween will return the minimal set of bases and heads
1312 # too. nodesbetween will return the minimal set of bases and heads
1299 # necessary to re-create the changegroup.
1313 # necessary to re-create the changegroup.
1300
1314
1301 # Known heads are the list of heads that it is assumed the recipient
1315 # Known heads are the list of heads that it is assumed the recipient
1302 # of this changegroup will know about.
1316 # of this changegroup will know about.
1303 knownheads = {}
1317 knownheads = {}
1304 # We assume that all parents of bases are known heads.
1318 # We assume that all parents of bases are known heads.
1305 for n in bases:
1319 for n in bases:
1306 for p in cl.parents(n):
1320 for p in cl.parents(n):
1307 if p != nullid:
1321 if p != nullid:
1308 knownheads[p] = 1
1322 knownheads[p] = 1
1309 knownheads = knownheads.keys()
1323 knownheads = knownheads.keys()
1310 if knownheads:
1324 if knownheads:
1311 # Now that we know what heads are known, we can compute which
1325 # Now that we know what heads are known, we can compute which
1312 # changesets are known. The recipient must know about all
1326 # changesets are known. The recipient must know about all
1313 # changesets required to reach the known heads from the null
1327 # changesets required to reach the known heads from the null
1314 # changeset.
1328 # changeset.
1315 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1329 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1316 junk = None
1330 junk = None
1317 # Transform the list into an ersatz set.
1331 # Transform the list into an ersatz set.
1318 has_cl_set = dict.fromkeys(has_cl_set)
1332 has_cl_set = dict.fromkeys(has_cl_set)
1319 else:
1333 else:
1320 # If there were no known heads, the recipient cannot be assumed to
1334 # If there were no known heads, the recipient cannot be assumed to
1321 # know about any changesets.
1335 # know about any changesets.
1322 has_cl_set = {}
1336 has_cl_set = {}
1323
1337
1324 # Make it easy to refer to self.manifest
1338 # Make it easy to refer to self.manifest
1325 mnfst = self.manifest
1339 mnfst = self.manifest
1326 # We don't know which manifests are missing yet
1340 # We don't know which manifests are missing yet
1327 msng_mnfst_set = {}
1341 msng_mnfst_set = {}
1328 # Nor do we know which filenodes are missing.
1342 # Nor do we know which filenodes are missing.
1329 msng_filenode_set = {}
1343 msng_filenode_set = {}
1330
1344
1331 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1345 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1332 junk = None
1346 junk = None
1333
1347
1334 # A changeset always belongs to itself, so the changenode lookup
1348 # A changeset always belongs to itself, so the changenode lookup
1335 # function for a changenode is identity.
1349 # function for a changenode is identity.
1336 def identity(x):
1350 def identity(x):
1337 return x
1351 return x
1338
1352
1339 # A function generating function. Sets up an environment for the
1353 # A function generating function. Sets up an environment for the
1340 # inner function.
1354 # inner function.
1341 def cmp_by_rev_func(revlog):
1355 def cmp_by_rev_func(revlog):
1342 # Compare two nodes by their revision number in the environment's
1356 # Compare two nodes by their revision number in the environment's
1343 # revision history. Since the revision number both represents the
1357 # revision history. Since the revision number both represents the
1344 # most efficient order to read the nodes in, and represents a
1358 # most efficient order to read the nodes in, and represents a
1345 # topological sorting of the nodes, this function is often useful.
1359 # topological sorting of the nodes, this function is often useful.
1346 def cmp_by_rev(a, b):
1360 def cmp_by_rev(a, b):
1347 return cmp(revlog.rev(a), revlog.rev(b))
1361 return cmp(revlog.rev(a), revlog.rev(b))
1348 return cmp_by_rev
1362 return cmp_by_rev
1349
1363
1350 # If we determine that a particular file or manifest node must be a
1364 # If we determine that a particular file or manifest node must be a
1351 # node that the recipient of the changegroup will already have, we can
1365 # node that the recipient of the changegroup will already have, we can
1352 # also assume the recipient will have all the parents. This function
1366 # also assume the recipient will have all the parents. This function
1353 # prunes them from the set of missing nodes.
1367 # prunes them from the set of missing nodes.
1354 def prune_parents(revlog, hasset, msngset):
1368 def prune_parents(revlog, hasset, msngset):
1355 haslst = hasset.keys()
1369 haslst = hasset.keys()
1356 haslst.sort(cmp_by_rev_func(revlog))
1370 haslst.sort(cmp_by_rev_func(revlog))
1357 for node in haslst:
1371 for node in haslst:
1358 parentlst = [p for p in revlog.parents(node) if p != nullid]
1372 parentlst = [p for p in revlog.parents(node) if p != nullid]
1359 while parentlst:
1373 while parentlst:
1360 n = parentlst.pop()
1374 n = parentlst.pop()
1361 if n not in hasset:
1375 if n not in hasset:
1362 hasset[n] = 1
1376 hasset[n] = 1
1363 p = [p for p in revlog.parents(n) if p != nullid]
1377 p = [p for p in revlog.parents(n) if p != nullid]
1364 parentlst.extend(p)
1378 parentlst.extend(p)
1365 for n in hasset:
1379 for n in hasset:
1366 msngset.pop(n, None)
1380 msngset.pop(n, None)
1367
1381
1368 # This is a function generating function used to set up an environment
1382 # This is a function generating function used to set up an environment
1369 # for the inner function to execute in.
1383 # for the inner function to execute in.
1370 def manifest_and_file_collector(changedfileset):
1384 def manifest_and_file_collector(changedfileset):
1371 # This is an information gathering function that gathers
1385 # This is an information gathering function that gathers
1372 # information from each changeset node that goes out as part of
1386 # information from each changeset node that goes out as part of
1373 # the changegroup. The information gathered is a list of which
1387 # the changegroup. The information gathered is a list of which
1374 # manifest nodes are potentially required (the recipient may
1388 # manifest nodes are potentially required (the recipient may
1375 # already have them) and total list of all files which were
1389 # already have them) and total list of all files which were
1376 # changed in any changeset in the changegroup.
1390 # changed in any changeset in the changegroup.
1377 #
1391 #
1378 # We also remember the first changenode we saw any manifest
1392 # We also remember the first changenode we saw any manifest
1379 # referenced by so we can later determine which changenode 'owns'
1393 # referenced by so we can later determine which changenode 'owns'
1380 # the manifest.
1394 # the manifest.
1381 def collect_manifests_and_files(clnode):
1395 def collect_manifests_and_files(clnode):
1382 c = cl.read(clnode)
1396 c = cl.read(clnode)
1383 for f in c[3]:
1397 for f in c[3]:
1384 # This is to make sure we only have one instance of each
1398 # This is to make sure we only have one instance of each
1385 # filename string for each filename.
1399 # filename string for each filename.
1386 changedfileset.setdefault(f, f)
1400 changedfileset.setdefault(f, f)
1387 msng_mnfst_set.setdefault(c[0], clnode)
1401 msng_mnfst_set.setdefault(c[0], clnode)
1388 return collect_manifests_and_files
1402 return collect_manifests_and_files
1389
1403
1390 # Figure out which manifest nodes (of the ones we think might be part
1404 # Figure out which manifest nodes (of the ones we think might be part
1391 # of the changegroup) the recipient must know about and remove them
1405 # of the changegroup) the recipient must know about and remove them
1392 # from the changegroup.
1406 # from the changegroup.
1393 def prune_manifests():
1407 def prune_manifests():
1394 has_mnfst_set = {}
1408 has_mnfst_set = {}
1395 for n in msng_mnfst_set:
1409 for n in msng_mnfst_set:
1396 # If a 'missing' manifest thinks it belongs to a changenode
1410 # If a 'missing' manifest thinks it belongs to a changenode
1397 # the recipient is assumed to have, obviously the recipient
1411 # the recipient is assumed to have, obviously the recipient
1398 # must have that manifest.
1412 # must have that manifest.
1399 linknode = cl.node(mnfst.linkrev(n))
1413 linknode = cl.node(mnfst.linkrev(n))
1400 if linknode in has_cl_set:
1414 if linknode in has_cl_set:
1401 has_mnfst_set[n] = 1
1415 has_mnfst_set[n] = 1
1402 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1416 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1403
1417
1404 # Use the information collected in collect_manifests_and_files to say
1418 # Use the information collected in collect_manifests_and_files to say
1405 # which changenode any manifestnode belongs to.
1419 # which changenode any manifestnode belongs to.
1406 def lookup_manifest_link(mnfstnode):
1420 def lookup_manifest_link(mnfstnode):
1407 return msng_mnfst_set[mnfstnode]
1421 return msng_mnfst_set[mnfstnode]
1408
1422
1409 # A function generating function that sets up the initial environment
1423 # A function generating function that sets up the initial environment
1410 # the inner function.
1424 # the inner function.
1411 def filenode_collector(changedfiles):
1425 def filenode_collector(changedfiles):
1412 next_rev = [0]
1426 next_rev = [0]
1413 # This gathers information from each manifestnode included in the
1427 # This gathers information from each manifestnode included in the
1414 # changegroup about which filenodes the manifest node references
1428 # changegroup about which filenodes the manifest node references
1415 # so we can include those in the changegroup too.
1429 # so we can include those in the changegroup too.
1416 #
1430 #
1417 # It also remembers which changenode each filenode belongs to. It
1431 # It also remembers which changenode each filenode belongs to. It
1418 # does this by assuming the a filenode belongs to the changenode
1432 # does this by assuming the a filenode belongs to the changenode
1419 # the first manifest that references it belongs to.
1433 # the first manifest that references it belongs to.
1420 def collect_msng_filenodes(mnfstnode):
1434 def collect_msng_filenodes(mnfstnode):
1421 r = mnfst.rev(mnfstnode)
1435 r = mnfst.rev(mnfstnode)
1422 if r == next_rev[0]:
1436 if r == next_rev[0]:
1423 # If the last rev we looked at was the one just previous,
1437 # If the last rev we looked at was the one just previous,
1424 # we only need to see a diff.
1438 # we only need to see a diff.
1425 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1439 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1426 # For each line in the delta
1440 # For each line in the delta
1427 for dline in delta.splitlines():
1441 for dline in delta.splitlines():
1428 # get the filename and filenode for that line
1442 # get the filename and filenode for that line
1429 f, fnode = dline.split('\0')
1443 f, fnode = dline.split('\0')
1430 fnode = bin(fnode[:40])
1444 fnode = bin(fnode[:40])
1431 f = changedfiles.get(f, None)
1445 f = changedfiles.get(f, None)
1432 # And if the file is in the list of files we care
1446 # And if the file is in the list of files we care
1433 # about.
1447 # about.
1434 if f is not None:
1448 if f is not None:
1435 # Get the changenode this manifest belongs to
1449 # Get the changenode this manifest belongs to
1436 clnode = msng_mnfst_set[mnfstnode]
1450 clnode = msng_mnfst_set[mnfstnode]
1437 # Create the set of filenodes for the file if
1451 # Create the set of filenodes for the file if
1438 # there isn't one already.
1452 # there isn't one already.
1439 ndset = msng_filenode_set.setdefault(f, {})
1453 ndset = msng_filenode_set.setdefault(f, {})
1440 # And set the filenode's changelog node to the
1454 # And set the filenode's changelog node to the
1441 # manifest's if it hasn't been set already.
1455 # manifest's if it hasn't been set already.
1442 ndset.setdefault(fnode, clnode)
1456 ndset.setdefault(fnode, clnode)
1443 else:
1457 else:
1444 # Otherwise we need a full manifest.
1458 # Otherwise we need a full manifest.
1445 m = mnfst.read(mnfstnode)
1459 m = mnfst.read(mnfstnode)
1446 # For every file in we care about.
1460 # For every file in we care about.
1447 for f in changedfiles:
1461 for f in changedfiles:
1448 fnode = m.get(f, None)
1462 fnode = m.get(f, None)
1449 # If it's in the manifest
1463 # If it's in the manifest
1450 if fnode is not None:
1464 if fnode is not None:
1451 # See comments above.
1465 # See comments above.
1452 clnode = msng_mnfst_set[mnfstnode]
1466 clnode = msng_mnfst_set[mnfstnode]
1453 ndset = msng_filenode_set.setdefault(f, {})
1467 ndset = msng_filenode_set.setdefault(f, {})
1454 ndset.setdefault(fnode, clnode)
1468 ndset.setdefault(fnode, clnode)
1455 # Remember the revision we hope to see next.
1469 # Remember the revision we hope to see next.
1456 next_rev[0] = r + 1
1470 next_rev[0] = r + 1
1457 return collect_msng_filenodes
1471 return collect_msng_filenodes
1458
1472
1459 # We have a list of filenodes we think we need for a file, lets remove
1473 # We have a list of filenodes we think we need for a file, lets remove
1460 # all those we now the recipient must have.
1474 # all those we now the recipient must have.
1461 def prune_filenodes(f, filerevlog):
1475 def prune_filenodes(f, filerevlog):
1462 msngset = msng_filenode_set[f]
1476 msngset = msng_filenode_set[f]
1463 hasset = {}
1477 hasset = {}
1464 # If a 'missing' filenode thinks it belongs to a changenode we
1478 # If a 'missing' filenode thinks it belongs to a changenode we
1465 # assume the recipient must have, then the recipient must have
1479 # assume the recipient must have, then the recipient must have
1466 # that filenode.
1480 # that filenode.
1467 for n in msngset:
1481 for n in msngset:
1468 clnode = cl.node(filerevlog.linkrev(n))
1482 clnode = cl.node(filerevlog.linkrev(n))
1469 if clnode in has_cl_set:
1483 if clnode in has_cl_set:
1470 hasset[n] = 1
1484 hasset[n] = 1
1471 prune_parents(filerevlog, hasset, msngset)
1485 prune_parents(filerevlog, hasset, msngset)
1472
1486
1473 # A function generator function that sets up the a context for the
1487 # A function generator function that sets up the a context for the
1474 # inner function.
1488 # inner function.
1475 def lookup_filenode_link_func(fname):
1489 def lookup_filenode_link_func(fname):
1476 msngset = msng_filenode_set[fname]
1490 msngset = msng_filenode_set[fname]
1477 # Lookup the changenode the filenode belongs to.
1491 # Lookup the changenode the filenode belongs to.
1478 def lookup_filenode_link(fnode):
1492 def lookup_filenode_link(fnode):
1479 return msngset[fnode]
1493 return msngset[fnode]
1480 return lookup_filenode_link
1494 return lookup_filenode_link
1481
1495
1482 # Now that we have all theses utility functions to help out and
1496 # Now that we have all theses utility functions to help out and
1483 # logically divide up the task, generate the group.
1497 # logically divide up the task, generate the group.
1484 def gengroup():
1498 def gengroup():
1485 # The set of changed files starts empty.
1499 # The set of changed files starts empty.
1486 changedfiles = {}
1500 changedfiles = {}
1487 # Create a changenode group generator that will call our functions
1501 # Create a changenode group generator that will call our functions
1488 # back to lookup the owning changenode and collect information.
1502 # back to lookup the owning changenode and collect information.
1489 group = cl.group(msng_cl_lst, identity,
1503 group = cl.group(msng_cl_lst, identity,
1490 manifest_and_file_collector(changedfiles))
1504 manifest_and_file_collector(changedfiles))
1491 for chnk in group:
1505 for chnk in group:
1492 yield chnk
1506 yield chnk
1493
1507
1494 # The list of manifests has been collected by the generator
1508 # The list of manifests has been collected by the generator
1495 # calling our functions back.
1509 # calling our functions back.
1496 prune_manifests()
1510 prune_manifests()
1497 msng_mnfst_lst = msng_mnfst_set.keys()
1511 msng_mnfst_lst = msng_mnfst_set.keys()
1498 # Sort the manifestnodes by revision number.
1512 # Sort the manifestnodes by revision number.
1499 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1513 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1500 # Create a generator for the manifestnodes that calls our lookup
1514 # Create a generator for the manifestnodes that calls our lookup
1501 # and data collection functions back.
1515 # and data collection functions back.
1502 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1516 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1503 filenode_collector(changedfiles))
1517 filenode_collector(changedfiles))
1504 for chnk in group:
1518 for chnk in group:
1505 yield chnk
1519 yield chnk
1506
1520
1507 # These are no longer needed, dereference and toss the memory for
1521 # These are no longer needed, dereference and toss the memory for
1508 # them.
1522 # them.
1509 msng_mnfst_lst = None
1523 msng_mnfst_lst = None
1510 msng_mnfst_set.clear()
1524 msng_mnfst_set.clear()
1511
1525
1512 changedfiles = changedfiles.keys()
1526 changedfiles = changedfiles.keys()
1513 changedfiles.sort()
1527 changedfiles.sort()
1514 # Go through all our files in order sorted by name.
1528 # Go through all our files in order sorted by name.
1515 for fname in changedfiles:
1529 for fname in changedfiles:
1516 filerevlog = self.file(fname)
1530 filerevlog = self.file(fname)
1517 # Toss out the filenodes that the recipient isn't really
1531 # Toss out the filenodes that the recipient isn't really
1518 # missing.
1532 # missing.
1519 if msng_filenode_set.has_key(fname):
1533 if msng_filenode_set.has_key(fname):
1520 prune_filenodes(fname, filerevlog)
1534 prune_filenodes(fname, filerevlog)
1521 msng_filenode_lst = msng_filenode_set[fname].keys()
1535 msng_filenode_lst = msng_filenode_set[fname].keys()
1522 else:
1536 else:
1523 msng_filenode_lst = []
1537 msng_filenode_lst = []
1524 # If any filenodes are left, generate the group for them,
1538 # If any filenodes are left, generate the group for them,
1525 # otherwise don't bother.
1539 # otherwise don't bother.
1526 if len(msng_filenode_lst) > 0:
1540 if len(msng_filenode_lst) > 0:
1527 yield changegroup.genchunk(fname)
1541 yield changegroup.genchunk(fname)
1528 # Sort the filenodes by their revision #
1542 # Sort the filenodes by their revision #
1529 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1543 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1530 # Create a group generator and only pass in a changenode
1544 # Create a group generator and only pass in a changenode
1531 # lookup function as we need to collect no information
1545 # lookup function as we need to collect no information
1532 # from filenodes.
1546 # from filenodes.
1533 group = filerevlog.group(msng_filenode_lst,
1547 group = filerevlog.group(msng_filenode_lst,
1534 lookup_filenode_link_func(fname))
1548 lookup_filenode_link_func(fname))
1535 for chnk in group:
1549 for chnk in group:
1536 yield chnk
1550 yield chnk
1537 if msng_filenode_set.has_key(fname):
1551 if msng_filenode_set.has_key(fname):
1538 # Don't need this anymore, toss it to free memory.
1552 # Don't need this anymore, toss it to free memory.
1539 del msng_filenode_set[fname]
1553 del msng_filenode_set[fname]
1540 # Signal that no more groups are left.
1554 # Signal that no more groups are left.
1541 yield changegroup.closechunk()
1555 yield changegroup.closechunk()
1542
1556
1543 if msng_cl_lst:
1557 if msng_cl_lst:
1544 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1558 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1545
1559
1546 return util.chunkbuffer(gengroup())
1560 return util.chunkbuffer(gengroup())
1547
1561
1548 def changegroup(self, basenodes, source):
1562 def changegroup(self, basenodes, source):
1549 """Generate a changegroup of all nodes that we have that a recipient
1563 """Generate a changegroup of all nodes that we have that a recipient
1550 doesn't.
1564 doesn't.
1551
1565
1552 This is much easier than the previous function as we can assume that
1566 This is much easier than the previous function as we can assume that
1553 the recipient has any changenode we aren't sending them."""
1567 the recipient has any changenode we aren't sending them."""
1554
1568
1555 self.hook('preoutgoing', throw=True, source=source)
1569 self.hook('preoutgoing', throw=True, source=source)
1556
1570
1557 cl = self.changelog
1571 cl = self.changelog
1558 nodes = cl.nodesbetween(basenodes, None)[0]
1572 nodes = cl.nodesbetween(basenodes, None)[0]
1559 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1573 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1560
1574
1561 def identity(x):
1575 def identity(x):
1562 return x
1576 return x
1563
1577
1564 def gennodelst(revlog):
1578 def gennodelst(revlog):
1565 for r in xrange(0, revlog.count()):
1579 for r in xrange(0, revlog.count()):
1566 n = revlog.node(r)
1580 n = revlog.node(r)
1567 if revlog.linkrev(n) in revset:
1581 if revlog.linkrev(n) in revset:
1568 yield n
1582 yield n
1569
1583
1570 def changed_file_collector(changedfileset):
1584 def changed_file_collector(changedfileset):
1571 def collect_changed_files(clnode):
1585 def collect_changed_files(clnode):
1572 c = cl.read(clnode)
1586 c = cl.read(clnode)
1573 for fname in c[3]:
1587 for fname in c[3]:
1574 changedfileset[fname] = 1
1588 changedfileset[fname] = 1
1575 return collect_changed_files
1589 return collect_changed_files
1576
1590
1577 def lookuprevlink_func(revlog):
1591 def lookuprevlink_func(revlog):
1578 def lookuprevlink(n):
1592 def lookuprevlink(n):
1579 return cl.node(revlog.linkrev(n))
1593 return cl.node(revlog.linkrev(n))
1580 return lookuprevlink
1594 return lookuprevlink
1581
1595
1582 def gengroup():
1596 def gengroup():
1583 # construct a list of all changed files
1597 # construct a list of all changed files
1584 changedfiles = {}
1598 changedfiles = {}
1585
1599
1586 for chnk in cl.group(nodes, identity,
1600 for chnk in cl.group(nodes, identity,
1587 changed_file_collector(changedfiles)):
1601 changed_file_collector(changedfiles)):
1588 yield chnk
1602 yield chnk
1589 changedfiles = changedfiles.keys()
1603 changedfiles = changedfiles.keys()
1590 changedfiles.sort()
1604 changedfiles.sort()
1591
1605
1592 mnfst = self.manifest
1606 mnfst = self.manifest
1593 nodeiter = gennodelst(mnfst)
1607 nodeiter = gennodelst(mnfst)
1594 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1608 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1595 yield chnk
1609 yield chnk
1596
1610
1597 for fname in changedfiles:
1611 for fname in changedfiles:
1598 filerevlog = self.file(fname)
1612 filerevlog = self.file(fname)
1599 nodeiter = gennodelst(filerevlog)
1613 nodeiter = gennodelst(filerevlog)
1600 nodeiter = list(nodeiter)
1614 nodeiter = list(nodeiter)
1601 if nodeiter:
1615 if nodeiter:
1602 yield changegroup.genchunk(fname)
1616 yield changegroup.genchunk(fname)
1603 lookup = lookuprevlink_func(filerevlog)
1617 lookup = lookuprevlink_func(filerevlog)
1604 for chnk in filerevlog.group(nodeiter, lookup):
1618 for chnk in filerevlog.group(nodeiter, lookup):
1605 yield chnk
1619 yield chnk
1606
1620
1607 yield changegroup.closechunk()
1621 yield changegroup.closechunk()
1608
1622
1609 if nodes:
1623 if nodes:
1610 self.hook('outgoing', node=hex(nodes[0]), source=source)
1624 self.hook('outgoing', node=hex(nodes[0]), source=source)
1611
1625
1612 return util.chunkbuffer(gengroup())
1626 return util.chunkbuffer(gengroup())
1613
1627
1614 def addchangegroup(self, source, srctype, url):
1628 def addchangegroup(self, source, srctype, url):
1615 """add changegroup to repo.
1629 """add changegroup to repo.
1616 returns number of heads modified or added + 1."""
1630 returns number of heads modified or added + 1."""
1617
1631
1618 def csmap(x):
1632 def csmap(x):
1619 self.ui.debug(_("add changeset %s\n") % short(x))
1633 self.ui.debug(_("add changeset %s\n") % short(x))
1620 return cl.count()
1634 return cl.count()
1621
1635
1622 def revmap(x):
1636 def revmap(x):
1623 return cl.rev(x)
1637 return cl.rev(x)
1624
1638
1625 if not source:
1639 if not source:
1626 return 0
1640 return 0
1627
1641
1628 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1642 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1629
1643
1630 changesets = files = revisions = 0
1644 changesets = files = revisions = 0
1631
1645
1632 tr = self.transaction()
1646 tr = self.transaction()
1633
1647
1634 # write changelog data to temp files so concurrent readers will not see
1648 # write changelog data to temp files so concurrent readers will not see
1635 # inconsistent view
1649 # inconsistent view
1636 cl = None
1650 cl = None
1637 try:
1651 try:
1638 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1652 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1639
1653
1640 oldheads = len(cl.heads())
1654 oldheads = len(cl.heads())
1641
1655
1642 # pull off the changeset group
1656 # pull off the changeset group
1643 self.ui.status(_("adding changesets\n"))
1657 self.ui.status(_("adding changesets\n"))
1644 cor = cl.count() - 1
1658 cor = cl.count() - 1
1645 chunkiter = changegroup.chunkiter(source)
1659 chunkiter = changegroup.chunkiter(source)
1646 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1660 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1647 raise util.Abort(_("received changelog group is empty"))
1661 raise util.Abort(_("received changelog group is empty"))
1648 cnr = cl.count() - 1
1662 cnr = cl.count() - 1
1649 changesets = cnr - cor
1663 changesets = cnr - cor
1650
1664
1651 # pull off the manifest group
1665 # pull off the manifest group
1652 self.ui.status(_("adding manifests\n"))
1666 self.ui.status(_("adding manifests\n"))
1653 chunkiter = changegroup.chunkiter(source)
1667 chunkiter = changegroup.chunkiter(source)
1654 # no need to check for empty manifest group here:
1668 # no need to check for empty manifest group here:
1655 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1669 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1656 # no new manifest will be created and the manifest group will
1670 # no new manifest will be created and the manifest group will
1657 # be empty during the pull
1671 # be empty during the pull
1658 self.manifest.addgroup(chunkiter, revmap, tr)
1672 self.manifest.addgroup(chunkiter, revmap, tr)
1659
1673
1660 # process the files
1674 # process the files
1661 self.ui.status(_("adding file changes\n"))
1675 self.ui.status(_("adding file changes\n"))
1662 while 1:
1676 while 1:
1663 f = changegroup.getchunk(source)
1677 f = changegroup.getchunk(source)
1664 if not f:
1678 if not f:
1665 break
1679 break
1666 self.ui.debug(_("adding %s revisions\n") % f)
1680 self.ui.debug(_("adding %s revisions\n") % f)
1667 fl = self.file(f)
1681 fl = self.file(f)
1668 o = fl.count()
1682 o = fl.count()
1669 chunkiter = changegroup.chunkiter(source)
1683 chunkiter = changegroup.chunkiter(source)
1670 if fl.addgroup(chunkiter, revmap, tr) is None:
1684 if fl.addgroup(chunkiter, revmap, tr) is None:
1671 raise util.Abort(_("received file revlog group is empty"))
1685 raise util.Abort(_("received file revlog group is empty"))
1672 revisions += fl.count() - o
1686 revisions += fl.count() - o
1673 files += 1
1687 files += 1
1674
1688
1675 cl.writedata()
1689 cl.writedata()
1676 finally:
1690 finally:
1677 if cl:
1691 if cl:
1678 cl.cleanup()
1692 cl.cleanup()
1679
1693
1680 # make changelog see real files again
1694 # make changelog see real files again
1681 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1695 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1682 self.changelog.checkinlinesize(tr)
1696 self.changelog.checkinlinesize(tr)
1683
1697
1684 newheads = len(self.changelog.heads())
1698 newheads = len(self.changelog.heads())
1685 heads = ""
1699 heads = ""
1686 if oldheads and newheads != oldheads:
1700 if oldheads and newheads != oldheads:
1687 heads = _(" (%+d heads)") % (newheads - oldheads)
1701 heads = _(" (%+d heads)") % (newheads - oldheads)
1688
1702
1689 self.ui.status(_("added %d changesets"
1703 self.ui.status(_("added %d changesets"
1690 " with %d changes to %d files%s\n")
1704 " with %d changes to %d files%s\n")
1691 % (changesets, revisions, files, heads))
1705 % (changesets, revisions, files, heads))
1692
1706
1693 if changesets > 0:
1707 if changesets > 0:
1694 self.hook('pretxnchangegroup', throw=True,
1708 self.hook('pretxnchangegroup', throw=True,
1695 node=hex(self.changelog.node(cor+1)), source=srctype,
1709 node=hex(self.changelog.node(cor+1)), source=srctype,
1696 url=url)
1710 url=url)
1697
1711
1698 tr.close()
1712 tr.close()
1699
1713
1700 if changesets > 0:
1714 if changesets > 0:
1701 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1715 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1702 source=srctype, url=url)
1716 source=srctype, url=url)
1703
1717
1704 for i in range(cor + 1, cnr + 1):
1718 for i in range(cor + 1, cnr + 1):
1705 self.hook("incoming", node=hex(self.changelog.node(i)),
1719 self.hook("incoming", node=hex(self.changelog.node(i)),
1706 source=srctype, url=url)
1720 source=srctype, url=url)
1707
1721
1708 return newheads - oldheads + 1
1722 return newheads - oldheads + 1
1709
1723
1710
1724
1711 def stream_in(self, remote):
1725 def stream_in(self, remote):
1712 fp = remote.stream_out()
1726 fp = remote.stream_out()
1713 resp = int(fp.readline())
1727 resp = int(fp.readline())
1714 if resp != 0:
1728 if resp != 0:
1715 raise util.Abort(_('operation forbidden by server'))
1729 raise util.Abort(_('operation forbidden by server'))
1716 self.ui.status(_('streaming all changes\n'))
1730 self.ui.status(_('streaming all changes\n'))
1717 total_files, total_bytes = map(int, fp.readline().split(' ', 1))
1731 total_files, total_bytes = map(int, fp.readline().split(' ', 1))
1718 self.ui.status(_('%d files to transfer, %s of data\n') %
1732 self.ui.status(_('%d files to transfer, %s of data\n') %
1719 (total_files, util.bytecount(total_bytes)))
1733 (total_files, util.bytecount(total_bytes)))
1720 start = time.time()
1734 start = time.time()
1721 for i in xrange(total_files):
1735 for i in xrange(total_files):
1722 name, size = fp.readline().split('\0', 1)
1736 name, size = fp.readline().split('\0', 1)
1723 size = int(size)
1737 size = int(size)
1724 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1738 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1725 ofp = self.opener(name, 'w')
1739 ofp = self.opener(name, 'w')
1726 for chunk in util.filechunkiter(fp, limit=size):
1740 for chunk in util.filechunkiter(fp, limit=size):
1727 ofp.write(chunk)
1741 ofp.write(chunk)
1728 ofp.close()
1742 ofp.close()
1729 elapsed = time.time() - start
1743 elapsed = time.time() - start
1730 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1744 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1731 (util.bytecount(total_bytes), elapsed,
1745 (util.bytecount(total_bytes), elapsed,
1732 util.bytecount(total_bytes / elapsed)))
1746 util.bytecount(total_bytes / elapsed)))
1733 self.reload()
1747 self.reload()
1734 return len(self.heads()) + 1
1748 return len(self.heads()) + 1
1735
1749
1736 def clone(self, remote, heads=[], stream=False):
1750 def clone(self, remote, heads=[], stream=False):
1737 '''clone remote repository.
1751 '''clone remote repository.
1738
1752
1739 keyword arguments:
1753 keyword arguments:
1740 heads: list of revs to clone (forces use of pull)
1754 heads: list of revs to clone (forces use of pull)
1741 stream: use streaming clone if possible'''
1755 stream: use streaming clone if possible'''
1742
1756
1743 # now, all clients that can request uncompressed clones can
1757 # now, all clients that can request uncompressed clones can
1744 # read repo formats supported by all servers that can serve
1758 # read repo formats supported by all servers that can serve
1745 # them.
1759 # them.
1746
1760
1747 # if revlog format changes, client will have to check version
1761 # if revlog format changes, client will have to check version
1748 # and format flags on "stream" capability, and use
1762 # and format flags on "stream" capability, and use
1749 # uncompressed only if compatible.
1763 # uncompressed only if compatible.
1750
1764
1751 if stream and not heads and remote.capable('stream'):
1765 if stream and not heads and remote.capable('stream'):
1752 return self.stream_in(remote)
1766 return self.stream_in(remote)
1753 return self.pull(remote, heads)
1767 return self.pull(remote, heads)
1754
1768
1755 # used to avoid circular references so destructors work
1769 # used to avoid circular references so destructors work
1756 def aftertrans(base):
1770 def aftertrans(base):
1757 p = base
1771 p = base
1758 def a():
1772 def a():
1759 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1773 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1760 util.rename(os.path.join(p, "journal.dirstate"),
1774 util.rename(os.path.join(p, "journal.dirstate"),
1761 os.path.join(p, "undo.dirstate"))
1775 os.path.join(p, "undo.dirstate"))
1762 return a
1776 return a
1763
1777
1764 def instance(ui, path, create):
1778 def instance(ui, path, create):
1765 return localrepository(ui, util.drop_scheme('file', path), create)
1779 return localrepository(ui, util.drop_scheme('file', path), create)
1766
1780
1767 def islocal(path):
1781 def islocal(path):
1768 return True
1782 return True
@@ -1,463 +1,465 b''
1 # merge.py - directory-level update/merge handling for Mercurial
1 # merge.py - directory-level update/merge handling for Mercurial
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 demandload(globals(), "errno util os tempfile")
11 demandload(globals(), "errno util os tempfile")
12
12
13 def filemerge(repo, fw, fo, fd, my, other, p1, p2, move):
13 def filemerge(repo, fw, fo, fd, my, other, p1, p2, move):
14 """perform a 3-way merge in the working directory
14 """perform a 3-way merge in the working directory
15
15
16 fw = filename in the working directory and first parent
16 fw = filename in the working directory and first parent
17 fo = filename in other parent
17 fo = filename in other parent
18 fd = destination filename
18 fd = destination filename
19 my = fileid in first parent
19 my = fileid in first parent
20 other = fileid in second parent
20 other = fileid in second parent
21 p1, p2 = hex changeset ids for merge command
21 p1, p2 = hex changeset ids for merge command
22 move = whether to move or copy the file to the destination
22 move = whether to move or copy the file to the destination
23
23
24 TODO:
24 TODO:
25 if fw is copied in the working directory, we get confused
25 if fw is copied in the working directory, we get confused
26 implement move and fd
26 implement move and fd
27 """
27 """
28
28
29 def temp(prefix, ctx):
29 def temp(prefix, ctx):
30 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
30 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
31 (fd, name) = tempfile.mkstemp(prefix=pre)
31 (fd, name) = tempfile.mkstemp(prefix=pre)
32 f = os.fdopen(fd, "wb")
32 f = os.fdopen(fd, "wb")
33 repo.wwrite(ctx.path(), ctx.data(), f)
33 repo.wwrite(ctx.path(), ctx.data(), f)
34 f.close()
34 f.close()
35 return name
35 return name
36
36
37 fcm = repo.filectx(fw, fileid=my)
37 fcm = repo.filectx(fw, fileid=my)
38 fco = repo.filectx(fo, fileid=other)
38 fco = repo.filectx(fo, fileid=other)
39 fca = fcm.ancestor(fco)
39 fca = fcm.ancestor(fco)
40 if not fca:
40 if not fca:
41 fca = repo.filectx(fw, fileid=-1)
41 fca = repo.filectx(fw, fileid=-1)
42 a = repo.wjoin(fw)
42 a = repo.wjoin(fw)
43 b = temp("base", fca)
43 b = temp("base", fca)
44 c = temp("other", fco)
44 c = temp("other", fco)
45
45
46 repo.ui.note(_("resolving %s\n") % fw)
46 repo.ui.note(_("resolving %s\n") % fw)
47 repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcm, fco, fca))
47 repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcm, fco, fca))
48
48
49 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
49 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
50 or "hgmerge")
50 or "hgmerge")
51 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
51 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
52 environ={'HG_FILE': fw,
52 environ={'HG_FILE': fw,
53 'HG_MY_NODE': p1,
53 'HG_MY_NODE': p1,
54 'HG_OTHER_NODE': p2})
54 'HG_OTHER_NODE': p2})
55 if r:
55 if r:
56 repo.ui.warn(_("merging %s failed!\n") % fw)
56 repo.ui.warn(_("merging %s failed!\n") % fw)
57 else:
57 else:
58 if fd != fw:
58 if fd != fw:
59 repo.ui.debug(_("copying %s to %s\n") % (fw, fd))
59 repo.ui.debug(_("copying %s to %s\n") % (fw, fd))
60 repo.wwrite(fd, repo.wread(fw))
60 repo.wwrite(fd, repo.wread(fw))
61 if move:
61 if move:
62 repo.ui.debug(_("removing %s\n") % fw)
62 repo.ui.debug(_("removing %s\n") % fw)
63 os.unlink(a)
63 os.unlink(a)
64
64
65 os.unlink(b)
65 os.unlink(b)
66 os.unlink(c)
66 os.unlink(c)
67 return r
67 return r
68
68
69 def checkunknown(repo, m2, wctx):
69 def checkunknown(repo, m2, wctx):
70 """
70 """
71 check for collisions between unknown files and files in m2
71 check for collisions between unknown files and files in m2
72 """
72 """
73 for f in wctx.unknown():
73 for f in wctx.unknown():
74 if f in m2:
74 if f in m2:
75 if repo.file(f).cmp(m2[f], repo.wread(f)):
75 if repo.file(f).cmp(m2[f], repo.wread(f)):
76 raise util.Abort(_("'%s' already exists in the working"
76 raise util.Abort(_("'%s' already exists in the working"
77 " dir and differs from remote") % f)
77 " dir and differs from remote") % f)
78
78
79 def forgetremoved(m2, wctx):
79 def forgetremoved(m2, wctx):
80 """
80 """
81 Forget removed files
81 Forget removed files
82
82
83 If we're jumping between revisions (as opposed to merging), and if
83 If we're jumping between revisions (as opposed to merging), and if
84 neither the working directory nor the target rev has the file,
84 neither the working directory nor the target rev has the file,
85 then we need to remove it from the dirstate, to prevent the
85 then we need to remove it from the dirstate, to prevent the
86 dirstate from listing the file when it is no longer in the
86 dirstate from listing the file when it is no longer in the
87 manifest.
87 manifest.
88 """
88 """
89
89
90 action = []
90 action = []
91
91
92 for f in wctx.deleted() + wctx.removed():
92 for f in wctx.deleted() + wctx.removed():
93 if f not in m2:
93 if f not in m2:
94 action.append((f, "f"))
94 action.append((f, "f"))
95
95
96 return action
96 return action
97
97
98 def nonoverlap(d1, d2):
98 def nonoverlap(d1, d2):
99 """
99 """
100 Return list of elements in d1 not in d2
100 Return list of elements in d1 not in d2
101 """
101 """
102
102
103 l = []
103 l = []
104 for d in d1:
104 for d in d1:
105 if d not in d2:
105 if d not in d2:
106 l.append(d)
106 l.append(d)
107
107
108 l.sort()
108 l.sort()
109 return l
109 return l
110
110
111 def findold(fctx, limit):
111 def findold(fctx, limit):
112 """
112 """
113 find files that path was copied from, back to linkrev limit
113 find files that path was copied from, back to linkrev limit
114 """
114 """
115
115
116 old = {}
116 old = {}
117 orig = fctx.path()
117 orig = fctx.path()
118 visit = [fctx]
118 visit = [fctx]
119 while visit:
119 while visit:
120 fc = visit.pop()
120 fc = visit.pop()
121 if fc.rev() < limit:
121 if fc.rev() < limit:
122 continue
122 continue
123 if fc.path() != orig and fc.path() not in old:
123 if fc.path() != orig and fc.path() not in old:
124 old[fc.path()] = 1
124 old[fc.path()] = 1
125 visit += fc.parents()
125 visit += fc.parents()
126
126
127 old = old.keys()
127 old = old.keys()
128 old.sort()
128 old.sort()
129 return old
129 return old
130
130
131 def findcopies(repo, m1, m2, limit):
131 def findcopies(repo, m1, m2, limit):
132 """
132 """
133 Find moves and copies between m1 and m2 back to limit linkrev
133 Find moves and copies between m1 and m2 back to limit linkrev
134 """
134 """
135
135
136 if not repo.ui.config("merge", "followcopies"):
136 if not repo.ui.config("merge", "followcopies"):
137 return {}
137 return {}
138
138
139 # avoid silly behavior for update from empty dir
139 # avoid silly behavior for update from empty dir
140 if not m1:
140 if not m1:
141 return {}
141 return {}
142
142
143 dcopies = repo.dirstate.copies()
143 dcopies = repo.dirstate.copies()
144 copy = {}
144 copy = {}
145 match = {}
145 match = {}
146 u1 = nonoverlap(m1, m2)
146 u1 = nonoverlap(m1, m2)
147 u2 = nonoverlap(m2, m1)
147 u2 = nonoverlap(m2, m1)
148 ctx = util.cachefunc(lambda f,n: repo.filectx(f, fileid=n[:20]))
148 ctx = util.cachefunc(lambda f,n: repo.filectx(f, fileid=n[:20]))
149
149
150 def checkpair(c, f2, man):
150 def checkpair(c, f2, man):
151 ''' check if an apparent pair actually matches '''
151 ''' check if an apparent pair actually matches '''
152 c2 = ctx(f2, man[f2])
152 c2 = ctx(f2, man[f2])
153 ca = c.ancestor(c2)
153 ca = c.ancestor(c2)
154 if ca and ca.path() == c.path() or ca.path() == c2.path():
154 if ca and ca.path() == c.path() or ca.path() == c2.path():
155 copy[c.path()] = f2
155 copy[c.path()] = f2
156 copy[f2] = c.path()
156 copy[f2] = c.path()
157
157
158 for f in u1:
158 for f in u1:
159 c = ctx(dcopies.get(f, f), m1[f])
159 c = ctx(dcopies.get(f, f), m1[f])
160 for of in findold(c, limit):
160 for of in findold(c, limit):
161 if of in m2:
161 if of in m2:
162 checkpair(c, of, m2)
162 checkpair(c, of, m2)
163 else:
163 else:
164 match.setdefault(of, []).append(f)
164 match.setdefault(of, []).append(f)
165
165
166 for f in u2:
166 for f in u2:
167 c = ctx(f, m2[f])
167 c = ctx(f, m2[f])
168 for of in findold(c, limit):
168 for of in findold(c, limit):
169 if of in m1:
169 if of in m1:
170 checkpair(c, of, m1)
170 checkpair(c, of, m1)
171 elif of in match:
171 elif of in match:
172 for mf in match[of]:
172 for mf in match[of]:
173 checkpair(c, mf, m1)
173 checkpair(c, mf, m1)
174
174
175 return copy
175 return copy
176
176
177 def manifestmerge(ui, m1, m2, ma, copy, overwrite, backwards, partial):
177 def manifestmerge(ui, m1, m2, ma, copy, overwrite, backwards, partial):
178 """
178 """
179 Merge manifest m1 with m2 using ancestor ma and generate merge action list
179 Merge manifest m1 with m2 using ancestor ma and generate merge action list
180 """
180 """
181
181
182 def fmerge(f, f2=None, fa=None):
182 def fmerge(f, f2=None, fa=None):
183 """merge executable flags"""
183 """merge executable flags"""
184 if not f2:
184 if not f2:
185 f2 = f
185 f2 = f
186 fa = f
186 fa = f
187 a, b, c = ma.execf(fa), m1.execf(f), m2.execf(f2)
187 a, b, c = ma.execf(fa), m1.execf(f), m2.execf(f2)
188 return ((a^b) | (a^c)) ^ a
188 return ((a^b) | (a^c)) ^ a
189
189
190 action = []
190 action = []
191
191
192 def act(msg, f, m, *args):
192 def act(msg, f, m, *args):
193 ui.debug(" %s: %s -> %s\n" % (f, msg, m))
193 ui.debug(" %s: %s -> %s\n" % (f, msg, m))
194 action.append((f, m) + args)
194 action.append((f, m) + args)
195
195
196 # Compare manifests
196 # Compare manifests
197 for f, n in m1.iteritems():
197 for f, n in m1.iteritems():
198 if partial and not partial(f):
198 if partial and not partial(f):
199 continue
199 continue
200 if f in m2:
200 if f in m2:
201 # are files different?
201 # are files different?
202 if n != m2[f]:
202 if n != m2[f]:
203 a = ma.get(f, nullid)
203 a = ma.get(f, nullid)
204 # are both different from the ancestor?
204 # are both different from the ancestor?
205 if not overwrite and n != a and m2[f] != a:
205 if not overwrite and n != a and m2[f] != a:
206 act("versions differ", f, "m", fmerge(f), n[:20], m2[f])
206 act("versions differ", f, "m", fmerge(f), n[:20], m2[f])
207 # are we clobbering?
207 # are we clobbering?
208 # is remote's version newer?
208 # is remote's version newer?
209 # or are we going back in time and clean?
209 # or are we going back in time and clean?
210 elif overwrite or m2[f] != a or (backwards and not n[20:]):
210 elif overwrite or m2[f] != a or (backwards and not n[20:]):
211 act("remote is newer", f, "g", m2.execf(f), m2[f])
211 act("remote is newer", f, "g", m2.execf(f), m2[f])
212 # local is newer, not overwrite, check mode bits
212 # local is newer, not overwrite, check mode bits
213 elif fmerge(f) != m1.execf(f):
213 elif fmerge(f) != m1.execf(f):
214 act("update permissions", f, "e", m2.execf(f))
214 act("update permissions", f, "e", m2.execf(f))
215 # contents same, check mode bits
215 # contents same, check mode bits
216 elif m1.execf(f) != m2.execf(f):
216 elif m1.execf(f) != m2.execf(f):
217 if overwrite or fmerge(f) != m1.execf(f):
217 if overwrite or fmerge(f) != m1.execf(f):
218 act("update permissions", f, "e", m2.execf(f))
218 act("update permissions", f, "e", m2.execf(f))
219 elif f in copy:
219 elif f in copy:
220 f2 = copy[f]
220 f2 = copy[f]
221 if f in ma: # case 3,20 A/B/A
221 if f in ma: # case 3,20 A/B/A
222 act("remote moved",
222 act("remote moved",
223 f, "c", f2, f2, m1[f], m2[f2], fmerge(f, f2, f), True)
223 f, "c", f2, f2, m1[f], m2[f2], fmerge(f, f2, f), True)
224 else:
224 else:
225 if f2 in m1: # case 2 A,B/B/B
225 if f2 in m1: # case 2 A,B/B/B
226 act("local copied",
226 act("local copied",
227 f, "c", f2, f, m1[f], m2[f2], fmerge(f, f2, f2), False)
227 f, "c", f2, f, m1[f], m2[f2], fmerge(f, f2, f2), False)
228 else: # case 4,21 A/B/B
228 else: # case 4,21 A/B/B
229 act("local moved",
229 act("local moved",
230 f, "c", f2, f, m1[f], m2[f2], fmerge(f, f2, f2), False)
230 f, "c", f2, f, m1[f], m2[f2], fmerge(f, f2, f2), False)
231 elif f in ma:
231 elif f in ma:
232 if n != ma[f] and not overwrite:
232 if n != ma[f] and not overwrite:
233 if ui.prompt(
233 if ui.prompt(
234 (_(" local changed %s which remote deleted\n") % f) +
234 (_(" local changed %s which remote deleted\n") % f) +
235 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("d"):
235 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("d"):
236 act("prompt delete", f, "r")
236 act("prompt delete", f, "r")
237 else:
237 else:
238 act("other deleted", f, "r")
238 act("other deleted", f, "r")
239 else:
239 else:
240 # file is created on branch or in working directory
240 # file is created on branch or in working directory
241 if (overwrite and n[20:] != "u") or (backwards and not n[20:]):
241 if (overwrite and n[20:] != "u") or (backwards and not n[20:]):
242 act("remote deleted", f, "r")
242 act("remote deleted", f, "r")
243
243
244 for f, n in m2.iteritems():
244 for f, n in m2.iteritems():
245 if partial and not partial(f):
245 if partial and not partial(f):
246 continue
246 continue
247 if f in m1:
247 if f in m1:
248 continue
248 continue
249 if f in copy:
249 if f in copy:
250 f2 = copy[f]
250 f2 = copy[f]
251 if f2 not in m2: # already seen
251 if f2 not in m2: # already seen
252 continue
252 continue
253 # rename case 1, A/A,B/A
253 # rename case 1, A/A,B/A
254 act("remote copied",
254 act("remote copied",
255 f2, "c", f, f, m1[f2], m2[f], fmerge(f2, f, f2), False)
255 f2, "c", f, f, m1[f2], m2[f], fmerge(f2, f, f2), False)
256 elif f in ma:
256 elif f in ma:
257 if overwrite or backwards:
257 if overwrite or backwards:
258 act("recreating", f, "g", m2.execf(f), n)
258 act("recreating", f, "g", m2.execf(f), n)
259 elif n != ma[f]:
259 elif n != ma[f]:
260 if ui.prompt(
260 if ui.prompt(
261 (_("remote changed %s which local deleted\n") % f) +
261 (_("remote changed %s which local deleted\n") % f) +
262 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("k"):
262 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("k"):
263 act("prompt recreating", f, "g", m2.execf(f), n)
263 act("prompt recreating", f, "g", m2.execf(f), n)
264 else:
264 else:
265 act("remote created", f, "g", m2.execf(f), n)
265 act("remote created", f, "g", m2.execf(f), n)
266
266
267 return action
267 return action
268
268
269 def applyupdates(repo, action, xp1, xp2):
269 def applyupdates(repo, action, xp1, xp2):
270 updated, merged, removed, unresolved = 0, 0, 0, 0
270 updated, merged, removed, unresolved = 0, 0, 0, 0
271 action.sort()
271 action.sort()
272 for a in action:
272 for a in action:
273 f, m = a[:2]
273 f, m = a[:2]
274 if f[0] == "/":
274 if f[0] == "/":
275 continue
275 continue
276 if m == "r": # remove
276 if m == "r": # remove
277 repo.ui.note(_("removing %s\n") % f)
277 repo.ui.note(_("removing %s\n") % f)
278 util.audit_path(f)
278 util.audit_path(f)
279 try:
279 try:
280 util.unlink(repo.wjoin(f))
280 util.unlink(repo.wjoin(f))
281 except OSError, inst:
281 except OSError, inst:
282 if inst.errno != errno.ENOENT:
282 if inst.errno != errno.ENOENT:
283 repo.ui.warn(_("update failed to remove %s: %s!\n") %
283 repo.ui.warn(_("update failed to remove %s: %s!\n") %
284 (f, inst.strerror))
284 (f, inst.strerror))
285 removed +=1
285 removed +=1
286 elif m == "c": # copy
286 elif m == "c": # copy
287 f2, fd, my, other, flag, move = a[2:]
287 f2, fd, my, other, flag, move = a[2:]
288 repo.ui.status(_("merging %s and %s to %s\n") % (f, f2, fd))
288 repo.ui.status(_("merging %s and %s to %s\n") % (f, f2, fd))
289 if filemerge(repo, f, f2, fd, my, other, xp1, xp2, move):
289 if filemerge(repo, f, f2, fd, my, other, xp1, xp2, move):
290 unresolved += 1
290 unresolved += 1
291 util.set_exec(repo.wjoin(fd), flag)
291 util.set_exec(repo.wjoin(fd), flag)
292 merged += 1
292 merged += 1
293 elif m == "m": # merge
293 elif m == "m": # merge
294 flag, my, other = a[2:]
294 flag, my, other = a[2:]
295 repo.ui.status(_("merging %s\n") % f)
295 repo.ui.status(_("merging %s\n") % f)
296 if filemerge(repo, f, f, f, my, other, xp1, xp2, False):
296 if filemerge(repo, f, f, f, my, other, xp1, xp2, False):
297 unresolved += 1
297 unresolved += 1
298 util.set_exec(repo.wjoin(f), flag)
298 util.set_exec(repo.wjoin(f), flag)
299 merged += 1
299 merged += 1
300 elif m == "g": # get
300 elif m == "g": # get
301 flag, node = a[2:]
301 flag, node = a[2:]
302 repo.ui.note(_("getting %s\n") % f)
302 repo.ui.note(_("getting %s\n") % f)
303 t = repo.file(f).read(node)
303 t = repo.file(f).read(node)
304 repo.wwrite(f, t)
304 repo.wwrite(f, t)
305 util.set_exec(repo.wjoin(f), flag)
305 util.set_exec(repo.wjoin(f), flag)
306 updated += 1
306 updated += 1
307 elif m == "e": # exec
307 elif m == "e": # exec
308 flag = a[2:]
308 flag = a[2:]
309 util.set_exec(repo.wjoin(f), flag)
309 util.set_exec(repo.wjoin(f), flag)
310
310
311 return updated, merged, removed, unresolved
311 return updated, merged, removed, unresolved
312
312
313 def recordupdates(repo, action, branchmerge):
313 def recordupdates(repo, action, branchmerge):
314 for a in action:
314 for a in action:
315 f, m = a[:2]
315 f, m = a[:2]
316 if m == "r": # remove
316 if m == "r": # remove
317 if branchmerge:
317 if branchmerge:
318 repo.dirstate.update([f], 'r')
318 repo.dirstate.update([f], 'r')
319 else:
319 else:
320 repo.dirstate.forget([f])
320 repo.dirstate.forget([f])
321 elif m == "f": # forget
321 elif m == "f": # forget
322 repo.dirstate.forget([f])
322 repo.dirstate.forget([f])
323 elif m == "g": # get
323 elif m == "g": # get
324 if branchmerge:
324 if branchmerge:
325 repo.dirstate.update([f], 'n', st_mtime=-1)
325 repo.dirstate.update([f], 'n', st_mtime=-1)
326 else:
326 else:
327 repo.dirstate.update([f], 'n')
327 repo.dirstate.update([f], 'n')
328 elif m == "m": # merge
328 elif m == "m": # merge
329 flag, my, other = a[2:]
329 flag, my, other = a[2:]
330 if branchmerge:
330 if branchmerge:
331 # We've done a branch merge, mark this file as merged
331 # We've done a branch merge, mark this file as merged
332 # so that we properly record the merger later
332 # so that we properly record the merger later
333 repo.dirstate.update([f], 'm')
333 repo.dirstate.update([f], 'm')
334 else:
334 else:
335 # We've update-merged a locally modified file, so
335 # We've update-merged a locally modified file, so
336 # we set the dirstate to emulate a normal checkout
336 # we set the dirstate to emulate a normal checkout
337 # of that file some time in the past. Thus our
337 # of that file some time in the past. Thus our
338 # merge will appear as a normal local file
338 # merge will appear as a normal local file
339 # modification.
339 # modification.
340 fl = repo.file(f)
340 fl = repo.file(f)
341 f_len = fl.size(fl.rev(other))
341 f_len = fl.size(fl.rev(other))
342 repo.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
342 repo.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
343 elif m == "c": # copy
343 elif m == "c": # copy
344 f2, fd, my, other, flag, move = a[2:]
344 f2, fd, my, other, flag, move = a[2:]
345 if branchmerge:
345 if branchmerge:
346 # We've done a branch merge, mark this file as merged
346 # We've done a branch merge, mark this file as merged
347 # so that we properly record the merger later
347 # so that we properly record the merger later
348 repo.dirstate.update([fd], 'm')
348 repo.dirstate.update([fd], 'm')
349 else:
349 else:
350 # We've update-merged a locally modified file, so
350 # We've update-merged a locally modified file, so
351 # we set the dirstate to emulate a normal checkout
351 # we set the dirstate to emulate a normal checkout
352 # of that file some time in the past. Thus our
352 # of that file some time in the past. Thus our
353 # merge will appear as a normal local file
353 # merge will appear as a normal local file
354 # modification.
354 # modification.
355 fl = repo.file(f)
355 fl = repo.file(f)
356 f_len = fl.size(fl.rev(other))
356 f_len = fl.size(fl.rev(other))
357 repo.dirstate.update([fd], 'n', st_size=f_len, st_mtime=-1)
357 repo.dirstate.update([fd], 'n', st_size=f_len, st_mtime=-1)
358 if move:
358 if move:
359 repo.dirstate.update([f], 'r')
359 repo.dirstate.update([f], 'r')
360 if f != fd:
360 if f != fd:
361 repo.dirstate.copy(f, fd)
361 repo.dirstate.copy(f, fd)
362 else:
363 repo.dirstate.copy(f2, fd)
362
364
363 def update(repo, node, branchmerge=False, force=False, partial=None,
365 def update(repo, node, branchmerge=False, force=False, partial=None,
364 wlock=None, show_stats=True, remind=True):
366 wlock=None, show_stats=True, remind=True):
365
367
366 overwrite = force and not branchmerge
368 overwrite = force and not branchmerge
367 forcemerge = force and branchmerge
369 forcemerge = force and branchmerge
368
370
369 if not wlock:
371 if not wlock:
370 wlock = repo.wlock()
372 wlock = repo.wlock()
371
373
372 ### check phase
374 ### check phase
373
375
374 wc = repo.workingctx()
376 wc = repo.workingctx()
375 pl = wc.parents()
377 pl = wc.parents()
376 if not overwrite and len(pl) > 1:
378 if not overwrite and len(pl) > 1:
377 raise util.Abort(_("outstanding uncommitted merges"))
379 raise util.Abort(_("outstanding uncommitted merges"))
378
380
379 p1, p2 = pl[0], repo.changectx(node)
381 p1, p2 = pl[0], repo.changectx(node)
380 pa = p1.ancestor(p2)
382 pa = p1.ancestor(p2)
381
383
382 # are we going backwards?
384 # are we going backwards?
383 backwards = (pa == p2)
385 backwards = (pa == p2)
384
386
385 # is there a linear path from p1 to p2?
387 # is there a linear path from p1 to p2?
386 if pa == p1 or pa == p2:
388 if pa == p1 or pa == p2:
387 if branchmerge:
389 if branchmerge:
388 raise util.Abort(_("there is nothing to merge, just use "
390 raise util.Abort(_("there is nothing to merge, just use "
389 "'hg update' or look at 'hg heads'"))
391 "'hg update' or look at 'hg heads'"))
390 elif not (overwrite or branchmerge):
392 elif not (overwrite or branchmerge):
391 raise util.Abort(_("update spans branches, use 'hg merge' "
393 raise util.Abort(_("update spans branches, use 'hg merge' "
392 "or 'hg update -C' to lose changes"))
394 "or 'hg update -C' to lose changes"))
393
395
394 if branchmerge and not forcemerge:
396 if branchmerge and not forcemerge:
395 if wc.modified() or wc.added() or wc.removed():
397 if wc.modified() or wc.added() or wc.removed():
396 raise util.Abort(_("outstanding uncommitted changes"))
398 raise util.Abort(_("outstanding uncommitted changes"))
397
399
398 m1 = wc.manifest()
400 m1 = wc.manifest()
399 m2 = p2.manifest()
401 m2 = p2.manifest()
400 ma = pa.manifest()
402 ma = pa.manifest()
401
403
402 # resolve the manifest to determine which files
404 # resolve the manifest to determine which files
403 # we care about merging
405 # we care about merging
404 repo.ui.note(_("resolving manifests\n"))
406 repo.ui.note(_("resolving manifests\n"))
405 repo.ui.debug(_(" overwrite %s branchmerge %s partial %s\n") %
407 repo.ui.debug(_(" overwrite %s branchmerge %s partial %s\n") %
406 (overwrite, branchmerge, bool(partial)))
408 (overwrite, branchmerge, bool(partial)))
407 repo.ui.debug(_(" ancestor %s local %s remote %s\n") % (p1, p2, pa))
409 repo.ui.debug(_(" ancestor %s local %s remote %s\n") % (p1, p2, pa))
408
410
409 action = []
411 action = []
410 copy = {}
412 copy = {}
411
413
412 if not force:
414 if not force:
413 checkunknown(repo, m2, wc)
415 checkunknown(repo, m2, wc)
414 if not branchmerge:
416 if not branchmerge:
415 action += forgetremoved(m2, wc)
417 action += forgetremoved(m2, wc)
416 if not (backwards or overwrite):
418 if not (backwards or overwrite):
417 copy = findcopies(repo, m1, m2, pa.rev())
419 copy = findcopies(repo, m1, m2, pa.rev())
418
420
419 action += manifestmerge(repo.ui, m1, m2, ma, copy,
421 action += manifestmerge(repo.ui, m1, m2, ma, copy,
420 overwrite, backwards, partial)
422 overwrite, backwards, partial)
421
423
422 ### apply phase
424 ### apply phase
423
425
424 if not branchmerge:
426 if not branchmerge:
425 # we don't need to do any magic, just jump to the new rev
427 # we don't need to do any magic, just jump to the new rev
426 p1, p2 = p2, repo.changectx(nullid)
428 p1, p2 = p2, repo.changectx(nullid)
427
429
428 xp1, xp2 = str(p1), str(p2)
430 xp1, xp2 = str(p1), str(p2)
429 if not p2: xp2 = ''
431 if not p2: xp2 = ''
430
432
431 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
433 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
432
434
433 updated, merged, removed, unresolved = applyupdates(repo, action, xp1, xp2)
435 updated, merged, removed, unresolved = applyupdates(repo, action, xp1, xp2)
434
436
435 # update dirstate
437 # update dirstate
436 if not partial:
438 if not partial:
437 recordupdates(repo, action, branchmerge)
439 recordupdates(repo, action, branchmerge)
438 repo.dirstate.setparents(p1.node(), p2.node())
440 repo.dirstate.setparents(p1.node(), p2.node())
439
441
440 if show_stats:
442 if show_stats:
441 stats = ((updated, _("updated")),
443 stats = ((updated, _("updated")),
442 (merged - unresolved, _("merged")),
444 (merged - unresolved, _("merged")),
443 (removed, _("removed")),
445 (removed, _("removed")),
444 (unresolved, _("unresolved")))
446 (unresolved, _("unresolved")))
445 note = ", ".join([_("%d files %s") % s for s in stats])
447 note = ", ".join([_("%d files %s") % s for s in stats])
446 repo.ui.status("%s\n" % note)
448 repo.ui.status("%s\n" % note)
447 if not partial:
449 if not partial:
448 if branchmerge:
450 if branchmerge:
449 if unresolved:
451 if unresolved:
450 repo.ui.status(_("There are unresolved merges,"
452 repo.ui.status(_("There are unresolved merges,"
451 " you can redo the full merge using:\n"
453 " you can redo the full merge using:\n"
452 " hg update -C %s\n"
454 " hg update -C %s\n"
453 " hg merge %s\n"
455 " hg merge %s\n"
454 % (p1.rev(), p2.rev())))
456 % (p1.rev(), p2.rev())))
455 elif remind:
457 elif remind:
456 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
458 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
457 elif unresolved:
459 elif unresolved:
458 repo.ui.status(_("There are unresolved merges with"
460 repo.ui.status(_("There are unresolved merges with"
459 " locally modified files.\n"))
461 " locally modified files.\n"))
460
462
461 repo.hook('update', parent1=xp1, parent2=xp2, error=unresolved)
463 repo.hook('update', parent1=xp1, parent2=xp2, error=unresolved)
462 return unresolved
464 return unresolved
463
465
@@ -1,25 +1,27 b''
1 #!/bin/sh
1 #!/bin/sh
2
2
3 mkdir t
3 mkdir t
4 cd t
4 cd t
5 hg init
5 hg init
6 echo "[merge]" >> .hg/hgrc
6 echo "[merge]" >> .hg/hgrc
7 echo "followcopies = 1" >> .hg/hgrc
7 echo "followcopies = 1" >> .hg/hgrc
8 echo foo > a
8 echo foo > a
9 echo foo > a2
9 echo foo > a2
10 hg add a a2
10 hg add a a2
11 hg ci -m "start" -d "0 0"
11 hg ci -m "start" -d "0 0"
12 hg mv a b
12 hg mv a b
13 hg mv a2 b2
13 hg mv a2 b2
14 hg ci -m "rename" -d "0 0"
14 hg ci -m "rename" -d "0 0"
15 echo "checkout"
15 echo "checkout"
16 hg co 0
16 hg co 0
17 echo blahblah > a
17 echo blahblah > a
18 echo blahblah > a2
18 echo blahblah > a2
19 hg mv a2 c2
19 hg mv a2 c2
20 hg ci -m "modify" -d "0 0"
20 hg ci -m "modify" -d "0 0"
21 echo "merge"
21 echo "merge"
22 hg merge -y --debug
22 hg merge -y --debug
23 hg status -AC
23 hg status -AC
24 cat b
24 cat b
25 hg ci -m "merge" -d "0 0"
25 hg ci -m "merge" -d "0 0"
26 hg debugindex .hg/data/b.i
27 hg debugrename b No newline at end of file
@@ -1,22 +1,26 b''
1 checkout
1 checkout
2 2 files updated, 0 files merged, 2 files removed, 0 files unresolved
2 2 files updated, 0 files merged, 2 files removed, 0 files unresolved
3 merge
3 merge
4 resolving manifests
4 resolving manifests
5 overwrite None branchmerge True partial False
5 overwrite None branchmerge True partial False
6 ancestor f26ec4fc3fa3 local 8e765a822af2 remote af1939970a1c
6 ancestor f26ec4fc3fa3 local 8e765a822af2 remote af1939970a1c
7 a: remote moved -> c
7 a: remote moved -> c
8 b2: remote created -> g
8 b2: remote created -> g
9 merging a and b to b
9 merging a and b to b
10 resolving a
10 resolving a
11 my a@f26ec4fc3fa3 other b@8e765a822af2 ancestor a@af1939970a1c
11 my a@f26ec4fc3fa3 other b@8e765a822af2 ancestor a@af1939970a1c
12 copying a to b
12 copying a to b
13 removing a
13 removing a
14 getting b2
14 getting b2
15 1 files updated, 1 files merged, 0 files removed, 0 files unresolved
15 1 files updated, 1 files merged, 0 files removed, 0 files unresolved
16 (branch merge, don't forget to commit)
16 (branch merge, don't forget to commit)
17 M b
17 M b
18 a
18 a
19 M b2
19 M b2
20 R a
20 R a
21 C c2
21 C c2
22 blahblah
22 blahblah
23 rev offset length base linkrev nodeid p1 p2
24 0 0 67 0 1 dc51707dfc98 000000000000 000000000000
25 1 67 72 1 3 b2494a44f0a9 000000000000 dc51707dfc98
26 renamed from a:dd03b83622e78778b403775d0d074b9ac7387a66
@@ -1,457 +1,463 b''
1 --------------
1 --------------
2 test L:up a R:nc a b W: - 1 get local a to b
2 test L:up a R:nc a b W: - 1 get local a to b
3 --------------
3 --------------
4 resolving manifests
4 resolving manifests
5 overwrite None branchmerge True partial False
5 overwrite None branchmerge True partial False
6 ancestor e300d1c794ec local 735846fee2d7 remote 924404dff337
6 ancestor e300d1c794ec local 735846fee2d7 remote 924404dff337
7 rev: versions differ -> m
7 rev: versions differ -> m
8 a: remote copied -> c
8 a: remote copied -> c
9 merging a and b to b
9 merging a and b to b
10 resolving a
10 resolving a
11 my a@e300d1c794ec other b@735846fee2d7 ancestor a@924404dff337
11 my a@e300d1c794ec other b@735846fee2d7 ancestor a@924404dff337
12 copying a to b
12 copying a to b
13 merging rev
13 merging rev
14 resolving rev
14 resolving rev
15 my rev@e300d1c794ec other rev@735846fee2d7 ancestor rev@924404dff337
15 my rev@e300d1c794ec other rev@735846fee2d7 ancestor rev@924404dff337
16 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
16 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
17 (branch merge, don't forget to commit)
17 (branch merge, don't forget to commit)
18 --------------
18 --------------
19 M a
19 M a
20 M b
20 M b
21 a
21 a
22 --------------
22 --------------
23
23
24 --------------
24 --------------
25 test L:nc a b R:up a W: - 2 get rem change to a and b
25 test L:nc a b R:up a W: - 2 get rem change to a and b
26 --------------
26 --------------
27 resolving manifests
27 resolving manifests
28 overwrite None branchmerge True partial False
28 overwrite None branchmerge True partial False
29 ancestor ac809aeed39a local f4db7e329e71 remote 924404dff337
29 ancestor ac809aeed39a local f4db7e329e71 remote 924404dff337
30 a: remote is newer -> g
30 a: remote is newer -> g
31 b: local copied -> c
31 b: local copied -> c
32 rev: versions differ -> m
32 rev: versions differ -> m
33 getting a
33 getting a
34 merging b and a to b
34 merging b and a to b
35 resolving b
35 resolving b
36 my b@ac809aeed39a other a@f4db7e329e71 ancestor a@924404dff337
36 my b@ac809aeed39a other a@f4db7e329e71 ancestor a@924404dff337
37 merging rev
37 merging rev
38 resolving rev
38 resolving rev
39 my rev@ac809aeed39a other rev@f4db7e329e71 ancestor rev@924404dff337
39 my rev@ac809aeed39a other rev@f4db7e329e71 ancestor rev@924404dff337
40 1 files updated, 2 files merged, 0 files removed, 0 files unresolved
40 1 files updated, 2 files merged, 0 files removed, 0 files unresolved
41 (branch merge, don't forget to commit)
41 (branch merge, don't forget to commit)
42 --------------
42 --------------
43 M a
43 M a
44 M b
44 M b
45 a
45 --------------
46 --------------
46
47
47 --------------
48 --------------
48 test L:up a R:nm a b W: - 3 get local a change to b, remove a
49 test L:up a R:nm a b W: - 3 get local a change to b, remove a
49 --------------
50 --------------
50 resolving manifests
51 resolving manifests
51 overwrite None branchmerge True partial False
52 overwrite None branchmerge True partial False
52 ancestor e300d1c794ec local e03727d2d66b remote 924404dff337
53 ancestor e300d1c794ec local e03727d2d66b remote 924404dff337
53 a: remote moved -> c
54 a: remote moved -> c
54 rev: versions differ -> m
55 rev: versions differ -> m
55 merging a and b to b
56 merging a and b to b
56 resolving a
57 resolving a
57 my a@e300d1c794ec other b@e03727d2d66b ancestor a@924404dff337
58 my a@e300d1c794ec other b@e03727d2d66b ancestor a@924404dff337
58 copying a to b
59 copying a to b
59 removing a
60 removing a
60 merging rev
61 merging rev
61 resolving rev
62 resolving rev
62 my rev@e300d1c794ec other rev@e03727d2d66b ancestor rev@924404dff337
63 my rev@e300d1c794ec other rev@e03727d2d66b ancestor rev@924404dff337
63 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
64 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
64 (branch merge, don't forget to commit)
65 (branch merge, don't forget to commit)
65 --------------
66 --------------
66 M b
67 M b
67 a
68 a
68 --------------
69 --------------
69
70
70 --------------
71 --------------
71 test L:nm a b R:up a W: - 4 get remote change to b
72 test L:nm a b R:up a W: - 4 get remote change to b
72 --------------
73 --------------
73 resolving manifests
74 resolving manifests
74 overwrite None branchmerge True partial False
75 overwrite None branchmerge True partial False
75 ancestor ecf3cb2a4219 local f4db7e329e71 remote 924404dff337
76 ancestor ecf3cb2a4219 local f4db7e329e71 remote 924404dff337
76 b: local moved -> c
77 b: local moved -> c
77 rev: versions differ -> m
78 rev: versions differ -> m
78 merging b and a to b
79 merging b and a to b
79 resolving b
80 resolving b
80 my b@ecf3cb2a4219 other a@f4db7e329e71 ancestor a@924404dff337
81 my b@ecf3cb2a4219 other a@f4db7e329e71 ancestor a@924404dff337
81 merging rev
82 merging rev
82 resolving rev
83 resolving rev
83 my rev@ecf3cb2a4219 other rev@f4db7e329e71 ancestor rev@924404dff337
84 my rev@ecf3cb2a4219 other rev@f4db7e329e71 ancestor rev@924404dff337
84 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
85 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
85 (branch merge, don't forget to commit)
86 (branch merge, don't forget to commit)
86 --------------
87 --------------
87 M b
88 M b
89 a
88 --------------
90 --------------
89
91
90 --------------
92 --------------
91 test L: R:nc a b W: - 5 get b
93 test L: R:nc a b W: - 5 get b
92 --------------
94 --------------
93 resolving manifests
95 resolving manifests
94 overwrite None branchmerge True partial False
96 overwrite None branchmerge True partial False
95 ancestor 94b33a1b7f2d local 735846fee2d7 remote 924404dff337
97 ancestor 94b33a1b7f2d local 735846fee2d7 remote 924404dff337
96 rev: versions differ -> m
98 rev: versions differ -> m
97 a: remote copied -> c
99 a: remote copied -> c
98 merging a and b to b
100 merging a and b to b
99 resolving a
101 resolving a
100 my a@924404dff337 other b@735846fee2d7 ancestor a@924404dff337
102 my a@924404dff337 other b@735846fee2d7 ancestor a@924404dff337
101 copying a to b
103 copying a to b
102 merging rev
104 merging rev
103 resolving rev
105 resolving rev
104 my rev@94b33a1b7f2d other rev@735846fee2d7 ancestor rev@924404dff337
106 my rev@94b33a1b7f2d other rev@735846fee2d7 ancestor rev@924404dff337
105 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
107 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
106 (branch merge, don't forget to commit)
108 (branch merge, don't forget to commit)
107 --------------
109 --------------
108 M a
110 M a
109 M b
111 M b
110 a
112 a
111 --------------
113 --------------
112
114
113 --------------
115 --------------
114 test L:nc a b R: W: - 6 nothing
116 test L:nc a b R: W: - 6 nothing
115 --------------
117 --------------
116 resolving manifests
118 resolving manifests
117 overwrite None branchmerge True partial False
119 overwrite None branchmerge True partial False
118 ancestor ac809aeed39a local 97c705ade336 remote 924404dff337
120 ancestor ac809aeed39a local 97c705ade336 remote 924404dff337
119 b: local copied -> c
121 b: local copied -> c
120 rev: versions differ -> m
122 rev: versions differ -> m
121 merging b and a to b
123 merging b and a to b
122 resolving b
124 resolving b
123 my b@ac809aeed39a other a@924404dff337 ancestor a@924404dff337
125 my b@ac809aeed39a other a@924404dff337 ancestor a@924404dff337
124 merging rev
126 merging rev
125 resolving rev
127 resolving rev
126 my rev@ac809aeed39a other rev@97c705ade336 ancestor rev@924404dff337
128 my rev@ac809aeed39a other rev@97c705ade336 ancestor rev@924404dff337
127 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
129 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
128 (branch merge, don't forget to commit)
130 (branch merge, don't forget to commit)
129 --------------
131 --------------
130 M b
132 M b
133 a
131 C a
134 C a
132 --------------
135 --------------
133
136
134 --------------
137 --------------
135 test L: R:nm a b W: - 7 get b
138 test L: R:nm a b W: - 7 get b
136 --------------
139 --------------
137 resolving manifests
140 resolving manifests
138 overwrite None branchmerge True partial False
141 overwrite None branchmerge True partial False
139 ancestor 94b33a1b7f2d local e03727d2d66b remote 924404dff337
142 ancestor 94b33a1b7f2d local e03727d2d66b remote 924404dff337
140 a: remote moved -> c
143 a: remote moved -> c
141 rev: versions differ -> m
144 rev: versions differ -> m
142 merging a and b to b
145 merging a and b to b
143 resolving a
146 resolving a
144 my a@924404dff337 other b@e03727d2d66b ancestor a@924404dff337
147 my a@924404dff337 other b@e03727d2d66b ancestor a@924404dff337
145 copying a to b
148 copying a to b
146 removing a
149 removing a
147 merging rev
150 merging rev
148 resolving rev
151 resolving rev
149 my rev@94b33a1b7f2d other rev@e03727d2d66b ancestor rev@924404dff337
152 my rev@94b33a1b7f2d other rev@e03727d2d66b ancestor rev@924404dff337
150 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
153 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
151 (branch merge, don't forget to commit)
154 (branch merge, don't forget to commit)
152 --------------
155 --------------
153 M b
156 M b
154 a
157 a
155 --------------
158 --------------
156
159
157 --------------
160 --------------
158 test L:nm a b R: W: - 8 nothing
161 test L:nm a b R: W: - 8 nothing
159 --------------
162 --------------
160 resolving manifests
163 resolving manifests
161 overwrite None branchmerge True partial False
164 overwrite None branchmerge True partial False
162 ancestor ecf3cb2a4219 local 97c705ade336 remote 924404dff337
165 ancestor ecf3cb2a4219 local 97c705ade336 remote 924404dff337
163 b: local moved -> c
166 b: local moved -> c
164 rev: versions differ -> m
167 rev: versions differ -> m
165 merging b and a to b
168 merging b and a to b
166 resolving b
169 resolving b
167 my b@ecf3cb2a4219 other a@924404dff337 ancestor a@924404dff337
170 my b@ecf3cb2a4219 other a@924404dff337 ancestor a@924404dff337
168 merging rev
171 merging rev
169 resolving rev
172 resolving rev
170 my rev@ecf3cb2a4219 other rev@97c705ade336 ancestor rev@924404dff337
173 my rev@ecf3cb2a4219 other rev@97c705ade336 ancestor rev@924404dff337
171 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
174 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
172 (branch merge, don't forget to commit)
175 (branch merge, don't forget to commit)
173 --------------
176 --------------
174 M b
177 M b
178 a
175 --------------
179 --------------
176
180
177 --------------
181 --------------
178 test L:um a b R:um a b W: - 9 do merge with ancestor in a
182 test L:um a b R:um a b W: - 9 do merge with ancestor in a
179 --------------
183 --------------
180 resolving manifests
184 resolving manifests
181 overwrite None branchmerge True partial False
185 overwrite None branchmerge True partial False
182 ancestor ec03c2ca8642 local 79cc6877a3b7 remote 924404dff337
186 ancestor ec03c2ca8642 local 79cc6877a3b7 remote 924404dff337
183 b: versions differ -> m
187 b: versions differ -> m
184 rev: versions differ -> m
188 rev: versions differ -> m
185 merging b
189 merging b
186 resolving b
190 resolving b
187 my b@ec03c2ca8642 other b@79cc6877a3b7 ancestor a@924404dff337
191 my b@ec03c2ca8642 other b@79cc6877a3b7 ancestor a@924404dff337
188 merging rev
192 merging rev
189 resolving rev
193 resolving rev
190 my rev@ec03c2ca8642 other rev@79cc6877a3b7 ancestor rev@924404dff337
194 my rev@ec03c2ca8642 other rev@79cc6877a3b7 ancestor rev@924404dff337
191 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
195 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
192 (branch merge, don't forget to commit)
196 (branch merge, don't forget to commit)
193 --------------
197 --------------
194 M b
198 M b
195 --------------
199 --------------
196
200
197 --------------
201 --------------
198 test L:nm a b R:nm a c W: - 11 get c, keep b
202 test L:nm a b R:nm a c W: - 11 get c, keep b
199 --------------
203 --------------
200 resolving manifests
204 resolving manifests
201 overwrite None branchmerge True partial False
205 overwrite None branchmerge True partial False
202 ancestor ecf3cb2a4219 local e6abcc1a30c2 remote 924404dff337
206 ancestor ecf3cb2a4219 local e6abcc1a30c2 remote 924404dff337
203 rev: versions differ -> m
207 rev: versions differ -> m
204 c: remote created -> g
208 c: remote created -> g
205 getting c
209 getting c
206 merging rev
210 merging rev
207 resolving rev
211 resolving rev
208 my rev@ecf3cb2a4219 other rev@e6abcc1a30c2 ancestor rev@924404dff337
212 my rev@ecf3cb2a4219 other rev@e6abcc1a30c2 ancestor rev@924404dff337
209 1 files updated, 1 files merged, 0 files removed, 0 files unresolved
213 1 files updated, 1 files merged, 0 files removed, 0 files unresolved
210 (branch merge, don't forget to commit)
214 (branch merge, don't forget to commit)
211 --------------
215 --------------
212 M c
216 M c
213 C b
217 C b
214 --------------
218 --------------
215
219
216 --------------
220 --------------
217 test L:nc a b R:up b W: - 12 merge b no ancestor
221 test L:nc a b R:up b W: - 12 merge b no ancestor
218 --------------
222 --------------
219 resolving manifests
223 resolving manifests
220 overwrite None branchmerge True partial False
224 overwrite None branchmerge True partial False
221 ancestor ac809aeed39a local af30c7647fc7 remote 924404dff337
225 ancestor ac809aeed39a local af30c7647fc7 remote 924404dff337
222 b: versions differ -> m
226 b: versions differ -> m
223 rev: versions differ -> m
227 rev: versions differ -> m
224 merging b
228 merging b
225 resolving b
229 resolving b
226 my b@ac809aeed39a other b@af30c7647fc7 ancestor b@000000000000
230 my b@ac809aeed39a other b@af30c7647fc7 ancestor b@000000000000
227 merging rev
231 merging rev
228 resolving rev
232 resolving rev
229 my rev@ac809aeed39a other rev@af30c7647fc7 ancestor rev@924404dff337
233 my rev@ac809aeed39a other rev@af30c7647fc7 ancestor rev@924404dff337
230 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
234 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
231 (branch merge, don't forget to commit)
235 (branch merge, don't forget to commit)
232 --------------
236 --------------
233 M b
237 M b
234 C a
238 C a
235 --------------
239 --------------
236
240
237 --------------
241 --------------
238 test L:up b R:nm a b W: - 13 merge b no ancestor
242 test L:up b R:nm a b W: - 13 merge b no ancestor
239 --------------
243 --------------
240 resolving manifests
244 resolving manifests
241 overwrite None branchmerge True partial False
245 overwrite None branchmerge True partial False
242 ancestor 59318016310c local e03727d2d66b remote 924404dff337
246 ancestor 59318016310c local e03727d2d66b remote 924404dff337
243 a: other deleted -> r
247 a: other deleted -> r
244 b: versions differ -> m
248 b: versions differ -> m
245 rev: versions differ -> m
249 rev: versions differ -> m
246 removing a
250 removing a
247 merging b
251 merging b
248 resolving b
252 resolving b
249 my b@59318016310c other b@e03727d2d66b ancestor b@000000000000
253 my b@59318016310c other b@e03727d2d66b ancestor b@000000000000
250 merging rev
254 merging rev
251 resolving rev
255 resolving rev
252 my rev@59318016310c other rev@e03727d2d66b ancestor rev@924404dff337
256 my rev@59318016310c other rev@e03727d2d66b ancestor rev@924404dff337
253 0 files updated, 2 files merged, 1 files removed, 0 files unresolved
257 0 files updated, 2 files merged, 1 files removed, 0 files unresolved
254 (branch merge, don't forget to commit)
258 (branch merge, don't forget to commit)
255 --------------
259 --------------
256 M b
260 M b
257 --------------
261 --------------
258
262
259 --------------
263 --------------
260 test L:nc a b R:up a b W: - 14 merge b no ancestor
264 test L:nc a b R:up a b W: - 14 merge b no ancestor
261 --------------
265 --------------
262 resolving manifests
266 resolving manifests
263 overwrite None branchmerge True partial False
267 overwrite None branchmerge True partial False
264 ancestor ac809aeed39a local 8dbce441892a remote 924404dff337
268 ancestor ac809aeed39a local 8dbce441892a remote 924404dff337
265 a: remote is newer -> g
269 a: remote is newer -> g
266 b: versions differ -> m
270 b: versions differ -> m
267 rev: versions differ -> m
271 rev: versions differ -> m
268 getting a
272 getting a
269 merging b
273 merging b
270 resolving b
274 resolving b
271 my b@ac809aeed39a other b@8dbce441892a ancestor b@000000000000
275 my b@ac809aeed39a other b@8dbce441892a ancestor b@000000000000
272 merging rev
276 merging rev
273 resolving rev
277 resolving rev
274 my rev@ac809aeed39a other rev@8dbce441892a ancestor rev@924404dff337
278 my rev@ac809aeed39a other rev@8dbce441892a ancestor rev@924404dff337
275 1 files updated, 2 files merged, 0 files removed, 0 files unresolved
279 1 files updated, 2 files merged, 0 files removed, 0 files unresolved
276 (branch merge, don't forget to commit)
280 (branch merge, don't forget to commit)
277 --------------
281 --------------
278 M a
282 M a
279 M b
283 M b
280 --------------
284 --------------
281
285
282 --------------
286 --------------
283 test L:up b R:nm a b W: - 15 merge b no ancestor, remove a
287 test L:up b R:nm a b W: - 15 merge b no ancestor, remove a
284 --------------
288 --------------
285 resolving manifests
289 resolving manifests
286 overwrite None branchmerge True partial False
290 overwrite None branchmerge True partial False
287 ancestor 59318016310c local e03727d2d66b remote 924404dff337
291 ancestor 59318016310c local e03727d2d66b remote 924404dff337
288 a: other deleted -> r
292 a: other deleted -> r
289 b: versions differ -> m
293 b: versions differ -> m
290 rev: versions differ -> m
294 rev: versions differ -> m
291 removing a
295 removing a
292 merging b
296 merging b
293 resolving b
297 resolving b
294 my b@59318016310c other b@e03727d2d66b ancestor b@000000000000
298 my b@59318016310c other b@e03727d2d66b ancestor b@000000000000
295 merging rev
299 merging rev
296 resolving rev
300 resolving rev
297 my rev@59318016310c other rev@e03727d2d66b ancestor rev@924404dff337
301 my rev@59318016310c other rev@e03727d2d66b ancestor rev@924404dff337
298 0 files updated, 2 files merged, 1 files removed, 0 files unresolved
302 0 files updated, 2 files merged, 1 files removed, 0 files unresolved
299 (branch merge, don't forget to commit)
303 (branch merge, don't forget to commit)
300 --------------
304 --------------
301 M b
305 M b
302 --------------
306 --------------
303
307
304 --------------
308 --------------
305 test L:nc a b R:up a b W: - 16 get a, merge b no ancestor
309 test L:nc a b R:up a b W: - 16 get a, merge b no ancestor
306 --------------
310 --------------
307 resolving manifests
311 resolving manifests
308 overwrite None branchmerge True partial False
312 overwrite None branchmerge True partial False
309 ancestor ac809aeed39a local 8dbce441892a remote 924404dff337
313 ancestor ac809aeed39a local 8dbce441892a remote 924404dff337
310 a: remote is newer -> g
314 a: remote is newer -> g
311 b: versions differ -> m
315 b: versions differ -> m
312 rev: versions differ -> m
316 rev: versions differ -> m
313 getting a
317 getting a
314 merging b
318 merging b
315 resolving b
319 resolving b
316 my b@ac809aeed39a other b@8dbce441892a ancestor b@000000000000
320 my b@ac809aeed39a other b@8dbce441892a ancestor b@000000000000
317 merging rev
321 merging rev
318 resolving rev
322 resolving rev
319 my rev@ac809aeed39a other rev@8dbce441892a ancestor rev@924404dff337
323 my rev@ac809aeed39a other rev@8dbce441892a ancestor rev@924404dff337
320 1 files updated, 2 files merged, 0 files removed, 0 files unresolved
324 1 files updated, 2 files merged, 0 files removed, 0 files unresolved
321 (branch merge, don't forget to commit)
325 (branch merge, don't forget to commit)
322 --------------
326 --------------
323 M a
327 M a
324 M b
328 M b
325 --------------
329 --------------
326
330
327 --------------
331 --------------
328 test L:up a b R:nc a b W: - 17 keep a, merge b no ancestor
332 test L:up a b R:nc a b W: - 17 keep a, merge b no ancestor
329 --------------
333 --------------
330 resolving manifests
334 resolving manifests
331 overwrite None branchmerge True partial False
335 overwrite None branchmerge True partial False
332 ancestor 0b76e65c8289 local 735846fee2d7 remote 924404dff337
336 ancestor 0b76e65c8289 local 735846fee2d7 remote 924404dff337
333 b: versions differ -> m
337 b: versions differ -> m
334 rev: versions differ -> m
338 rev: versions differ -> m
335 merging b
339 merging b
336 resolving b
340 resolving b
337 my b@0b76e65c8289 other b@735846fee2d7 ancestor b@000000000000
341 my b@0b76e65c8289 other b@735846fee2d7 ancestor b@000000000000
338 merging rev
342 merging rev
339 resolving rev
343 resolving rev
340 my rev@0b76e65c8289 other rev@735846fee2d7 ancestor rev@924404dff337
344 my rev@0b76e65c8289 other rev@735846fee2d7 ancestor rev@924404dff337
341 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
345 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
342 (branch merge, don't forget to commit)
346 (branch merge, don't forget to commit)
343 --------------
347 --------------
344 M b
348 M b
345 C a
349 C a
346 --------------
350 --------------
347
351
348 --------------
352 --------------
349 test L:nm a b R:up a b W: - 18 merge b no ancestor
353 test L:nm a b R:up a b W: - 18 merge b no ancestor
350 --------------
354 --------------
351 resolving manifests
355 resolving manifests
352 overwrite None branchmerge True partial False
356 overwrite None branchmerge True partial False
353 ancestor ecf3cb2a4219 local 8dbce441892a remote 924404dff337
357 ancestor ecf3cb2a4219 local 8dbce441892a remote 924404dff337
354 b: versions differ -> m
358 b: versions differ -> m
355 rev: versions differ -> m
359 rev: versions differ -> m
356 a: prompt recreating -> g
360 a: prompt recreating -> g
357 getting a
361 getting a
358 merging b
362 merging b
359 resolving b
363 resolving b
360 my b@ecf3cb2a4219 other b@8dbce441892a ancestor b@000000000000
364 my b@ecf3cb2a4219 other b@8dbce441892a ancestor b@000000000000
361 merging rev
365 merging rev
362 resolving rev
366 resolving rev
363 my rev@ecf3cb2a4219 other rev@8dbce441892a ancestor rev@924404dff337
367 my rev@ecf3cb2a4219 other rev@8dbce441892a ancestor rev@924404dff337
364 1 files updated, 2 files merged, 0 files removed, 0 files unresolved
368 1 files updated, 2 files merged, 0 files removed, 0 files unresolved
365 (branch merge, don't forget to commit)
369 (branch merge, don't forget to commit)
366 --------------
370 --------------
367 M a
371 M a
368 M b
372 M b
369 --------------
373 --------------
370
374
371 --------------
375 --------------
372 test L:up a b R:nm a b W: - 19 merge b no ancestor, prompt remove a
376 test L:up a b R:nm a b W: - 19 merge b no ancestor, prompt remove a
373 --------------
377 --------------
374 resolving manifests
378 resolving manifests
375 overwrite None branchmerge True partial False
379 overwrite None branchmerge True partial False
376 ancestor 0b76e65c8289 local e03727d2d66b remote 924404dff337
380 ancestor 0b76e65c8289 local e03727d2d66b remote 924404dff337
377 b: versions differ -> m
381 b: versions differ -> m
378 rev: versions differ -> m
382 rev: versions differ -> m
379 merging b
383 merging b
380 resolving b
384 resolving b
381 my b@0b76e65c8289 other b@e03727d2d66b ancestor b@000000000000
385 my b@0b76e65c8289 other b@e03727d2d66b ancestor b@000000000000
382 merging rev
386 merging rev
383 resolving rev
387 resolving rev
384 my rev@0b76e65c8289 other rev@e03727d2d66b ancestor rev@924404dff337
388 my rev@0b76e65c8289 other rev@e03727d2d66b ancestor rev@924404dff337
385 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
389 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
386 (branch merge, don't forget to commit)
390 (branch merge, don't forget to commit)
387 --------------
391 --------------
388 M b
392 M b
389 C a
393 C a
390 --------------
394 --------------
391
395
392 --------------
396 --------------
393 test L:up a R:um a b W: - 20 merge a and b to b, remove a
397 test L:up a R:um a b W: - 20 merge a and b to b, remove a
394 --------------
398 --------------
395 resolving manifests
399 resolving manifests
396 overwrite None branchmerge True partial False
400 overwrite None branchmerge True partial False
397 ancestor e300d1c794ec local 79cc6877a3b7 remote 924404dff337
401 ancestor e300d1c794ec local 79cc6877a3b7 remote 924404dff337
398 a: remote moved -> c
402 a: remote moved -> c
399 rev: versions differ -> m
403 rev: versions differ -> m
400 merging a and b to b
404 merging a and b to b
401 resolving a
405 resolving a
402 my a@e300d1c794ec other b@79cc6877a3b7 ancestor a@924404dff337
406 my a@e300d1c794ec other b@79cc6877a3b7 ancestor a@924404dff337
403 copying a to b
407 copying a to b
404 removing a
408 removing a
405 merging rev
409 merging rev
406 resolving rev
410 resolving rev
407 my rev@e300d1c794ec other rev@79cc6877a3b7 ancestor rev@924404dff337
411 my rev@e300d1c794ec other rev@79cc6877a3b7 ancestor rev@924404dff337
408 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
412 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
409 (branch merge, don't forget to commit)
413 (branch merge, don't forget to commit)
410 --------------
414 --------------
411 M b
415 M b
412 a
416 a
413 --------------
417 --------------
414
418
415 --------------
419 --------------
416 test L:um a b R:up a W: - 21 merge a and b to b
420 test L:um a b R:up a W: - 21 merge a and b to b
417 --------------
421 --------------
418 resolving manifests
422 resolving manifests
419 overwrite None branchmerge True partial False
423 overwrite None branchmerge True partial False
420 ancestor ec03c2ca8642 local f4db7e329e71 remote 924404dff337
424 ancestor ec03c2ca8642 local f4db7e329e71 remote 924404dff337
421 b: local moved -> c
425 b: local moved -> c
422 rev: versions differ -> m
426 rev: versions differ -> m
423 merging b and a to b
427 merging b and a to b
424 resolving b
428 resolving b
425 my b@ec03c2ca8642 other a@f4db7e329e71 ancestor a@924404dff337
429 my b@ec03c2ca8642 other a@f4db7e329e71 ancestor a@924404dff337
426 merging rev
430 merging rev
427 resolving rev
431 resolving rev
428 my rev@ec03c2ca8642 other rev@f4db7e329e71 ancestor rev@924404dff337
432 my rev@ec03c2ca8642 other rev@f4db7e329e71 ancestor rev@924404dff337
429 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
433 0 files updated, 2 files merged, 0 files removed, 0 files unresolved
430 (branch merge, don't forget to commit)
434 (branch merge, don't forget to commit)
431 --------------
435 --------------
432 M b
436 M b
437 a
433 --------------
438 --------------
434
439
435 --------------
440 --------------
436 test L:nm a b R:up a c W: - 23 get c, keep b
441 test L:nm a b R:up a c W: - 23 get c, keep b
437 --------------
442 --------------
438 resolving manifests
443 resolving manifests
439 overwrite None branchmerge True partial False
444 overwrite None branchmerge True partial False
440 ancestor ecf3cb2a4219 local 2b958612230f remote 924404dff337
445 ancestor ecf3cb2a4219 local 2b958612230f remote 924404dff337
441 b: local moved -> c
446 b: local moved -> c
442 rev: versions differ -> m
447 rev: versions differ -> m
443 c: remote created -> g
448 c: remote created -> g
444 merging b and a to b
449 merging b and a to b
445 resolving b
450 resolving b
446 my b@ecf3cb2a4219 other a@2b958612230f ancestor a@924404dff337
451 my b@ecf3cb2a4219 other a@2b958612230f ancestor a@924404dff337
447 getting c
452 getting c
448 merging rev
453 merging rev
449 resolving rev
454 resolving rev
450 my rev@ecf3cb2a4219 other rev@2b958612230f ancestor rev@924404dff337
455 my rev@ecf3cb2a4219 other rev@2b958612230f ancestor rev@924404dff337
451 1 files updated, 2 files merged, 0 files removed, 0 files unresolved
456 1 files updated, 2 files merged, 0 files removed, 0 files unresolved
452 (branch merge, don't forget to commit)
457 (branch merge, don't forget to commit)
453 --------------
458 --------------
454 M b
459 M b
460 a
455 M c
461 M c
456 --------------
462 --------------
457
463
General Comments 0
You need to be logged in to leave comments. Login now