##// END OF EJS Templates
merge: introduce 'commitinfo' in mergeresult...
Pulkit Goyal -
r45832:8e8d5139 default
parent child Browse files
Show More
@@ -1,729 +1,730 b''
1 # hg.py - hg backend for convert extension
1 # hg.py - hg backend for convert extension
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # Notes for hg->hg conversion:
8 # Notes for hg->hg conversion:
9 #
9 #
10 # * Old versions of Mercurial didn't trim the whitespace from the ends
10 # * Old versions of Mercurial didn't trim the whitespace from the ends
11 # of commit messages, but new versions do. Changesets created by
11 # of commit messages, but new versions do. Changesets created by
12 # those older versions, then converted, may thus have different
12 # those older versions, then converted, may thus have different
13 # hashes for changesets that are otherwise identical.
13 # hashes for changesets that are otherwise identical.
14 #
14 #
15 # * Using "--config convert.hg.saverev=true" will make the source
15 # * Using "--config convert.hg.saverev=true" will make the source
16 # identifier to be stored in the converted revision. This will cause
16 # identifier to be stored in the converted revision. This will cause
17 # the converted revision to have a different identity than the
17 # the converted revision to have a different identity than the
18 # source.
18 # source.
19 from __future__ import absolute_import
19 from __future__ import absolute_import
20
20
21 import os
21 import os
22 import re
22 import re
23 import time
23 import time
24
24
25 from mercurial.i18n import _
25 from mercurial.i18n import _
26 from mercurial.pycompat import open
26 from mercurial.pycompat import open
27 from mercurial import (
27 from mercurial import (
28 bookmarks,
28 bookmarks,
29 context,
29 context,
30 error,
30 error,
31 exchange,
31 exchange,
32 hg,
32 hg,
33 lock as lockmod,
33 lock as lockmod,
34 merge as mergemod,
34 merge as mergemod,
35 node as nodemod,
35 node as nodemod,
36 phases,
36 phases,
37 pycompat,
37 pycompat,
38 scmutil,
38 scmutil,
39 util,
39 util,
40 )
40 )
41 from mercurial.utils import dateutil
41 from mercurial.utils import dateutil
42
42
43 stringio = util.stringio
43 stringio = util.stringio
44
44
45 from . import common
45 from . import common
46
46
47 mapfile = common.mapfile
47 mapfile = common.mapfile
48 NoRepo = common.NoRepo
48 NoRepo = common.NoRepo
49
49
50 sha1re = re.compile(br'\b[0-9a-f]{12,40}\b')
50 sha1re = re.compile(br'\b[0-9a-f]{12,40}\b')
51
51
52
52
53 class mercurial_sink(common.converter_sink):
53 class mercurial_sink(common.converter_sink):
54 def __init__(self, ui, repotype, path):
54 def __init__(self, ui, repotype, path):
55 common.converter_sink.__init__(self, ui, repotype, path)
55 common.converter_sink.__init__(self, ui, repotype, path)
56 self.branchnames = ui.configbool(b'convert', b'hg.usebranchnames')
56 self.branchnames = ui.configbool(b'convert', b'hg.usebranchnames')
57 self.clonebranches = ui.configbool(b'convert', b'hg.clonebranches')
57 self.clonebranches = ui.configbool(b'convert', b'hg.clonebranches')
58 self.tagsbranch = ui.config(b'convert', b'hg.tagsbranch')
58 self.tagsbranch = ui.config(b'convert', b'hg.tagsbranch')
59 self.lastbranch = None
59 self.lastbranch = None
60 if os.path.isdir(path) and len(os.listdir(path)) > 0:
60 if os.path.isdir(path) and len(os.listdir(path)) > 0:
61 try:
61 try:
62 self.repo = hg.repository(self.ui, path)
62 self.repo = hg.repository(self.ui, path)
63 if not self.repo.local():
63 if not self.repo.local():
64 raise NoRepo(
64 raise NoRepo(
65 _(b'%s is not a local Mercurial repository') % path
65 _(b'%s is not a local Mercurial repository') % path
66 )
66 )
67 except error.RepoError as err:
67 except error.RepoError as err:
68 ui.traceback()
68 ui.traceback()
69 raise NoRepo(err.args[0])
69 raise NoRepo(err.args[0])
70 else:
70 else:
71 try:
71 try:
72 ui.status(_(b'initializing destination %s repository\n') % path)
72 ui.status(_(b'initializing destination %s repository\n') % path)
73 self.repo = hg.repository(self.ui, path, create=True)
73 self.repo = hg.repository(self.ui, path, create=True)
74 if not self.repo.local():
74 if not self.repo.local():
75 raise NoRepo(
75 raise NoRepo(
76 _(b'%s is not a local Mercurial repository') % path
76 _(b'%s is not a local Mercurial repository') % path
77 )
77 )
78 self.created.append(path)
78 self.created.append(path)
79 except error.RepoError:
79 except error.RepoError:
80 ui.traceback()
80 ui.traceback()
81 raise NoRepo(
81 raise NoRepo(
82 _(b"could not create hg repository %s as sink") % path
82 _(b"could not create hg repository %s as sink") % path
83 )
83 )
84 self.lock = None
84 self.lock = None
85 self.wlock = None
85 self.wlock = None
86 self.filemapmode = False
86 self.filemapmode = False
87 self.subrevmaps = {}
87 self.subrevmaps = {}
88
88
89 def before(self):
89 def before(self):
90 self.ui.debug(b'run hg sink pre-conversion action\n')
90 self.ui.debug(b'run hg sink pre-conversion action\n')
91 self.wlock = self.repo.wlock()
91 self.wlock = self.repo.wlock()
92 self.lock = self.repo.lock()
92 self.lock = self.repo.lock()
93
93
94 def after(self):
94 def after(self):
95 self.ui.debug(b'run hg sink post-conversion action\n')
95 self.ui.debug(b'run hg sink post-conversion action\n')
96 if self.lock:
96 if self.lock:
97 self.lock.release()
97 self.lock.release()
98 if self.wlock:
98 if self.wlock:
99 self.wlock.release()
99 self.wlock.release()
100
100
101 def revmapfile(self):
101 def revmapfile(self):
102 return self.repo.vfs.join(b"shamap")
102 return self.repo.vfs.join(b"shamap")
103
103
104 def authorfile(self):
104 def authorfile(self):
105 return self.repo.vfs.join(b"authormap")
105 return self.repo.vfs.join(b"authormap")
106
106
107 def setbranch(self, branch, pbranches):
107 def setbranch(self, branch, pbranches):
108 if not self.clonebranches:
108 if not self.clonebranches:
109 return
109 return
110
110
111 setbranch = branch != self.lastbranch
111 setbranch = branch != self.lastbranch
112 self.lastbranch = branch
112 self.lastbranch = branch
113 if not branch:
113 if not branch:
114 branch = b'default'
114 branch = b'default'
115 pbranches = [(b[0], b[1] and b[1] or b'default') for b in pbranches]
115 pbranches = [(b[0], b[1] and b[1] or b'default') for b in pbranches]
116
116
117 branchpath = os.path.join(self.path, branch)
117 branchpath = os.path.join(self.path, branch)
118 if setbranch:
118 if setbranch:
119 self.after()
119 self.after()
120 try:
120 try:
121 self.repo = hg.repository(self.ui, branchpath)
121 self.repo = hg.repository(self.ui, branchpath)
122 except Exception:
122 except Exception:
123 self.repo = hg.repository(self.ui, branchpath, create=True)
123 self.repo = hg.repository(self.ui, branchpath, create=True)
124 self.before()
124 self.before()
125
125
126 # pbranches may bring revisions from other branches (merge parents)
126 # pbranches may bring revisions from other branches (merge parents)
127 # Make sure we have them, or pull them.
127 # Make sure we have them, or pull them.
128 missings = {}
128 missings = {}
129 for b in pbranches:
129 for b in pbranches:
130 try:
130 try:
131 self.repo.lookup(b[0])
131 self.repo.lookup(b[0])
132 except Exception:
132 except Exception:
133 missings.setdefault(b[1], []).append(b[0])
133 missings.setdefault(b[1], []).append(b[0])
134
134
135 if missings:
135 if missings:
136 self.after()
136 self.after()
137 for pbranch, heads in sorted(pycompat.iteritems(missings)):
137 for pbranch, heads in sorted(pycompat.iteritems(missings)):
138 pbranchpath = os.path.join(self.path, pbranch)
138 pbranchpath = os.path.join(self.path, pbranch)
139 prepo = hg.peer(self.ui, {}, pbranchpath)
139 prepo = hg.peer(self.ui, {}, pbranchpath)
140 self.ui.note(
140 self.ui.note(
141 _(b'pulling from %s into %s\n') % (pbranch, branch)
141 _(b'pulling from %s into %s\n') % (pbranch, branch)
142 )
142 )
143 exchange.pull(
143 exchange.pull(
144 self.repo, prepo, [prepo.lookup(h) for h in heads]
144 self.repo, prepo, [prepo.lookup(h) for h in heads]
145 )
145 )
146 self.before()
146 self.before()
147
147
148 def _rewritetags(self, source, revmap, data):
148 def _rewritetags(self, source, revmap, data):
149 fp = stringio()
149 fp = stringio()
150 for line in data.splitlines():
150 for line in data.splitlines():
151 s = line.split(b' ', 1)
151 s = line.split(b' ', 1)
152 if len(s) != 2:
152 if len(s) != 2:
153 self.ui.warn(_(b'invalid tag entry: "%s"\n') % line)
153 self.ui.warn(_(b'invalid tag entry: "%s"\n') % line)
154 fp.write(b'%s\n' % line) # Bogus, but keep for hash stability
154 fp.write(b'%s\n' % line) # Bogus, but keep for hash stability
155 continue
155 continue
156 revid = revmap.get(source.lookuprev(s[0]))
156 revid = revmap.get(source.lookuprev(s[0]))
157 if not revid:
157 if not revid:
158 if s[0] == nodemod.nullhex:
158 if s[0] == nodemod.nullhex:
159 revid = s[0]
159 revid = s[0]
160 else:
160 else:
161 # missing, but keep for hash stability
161 # missing, but keep for hash stability
162 self.ui.warn(_(b'missing tag entry: "%s"\n') % line)
162 self.ui.warn(_(b'missing tag entry: "%s"\n') % line)
163 fp.write(b'%s\n' % line)
163 fp.write(b'%s\n' % line)
164 continue
164 continue
165 fp.write(b'%s %s\n' % (revid, s[1]))
165 fp.write(b'%s %s\n' % (revid, s[1]))
166 return fp.getvalue()
166 return fp.getvalue()
167
167
168 def _rewritesubstate(self, source, data):
168 def _rewritesubstate(self, source, data):
169 fp = stringio()
169 fp = stringio()
170 for line in data.splitlines():
170 for line in data.splitlines():
171 s = line.split(b' ', 1)
171 s = line.split(b' ', 1)
172 if len(s) != 2:
172 if len(s) != 2:
173 continue
173 continue
174
174
175 revid = s[0]
175 revid = s[0]
176 subpath = s[1]
176 subpath = s[1]
177 if revid != nodemod.nullhex:
177 if revid != nodemod.nullhex:
178 revmap = self.subrevmaps.get(subpath)
178 revmap = self.subrevmaps.get(subpath)
179 if revmap is None:
179 if revmap is None:
180 revmap = mapfile(
180 revmap = mapfile(
181 self.ui, self.repo.wjoin(subpath, b'.hg/shamap')
181 self.ui, self.repo.wjoin(subpath, b'.hg/shamap')
182 )
182 )
183 self.subrevmaps[subpath] = revmap
183 self.subrevmaps[subpath] = revmap
184
184
185 # It is reasonable that one or more of the subrepos don't
185 # It is reasonable that one or more of the subrepos don't
186 # need to be converted, in which case they can be cloned
186 # need to be converted, in which case they can be cloned
187 # into place instead of converted. Therefore, only warn
187 # into place instead of converted. Therefore, only warn
188 # once.
188 # once.
189 msg = _(b'no ".hgsubstate" updates will be made for "%s"\n')
189 msg = _(b'no ".hgsubstate" updates will be made for "%s"\n')
190 if len(revmap) == 0:
190 if len(revmap) == 0:
191 sub = self.repo.wvfs.reljoin(subpath, b'.hg')
191 sub = self.repo.wvfs.reljoin(subpath, b'.hg')
192
192
193 if self.repo.wvfs.exists(sub):
193 if self.repo.wvfs.exists(sub):
194 self.ui.warn(msg % subpath)
194 self.ui.warn(msg % subpath)
195
195
196 newid = revmap.get(revid)
196 newid = revmap.get(revid)
197 if not newid:
197 if not newid:
198 if len(revmap) > 0:
198 if len(revmap) > 0:
199 self.ui.warn(
199 self.ui.warn(
200 _(b"%s is missing from %s/.hg/shamap\n")
200 _(b"%s is missing from %s/.hg/shamap\n")
201 % (revid, subpath)
201 % (revid, subpath)
202 )
202 )
203 else:
203 else:
204 revid = newid
204 revid = newid
205
205
206 fp.write(b'%s %s\n' % (revid, subpath))
206 fp.write(b'%s %s\n' % (revid, subpath))
207
207
208 return fp.getvalue()
208 return fp.getvalue()
209
209
210 def _calculatemergedfiles(self, source, p1ctx, p2ctx):
210 def _calculatemergedfiles(self, source, p1ctx, p2ctx):
211 """Calculates the files from p2 that we need to pull in when merging p1
211 """Calculates the files from p2 that we need to pull in when merging p1
212 and p2, given that the merge is coming from the given source.
212 and p2, given that the merge is coming from the given source.
213
213
214 This prevents us from losing files that only exist in the target p2 and
214 This prevents us from losing files that only exist in the target p2 and
215 that don't come from the source repo (like if you're merging multiple
215 that don't come from the source repo (like if you're merging multiple
216 repositories together).
216 repositories together).
217 """
217 """
218 anc = [p1ctx.ancestor(p2ctx)]
218 anc = [p1ctx.ancestor(p2ctx)]
219 # Calculate what files are coming from p2
219 # Calculate what files are coming from p2
220 # TODO: mresult.commitinfo might be able to get that info
220 mresult = mergemod.calculateupdates(
221 mresult = mergemod.calculateupdates(
221 self.repo,
222 self.repo,
222 p1ctx,
223 p1ctx,
223 p2ctx,
224 p2ctx,
224 anc,
225 anc,
225 branchmerge=True,
226 branchmerge=True,
226 force=True,
227 force=True,
227 acceptremote=False,
228 acceptremote=False,
228 followcopies=False,
229 followcopies=False,
229 )
230 )
230
231
231 for file, (action, info, msg) in pycompat.iteritems(mresult.actions):
232 for file, (action, info, msg) in pycompat.iteritems(mresult.actions):
232 if source.targetfilebelongstosource(file):
233 if source.targetfilebelongstosource(file):
233 # If the file belongs to the source repo, ignore the p2
234 # If the file belongs to the source repo, ignore the p2
234 # since it will be covered by the existing fileset.
235 # since it will be covered by the existing fileset.
235 continue
236 continue
236
237
237 # If the file requires actual merging, abort. We don't have enough
238 # If the file requires actual merging, abort. We don't have enough
238 # context to resolve merges correctly.
239 # context to resolve merges correctly.
239 if action in [b'm', b'dm', b'cd', b'dc']:
240 if action in [b'm', b'dm', b'cd', b'dc']:
240 raise error.Abort(
241 raise error.Abort(
241 _(
242 _(
242 b"unable to convert merge commit "
243 b"unable to convert merge commit "
243 b"since target parents do not merge cleanly (file "
244 b"since target parents do not merge cleanly (file "
244 b"%s, parents %s and %s)"
245 b"%s, parents %s and %s)"
245 )
246 )
246 % (file, p1ctx, p2ctx)
247 % (file, p1ctx, p2ctx)
247 )
248 )
248 elif action == b'k':
249 elif action == b'k':
249 # 'keep' means nothing changed from p1
250 # 'keep' means nothing changed from p1
250 continue
251 continue
251 else:
252 else:
252 # Any other change means we want to take the p2 version
253 # Any other change means we want to take the p2 version
253 yield file
254 yield file
254
255
255 def putcommit(
256 def putcommit(
256 self, files, copies, parents, commit, source, revmap, full, cleanp2
257 self, files, copies, parents, commit, source, revmap, full, cleanp2
257 ):
258 ):
258 files = dict(files)
259 files = dict(files)
259
260
260 def getfilectx(repo, memctx, f):
261 def getfilectx(repo, memctx, f):
261 if p2ctx and f in p2files and f not in copies:
262 if p2ctx and f in p2files and f not in copies:
262 self.ui.debug(b'reusing %s from p2\n' % f)
263 self.ui.debug(b'reusing %s from p2\n' % f)
263 try:
264 try:
264 return p2ctx[f]
265 return p2ctx[f]
265 except error.ManifestLookupError:
266 except error.ManifestLookupError:
266 # If the file doesn't exist in p2, then we're syncing a
267 # If the file doesn't exist in p2, then we're syncing a
267 # delete, so just return None.
268 # delete, so just return None.
268 return None
269 return None
269 try:
270 try:
270 v = files[f]
271 v = files[f]
271 except KeyError:
272 except KeyError:
272 return None
273 return None
273 data, mode = source.getfile(f, v)
274 data, mode = source.getfile(f, v)
274 if data is None:
275 if data is None:
275 return None
276 return None
276 if f == b'.hgtags':
277 if f == b'.hgtags':
277 data = self._rewritetags(source, revmap, data)
278 data = self._rewritetags(source, revmap, data)
278 if f == b'.hgsubstate':
279 if f == b'.hgsubstate':
279 data = self._rewritesubstate(source, data)
280 data = self._rewritesubstate(source, data)
280 return context.memfilectx(
281 return context.memfilectx(
281 self.repo,
282 self.repo,
282 memctx,
283 memctx,
283 f,
284 f,
284 data,
285 data,
285 b'l' in mode,
286 b'l' in mode,
286 b'x' in mode,
287 b'x' in mode,
287 copies.get(f),
288 copies.get(f),
288 )
289 )
289
290
290 pl = []
291 pl = []
291 for p in parents:
292 for p in parents:
292 if p not in pl:
293 if p not in pl:
293 pl.append(p)
294 pl.append(p)
294 parents = pl
295 parents = pl
295 nparents = len(parents)
296 nparents = len(parents)
296 if self.filemapmode and nparents == 1:
297 if self.filemapmode and nparents == 1:
297 m1node = self.repo.changelog.read(nodemod.bin(parents[0]))[0]
298 m1node = self.repo.changelog.read(nodemod.bin(parents[0]))[0]
298 parent = parents[0]
299 parent = parents[0]
299
300
300 if len(parents) < 2:
301 if len(parents) < 2:
301 parents.append(nodemod.nullid)
302 parents.append(nodemod.nullid)
302 if len(parents) < 2:
303 if len(parents) < 2:
303 parents.append(nodemod.nullid)
304 parents.append(nodemod.nullid)
304 p2 = parents.pop(0)
305 p2 = parents.pop(0)
305
306
306 text = commit.desc
307 text = commit.desc
307
308
308 sha1s = re.findall(sha1re, text)
309 sha1s = re.findall(sha1re, text)
309 for sha1 in sha1s:
310 for sha1 in sha1s:
310 oldrev = source.lookuprev(sha1)
311 oldrev = source.lookuprev(sha1)
311 newrev = revmap.get(oldrev)
312 newrev = revmap.get(oldrev)
312 if newrev is not None:
313 if newrev is not None:
313 text = text.replace(sha1, newrev[: len(sha1)])
314 text = text.replace(sha1, newrev[: len(sha1)])
314
315
315 extra = commit.extra.copy()
316 extra = commit.extra.copy()
316
317
317 sourcename = self.repo.ui.config(b'convert', b'hg.sourcename')
318 sourcename = self.repo.ui.config(b'convert', b'hg.sourcename')
318 if sourcename:
319 if sourcename:
319 extra[b'convert_source'] = sourcename
320 extra[b'convert_source'] = sourcename
320
321
321 for label in (
322 for label in (
322 b'source',
323 b'source',
323 b'transplant_source',
324 b'transplant_source',
324 b'rebase_source',
325 b'rebase_source',
325 b'intermediate-source',
326 b'intermediate-source',
326 ):
327 ):
327 node = extra.get(label)
328 node = extra.get(label)
328
329
329 if node is None:
330 if node is None:
330 continue
331 continue
331
332
332 # Only transplant stores its reference in binary
333 # Only transplant stores its reference in binary
333 if label == b'transplant_source':
334 if label == b'transplant_source':
334 node = nodemod.hex(node)
335 node = nodemod.hex(node)
335
336
336 newrev = revmap.get(node)
337 newrev = revmap.get(node)
337 if newrev is not None:
338 if newrev is not None:
338 if label == b'transplant_source':
339 if label == b'transplant_source':
339 newrev = nodemod.bin(newrev)
340 newrev = nodemod.bin(newrev)
340
341
341 extra[label] = newrev
342 extra[label] = newrev
342
343
343 if self.branchnames and commit.branch:
344 if self.branchnames and commit.branch:
344 extra[b'branch'] = commit.branch
345 extra[b'branch'] = commit.branch
345 if commit.rev and commit.saverev:
346 if commit.rev and commit.saverev:
346 extra[b'convert_revision'] = commit.rev
347 extra[b'convert_revision'] = commit.rev
347
348
348 while parents:
349 while parents:
349 p1 = p2
350 p1 = p2
350 p2 = parents.pop(0)
351 p2 = parents.pop(0)
351 p1ctx = self.repo[p1]
352 p1ctx = self.repo[p1]
352 p2ctx = None
353 p2ctx = None
353 if p2 != nodemod.nullid:
354 if p2 != nodemod.nullid:
354 p2ctx = self.repo[p2]
355 p2ctx = self.repo[p2]
355 fileset = set(files)
356 fileset = set(files)
356 if full:
357 if full:
357 fileset.update(self.repo[p1])
358 fileset.update(self.repo[p1])
358 fileset.update(self.repo[p2])
359 fileset.update(self.repo[p2])
359
360
360 if p2ctx:
361 if p2ctx:
361 p2files = set(cleanp2)
362 p2files = set(cleanp2)
362 for file in self._calculatemergedfiles(source, p1ctx, p2ctx):
363 for file in self._calculatemergedfiles(source, p1ctx, p2ctx):
363 p2files.add(file)
364 p2files.add(file)
364 fileset.add(file)
365 fileset.add(file)
365
366
366 ctx = context.memctx(
367 ctx = context.memctx(
367 self.repo,
368 self.repo,
368 (p1, p2),
369 (p1, p2),
369 text,
370 text,
370 fileset,
371 fileset,
371 getfilectx,
372 getfilectx,
372 commit.author,
373 commit.author,
373 commit.date,
374 commit.date,
374 extra,
375 extra,
375 )
376 )
376
377
377 # We won't know if the conversion changes the node until after the
378 # We won't know if the conversion changes the node until after the
378 # commit, so copy the source's phase for now.
379 # commit, so copy the source's phase for now.
379 self.repo.ui.setconfig(
380 self.repo.ui.setconfig(
380 b'phases',
381 b'phases',
381 b'new-commit',
382 b'new-commit',
382 phases.phasenames[commit.phase],
383 phases.phasenames[commit.phase],
383 b'convert',
384 b'convert',
384 )
385 )
385
386
386 with self.repo.transaction(b"convert") as tr:
387 with self.repo.transaction(b"convert") as tr:
387 if self.repo.ui.config(b'convert', b'hg.preserve-hash'):
388 if self.repo.ui.config(b'convert', b'hg.preserve-hash'):
388 origctx = commit.ctx
389 origctx = commit.ctx
389 else:
390 else:
390 origctx = None
391 origctx = None
391 node = nodemod.hex(self.repo.commitctx(ctx, origctx=origctx))
392 node = nodemod.hex(self.repo.commitctx(ctx, origctx=origctx))
392
393
393 # If the node value has changed, but the phase is lower than
394 # If the node value has changed, but the phase is lower than
394 # draft, set it back to draft since it hasn't been exposed
395 # draft, set it back to draft since it hasn't been exposed
395 # anywhere.
396 # anywhere.
396 if commit.rev != node:
397 if commit.rev != node:
397 ctx = self.repo[node]
398 ctx = self.repo[node]
398 if ctx.phase() < phases.draft:
399 if ctx.phase() < phases.draft:
399 phases.registernew(
400 phases.registernew(
400 self.repo, tr, phases.draft, [ctx.node()]
401 self.repo, tr, phases.draft, [ctx.node()]
401 )
402 )
402
403
403 text = b"(octopus merge fixup)\n"
404 text = b"(octopus merge fixup)\n"
404 p2 = node
405 p2 = node
405
406
406 if self.filemapmode and nparents == 1:
407 if self.filemapmode and nparents == 1:
407 man = self.repo.manifestlog.getstorage(b'')
408 man = self.repo.manifestlog.getstorage(b'')
408 mnode = self.repo.changelog.read(nodemod.bin(p2))[0]
409 mnode = self.repo.changelog.read(nodemod.bin(p2))[0]
409 closed = b'close' in commit.extra
410 closed = b'close' in commit.extra
410 if not closed and not man.cmp(m1node, man.revision(mnode)):
411 if not closed and not man.cmp(m1node, man.revision(mnode)):
411 self.ui.status(_(b"filtering out empty revision\n"))
412 self.ui.status(_(b"filtering out empty revision\n"))
412 self.repo.rollback(force=True)
413 self.repo.rollback(force=True)
413 return parent
414 return parent
414 return p2
415 return p2
415
416
416 def puttags(self, tags):
417 def puttags(self, tags):
417 tagparent = self.repo.branchtip(self.tagsbranch, ignoremissing=True)
418 tagparent = self.repo.branchtip(self.tagsbranch, ignoremissing=True)
418 tagparent = tagparent or nodemod.nullid
419 tagparent = tagparent or nodemod.nullid
419
420
420 oldlines = set()
421 oldlines = set()
421 for branch, heads in pycompat.iteritems(self.repo.branchmap()):
422 for branch, heads in pycompat.iteritems(self.repo.branchmap()):
422 for h in heads:
423 for h in heads:
423 if b'.hgtags' in self.repo[h]:
424 if b'.hgtags' in self.repo[h]:
424 oldlines.update(
425 oldlines.update(
425 set(self.repo[h][b'.hgtags'].data().splitlines(True))
426 set(self.repo[h][b'.hgtags'].data().splitlines(True))
426 )
427 )
427 oldlines = sorted(list(oldlines))
428 oldlines = sorted(list(oldlines))
428
429
429 newlines = sorted([(b"%s %s\n" % (tags[tag], tag)) for tag in tags])
430 newlines = sorted([(b"%s %s\n" % (tags[tag], tag)) for tag in tags])
430 if newlines == oldlines:
431 if newlines == oldlines:
431 return None, None
432 return None, None
432
433
433 # if the old and new tags match, then there is nothing to update
434 # if the old and new tags match, then there is nothing to update
434 oldtags = set()
435 oldtags = set()
435 newtags = set()
436 newtags = set()
436 for line in oldlines:
437 for line in oldlines:
437 s = line.strip().split(b' ', 1)
438 s = line.strip().split(b' ', 1)
438 if len(s) != 2:
439 if len(s) != 2:
439 continue
440 continue
440 oldtags.add(s[1])
441 oldtags.add(s[1])
441 for line in newlines:
442 for line in newlines:
442 s = line.strip().split(b' ', 1)
443 s = line.strip().split(b' ', 1)
443 if len(s) != 2:
444 if len(s) != 2:
444 continue
445 continue
445 if s[1] not in oldtags:
446 if s[1] not in oldtags:
446 newtags.add(s[1].strip())
447 newtags.add(s[1].strip())
447
448
448 if not newtags:
449 if not newtags:
449 return None, None
450 return None, None
450
451
451 data = b"".join(newlines)
452 data = b"".join(newlines)
452
453
453 def getfilectx(repo, memctx, f):
454 def getfilectx(repo, memctx, f):
454 return context.memfilectx(repo, memctx, f, data, False, False, None)
455 return context.memfilectx(repo, memctx, f, data, False, False, None)
455
456
456 self.ui.status(_(b"updating tags\n"))
457 self.ui.status(_(b"updating tags\n"))
457 date = b"%d 0" % int(time.mktime(time.gmtime()))
458 date = b"%d 0" % int(time.mktime(time.gmtime()))
458 extra = {b'branch': self.tagsbranch}
459 extra = {b'branch': self.tagsbranch}
459 ctx = context.memctx(
460 ctx = context.memctx(
460 self.repo,
461 self.repo,
461 (tagparent, None),
462 (tagparent, None),
462 b"update tags",
463 b"update tags",
463 [b".hgtags"],
464 [b".hgtags"],
464 getfilectx,
465 getfilectx,
465 b"convert-repo",
466 b"convert-repo",
466 date,
467 date,
467 extra,
468 extra,
468 )
469 )
469 node = self.repo.commitctx(ctx)
470 node = self.repo.commitctx(ctx)
470 return nodemod.hex(node), nodemod.hex(tagparent)
471 return nodemod.hex(node), nodemod.hex(tagparent)
471
472
472 def setfilemapmode(self, active):
473 def setfilemapmode(self, active):
473 self.filemapmode = active
474 self.filemapmode = active
474
475
475 def putbookmarks(self, updatedbookmark):
476 def putbookmarks(self, updatedbookmark):
476 if not len(updatedbookmark):
477 if not len(updatedbookmark):
477 return
478 return
478 wlock = lock = tr = None
479 wlock = lock = tr = None
479 try:
480 try:
480 wlock = self.repo.wlock()
481 wlock = self.repo.wlock()
481 lock = self.repo.lock()
482 lock = self.repo.lock()
482 tr = self.repo.transaction(b'bookmark')
483 tr = self.repo.transaction(b'bookmark')
483 self.ui.status(_(b"updating bookmarks\n"))
484 self.ui.status(_(b"updating bookmarks\n"))
484 destmarks = self.repo._bookmarks
485 destmarks = self.repo._bookmarks
485 changes = [
486 changes = [
486 (bookmark, nodemod.bin(updatedbookmark[bookmark]))
487 (bookmark, nodemod.bin(updatedbookmark[bookmark]))
487 for bookmark in updatedbookmark
488 for bookmark in updatedbookmark
488 ]
489 ]
489 destmarks.applychanges(self.repo, tr, changes)
490 destmarks.applychanges(self.repo, tr, changes)
490 tr.close()
491 tr.close()
491 finally:
492 finally:
492 lockmod.release(lock, wlock, tr)
493 lockmod.release(lock, wlock, tr)
493
494
494 def hascommitfrommap(self, rev):
495 def hascommitfrommap(self, rev):
495 # the exact semantics of clonebranches is unclear so we can't say no
496 # the exact semantics of clonebranches is unclear so we can't say no
496 return rev in self.repo or self.clonebranches
497 return rev in self.repo or self.clonebranches
497
498
498 def hascommitforsplicemap(self, rev):
499 def hascommitforsplicemap(self, rev):
499 if rev not in self.repo and self.clonebranches:
500 if rev not in self.repo and self.clonebranches:
500 raise error.Abort(
501 raise error.Abort(
501 _(
502 _(
502 b'revision %s not found in destination '
503 b'revision %s not found in destination '
503 b'repository (lookups with clonebranches=true '
504 b'repository (lookups with clonebranches=true '
504 b'are not implemented)'
505 b'are not implemented)'
505 )
506 )
506 % rev
507 % rev
507 )
508 )
508 return rev in self.repo
509 return rev in self.repo
509
510
510
511
511 class mercurial_source(common.converter_source):
512 class mercurial_source(common.converter_source):
512 def __init__(self, ui, repotype, path, revs=None):
513 def __init__(self, ui, repotype, path, revs=None):
513 common.converter_source.__init__(self, ui, repotype, path, revs)
514 common.converter_source.__init__(self, ui, repotype, path, revs)
514 self.ignoreerrors = ui.configbool(b'convert', b'hg.ignoreerrors')
515 self.ignoreerrors = ui.configbool(b'convert', b'hg.ignoreerrors')
515 self.ignored = set()
516 self.ignored = set()
516 self.saverev = ui.configbool(b'convert', b'hg.saverev')
517 self.saverev = ui.configbool(b'convert', b'hg.saverev')
517 try:
518 try:
518 self.repo = hg.repository(self.ui, path)
519 self.repo = hg.repository(self.ui, path)
519 # try to provoke an exception if this isn't really a hg
520 # try to provoke an exception if this isn't really a hg
520 # repo, but some other bogus compatible-looking url
521 # repo, but some other bogus compatible-looking url
521 if not self.repo.local():
522 if not self.repo.local():
522 raise error.RepoError
523 raise error.RepoError
523 except error.RepoError:
524 except error.RepoError:
524 ui.traceback()
525 ui.traceback()
525 raise NoRepo(_(b"%s is not a local Mercurial repository") % path)
526 raise NoRepo(_(b"%s is not a local Mercurial repository") % path)
526 self.lastrev = None
527 self.lastrev = None
527 self.lastctx = None
528 self.lastctx = None
528 self._changescache = None, None
529 self._changescache = None, None
529 self.convertfp = None
530 self.convertfp = None
530 # Restrict converted revisions to startrev descendants
531 # Restrict converted revisions to startrev descendants
531 startnode = ui.config(b'convert', b'hg.startrev')
532 startnode = ui.config(b'convert', b'hg.startrev')
532 hgrevs = ui.config(b'convert', b'hg.revs')
533 hgrevs = ui.config(b'convert', b'hg.revs')
533 if hgrevs is None:
534 if hgrevs is None:
534 if startnode is not None:
535 if startnode is not None:
535 try:
536 try:
536 startnode = self.repo.lookup(startnode)
537 startnode = self.repo.lookup(startnode)
537 except error.RepoError:
538 except error.RepoError:
538 raise error.Abort(
539 raise error.Abort(
539 _(b'%s is not a valid start revision') % startnode
540 _(b'%s is not a valid start revision') % startnode
540 )
541 )
541 startrev = self.repo.changelog.rev(startnode)
542 startrev = self.repo.changelog.rev(startnode)
542 children = {startnode: 1}
543 children = {startnode: 1}
543 for r in self.repo.changelog.descendants([startrev]):
544 for r in self.repo.changelog.descendants([startrev]):
544 children[self.repo.changelog.node(r)] = 1
545 children[self.repo.changelog.node(r)] = 1
545 self.keep = children.__contains__
546 self.keep = children.__contains__
546 else:
547 else:
547 self.keep = util.always
548 self.keep = util.always
548 if revs:
549 if revs:
549 self._heads = [self.repo.lookup(r) for r in revs]
550 self._heads = [self.repo.lookup(r) for r in revs]
550 else:
551 else:
551 self._heads = self.repo.heads()
552 self._heads = self.repo.heads()
552 else:
553 else:
553 if revs or startnode is not None:
554 if revs or startnode is not None:
554 raise error.Abort(
555 raise error.Abort(
555 _(
556 _(
556 b'hg.revs cannot be combined with '
557 b'hg.revs cannot be combined with '
557 b'hg.startrev or --rev'
558 b'hg.startrev or --rev'
558 )
559 )
559 )
560 )
560 nodes = set()
561 nodes = set()
561 parents = set()
562 parents = set()
562 for r in scmutil.revrange(self.repo, [hgrevs]):
563 for r in scmutil.revrange(self.repo, [hgrevs]):
563 ctx = self.repo[r]
564 ctx = self.repo[r]
564 nodes.add(ctx.node())
565 nodes.add(ctx.node())
565 parents.update(p.node() for p in ctx.parents())
566 parents.update(p.node() for p in ctx.parents())
566 self.keep = nodes.__contains__
567 self.keep = nodes.__contains__
567 self._heads = nodes - parents
568 self._heads = nodes - parents
568
569
569 def _changectx(self, rev):
570 def _changectx(self, rev):
570 if self.lastrev != rev:
571 if self.lastrev != rev:
571 self.lastctx = self.repo[rev]
572 self.lastctx = self.repo[rev]
572 self.lastrev = rev
573 self.lastrev = rev
573 return self.lastctx
574 return self.lastctx
574
575
575 def _parents(self, ctx):
576 def _parents(self, ctx):
576 return [p for p in ctx.parents() if p and self.keep(p.node())]
577 return [p for p in ctx.parents() if p and self.keep(p.node())]
577
578
578 def getheads(self):
579 def getheads(self):
579 return [nodemod.hex(h) for h in self._heads if self.keep(h)]
580 return [nodemod.hex(h) for h in self._heads if self.keep(h)]
580
581
581 def getfile(self, name, rev):
582 def getfile(self, name, rev):
582 try:
583 try:
583 fctx = self._changectx(rev)[name]
584 fctx = self._changectx(rev)[name]
584 return fctx.data(), fctx.flags()
585 return fctx.data(), fctx.flags()
585 except error.LookupError:
586 except error.LookupError:
586 return None, None
587 return None, None
587
588
588 def _changedfiles(self, ctx1, ctx2):
589 def _changedfiles(self, ctx1, ctx2):
589 ma, r = [], []
590 ma, r = [], []
590 maappend = ma.append
591 maappend = ma.append
591 rappend = r.append
592 rappend = r.append
592 d = ctx1.manifest().diff(ctx2.manifest())
593 d = ctx1.manifest().diff(ctx2.manifest())
593 for f, ((node1, flag1), (node2, flag2)) in pycompat.iteritems(d):
594 for f, ((node1, flag1), (node2, flag2)) in pycompat.iteritems(d):
594 if node2 is None:
595 if node2 is None:
595 rappend(f)
596 rappend(f)
596 else:
597 else:
597 maappend(f)
598 maappend(f)
598 return ma, r
599 return ma, r
599
600
600 def getchanges(self, rev, full):
601 def getchanges(self, rev, full):
601 ctx = self._changectx(rev)
602 ctx = self._changectx(rev)
602 parents = self._parents(ctx)
603 parents = self._parents(ctx)
603 if full or not parents:
604 if full or not parents:
604 files = copyfiles = ctx.manifest()
605 files = copyfiles = ctx.manifest()
605 if parents:
606 if parents:
606 if self._changescache[0] == rev:
607 if self._changescache[0] == rev:
607 ma, r = self._changescache[1]
608 ma, r = self._changescache[1]
608 else:
609 else:
609 ma, r = self._changedfiles(parents[0], ctx)
610 ma, r = self._changedfiles(parents[0], ctx)
610 if not full:
611 if not full:
611 files = ma + r
612 files = ma + r
612 copyfiles = ma
613 copyfiles = ma
613 # _getcopies() is also run for roots and before filtering so missing
614 # _getcopies() is also run for roots and before filtering so missing
614 # revlogs are detected early
615 # revlogs are detected early
615 copies = self._getcopies(ctx, parents, copyfiles)
616 copies = self._getcopies(ctx, parents, copyfiles)
616 cleanp2 = set()
617 cleanp2 = set()
617 if len(parents) == 2:
618 if len(parents) == 2:
618 d = parents[1].manifest().diff(ctx.manifest(), clean=True)
619 d = parents[1].manifest().diff(ctx.manifest(), clean=True)
619 for f, value in pycompat.iteritems(d):
620 for f, value in pycompat.iteritems(d):
620 if value is None:
621 if value is None:
621 cleanp2.add(f)
622 cleanp2.add(f)
622 changes = [(f, rev) for f in files if f not in self.ignored]
623 changes = [(f, rev) for f in files if f not in self.ignored]
623 changes.sort()
624 changes.sort()
624 return changes, copies, cleanp2
625 return changes, copies, cleanp2
625
626
626 def _getcopies(self, ctx, parents, files):
627 def _getcopies(self, ctx, parents, files):
627 copies = {}
628 copies = {}
628 for name in files:
629 for name in files:
629 if name in self.ignored:
630 if name in self.ignored:
630 continue
631 continue
631 try:
632 try:
632 copysource = ctx.filectx(name).copysource()
633 copysource = ctx.filectx(name).copysource()
633 if copysource in self.ignored:
634 if copysource in self.ignored:
634 continue
635 continue
635 # Ignore copy sources not in parent revisions
636 # Ignore copy sources not in parent revisions
636 if not any(copysource in p for p in parents):
637 if not any(copysource in p for p in parents):
637 continue
638 continue
638 copies[name] = copysource
639 copies[name] = copysource
639 except TypeError:
640 except TypeError:
640 pass
641 pass
641 except error.LookupError as e:
642 except error.LookupError as e:
642 if not self.ignoreerrors:
643 if not self.ignoreerrors:
643 raise
644 raise
644 self.ignored.add(name)
645 self.ignored.add(name)
645 self.ui.warn(_(b'ignoring: %s\n') % e)
646 self.ui.warn(_(b'ignoring: %s\n') % e)
646 return copies
647 return copies
647
648
648 def getcommit(self, rev):
649 def getcommit(self, rev):
649 ctx = self._changectx(rev)
650 ctx = self._changectx(rev)
650 _parents = self._parents(ctx)
651 _parents = self._parents(ctx)
651 parents = [p.hex() for p in _parents]
652 parents = [p.hex() for p in _parents]
652 optparents = [p.hex() for p in ctx.parents() if p and p not in _parents]
653 optparents = [p.hex() for p in ctx.parents() if p and p not in _parents]
653 crev = rev
654 crev = rev
654
655
655 return common.commit(
656 return common.commit(
656 author=ctx.user(),
657 author=ctx.user(),
657 date=dateutil.datestr(ctx.date(), b'%Y-%m-%d %H:%M:%S %1%2'),
658 date=dateutil.datestr(ctx.date(), b'%Y-%m-%d %H:%M:%S %1%2'),
658 desc=ctx.description(),
659 desc=ctx.description(),
659 rev=crev,
660 rev=crev,
660 parents=parents,
661 parents=parents,
661 optparents=optparents,
662 optparents=optparents,
662 branch=ctx.branch(),
663 branch=ctx.branch(),
663 extra=ctx.extra(),
664 extra=ctx.extra(),
664 sortkey=ctx.rev(),
665 sortkey=ctx.rev(),
665 saverev=self.saverev,
666 saverev=self.saverev,
666 phase=ctx.phase(),
667 phase=ctx.phase(),
667 ctx=ctx,
668 ctx=ctx,
668 )
669 )
669
670
670 def numcommits(self):
671 def numcommits(self):
671 return len(self.repo)
672 return len(self.repo)
672
673
673 def gettags(self):
674 def gettags(self):
674 # This will get written to .hgtags, filter non global tags out.
675 # This will get written to .hgtags, filter non global tags out.
675 tags = [
676 tags = [
676 t
677 t
677 for t in self.repo.tagslist()
678 for t in self.repo.tagslist()
678 if self.repo.tagtype(t[0]) == b'global'
679 if self.repo.tagtype(t[0]) == b'global'
679 ]
680 ]
680 return {
681 return {
681 name: nodemod.hex(node) for name, node in tags if self.keep(node)
682 name: nodemod.hex(node) for name, node in tags if self.keep(node)
682 }
683 }
683
684
684 def getchangedfiles(self, rev, i):
685 def getchangedfiles(self, rev, i):
685 ctx = self._changectx(rev)
686 ctx = self._changectx(rev)
686 parents = self._parents(ctx)
687 parents = self._parents(ctx)
687 if not parents and i is None:
688 if not parents and i is None:
688 i = 0
689 i = 0
689 ma, r = ctx.manifest().keys(), []
690 ma, r = ctx.manifest().keys(), []
690 else:
691 else:
691 i = i or 0
692 i = i or 0
692 ma, r = self._changedfiles(parents[i], ctx)
693 ma, r = self._changedfiles(parents[i], ctx)
693 ma, r = [[f for f in l if f not in self.ignored] for l in (ma, r)]
694 ma, r = [[f for f in l if f not in self.ignored] for l in (ma, r)]
694
695
695 if i == 0:
696 if i == 0:
696 self._changescache = (rev, (ma, r))
697 self._changescache = (rev, (ma, r))
697
698
698 return ma + r
699 return ma + r
699
700
700 def converted(self, rev, destrev):
701 def converted(self, rev, destrev):
701 if self.convertfp is None:
702 if self.convertfp is None:
702 self.convertfp = open(self.repo.vfs.join(b'shamap'), b'ab')
703 self.convertfp = open(self.repo.vfs.join(b'shamap'), b'ab')
703 self.convertfp.write(util.tonativeeol(b'%s %s\n' % (destrev, rev)))
704 self.convertfp.write(util.tonativeeol(b'%s %s\n' % (destrev, rev)))
704 self.convertfp.flush()
705 self.convertfp.flush()
705
706
706 def before(self):
707 def before(self):
707 self.ui.debug(b'run hg source pre-conversion action\n')
708 self.ui.debug(b'run hg source pre-conversion action\n')
708
709
709 def after(self):
710 def after(self):
710 self.ui.debug(b'run hg source post-conversion action\n')
711 self.ui.debug(b'run hg source post-conversion action\n')
711
712
712 def hasnativeorder(self):
713 def hasnativeorder(self):
713 return True
714 return True
714
715
715 def hasnativeclose(self):
716 def hasnativeclose(self):
716 return True
717 return True
717
718
718 def lookuprev(self, rev):
719 def lookuprev(self, rev):
719 try:
720 try:
720 return nodemod.hex(self.repo.lookup(rev))
721 return nodemod.hex(self.repo.lookup(rev))
721 except (error.RepoError, error.LookupError):
722 except (error.RepoError, error.LookupError):
722 return None
723 return None
723
724
724 def getbookmarks(self):
725 def getbookmarks(self):
725 return bookmarks.listbookmarks(self.repo)
726 return bookmarks.listbookmarks(self.repo)
726
727
727 def checkrevformat(self, revstr, mapname=b'splicemap'):
728 def checkrevformat(self, revstr, mapname=b'splicemap'):
728 """ Mercurial, revision string is a 40 byte hex """
729 """ Mercurial, revision string is a 40 byte hex """
729 self.checkhexformat(revstr, mapname)
730 self.checkhexformat(revstr, mapname)
@@ -1,2143 +1,2159 b''
1 # merge.py - directory-level update/merge handling for Mercurial
1 # merge.py - directory-level update/merge handling for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import stat
11 import stat
12 import struct
12 import struct
13
13
14 from .i18n import _
14 from .i18n import _
15 from .node import (
15 from .node import (
16 addednodeid,
16 addednodeid,
17 modifiednodeid,
17 modifiednodeid,
18 nullid,
18 nullid,
19 nullrev,
19 nullrev,
20 )
20 )
21 from .thirdparty import attr
21 from .thirdparty import attr
22 from . import (
22 from . import (
23 copies,
23 copies,
24 encoding,
24 encoding,
25 error,
25 error,
26 filemerge,
26 filemerge,
27 match as matchmod,
27 match as matchmod,
28 mergestate as mergestatemod,
28 mergestate as mergestatemod,
29 obsutil,
29 obsutil,
30 pathutil,
30 pathutil,
31 pycompat,
31 pycompat,
32 scmutil,
32 scmutil,
33 subrepoutil,
33 subrepoutil,
34 util,
34 util,
35 worker,
35 worker,
36 )
36 )
37
37
38 _pack = struct.pack
38 _pack = struct.pack
39 _unpack = struct.unpack
39 _unpack = struct.unpack
40
40
41
41
42 def _getcheckunknownconfig(repo, section, name):
42 def _getcheckunknownconfig(repo, section, name):
43 config = repo.ui.config(section, name)
43 config = repo.ui.config(section, name)
44 valid = [b'abort', b'ignore', b'warn']
44 valid = [b'abort', b'ignore', b'warn']
45 if config not in valid:
45 if config not in valid:
46 validstr = b', '.join([b"'" + v + b"'" for v in valid])
46 validstr = b', '.join([b"'" + v + b"'" for v in valid])
47 raise error.ConfigError(
47 raise error.ConfigError(
48 _(b"%s.%s not valid ('%s' is none of %s)")
48 _(b"%s.%s not valid ('%s' is none of %s)")
49 % (section, name, config, validstr)
49 % (section, name, config, validstr)
50 )
50 )
51 return config
51 return config
52
52
53
53
54 def _checkunknownfile(repo, wctx, mctx, f, f2=None):
54 def _checkunknownfile(repo, wctx, mctx, f, f2=None):
55 if wctx.isinmemory():
55 if wctx.isinmemory():
56 # Nothing to do in IMM because nothing in the "working copy" can be an
56 # Nothing to do in IMM because nothing in the "working copy" can be an
57 # unknown file.
57 # unknown file.
58 #
58 #
59 # Note that we should bail out here, not in ``_checkunknownfiles()``,
59 # Note that we should bail out here, not in ``_checkunknownfiles()``,
60 # because that function does other useful work.
60 # because that function does other useful work.
61 return False
61 return False
62
62
63 if f2 is None:
63 if f2 is None:
64 f2 = f
64 f2 = f
65 return (
65 return (
66 repo.wvfs.audit.check(f)
66 repo.wvfs.audit.check(f)
67 and repo.wvfs.isfileorlink(f)
67 and repo.wvfs.isfileorlink(f)
68 and repo.dirstate.normalize(f) not in repo.dirstate
68 and repo.dirstate.normalize(f) not in repo.dirstate
69 and mctx[f2].cmp(wctx[f])
69 and mctx[f2].cmp(wctx[f])
70 )
70 )
71
71
72
72
73 class _unknowndirschecker(object):
73 class _unknowndirschecker(object):
74 """
74 """
75 Look for any unknown files or directories that may have a path conflict
75 Look for any unknown files or directories that may have a path conflict
76 with a file. If any path prefix of the file exists as a file or link,
76 with a file. If any path prefix of the file exists as a file or link,
77 then it conflicts. If the file itself is a directory that contains any
77 then it conflicts. If the file itself is a directory that contains any
78 file that is not tracked, then it conflicts.
78 file that is not tracked, then it conflicts.
79
79
80 Returns the shortest path at which a conflict occurs, or None if there is
80 Returns the shortest path at which a conflict occurs, or None if there is
81 no conflict.
81 no conflict.
82 """
82 """
83
83
84 def __init__(self):
84 def __init__(self):
85 # A set of paths known to be good. This prevents repeated checking of
85 # A set of paths known to be good. This prevents repeated checking of
86 # dirs. It will be updated with any new dirs that are checked and found
86 # dirs. It will be updated with any new dirs that are checked and found
87 # to be safe.
87 # to be safe.
88 self._unknowndircache = set()
88 self._unknowndircache = set()
89
89
90 # A set of paths that are known to be absent. This prevents repeated
90 # A set of paths that are known to be absent. This prevents repeated
91 # checking of subdirectories that are known not to exist. It will be
91 # checking of subdirectories that are known not to exist. It will be
92 # updated with any new dirs that are checked and found to be absent.
92 # updated with any new dirs that are checked and found to be absent.
93 self._missingdircache = set()
93 self._missingdircache = set()
94
94
95 def __call__(self, repo, wctx, f):
95 def __call__(self, repo, wctx, f):
96 if wctx.isinmemory():
96 if wctx.isinmemory():
97 # Nothing to do in IMM for the same reason as ``_checkunknownfile``.
97 # Nothing to do in IMM for the same reason as ``_checkunknownfile``.
98 return False
98 return False
99
99
100 # Check for path prefixes that exist as unknown files.
100 # Check for path prefixes that exist as unknown files.
101 for p in reversed(list(pathutil.finddirs(f))):
101 for p in reversed(list(pathutil.finddirs(f))):
102 if p in self._missingdircache:
102 if p in self._missingdircache:
103 return
103 return
104 if p in self._unknowndircache:
104 if p in self._unknowndircache:
105 continue
105 continue
106 if repo.wvfs.audit.check(p):
106 if repo.wvfs.audit.check(p):
107 if (
107 if (
108 repo.wvfs.isfileorlink(p)
108 repo.wvfs.isfileorlink(p)
109 and repo.dirstate.normalize(p) not in repo.dirstate
109 and repo.dirstate.normalize(p) not in repo.dirstate
110 ):
110 ):
111 return p
111 return p
112 if not repo.wvfs.lexists(p):
112 if not repo.wvfs.lexists(p):
113 self._missingdircache.add(p)
113 self._missingdircache.add(p)
114 return
114 return
115 self._unknowndircache.add(p)
115 self._unknowndircache.add(p)
116
116
117 # Check if the file conflicts with a directory containing unknown files.
117 # Check if the file conflicts with a directory containing unknown files.
118 if repo.wvfs.audit.check(f) and repo.wvfs.isdir(f):
118 if repo.wvfs.audit.check(f) and repo.wvfs.isdir(f):
119 # Does the directory contain any files that are not in the dirstate?
119 # Does the directory contain any files that are not in the dirstate?
120 for p, dirs, files in repo.wvfs.walk(f):
120 for p, dirs, files in repo.wvfs.walk(f):
121 for fn in files:
121 for fn in files:
122 relf = util.pconvert(repo.wvfs.reljoin(p, fn))
122 relf = util.pconvert(repo.wvfs.reljoin(p, fn))
123 relf = repo.dirstate.normalize(relf, isknown=True)
123 relf = repo.dirstate.normalize(relf, isknown=True)
124 if relf not in repo.dirstate:
124 if relf not in repo.dirstate:
125 return f
125 return f
126 return None
126 return None
127
127
128
128
129 def _checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce):
129 def _checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce):
130 """
130 """
131 Considers any actions that care about the presence of conflicting unknown
131 Considers any actions that care about the presence of conflicting unknown
132 files. For some actions, the result is to abort; for others, it is to
132 files. For some actions, the result is to abort; for others, it is to
133 choose a different action.
133 choose a different action.
134 """
134 """
135 fileconflicts = set()
135 fileconflicts = set()
136 pathconflicts = set()
136 pathconflicts = set()
137 warnconflicts = set()
137 warnconflicts = set()
138 abortconflicts = set()
138 abortconflicts = set()
139 unknownconfig = _getcheckunknownconfig(repo, b'merge', b'checkunknown')
139 unknownconfig = _getcheckunknownconfig(repo, b'merge', b'checkunknown')
140 ignoredconfig = _getcheckunknownconfig(repo, b'merge', b'checkignored')
140 ignoredconfig = _getcheckunknownconfig(repo, b'merge', b'checkignored')
141 pathconfig = repo.ui.configbool(
141 pathconfig = repo.ui.configbool(
142 b'experimental', b'merge.checkpathconflicts'
142 b'experimental', b'merge.checkpathconflicts'
143 )
143 )
144 if not force:
144 if not force:
145
145
146 def collectconflicts(conflicts, config):
146 def collectconflicts(conflicts, config):
147 if config == b'abort':
147 if config == b'abort':
148 abortconflicts.update(conflicts)
148 abortconflicts.update(conflicts)
149 elif config == b'warn':
149 elif config == b'warn':
150 warnconflicts.update(conflicts)
150 warnconflicts.update(conflicts)
151
151
152 checkunknowndirs = _unknowndirschecker()
152 checkunknowndirs = _unknowndirschecker()
153 for f, (m, args, msg) in pycompat.iteritems(actions):
153 for f, (m, args, msg) in pycompat.iteritems(actions):
154 if m in (
154 if m in (
155 mergestatemod.ACTION_CREATED,
155 mergestatemod.ACTION_CREATED,
156 mergestatemod.ACTION_DELETED_CHANGED,
156 mergestatemod.ACTION_DELETED_CHANGED,
157 ):
157 ):
158 if _checkunknownfile(repo, wctx, mctx, f):
158 if _checkunknownfile(repo, wctx, mctx, f):
159 fileconflicts.add(f)
159 fileconflicts.add(f)
160 elif pathconfig and f not in wctx:
160 elif pathconfig and f not in wctx:
161 path = checkunknowndirs(repo, wctx, f)
161 path = checkunknowndirs(repo, wctx, f)
162 if path is not None:
162 if path is not None:
163 pathconflicts.add(path)
163 pathconflicts.add(path)
164 elif m == mergestatemod.ACTION_LOCAL_DIR_RENAME_GET:
164 elif m == mergestatemod.ACTION_LOCAL_DIR_RENAME_GET:
165 if _checkunknownfile(repo, wctx, mctx, f, args[0]):
165 if _checkunknownfile(repo, wctx, mctx, f, args[0]):
166 fileconflicts.add(f)
166 fileconflicts.add(f)
167
167
168 allconflicts = fileconflicts | pathconflicts
168 allconflicts = fileconflicts | pathconflicts
169 ignoredconflicts = {c for c in allconflicts if repo.dirstate._ignore(c)}
169 ignoredconflicts = {c for c in allconflicts if repo.dirstate._ignore(c)}
170 unknownconflicts = allconflicts - ignoredconflicts
170 unknownconflicts = allconflicts - ignoredconflicts
171 collectconflicts(ignoredconflicts, ignoredconfig)
171 collectconflicts(ignoredconflicts, ignoredconfig)
172 collectconflicts(unknownconflicts, unknownconfig)
172 collectconflicts(unknownconflicts, unknownconfig)
173 else:
173 else:
174 for f, (m, args, msg) in pycompat.iteritems(actions):
174 for f, (m, args, msg) in pycompat.iteritems(actions):
175 if m == mergestatemod.ACTION_CREATED_MERGE:
175 if m == mergestatemod.ACTION_CREATED_MERGE:
176 fl2, anc = args
176 fl2, anc = args
177 different = _checkunknownfile(repo, wctx, mctx, f)
177 different = _checkunknownfile(repo, wctx, mctx, f)
178 if repo.dirstate._ignore(f):
178 if repo.dirstate._ignore(f):
179 config = ignoredconfig
179 config = ignoredconfig
180 else:
180 else:
181 config = unknownconfig
181 config = unknownconfig
182
182
183 # The behavior when force is True is described by this table:
183 # The behavior when force is True is described by this table:
184 # config different mergeforce | action backup
184 # config different mergeforce | action backup
185 # * n * | get n
185 # * n * | get n
186 # * y y | merge -
186 # * y y | merge -
187 # abort y n | merge - (1)
187 # abort y n | merge - (1)
188 # warn y n | warn + get y
188 # warn y n | warn + get y
189 # ignore y n | get y
189 # ignore y n | get y
190 #
190 #
191 # (1) this is probably the wrong behavior here -- we should
191 # (1) this is probably the wrong behavior here -- we should
192 # probably abort, but some actions like rebases currently
192 # probably abort, but some actions like rebases currently
193 # don't like an abort happening in the middle of
193 # don't like an abort happening in the middle of
194 # merge.update.
194 # merge.update.
195 if not different:
195 if not different:
196 actions[f] = (
196 actions[f] = (
197 mergestatemod.ACTION_GET,
197 mergestatemod.ACTION_GET,
198 (fl2, False),
198 (fl2, False),
199 b'remote created',
199 b'remote created',
200 )
200 )
201 elif mergeforce or config == b'abort':
201 elif mergeforce or config == b'abort':
202 actions[f] = (
202 actions[f] = (
203 mergestatemod.ACTION_MERGE,
203 mergestatemod.ACTION_MERGE,
204 (f, f, None, False, anc),
204 (f, f, None, False, anc),
205 b'remote differs from untracked local',
205 b'remote differs from untracked local',
206 )
206 )
207 elif config == b'abort':
207 elif config == b'abort':
208 abortconflicts.add(f)
208 abortconflicts.add(f)
209 else:
209 else:
210 if config == b'warn':
210 if config == b'warn':
211 warnconflicts.add(f)
211 warnconflicts.add(f)
212 actions[f] = (
212 actions[f] = (
213 mergestatemod.ACTION_GET,
213 mergestatemod.ACTION_GET,
214 (fl2, True),
214 (fl2, True),
215 b'remote created',
215 b'remote created',
216 )
216 )
217
217
218 for f in sorted(abortconflicts):
218 for f in sorted(abortconflicts):
219 warn = repo.ui.warn
219 warn = repo.ui.warn
220 if f in pathconflicts:
220 if f in pathconflicts:
221 if repo.wvfs.isfileorlink(f):
221 if repo.wvfs.isfileorlink(f):
222 warn(_(b"%s: untracked file conflicts with directory\n") % f)
222 warn(_(b"%s: untracked file conflicts with directory\n") % f)
223 else:
223 else:
224 warn(_(b"%s: untracked directory conflicts with file\n") % f)
224 warn(_(b"%s: untracked directory conflicts with file\n") % f)
225 else:
225 else:
226 warn(_(b"%s: untracked file differs\n") % f)
226 warn(_(b"%s: untracked file differs\n") % f)
227 if abortconflicts:
227 if abortconflicts:
228 raise error.Abort(
228 raise error.Abort(
229 _(
229 _(
230 b"untracked files in working directory "
230 b"untracked files in working directory "
231 b"differ from files in requested revision"
231 b"differ from files in requested revision"
232 )
232 )
233 )
233 )
234
234
235 for f in sorted(warnconflicts):
235 for f in sorted(warnconflicts):
236 if repo.wvfs.isfileorlink(f):
236 if repo.wvfs.isfileorlink(f):
237 repo.ui.warn(_(b"%s: replacing untracked file\n") % f)
237 repo.ui.warn(_(b"%s: replacing untracked file\n") % f)
238 else:
238 else:
239 repo.ui.warn(_(b"%s: replacing untracked files in directory\n") % f)
239 repo.ui.warn(_(b"%s: replacing untracked files in directory\n") % f)
240
240
241 for f, (m, args, msg) in pycompat.iteritems(actions):
241 for f, (m, args, msg) in pycompat.iteritems(actions):
242 if m == mergestatemod.ACTION_CREATED:
242 if m == mergestatemod.ACTION_CREATED:
243 backup = (
243 backup = (
244 f in fileconflicts
244 f in fileconflicts
245 or f in pathconflicts
245 or f in pathconflicts
246 or any(p in pathconflicts for p in pathutil.finddirs(f))
246 or any(p in pathconflicts for p in pathutil.finddirs(f))
247 )
247 )
248 (flags,) = args
248 (flags,) = args
249 actions[f] = (mergestatemod.ACTION_GET, (flags, backup), msg)
249 actions[f] = (mergestatemod.ACTION_GET, (flags, backup), msg)
250
250
251
251
252 def _forgetremoved(wctx, mctx, branchmerge):
252 def _forgetremoved(wctx, mctx, branchmerge):
253 """
253 """
254 Forget removed files
254 Forget removed files
255
255
256 If we're jumping between revisions (as opposed to merging), and if
256 If we're jumping between revisions (as opposed to merging), and if
257 neither the working directory nor the target rev has the file,
257 neither the working directory nor the target rev has the file,
258 then we need to remove it from the dirstate, to prevent the
258 then we need to remove it from the dirstate, to prevent the
259 dirstate from listing the file when it is no longer in the
259 dirstate from listing the file when it is no longer in the
260 manifest.
260 manifest.
261
261
262 If we're merging, and the other revision has removed a file
262 If we're merging, and the other revision has removed a file
263 that is not present in the working directory, we need to mark it
263 that is not present in the working directory, we need to mark it
264 as removed.
264 as removed.
265 """
265 """
266
266
267 actions = {}
267 actions = {}
268 m = mergestatemod.ACTION_FORGET
268 m = mergestatemod.ACTION_FORGET
269 if branchmerge:
269 if branchmerge:
270 m = mergestatemod.ACTION_REMOVE
270 m = mergestatemod.ACTION_REMOVE
271 for f in wctx.deleted():
271 for f in wctx.deleted():
272 if f not in mctx:
272 if f not in mctx:
273 actions[f] = m, None, b"forget deleted"
273 actions[f] = m, None, b"forget deleted"
274
274
275 if not branchmerge:
275 if not branchmerge:
276 for f in wctx.removed():
276 for f in wctx.removed():
277 if f not in mctx:
277 if f not in mctx:
278 actions[f] = (
278 actions[f] = (
279 mergestatemod.ACTION_FORGET,
279 mergestatemod.ACTION_FORGET,
280 None,
280 None,
281 b"forget removed",
281 b"forget removed",
282 )
282 )
283
283
284 return actions
284 return actions
285
285
286
286
287 def _checkcollision(repo, wmf, actions):
287 def _checkcollision(repo, wmf, actions):
288 """
288 """
289 Check for case-folding collisions.
289 Check for case-folding collisions.
290 """
290 """
291 # If the repo is narrowed, filter out files outside the narrowspec.
291 # If the repo is narrowed, filter out files outside the narrowspec.
292 narrowmatch = repo.narrowmatch()
292 narrowmatch = repo.narrowmatch()
293 if not narrowmatch.always():
293 if not narrowmatch.always():
294 pmmf = set(wmf.walk(narrowmatch))
294 pmmf = set(wmf.walk(narrowmatch))
295 if actions:
295 if actions:
296 narrowactions = {}
296 narrowactions = {}
297 for m, actionsfortype in pycompat.iteritems(actions):
297 for m, actionsfortype in pycompat.iteritems(actions):
298 narrowactions[m] = []
298 narrowactions[m] = []
299 for (f, args, msg) in actionsfortype:
299 for (f, args, msg) in actionsfortype:
300 if narrowmatch(f):
300 if narrowmatch(f):
301 narrowactions[m].append((f, args, msg))
301 narrowactions[m].append((f, args, msg))
302 actions = narrowactions
302 actions = narrowactions
303 else:
303 else:
304 # build provisional merged manifest up
304 # build provisional merged manifest up
305 pmmf = set(wmf)
305 pmmf = set(wmf)
306
306
307 if actions:
307 if actions:
308 # KEEP and EXEC are no-op
308 # KEEP and EXEC are no-op
309 for m in (
309 for m in (
310 mergestatemod.ACTION_ADD,
310 mergestatemod.ACTION_ADD,
311 mergestatemod.ACTION_ADD_MODIFIED,
311 mergestatemod.ACTION_ADD_MODIFIED,
312 mergestatemod.ACTION_FORGET,
312 mergestatemod.ACTION_FORGET,
313 mergestatemod.ACTION_GET,
313 mergestatemod.ACTION_GET,
314 mergestatemod.ACTION_CHANGED_DELETED,
314 mergestatemod.ACTION_CHANGED_DELETED,
315 mergestatemod.ACTION_DELETED_CHANGED,
315 mergestatemod.ACTION_DELETED_CHANGED,
316 ):
316 ):
317 for f, args, msg in actions[m]:
317 for f, args, msg in actions[m]:
318 pmmf.add(f)
318 pmmf.add(f)
319 for f, args, msg in actions[mergestatemod.ACTION_REMOVE]:
319 for f, args, msg in actions[mergestatemod.ACTION_REMOVE]:
320 pmmf.discard(f)
320 pmmf.discard(f)
321 for f, args, msg in actions[mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL]:
321 for f, args, msg in actions[mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL]:
322 f2, flags = args
322 f2, flags = args
323 pmmf.discard(f2)
323 pmmf.discard(f2)
324 pmmf.add(f)
324 pmmf.add(f)
325 for f, args, msg in actions[mergestatemod.ACTION_LOCAL_DIR_RENAME_GET]:
325 for f, args, msg in actions[mergestatemod.ACTION_LOCAL_DIR_RENAME_GET]:
326 pmmf.add(f)
326 pmmf.add(f)
327 for f, args, msg in actions[mergestatemod.ACTION_MERGE]:
327 for f, args, msg in actions[mergestatemod.ACTION_MERGE]:
328 f1, f2, fa, move, anc = args
328 f1, f2, fa, move, anc = args
329 if move:
329 if move:
330 pmmf.discard(f1)
330 pmmf.discard(f1)
331 pmmf.add(f)
331 pmmf.add(f)
332
332
333 # check case-folding collision in provisional merged manifest
333 # check case-folding collision in provisional merged manifest
334 foldmap = {}
334 foldmap = {}
335 for f in pmmf:
335 for f in pmmf:
336 fold = util.normcase(f)
336 fold = util.normcase(f)
337 if fold in foldmap:
337 if fold in foldmap:
338 raise error.Abort(
338 raise error.Abort(
339 _(b"case-folding collision between %s and %s")
339 _(b"case-folding collision between %s and %s")
340 % (f, foldmap[fold])
340 % (f, foldmap[fold])
341 )
341 )
342 foldmap[fold] = f
342 foldmap[fold] = f
343
343
344 # check case-folding of directories
344 # check case-folding of directories
345 foldprefix = unfoldprefix = lastfull = b''
345 foldprefix = unfoldprefix = lastfull = b''
346 for fold, f in sorted(foldmap.items()):
346 for fold, f in sorted(foldmap.items()):
347 if fold.startswith(foldprefix) and not f.startswith(unfoldprefix):
347 if fold.startswith(foldprefix) and not f.startswith(unfoldprefix):
348 # the folded prefix matches but actual casing is different
348 # the folded prefix matches but actual casing is different
349 raise error.Abort(
349 raise error.Abort(
350 _(b"case-folding collision between %s and directory of %s")
350 _(b"case-folding collision between %s and directory of %s")
351 % (lastfull, f)
351 % (lastfull, f)
352 )
352 )
353 foldprefix = fold + b'/'
353 foldprefix = fold + b'/'
354 unfoldprefix = f + b'/'
354 unfoldprefix = f + b'/'
355 lastfull = f
355 lastfull = f
356
356
357
357
358 def driverpreprocess(repo, ms, wctx, labels=None):
358 def driverpreprocess(repo, ms, wctx, labels=None):
359 """run the preprocess step of the merge driver, if any
359 """run the preprocess step of the merge driver, if any
360
360
361 This is currently not implemented -- it's an extension point."""
361 This is currently not implemented -- it's an extension point."""
362 return True
362 return True
363
363
364
364
365 def driverconclude(repo, ms, wctx, labels=None):
365 def driverconclude(repo, ms, wctx, labels=None):
366 """run the conclude step of the merge driver, if any
366 """run the conclude step of the merge driver, if any
367
367
368 This is currently not implemented -- it's an extension point."""
368 This is currently not implemented -- it's an extension point."""
369 return True
369 return True
370
370
371
371
372 def _filesindirs(repo, manifest, dirs):
372 def _filesindirs(repo, manifest, dirs):
373 """
373 """
374 Generator that yields pairs of all the files in the manifest that are found
374 Generator that yields pairs of all the files in the manifest that are found
375 inside the directories listed in dirs, and which directory they are found
375 inside the directories listed in dirs, and which directory they are found
376 in.
376 in.
377 """
377 """
378 for f in manifest:
378 for f in manifest:
379 for p in pathutil.finddirs(f):
379 for p in pathutil.finddirs(f):
380 if p in dirs:
380 if p in dirs:
381 yield f, p
381 yield f, p
382 break
382 break
383
383
384
384
385 def checkpathconflicts(repo, wctx, mctx, actions):
385 def checkpathconflicts(repo, wctx, mctx, actions):
386 """
386 """
387 Check if any actions introduce path conflicts in the repository, updating
387 Check if any actions introduce path conflicts in the repository, updating
388 actions to record or handle the path conflict accordingly.
388 actions to record or handle the path conflict accordingly.
389 """
389 """
390 mf = wctx.manifest()
390 mf = wctx.manifest()
391
391
392 # The set of local files that conflict with a remote directory.
392 # The set of local files that conflict with a remote directory.
393 localconflicts = set()
393 localconflicts = set()
394
394
395 # The set of directories that conflict with a remote file, and so may cause
395 # The set of directories that conflict with a remote file, and so may cause
396 # conflicts if they still contain any files after the merge.
396 # conflicts if they still contain any files after the merge.
397 remoteconflicts = set()
397 remoteconflicts = set()
398
398
399 # The set of directories that appear as both a file and a directory in the
399 # The set of directories that appear as both a file and a directory in the
400 # remote manifest. These indicate an invalid remote manifest, which
400 # remote manifest. These indicate an invalid remote manifest, which
401 # can't be updated to cleanly.
401 # can't be updated to cleanly.
402 invalidconflicts = set()
402 invalidconflicts = set()
403
403
404 # The set of directories that contain files that are being created.
404 # The set of directories that contain files that are being created.
405 createdfiledirs = set()
405 createdfiledirs = set()
406
406
407 # The set of files deleted by all the actions.
407 # The set of files deleted by all the actions.
408 deletedfiles = set()
408 deletedfiles = set()
409
409
410 for f, (m, args, msg) in actions.items():
410 for f, (m, args, msg) in actions.items():
411 if m in (
411 if m in (
412 mergestatemod.ACTION_CREATED,
412 mergestatemod.ACTION_CREATED,
413 mergestatemod.ACTION_DELETED_CHANGED,
413 mergestatemod.ACTION_DELETED_CHANGED,
414 mergestatemod.ACTION_MERGE,
414 mergestatemod.ACTION_MERGE,
415 mergestatemod.ACTION_CREATED_MERGE,
415 mergestatemod.ACTION_CREATED_MERGE,
416 ):
416 ):
417 # This action may create a new local file.
417 # This action may create a new local file.
418 createdfiledirs.update(pathutil.finddirs(f))
418 createdfiledirs.update(pathutil.finddirs(f))
419 if mf.hasdir(f):
419 if mf.hasdir(f):
420 # The file aliases a local directory. This might be ok if all
420 # The file aliases a local directory. This might be ok if all
421 # the files in the local directory are being deleted. This
421 # the files in the local directory are being deleted. This
422 # will be checked once we know what all the deleted files are.
422 # will be checked once we know what all the deleted files are.
423 remoteconflicts.add(f)
423 remoteconflicts.add(f)
424 # Track the names of all deleted files.
424 # Track the names of all deleted files.
425 if m == mergestatemod.ACTION_REMOVE:
425 if m == mergestatemod.ACTION_REMOVE:
426 deletedfiles.add(f)
426 deletedfiles.add(f)
427 if m == mergestatemod.ACTION_MERGE:
427 if m == mergestatemod.ACTION_MERGE:
428 f1, f2, fa, move, anc = args
428 f1, f2, fa, move, anc = args
429 if move:
429 if move:
430 deletedfiles.add(f1)
430 deletedfiles.add(f1)
431 if m == mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL:
431 if m == mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL:
432 f2, flags = args
432 f2, flags = args
433 deletedfiles.add(f2)
433 deletedfiles.add(f2)
434
434
435 # Check all directories that contain created files for path conflicts.
435 # Check all directories that contain created files for path conflicts.
436 for p in createdfiledirs:
436 for p in createdfiledirs:
437 if p in mf:
437 if p in mf:
438 if p in mctx:
438 if p in mctx:
439 # A file is in a directory which aliases both a local
439 # A file is in a directory which aliases both a local
440 # and a remote file. This is an internal inconsistency
440 # and a remote file. This is an internal inconsistency
441 # within the remote manifest.
441 # within the remote manifest.
442 invalidconflicts.add(p)
442 invalidconflicts.add(p)
443 else:
443 else:
444 # A file is in a directory which aliases a local file.
444 # A file is in a directory which aliases a local file.
445 # We will need to rename the local file.
445 # We will need to rename the local file.
446 localconflicts.add(p)
446 localconflicts.add(p)
447 if p in actions and actions[p][0] in (
447 if p in actions and actions[p][0] in (
448 mergestatemod.ACTION_CREATED,
448 mergestatemod.ACTION_CREATED,
449 mergestatemod.ACTION_DELETED_CHANGED,
449 mergestatemod.ACTION_DELETED_CHANGED,
450 mergestatemod.ACTION_MERGE,
450 mergestatemod.ACTION_MERGE,
451 mergestatemod.ACTION_CREATED_MERGE,
451 mergestatemod.ACTION_CREATED_MERGE,
452 ):
452 ):
453 # The file is in a directory which aliases a remote file.
453 # The file is in a directory which aliases a remote file.
454 # This is an internal inconsistency within the remote
454 # This is an internal inconsistency within the remote
455 # manifest.
455 # manifest.
456 invalidconflicts.add(p)
456 invalidconflicts.add(p)
457
457
458 # Rename all local conflicting files that have not been deleted.
458 # Rename all local conflicting files that have not been deleted.
459 for p in localconflicts:
459 for p in localconflicts:
460 if p not in deletedfiles:
460 if p not in deletedfiles:
461 ctxname = bytes(wctx).rstrip(b'+')
461 ctxname = bytes(wctx).rstrip(b'+')
462 pnew = util.safename(p, ctxname, wctx, set(actions.keys()))
462 pnew = util.safename(p, ctxname, wctx, set(actions.keys()))
463 porig = wctx[p].copysource() or p
463 porig = wctx[p].copysource() or p
464 actions[pnew] = (
464 actions[pnew] = (
465 mergestatemod.ACTION_PATH_CONFLICT_RESOLVE,
465 mergestatemod.ACTION_PATH_CONFLICT_RESOLVE,
466 (p, porig),
466 (p, porig),
467 b'local path conflict',
467 b'local path conflict',
468 )
468 )
469 actions[p] = (
469 actions[p] = (
470 mergestatemod.ACTION_PATH_CONFLICT,
470 mergestatemod.ACTION_PATH_CONFLICT,
471 (pnew, b'l'),
471 (pnew, b'l'),
472 b'path conflict',
472 b'path conflict',
473 )
473 )
474
474
475 if remoteconflicts:
475 if remoteconflicts:
476 # Check if all files in the conflicting directories have been removed.
476 # Check if all files in the conflicting directories have been removed.
477 ctxname = bytes(mctx).rstrip(b'+')
477 ctxname = bytes(mctx).rstrip(b'+')
478 for f, p in _filesindirs(repo, mf, remoteconflicts):
478 for f, p in _filesindirs(repo, mf, remoteconflicts):
479 if f not in deletedfiles:
479 if f not in deletedfiles:
480 m, args, msg = actions[p]
480 m, args, msg = actions[p]
481 pnew = util.safename(p, ctxname, wctx, set(actions.keys()))
481 pnew = util.safename(p, ctxname, wctx, set(actions.keys()))
482 if m in (
482 if m in (
483 mergestatemod.ACTION_DELETED_CHANGED,
483 mergestatemod.ACTION_DELETED_CHANGED,
484 mergestatemod.ACTION_MERGE,
484 mergestatemod.ACTION_MERGE,
485 ):
485 ):
486 # Action was merge, just update target.
486 # Action was merge, just update target.
487 actions[pnew] = (m, args, msg)
487 actions[pnew] = (m, args, msg)
488 else:
488 else:
489 # Action was create, change to renamed get action.
489 # Action was create, change to renamed get action.
490 fl = args[0]
490 fl = args[0]
491 actions[pnew] = (
491 actions[pnew] = (
492 mergestatemod.ACTION_LOCAL_DIR_RENAME_GET,
492 mergestatemod.ACTION_LOCAL_DIR_RENAME_GET,
493 (p, fl),
493 (p, fl),
494 b'remote path conflict',
494 b'remote path conflict',
495 )
495 )
496 actions[p] = (
496 actions[p] = (
497 mergestatemod.ACTION_PATH_CONFLICT,
497 mergestatemod.ACTION_PATH_CONFLICT,
498 (pnew, mergestatemod.ACTION_REMOVE),
498 (pnew, mergestatemod.ACTION_REMOVE),
499 b'path conflict',
499 b'path conflict',
500 )
500 )
501 remoteconflicts.remove(p)
501 remoteconflicts.remove(p)
502 break
502 break
503
503
504 if invalidconflicts:
504 if invalidconflicts:
505 for p in invalidconflicts:
505 for p in invalidconflicts:
506 repo.ui.warn(_(b"%s: is both a file and a directory\n") % p)
506 repo.ui.warn(_(b"%s: is both a file and a directory\n") % p)
507 raise error.Abort(_(b"destination manifest contains path conflicts"))
507 raise error.Abort(_(b"destination manifest contains path conflicts"))
508
508
509
509
510 def _filternarrowactions(narrowmatch, branchmerge, actions):
510 def _filternarrowactions(narrowmatch, branchmerge, actions):
511 """
511 """
512 Filters out actions that can ignored because the repo is narrowed.
512 Filters out actions that can ignored because the repo is narrowed.
513
513
514 Raise an exception if the merge cannot be completed because the repo is
514 Raise an exception if the merge cannot be completed because the repo is
515 narrowed.
515 narrowed.
516 """
516 """
517 nooptypes = {b'k'} # TODO: handle with nonconflicttypes
517 nooptypes = {b'k'} # TODO: handle with nonconflicttypes
518 nonconflicttypes = set(b'a am c cm f g gs r e'.split())
518 nonconflicttypes = set(b'a am c cm f g gs r e'.split())
519 # We mutate the items in the dict during iteration, so iterate
519 # We mutate the items in the dict during iteration, so iterate
520 # over a copy.
520 # over a copy.
521 for f, action in list(actions.items()):
521 for f, action in list(actions.items()):
522 if narrowmatch(f):
522 if narrowmatch(f):
523 pass
523 pass
524 elif not branchmerge:
524 elif not branchmerge:
525 del actions[f] # just updating, ignore changes outside clone
525 del actions[f] # just updating, ignore changes outside clone
526 elif action[0] in nooptypes:
526 elif action[0] in nooptypes:
527 del actions[f] # merge does not affect file
527 del actions[f] # merge does not affect file
528 elif action[0] in nonconflicttypes:
528 elif action[0] in nonconflicttypes:
529 raise error.Abort(
529 raise error.Abort(
530 _(
530 _(
531 b'merge affects file \'%s\' outside narrow, '
531 b'merge affects file \'%s\' outside narrow, '
532 b'which is not yet supported'
532 b'which is not yet supported'
533 )
533 )
534 % f,
534 % f,
535 hint=_(b'merging in the other direction may work'),
535 hint=_(b'merging in the other direction may work'),
536 )
536 )
537 else:
537 else:
538 raise error.Abort(
538 raise error.Abort(
539 _(b'conflict in file \'%s\' is outside narrow clone') % f
539 _(b'conflict in file \'%s\' is outside narrow clone') % f
540 )
540 )
541
541
542
542
543 class mergeresult(object):
543 class mergeresult(object):
544 ''''An object representing result of merging manifests.
544 ''''An object representing result of merging manifests.
545
545
546 It has information about what actions need to be performed on dirstate
546 It has information about what actions need to be performed on dirstate
547 mapping of divergent renames and other such cases. '''
547 mapping of divergent renames and other such cases. '''
548
548
549 def __init__(self, actions, diverge, renamedelete):
549 def __init__(self, actions, diverge, renamedelete, commitinfo):
550 """
550 """
551 actions: dict of filename as keys and action related info as values
551 actions: dict of filename as keys and action related info as values
552 diverge: mapping of source name -> list of dest name for
552 diverge: mapping of source name -> list of dest name for
553 divergent renames
553 divergent renames
554 renamedelete: mapping of source name -> list of destinations for files
554 renamedelete: mapping of source name -> list of destinations for files
555 deleted on one side and renamed on other.
555 deleted on one side and renamed on other.
556 commitinfo: dict containing data which should be used on commit
557 contains a filename -> info mapping
556 """
558 """
557
559
558 self._actions = actions
560 self._actions = actions
559 self._diverge = diverge
561 self._diverge = diverge
560 self._renamedelete = renamedelete
562 self._renamedelete = renamedelete
563 self._commitinfo = commitinfo
561
564
562 @property
565 @property
563 def actions(self):
566 def actions(self):
564 return self._actions
567 return self._actions
565
568
566 @property
569 @property
567 def diverge(self):
570 def diverge(self):
568 return self._diverge
571 return self._diverge
569
572
570 @property
573 @property
571 def renamedelete(self):
574 def renamedelete(self):
572 return self._renamedelete
575 return self._renamedelete
573
576
577 @property
578 def commitinfo(self):
579 return self._commitinfo
580
574 def setactions(self, actions):
581 def setactions(self, actions):
575 self._actions = actions
582 self._actions = actions
576
583
577
584
578 def manifestmerge(
585 def manifestmerge(
579 repo,
586 repo,
580 wctx,
587 wctx,
581 p2,
588 p2,
582 pa,
589 pa,
583 branchmerge,
590 branchmerge,
584 force,
591 force,
585 matcher,
592 matcher,
586 acceptremote,
593 acceptremote,
587 followcopies,
594 followcopies,
588 forcefulldiff=False,
595 forcefulldiff=False,
589 ):
596 ):
590 """
597 """
591 Merge wctx and p2 with ancestor pa and generate merge action list
598 Merge wctx and p2 with ancestor pa and generate merge action list
592
599
593 branchmerge and force are as passed in to update
600 branchmerge and force are as passed in to update
594 matcher = matcher to filter file lists
601 matcher = matcher to filter file lists
595 acceptremote = accept the incoming changes without prompting
602 acceptremote = accept the incoming changes without prompting
596
603
597 Returns an object of mergeresult class
604 Returns an object of mergeresult class
598 """
605 """
599 if matcher is not None and matcher.always():
606 if matcher is not None and matcher.always():
600 matcher = None
607 matcher = None
601
608
602 # manifests fetched in order are going to be faster, so prime the caches
609 # manifests fetched in order are going to be faster, so prime the caches
603 [
610 [
604 x.manifest()
611 x.manifest()
605 for x in sorted(wctx.parents() + [p2, pa], key=scmutil.intrev)
612 for x in sorted(wctx.parents() + [p2, pa], key=scmutil.intrev)
606 ]
613 ]
607
614
608 branch_copies1 = copies.branch_copies()
615 branch_copies1 = copies.branch_copies()
609 branch_copies2 = copies.branch_copies()
616 branch_copies2 = copies.branch_copies()
610 diverge = {}
617 diverge = {}
618 # information from merge which is needed at commit time
619 # for example choosing filelog of which parent to commit
620 # TODO: use specific constants in future for this mapping
621 commitinfo = {}
611 if followcopies:
622 if followcopies:
612 branch_copies1, branch_copies2, diverge = copies.mergecopies(
623 branch_copies1, branch_copies2, diverge = copies.mergecopies(
613 repo, wctx, p2, pa
624 repo, wctx, p2, pa
614 )
625 )
615
626
616 boolbm = pycompat.bytestr(bool(branchmerge))
627 boolbm = pycompat.bytestr(bool(branchmerge))
617 boolf = pycompat.bytestr(bool(force))
628 boolf = pycompat.bytestr(bool(force))
618 boolm = pycompat.bytestr(bool(matcher))
629 boolm = pycompat.bytestr(bool(matcher))
619 repo.ui.note(_(b"resolving manifests\n"))
630 repo.ui.note(_(b"resolving manifests\n"))
620 repo.ui.debug(
631 repo.ui.debug(
621 b" branchmerge: %s, force: %s, partial: %s\n" % (boolbm, boolf, boolm)
632 b" branchmerge: %s, force: %s, partial: %s\n" % (boolbm, boolf, boolm)
622 )
633 )
623 repo.ui.debug(b" ancestor: %s, local: %s, remote: %s\n" % (pa, wctx, p2))
634 repo.ui.debug(b" ancestor: %s, local: %s, remote: %s\n" % (pa, wctx, p2))
624
635
625 m1, m2, ma = wctx.manifest(), p2.manifest(), pa.manifest()
636 m1, m2, ma = wctx.manifest(), p2.manifest(), pa.manifest()
626 copied1 = set(branch_copies1.copy.values())
637 copied1 = set(branch_copies1.copy.values())
627 copied1.update(branch_copies1.movewithdir.values())
638 copied1.update(branch_copies1.movewithdir.values())
628 copied2 = set(branch_copies2.copy.values())
639 copied2 = set(branch_copies2.copy.values())
629 copied2.update(branch_copies2.movewithdir.values())
640 copied2.update(branch_copies2.movewithdir.values())
630
641
631 if b'.hgsubstate' in m1 and wctx.rev() is None:
642 if b'.hgsubstate' in m1 and wctx.rev() is None:
632 # Check whether sub state is modified, and overwrite the manifest
643 # Check whether sub state is modified, and overwrite the manifest
633 # to flag the change. If wctx is a committed revision, we shouldn't
644 # to flag the change. If wctx is a committed revision, we shouldn't
634 # care for the dirty state of the working directory.
645 # care for the dirty state of the working directory.
635 if any(wctx.sub(s).dirty() for s in wctx.substate):
646 if any(wctx.sub(s).dirty() for s in wctx.substate):
636 m1[b'.hgsubstate'] = modifiednodeid
647 m1[b'.hgsubstate'] = modifiednodeid
637
648
638 # Don't use m2-vs-ma optimization if:
649 # Don't use m2-vs-ma optimization if:
639 # - ma is the same as m1 or m2, which we're just going to diff again later
650 # - ma is the same as m1 or m2, which we're just going to diff again later
640 # - The caller specifically asks for a full diff, which is useful during bid
651 # - The caller specifically asks for a full diff, which is useful during bid
641 # merge.
652 # merge.
642 if pa not in ([wctx, p2] + wctx.parents()) and not forcefulldiff:
653 if pa not in ([wctx, p2] + wctx.parents()) and not forcefulldiff:
643 # Identify which files are relevant to the merge, so we can limit the
654 # Identify which files are relevant to the merge, so we can limit the
644 # total m1-vs-m2 diff to just those files. This has significant
655 # total m1-vs-m2 diff to just those files. This has significant
645 # performance benefits in large repositories.
656 # performance benefits in large repositories.
646 relevantfiles = set(ma.diff(m2).keys())
657 relevantfiles = set(ma.diff(m2).keys())
647
658
648 # For copied and moved files, we need to add the source file too.
659 # For copied and moved files, we need to add the source file too.
649 for copykey, copyvalue in pycompat.iteritems(branch_copies1.copy):
660 for copykey, copyvalue in pycompat.iteritems(branch_copies1.copy):
650 if copyvalue in relevantfiles:
661 if copyvalue in relevantfiles:
651 relevantfiles.add(copykey)
662 relevantfiles.add(copykey)
652 for movedirkey in branch_copies1.movewithdir:
663 for movedirkey in branch_copies1.movewithdir:
653 relevantfiles.add(movedirkey)
664 relevantfiles.add(movedirkey)
654 filesmatcher = scmutil.matchfiles(repo, relevantfiles)
665 filesmatcher = scmutil.matchfiles(repo, relevantfiles)
655 matcher = matchmod.intersectmatchers(matcher, filesmatcher)
666 matcher = matchmod.intersectmatchers(matcher, filesmatcher)
656
667
657 diff = m1.diff(m2, match=matcher)
668 diff = m1.diff(m2, match=matcher)
658
669
659 actions = {}
670 actions = {}
660 for f, ((n1, fl1), (n2, fl2)) in pycompat.iteritems(diff):
671 for f, ((n1, fl1), (n2, fl2)) in pycompat.iteritems(diff):
661 if n1 and n2: # file exists on both local and remote side
672 if n1 and n2: # file exists on both local and remote side
662 if f not in ma:
673 if f not in ma:
663 # TODO: what if they're renamed from different sources?
674 # TODO: what if they're renamed from different sources?
664 fa = branch_copies1.copy.get(
675 fa = branch_copies1.copy.get(
665 f, None
676 f, None
666 ) or branch_copies2.copy.get(f, None)
677 ) or branch_copies2.copy.get(f, None)
667 if fa is not None:
678 if fa is not None:
668 actions[f] = (
679 actions[f] = (
669 mergestatemod.ACTION_MERGE,
680 mergestatemod.ACTION_MERGE,
670 (f, f, fa, False, pa.node()),
681 (f, f, fa, False, pa.node()),
671 b'both renamed from %s' % fa,
682 b'both renamed from %s' % fa,
672 )
683 )
673 else:
684 else:
674 actions[f] = (
685 actions[f] = (
675 mergestatemod.ACTION_MERGE,
686 mergestatemod.ACTION_MERGE,
676 (f, f, None, False, pa.node()),
687 (f, f, None, False, pa.node()),
677 b'both created',
688 b'both created',
678 )
689 )
679 else:
690 else:
680 a = ma[f]
691 a = ma[f]
681 fla = ma.flags(f)
692 fla = ma.flags(f)
682 nol = b'l' not in fl1 + fl2 + fla
693 nol = b'l' not in fl1 + fl2 + fla
683 if n2 == a and fl2 == fla:
694 if n2 == a and fl2 == fla:
684 actions[f] = (
695 actions[f] = (
685 mergestatemod.ACTION_KEEP,
696 mergestatemod.ACTION_KEEP,
686 (),
697 (),
687 b'remote unchanged',
698 b'remote unchanged',
688 )
699 )
689 elif n1 == a and fl1 == fla: # local unchanged - use remote
700 elif n1 == a and fl1 == fla: # local unchanged - use remote
690 if n1 == n2: # optimization: keep local content
701 if n1 == n2: # optimization: keep local content
691 actions[f] = (
702 actions[f] = (
692 mergestatemod.ACTION_EXEC,
703 mergestatemod.ACTION_EXEC,
693 (fl2,),
704 (fl2,),
694 b'update permissions',
705 b'update permissions',
695 )
706 )
696 else:
707 else:
697 actions[f] = (
708 actions[f] = (
698 mergestatemod.ACTION_GET_OTHER_AND_STORE
709 mergestatemod.ACTION_GET_OTHER_AND_STORE
699 if branchmerge
710 if branchmerge
700 else mergestatemod.ACTION_GET,
711 else mergestatemod.ACTION_GET,
701 (fl2, False),
712 (fl2, False),
702 b'remote is newer',
713 b'remote is newer',
703 )
714 )
715 if branchmerge:
716 commitinfo[f] = b'other'
704 elif nol and n2 == a: # remote only changed 'x'
717 elif nol and n2 == a: # remote only changed 'x'
705 actions[f] = (
718 actions[f] = (
706 mergestatemod.ACTION_EXEC,
719 mergestatemod.ACTION_EXEC,
707 (fl2,),
720 (fl2,),
708 b'update permissions',
721 b'update permissions',
709 )
722 )
710 elif nol and n1 == a: # local only changed 'x'
723 elif nol and n1 == a: # local only changed 'x'
711 actions[f] = (
724 actions[f] = (
712 mergestatemod.ACTION_GET_OTHER_AND_STORE
725 mergestatemod.ACTION_GET_OTHER_AND_STORE
713 if branchmerge
726 if branchmerge
714 else mergestatemod.ACTION_GET,
727 else mergestatemod.ACTION_GET,
715 (fl1, False),
728 (fl1, False),
716 b'remote is newer',
729 b'remote is newer',
717 )
730 )
731 if branchmerge:
732 commitinfo[f] = b'other'
718 else: # both changed something
733 else: # both changed something
719 actions[f] = (
734 actions[f] = (
720 mergestatemod.ACTION_MERGE,
735 mergestatemod.ACTION_MERGE,
721 (f, f, f, False, pa.node()),
736 (f, f, f, False, pa.node()),
722 b'versions differ',
737 b'versions differ',
723 )
738 )
724 elif n1: # file exists only on local side
739 elif n1: # file exists only on local side
725 if f in copied2:
740 if f in copied2:
726 pass # we'll deal with it on m2 side
741 pass # we'll deal with it on m2 side
727 elif (
742 elif (
728 f in branch_copies1.movewithdir
743 f in branch_copies1.movewithdir
729 ): # directory rename, move local
744 ): # directory rename, move local
730 f2 = branch_copies1.movewithdir[f]
745 f2 = branch_copies1.movewithdir[f]
731 if f2 in m2:
746 if f2 in m2:
732 actions[f2] = (
747 actions[f2] = (
733 mergestatemod.ACTION_MERGE,
748 mergestatemod.ACTION_MERGE,
734 (f, f2, None, True, pa.node()),
749 (f, f2, None, True, pa.node()),
735 b'remote directory rename, both created',
750 b'remote directory rename, both created',
736 )
751 )
737 else:
752 else:
738 actions[f2] = (
753 actions[f2] = (
739 mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL,
754 mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL,
740 (f, fl1),
755 (f, fl1),
741 b'remote directory rename - move from %s' % f,
756 b'remote directory rename - move from %s' % f,
742 )
757 )
743 elif f in branch_copies1.copy:
758 elif f in branch_copies1.copy:
744 f2 = branch_copies1.copy[f]
759 f2 = branch_copies1.copy[f]
745 actions[f] = (
760 actions[f] = (
746 mergestatemod.ACTION_MERGE,
761 mergestatemod.ACTION_MERGE,
747 (f, f2, f2, False, pa.node()),
762 (f, f2, f2, False, pa.node()),
748 b'local copied/moved from %s' % f2,
763 b'local copied/moved from %s' % f2,
749 )
764 )
750 elif f in ma: # clean, a different, no remote
765 elif f in ma: # clean, a different, no remote
751 if n1 != ma[f]:
766 if n1 != ma[f]:
752 if acceptremote:
767 if acceptremote:
753 actions[f] = (
768 actions[f] = (
754 mergestatemod.ACTION_REMOVE,
769 mergestatemod.ACTION_REMOVE,
755 None,
770 None,
756 b'remote delete',
771 b'remote delete',
757 )
772 )
758 else:
773 else:
759 actions[f] = (
774 actions[f] = (
760 mergestatemod.ACTION_CHANGED_DELETED,
775 mergestatemod.ACTION_CHANGED_DELETED,
761 (f, None, f, False, pa.node()),
776 (f, None, f, False, pa.node()),
762 b'prompt changed/deleted',
777 b'prompt changed/deleted',
763 )
778 )
764 elif n1 == addednodeid:
779 elif n1 == addednodeid:
765 # This file was locally added. We should forget it instead of
780 # This file was locally added. We should forget it instead of
766 # deleting it.
781 # deleting it.
767 actions[f] = (
782 actions[f] = (
768 mergestatemod.ACTION_FORGET,
783 mergestatemod.ACTION_FORGET,
769 None,
784 None,
770 b'remote deleted',
785 b'remote deleted',
771 )
786 )
772 else:
787 else:
773 actions[f] = (
788 actions[f] = (
774 mergestatemod.ACTION_REMOVE,
789 mergestatemod.ACTION_REMOVE,
775 None,
790 None,
776 b'other deleted',
791 b'other deleted',
777 )
792 )
778 elif n2: # file exists only on remote side
793 elif n2: # file exists only on remote side
779 if f in copied1:
794 if f in copied1:
780 pass # we'll deal with it on m1 side
795 pass # we'll deal with it on m1 side
781 elif f in branch_copies2.movewithdir:
796 elif f in branch_copies2.movewithdir:
782 f2 = branch_copies2.movewithdir[f]
797 f2 = branch_copies2.movewithdir[f]
783 if f2 in m1:
798 if f2 in m1:
784 actions[f2] = (
799 actions[f2] = (
785 mergestatemod.ACTION_MERGE,
800 mergestatemod.ACTION_MERGE,
786 (f2, f, None, False, pa.node()),
801 (f2, f, None, False, pa.node()),
787 b'local directory rename, both created',
802 b'local directory rename, both created',
788 )
803 )
789 else:
804 else:
790 actions[f2] = (
805 actions[f2] = (
791 mergestatemod.ACTION_LOCAL_DIR_RENAME_GET,
806 mergestatemod.ACTION_LOCAL_DIR_RENAME_GET,
792 (f, fl2),
807 (f, fl2),
793 b'local directory rename - get from %s' % f,
808 b'local directory rename - get from %s' % f,
794 )
809 )
795 elif f in branch_copies2.copy:
810 elif f in branch_copies2.copy:
796 f2 = branch_copies2.copy[f]
811 f2 = branch_copies2.copy[f]
797 if f2 in m2:
812 if f2 in m2:
798 actions[f] = (
813 actions[f] = (
799 mergestatemod.ACTION_MERGE,
814 mergestatemod.ACTION_MERGE,
800 (f2, f, f2, False, pa.node()),
815 (f2, f, f2, False, pa.node()),
801 b'remote copied from %s' % f2,
816 b'remote copied from %s' % f2,
802 )
817 )
803 else:
818 else:
804 actions[f] = (
819 actions[f] = (
805 mergestatemod.ACTION_MERGE,
820 mergestatemod.ACTION_MERGE,
806 (f2, f, f2, True, pa.node()),
821 (f2, f, f2, True, pa.node()),
807 b'remote moved from %s' % f2,
822 b'remote moved from %s' % f2,
808 )
823 )
809 elif f not in ma:
824 elif f not in ma:
810 # local unknown, remote created: the logic is described by the
825 # local unknown, remote created: the logic is described by the
811 # following table:
826 # following table:
812 #
827 #
813 # force branchmerge different | action
828 # force branchmerge different | action
814 # n * * | create
829 # n * * | create
815 # y n * | create
830 # y n * | create
816 # y y n | create
831 # y y n | create
817 # y y y | merge
832 # y y y | merge
818 #
833 #
819 # Checking whether the files are different is expensive, so we
834 # Checking whether the files are different is expensive, so we
820 # don't do that when we can avoid it.
835 # don't do that when we can avoid it.
821 if not force:
836 if not force:
822 actions[f] = (
837 actions[f] = (
823 mergestatemod.ACTION_CREATED,
838 mergestatemod.ACTION_CREATED,
824 (fl2,),
839 (fl2,),
825 b'remote created',
840 b'remote created',
826 )
841 )
827 elif not branchmerge:
842 elif not branchmerge:
828 actions[f] = (
843 actions[f] = (
829 mergestatemod.ACTION_CREATED,
844 mergestatemod.ACTION_CREATED,
830 (fl2,),
845 (fl2,),
831 b'remote created',
846 b'remote created',
832 )
847 )
833 else:
848 else:
834 actions[f] = (
849 actions[f] = (
835 mergestatemod.ACTION_CREATED_MERGE,
850 mergestatemod.ACTION_CREATED_MERGE,
836 (fl2, pa.node()),
851 (fl2, pa.node()),
837 b'remote created, get or merge',
852 b'remote created, get or merge',
838 )
853 )
839 elif n2 != ma[f]:
854 elif n2 != ma[f]:
840 df = None
855 df = None
841 for d in branch_copies1.dirmove:
856 for d in branch_copies1.dirmove:
842 if f.startswith(d):
857 if f.startswith(d):
843 # new file added in a directory that was moved
858 # new file added in a directory that was moved
844 df = branch_copies1.dirmove[d] + f[len(d) :]
859 df = branch_copies1.dirmove[d] + f[len(d) :]
845 break
860 break
846 if df is not None and df in m1:
861 if df is not None and df in m1:
847 actions[df] = (
862 actions[df] = (
848 mergestatemod.ACTION_MERGE,
863 mergestatemod.ACTION_MERGE,
849 (df, f, f, False, pa.node()),
864 (df, f, f, False, pa.node()),
850 b'local directory rename - respect move '
865 b'local directory rename - respect move '
851 b'from %s' % f,
866 b'from %s' % f,
852 )
867 )
853 elif acceptremote:
868 elif acceptremote:
854 actions[f] = (
869 actions[f] = (
855 mergestatemod.ACTION_CREATED,
870 mergestatemod.ACTION_CREATED,
856 (fl2,),
871 (fl2,),
857 b'remote recreating',
872 b'remote recreating',
858 )
873 )
859 else:
874 else:
860 actions[f] = (
875 actions[f] = (
861 mergestatemod.ACTION_DELETED_CHANGED,
876 mergestatemod.ACTION_DELETED_CHANGED,
862 (None, f, f, False, pa.node()),
877 (None, f, f, False, pa.node()),
863 b'prompt deleted/changed',
878 b'prompt deleted/changed',
864 )
879 )
865
880
866 if repo.ui.configbool(b'experimental', b'merge.checkpathconflicts'):
881 if repo.ui.configbool(b'experimental', b'merge.checkpathconflicts'):
867 # If we are merging, look for path conflicts.
882 # If we are merging, look for path conflicts.
868 checkpathconflicts(repo, wctx, p2, actions)
883 checkpathconflicts(repo, wctx, p2, actions)
869
884
870 narrowmatch = repo.narrowmatch()
885 narrowmatch = repo.narrowmatch()
871 if not narrowmatch.always():
886 if not narrowmatch.always():
872 # Updates "actions" in place
887 # Updates "actions" in place
873 _filternarrowactions(narrowmatch, branchmerge, actions)
888 _filternarrowactions(narrowmatch, branchmerge, actions)
874
889
875 renamedelete = branch_copies1.renamedelete
890 renamedelete = branch_copies1.renamedelete
876 renamedelete.update(branch_copies2.renamedelete)
891 renamedelete.update(branch_copies2.renamedelete)
877
892
878 return mergeresult(actions, diverge, renamedelete)
893 return mergeresult(actions, diverge, renamedelete, commitinfo)
879
894
880
895
881 def _resolvetrivial(repo, wctx, mctx, ancestor, actions):
896 def _resolvetrivial(repo, wctx, mctx, ancestor, actions):
882 """Resolves false conflicts where the nodeid changed but the content
897 """Resolves false conflicts where the nodeid changed but the content
883 remained the same."""
898 remained the same."""
884 # We force a copy of actions.items() because we're going to mutate
899 # We force a copy of actions.items() because we're going to mutate
885 # actions as we resolve trivial conflicts.
900 # actions as we resolve trivial conflicts.
886 for f, (m, args, msg) in list(actions.items()):
901 for f, (m, args, msg) in list(actions.items()):
887 if (
902 if (
888 m == mergestatemod.ACTION_CHANGED_DELETED
903 m == mergestatemod.ACTION_CHANGED_DELETED
889 and f in ancestor
904 and f in ancestor
890 and not wctx[f].cmp(ancestor[f])
905 and not wctx[f].cmp(ancestor[f])
891 ):
906 ):
892 # local did change but ended up with same content
907 # local did change but ended up with same content
893 actions[f] = mergestatemod.ACTION_REMOVE, None, b'prompt same'
908 actions[f] = mergestatemod.ACTION_REMOVE, None, b'prompt same'
894 elif (
909 elif (
895 m == mergestatemod.ACTION_DELETED_CHANGED
910 m == mergestatemod.ACTION_DELETED_CHANGED
896 and f in ancestor
911 and f in ancestor
897 and not mctx[f].cmp(ancestor[f])
912 and not mctx[f].cmp(ancestor[f])
898 ):
913 ):
899 # remote did change but ended up with same content
914 # remote did change but ended up with same content
900 del actions[f] # don't get = keep local deleted
915 del actions[f] # don't get = keep local deleted
901
916
902
917
903 def calculateupdates(
918 def calculateupdates(
904 repo,
919 repo,
905 wctx,
920 wctx,
906 mctx,
921 mctx,
907 ancestors,
922 ancestors,
908 branchmerge,
923 branchmerge,
909 force,
924 force,
910 acceptremote,
925 acceptremote,
911 followcopies,
926 followcopies,
912 matcher=None,
927 matcher=None,
913 mergeforce=False,
928 mergeforce=False,
914 ):
929 ):
915 """
930 """
916 Calculate the actions needed to merge mctx into wctx using ancestors
931 Calculate the actions needed to merge mctx into wctx using ancestors
917
932
918 Uses manifestmerge() to merge manifest and get list of actions required to
933 Uses manifestmerge() to merge manifest and get list of actions required to
919 perform for merging two manifests. If there are multiple ancestors, uses bid
934 perform for merging two manifests. If there are multiple ancestors, uses bid
920 merge if enabled.
935 merge if enabled.
921
936
922 Also filters out actions which are unrequired if repository is sparse.
937 Also filters out actions which are unrequired if repository is sparse.
923
938
924 Returns mergeresult object same as manifestmerge().
939 Returns mergeresult object same as manifestmerge().
925 """
940 """
926 # Avoid cycle.
941 # Avoid cycle.
927 from . import sparse
942 from . import sparse
928
943
929 if len(ancestors) == 1: # default
944 if len(ancestors) == 1: # default
930 mresult = manifestmerge(
945 mresult = manifestmerge(
931 repo,
946 repo,
932 wctx,
947 wctx,
933 mctx,
948 mctx,
934 ancestors[0],
949 ancestors[0],
935 branchmerge,
950 branchmerge,
936 force,
951 force,
937 matcher,
952 matcher,
938 acceptremote,
953 acceptremote,
939 followcopies,
954 followcopies,
940 )
955 )
941 _checkunknownfiles(repo, wctx, mctx, force, mresult.actions, mergeforce)
956 _checkunknownfiles(repo, wctx, mctx, force, mresult.actions, mergeforce)
942
957
943 else: # only when merge.preferancestor=* - the default
958 else: # only when merge.preferancestor=* - the default
944 repo.ui.note(
959 repo.ui.note(
945 _(b"note: merging %s and %s using bids from ancestors %s\n")
960 _(b"note: merging %s and %s using bids from ancestors %s\n")
946 % (
961 % (
947 wctx,
962 wctx,
948 mctx,
963 mctx,
949 _(b' and ').join(pycompat.bytestr(anc) for anc in ancestors),
964 _(b' and ').join(pycompat.bytestr(anc) for anc in ancestors),
950 )
965 )
951 )
966 )
952
967
953 # Call for bids
968 # Call for bids
954 fbids = (
969 fbids = (
955 {}
970 {}
956 ) # mapping filename to bids (action method to list af actions)
971 ) # mapping filename to bids (action method to list af actions)
957 diverge, renamedelete = None, None
972 diverge, renamedelete = None, None
958 for ancestor in ancestors:
973 for ancestor in ancestors:
959 repo.ui.note(_(b'\ncalculating bids for ancestor %s\n') % ancestor)
974 repo.ui.note(_(b'\ncalculating bids for ancestor %s\n') % ancestor)
960 mresult1 = manifestmerge(
975 mresult1 = manifestmerge(
961 repo,
976 repo,
962 wctx,
977 wctx,
963 mctx,
978 mctx,
964 ancestor,
979 ancestor,
965 branchmerge,
980 branchmerge,
966 force,
981 force,
967 matcher,
982 matcher,
968 acceptremote,
983 acceptremote,
969 followcopies,
984 followcopies,
970 forcefulldiff=True,
985 forcefulldiff=True,
971 )
986 )
972 _checkunknownfiles(
987 _checkunknownfiles(
973 repo, wctx, mctx, force, mresult1.actions, mergeforce
988 repo, wctx, mctx, force, mresult1.actions, mergeforce
974 )
989 )
975
990
976 # Track the shortest set of warning on the theory that bid
991 # Track the shortest set of warning on the theory that bid
977 # merge will correctly incorporate more information
992 # merge will correctly incorporate more information
978 if diverge is None or len(mresult1.diverge) < len(diverge):
993 if diverge is None or len(mresult1.diverge) < len(diverge):
979 diverge = mresult1.diverge
994 diverge = mresult1.diverge
980 if renamedelete is None or len(renamedelete) < len(
995 if renamedelete is None or len(renamedelete) < len(
981 mresult1.renamedelete
996 mresult1.renamedelete
982 ):
997 ):
983 renamedelete = mresult1.renamedelete
998 renamedelete = mresult1.renamedelete
984
999
985 for f, a in sorted(pycompat.iteritems(mresult1.actions)):
1000 for f, a in sorted(pycompat.iteritems(mresult1.actions)):
986 m, args, msg = a
1001 m, args, msg = a
987 if m == mergestatemod.ACTION_GET_OTHER_AND_STORE:
1002 if m == mergestatemod.ACTION_GET_OTHER_AND_STORE:
988 m = mergestatemod.ACTION_GET
1003 m = mergestatemod.ACTION_GET
989 repo.ui.debug(b' %s: %s -> %s\n' % (f, msg, m))
1004 repo.ui.debug(b' %s: %s -> %s\n' % (f, msg, m))
990 if f in fbids:
1005 if f in fbids:
991 d = fbids[f]
1006 d = fbids[f]
992 if m in d:
1007 if m in d:
993 d[m].append(a)
1008 d[m].append(a)
994 else:
1009 else:
995 d[m] = [a]
1010 d[m] = [a]
996 else:
1011 else:
997 fbids[f] = {m: [a]}
1012 fbids[f] = {m: [a]}
998
1013
999 # Pick the best bid for each file
1014 # Pick the best bid for each file
1000 repo.ui.note(_(b'\nauction for merging merge bids\n'))
1015 repo.ui.note(_(b'\nauction for merging merge bids\n'))
1001 actions = {}
1016 actions = {}
1002 for f, bids in sorted(fbids.items()):
1017 for f, bids in sorted(fbids.items()):
1003 # bids is a mapping from action method to list af actions
1018 # bids is a mapping from action method to list af actions
1004 # Consensus?
1019 # Consensus?
1005 if len(bids) == 1: # all bids are the same kind of method
1020 if len(bids) == 1: # all bids are the same kind of method
1006 m, l = list(bids.items())[0]
1021 m, l = list(bids.items())[0]
1007 if all(a == l[0] for a in l[1:]): # len(bids) is > 1
1022 if all(a == l[0] for a in l[1:]): # len(bids) is > 1
1008 repo.ui.note(_(b" %s: consensus for %s\n") % (f, m))
1023 repo.ui.note(_(b" %s: consensus for %s\n") % (f, m))
1009 actions[f] = l[0]
1024 actions[f] = l[0]
1010 continue
1025 continue
1011 # If keep is an option, just do it.
1026 # If keep is an option, just do it.
1012 if mergestatemod.ACTION_KEEP in bids:
1027 if mergestatemod.ACTION_KEEP in bids:
1013 repo.ui.note(_(b" %s: picking 'keep' action\n") % f)
1028 repo.ui.note(_(b" %s: picking 'keep' action\n") % f)
1014 actions[f] = bids[mergestatemod.ACTION_KEEP][0]
1029 actions[f] = bids[mergestatemod.ACTION_KEEP][0]
1015 continue
1030 continue
1016 # If there are gets and they all agree [how could they not?], do it.
1031 # If there are gets and they all agree [how could they not?], do it.
1017 if mergestatemod.ACTION_GET in bids:
1032 if mergestatemod.ACTION_GET in bids:
1018 ga0 = bids[mergestatemod.ACTION_GET][0]
1033 ga0 = bids[mergestatemod.ACTION_GET][0]
1019 if all(a == ga0 for a in bids[mergestatemod.ACTION_GET][1:]):
1034 if all(a == ga0 for a in bids[mergestatemod.ACTION_GET][1:]):
1020 repo.ui.note(_(b" %s: picking 'get' action\n") % f)
1035 repo.ui.note(_(b" %s: picking 'get' action\n") % f)
1021 actions[f] = ga0
1036 actions[f] = ga0
1022 continue
1037 continue
1023 # TODO: Consider other simple actions such as mode changes
1038 # TODO: Consider other simple actions such as mode changes
1024 # Handle inefficient democrazy.
1039 # Handle inefficient democrazy.
1025 repo.ui.note(_(b' %s: multiple bids for merge action:\n') % f)
1040 repo.ui.note(_(b' %s: multiple bids for merge action:\n') % f)
1026 for m, l in sorted(bids.items()):
1041 for m, l in sorted(bids.items()):
1027 for _f, args, msg in l:
1042 for _f, args, msg in l:
1028 repo.ui.note(b' %s -> %s\n' % (msg, m))
1043 repo.ui.note(b' %s -> %s\n' % (msg, m))
1029 # Pick random action. TODO: Instead, prompt user when resolving
1044 # Pick random action. TODO: Instead, prompt user when resolving
1030 m, l = list(bids.items())[0]
1045 m, l = list(bids.items())[0]
1031 repo.ui.warn(
1046 repo.ui.warn(
1032 _(b' %s: ambiguous merge - picked %s action\n') % (f, m)
1047 _(b' %s: ambiguous merge - picked %s action\n') % (f, m)
1033 )
1048 )
1034 actions[f] = l[0]
1049 actions[f] = l[0]
1035 continue
1050 continue
1036 repo.ui.note(_(b'end of auction\n\n'))
1051 repo.ui.note(_(b'end of auction\n\n'))
1037 mresult = mergeresult(actions, diverge, renamedelete)
1052 # TODO: think about commitinfo when bid merge is used
1053 mresult = mergeresult(actions, diverge, renamedelete, {})
1038
1054
1039 if wctx.rev() is None:
1055 if wctx.rev() is None:
1040 fractions = _forgetremoved(wctx, mctx, branchmerge)
1056 fractions = _forgetremoved(wctx, mctx, branchmerge)
1041 mresult.actions.update(fractions)
1057 mresult.actions.update(fractions)
1042
1058
1043 prunedactions = sparse.filterupdatesactions(
1059 prunedactions = sparse.filterupdatesactions(
1044 repo, wctx, mctx, branchmerge, mresult.actions
1060 repo, wctx, mctx, branchmerge, mresult.actions
1045 )
1061 )
1046 _resolvetrivial(repo, wctx, mctx, ancestors[0], mresult.actions)
1062 _resolvetrivial(repo, wctx, mctx, ancestors[0], mresult.actions)
1047
1063
1048 mresult.setactions(prunedactions)
1064 mresult.setactions(prunedactions)
1049 return mresult
1065 return mresult
1050
1066
1051
1067
1052 def _getcwd():
1068 def _getcwd():
1053 try:
1069 try:
1054 return encoding.getcwd()
1070 return encoding.getcwd()
1055 except OSError as err:
1071 except OSError as err:
1056 if err.errno == errno.ENOENT:
1072 if err.errno == errno.ENOENT:
1057 return None
1073 return None
1058 raise
1074 raise
1059
1075
1060
1076
1061 def batchremove(repo, wctx, actions):
1077 def batchremove(repo, wctx, actions):
1062 """apply removes to the working directory
1078 """apply removes to the working directory
1063
1079
1064 yields tuples for progress updates
1080 yields tuples for progress updates
1065 """
1081 """
1066 verbose = repo.ui.verbose
1082 verbose = repo.ui.verbose
1067 cwd = _getcwd()
1083 cwd = _getcwd()
1068 i = 0
1084 i = 0
1069 for f, args, msg in actions:
1085 for f, args, msg in actions:
1070 repo.ui.debug(b" %s: %s -> r\n" % (f, msg))
1086 repo.ui.debug(b" %s: %s -> r\n" % (f, msg))
1071 if verbose:
1087 if verbose:
1072 repo.ui.note(_(b"removing %s\n") % f)
1088 repo.ui.note(_(b"removing %s\n") % f)
1073 wctx[f].audit()
1089 wctx[f].audit()
1074 try:
1090 try:
1075 wctx[f].remove(ignoremissing=True)
1091 wctx[f].remove(ignoremissing=True)
1076 except OSError as inst:
1092 except OSError as inst:
1077 repo.ui.warn(
1093 repo.ui.warn(
1078 _(b"update failed to remove %s: %s!\n") % (f, inst.strerror)
1094 _(b"update failed to remove %s: %s!\n") % (f, inst.strerror)
1079 )
1095 )
1080 if i == 100:
1096 if i == 100:
1081 yield i, f
1097 yield i, f
1082 i = 0
1098 i = 0
1083 i += 1
1099 i += 1
1084 if i > 0:
1100 if i > 0:
1085 yield i, f
1101 yield i, f
1086
1102
1087 if cwd and not _getcwd():
1103 if cwd and not _getcwd():
1088 # cwd was removed in the course of removing files; print a helpful
1104 # cwd was removed in the course of removing files; print a helpful
1089 # warning.
1105 # warning.
1090 repo.ui.warn(
1106 repo.ui.warn(
1091 _(
1107 _(
1092 b"current directory was removed\n"
1108 b"current directory was removed\n"
1093 b"(consider changing to repo root: %s)\n"
1109 b"(consider changing to repo root: %s)\n"
1094 )
1110 )
1095 % repo.root
1111 % repo.root
1096 )
1112 )
1097
1113
1098
1114
1099 def batchget(repo, mctx, wctx, wantfiledata, actions):
1115 def batchget(repo, mctx, wctx, wantfiledata, actions):
1100 """apply gets to the working directory
1116 """apply gets to the working directory
1101
1117
1102 mctx is the context to get from
1118 mctx is the context to get from
1103
1119
1104 Yields arbitrarily many (False, tuple) for progress updates, followed by
1120 Yields arbitrarily many (False, tuple) for progress updates, followed by
1105 exactly one (True, filedata). When wantfiledata is false, filedata is an
1121 exactly one (True, filedata). When wantfiledata is false, filedata is an
1106 empty dict. When wantfiledata is true, filedata[f] is a triple (mode, size,
1122 empty dict. When wantfiledata is true, filedata[f] is a triple (mode, size,
1107 mtime) of the file f written for each action.
1123 mtime) of the file f written for each action.
1108 """
1124 """
1109 filedata = {}
1125 filedata = {}
1110 verbose = repo.ui.verbose
1126 verbose = repo.ui.verbose
1111 fctx = mctx.filectx
1127 fctx = mctx.filectx
1112 ui = repo.ui
1128 ui = repo.ui
1113 i = 0
1129 i = 0
1114 with repo.wvfs.backgroundclosing(ui, expectedcount=len(actions)):
1130 with repo.wvfs.backgroundclosing(ui, expectedcount=len(actions)):
1115 for f, (flags, backup), msg in actions:
1131 for f, (flags, backup), msg in actions:
1116 repo.ui.debug(b" %s: %s -> g\n" % (f, msg))
1132 repo.ui.debug(b" %s: %s -> g\n" % (f, msg))
1117 if verbose:
1133 if verbose:
1118 repo.ui.note(_(b"getting %s\n") % f)
1134 repo.ui.note(_(b"getting %s\n") % f)
1119
1135
1120 if backup:
1136 if backup:
1121 # If a file or directory exists with the same name, back that
1137 # If a file or directory exists with the same name, back that
1122 # up. Otherwise, look to see if there is a file that conflicts
1138 # up. Otherwise, look to see if there is a file that conflicts
1123 # with a directory this file is in, and if so, back that up.
1139 # with a directory this file is in, and if so, back that up.
1124 conflicting = f
1140 conflicting = f
1125 if not repo.wvfs.lexists(f):
1141 if not repo.wvfs.lexists(f):
1126 for p in pathutil.finddirs(f):
1142 for p in pathutil.finddirs(f):
1127 if repo.wvfs.isfileorlink(p):
1143 if repo.wvfs.isfileorlink(p):
1128 conflicting = p
1144 conflicting = p
1129 break
1145 break
1130 if repo.wvfs.lexists(conflicting):
1146 if repo.wvfs.lexists(conflicting):
1131 orig = scmutil.backuppath(ui, repo, conflicting)
1147 orig = scmutil.backuppath(ui, repo, conflicting)
1132 util.rename(repo.wjoin(conflicting), orig)
1148 util.rename(repo.wjoin(conflicting), orig)
1133 wfctx = wctx[f]
1149 wfctx = wctx[f]
1134 wfctx.clearunknown()
1150 wfctx.clearunknown()
1135 atomictemp = ui.configbool(b"experimental", b"update.atomic-file")
1151 atomictemp = ui.configbool(b"experimental", b"update.atomic-file")
1136 size = wfctx.write(
1152 size = wfctx.write(
1137 fctx(f).data(),
1153 fctx(f).data(),
1138 flags,
1154 flags,
1139 backgroundclose=True,
1155 backgroundclose=True,
1140 atomictemp=atomictemp,
1156 atomictemp=atomictemp,
1141 )
1157 )
1142 if wantfiledata:
1158 if wantfiledata:
1143 s = wfctx.lstat()
1159 s = wfctx.lstat()
1144 mode = s.st_mode
1160 mode = s.st_mode
1145 mtime = s[stat.ST_MTIME]
1161 mtime = s[stat.ST_MTIME]
1146 filedata[f] = (mode, size, mtime) # for dirstate.normal
1162 filedata[f] = (mode, size, mtime) # for dirstate.normal
1147 if i == 100:
1163 if i == 100:
1148 yield False, (i, f)
1164 yield False, (i, f)
1149 i = 0
1165 i = 0
1150 i += 1
1166 i += 1
1151 if i > 0:
1167 if i > 0:
1152 yield False, (i, f)
1168 yield False, (i, f)
1153 yield True, filedata
1169 yield True, filedata
1154
1170
1155
1171
1156 def _prefetchfiles(repo, ctx, actions):
1172 def _prefetchfiles(repo, ctx, actions):
1157 """Invoke ``scmutil.prefetchfiles()`` for the files relevant to the dict
1173 """Invoke ``scmutil.prefetchfiles()`` for the files relevant to the dict
1158 of merge actions. ``ctx`` is the context being merged in."""
1174 of merge actions. ``ctx`` is the context being merged in."""
1159
1175
1160 # Skipping 'a', 'am', 'f', 'r', 'dm', 'e', 'k', 'p' and 'pr', because they
1176 # Skipping 'a', 'am', 'f', 'r', 'dm', 'e', 'k', 'p' and 'pr', because they
1161 # don't touch the context to be merged in. 'cd' is skipped, because
1177 # don't touch the context to be merged in. 'cd' is skipped, because
1162 # changed/deleted never resolves to something from the remote side.
1178 # changed/deleted never resolves to something from the remote side.
1163 oplist = [
1179 oplist = [
1164 actions[a]
1180 actions[a]
1165 for a in (
1181 for a in (
1166 mergestatemod.ACTION_GET,
1182 mergestatemod.ACTION_GET,
1167 mergestatemod.ACTION_DELETED_CHANGED,
1183 mergestatemod.ACTION_DELETED_CHANGED,
1168 mergestatemod.ACTION_LOCAL_DIR_RENAME_GET,
1184 mergestatemod.ACTION_LOCAL_DIR_RENAME_GET,
1169 mergestatemod.ACTION_MERGE,
1185 mergestatemod.ACTION_MERGE,
1170 )
1186 )
1171 ]
1187 ]
1172 prefetch = scmutil.prefetchfiles
1188 prefetch = scmutil.prefetchfiles
1173 matchfiles = scmutil.matchfiles
1189 matchfiles = scmutil.matchfiles
1174 prefetch(
1190 prefetch(
1175 repo,
1191 repo,
1176 [
1192 [
1177 (
1193 (
1178 ctx.rev(),
1194 ctx.rev(),
1179 matchfiles(
1195 matchfiles(
1180 repo, [f for sublist in oplist for f, args, msg in sublist]
1196 repo, [f for sublist in oplist for f, args, msg in sublist]
1181 ),
1197 ),
1182 )
1198 )
1183 ],
1199 ],
1184 )
1200 )
1185
1201
1186
1202
1187 @attr.s(frozen=True)
1203 @attr.s(frozen=True)
1188 class updateresult(object):
1204 class updateresult(object):
1189 updatedcount = attr.ib()
1205 updatedcount = attr.ib()
1190 mergedcount = attr.ib()
1206 mergedcount = attr.ib()
1191 removedcount = attr.ib()
1207 removedcount = attr.ib()
1192 unresolvedcount = attr.ib()
1208 unresolvedcount = attr.ib()
1193
1209
1194 def isempty(self):
1210 def isempty(self):
1195 return not (
1211 return not (
1196 self.updatedcount
1212 self.updatedcount
1197 or self.mergedcount
1213 or self.mergedcount
1198 or self.removedcount
1214 or self.removedcount
1199 or self.unresolvedcount
1215 or self.unresolvedcount
1200 )
1216 )
1201
1217
1202
1218
1203 def emptyactions():
1219 def emptyactions():
1204 """create an actions dict, to be populated and passed to applyupdates()"""
1220 """create an actions dict, to be populated and passed to applyupdates()"""
1205 return {
1221 return {
1206 m: []
1222 m: []
1207 for m in (
1223 for m in (
1208 mergestatemod.ACTION_ADD,
1224 mergestatemod.ACTION_ADD,
1209 mergestatemod.ACTION_ADD_MODIFIED,
1225 mergestatemod.ACTION_ADD_MODIFIED,
1210 mergestatemod.ACTION_FORGET,
1226 mergestatemod.ACTION_FORGET,
1211 mergestatemod.ACTION_GET,
1227 mergestatemod.ACTION_GET,
1212 mergestatemod.ACTION_CHANGED_DELETED,
1228 mergestatemod.ACTION_CHANGED_DELETED,
1213 mergestatemod.ACTION_DELETED_CHANGED,
1229 mergestatemod.ACTION_DELETED_CHANGED,
1214 mergestatemod.ACTION_REMOVE,
1230 mergestatemod.ACTION_REMOVE,
1215 mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL,
1231 mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL,
1216 mergestatemod.ACTION_LOCAL_DIR_RENAME_GET,
1232 mergestatemod.ACTION_LOCAL_DIR_RENAME_GET,
1217 mergestatemod.ACTION_MERGE,
1233 mergestatemod.ACTION_MERGE,
1218 mergestatemod.ACTION_EXEC,
1234 mergestatemod.ACTION_EXEC,
1219 mergestatemod.ACTION_KEEP,
1235 mergestatemod.ACTION_KEEP,
1220 mergestatemod.ACTION_PATH_CONFLICT,
1236 mergestatemod.ACTION_PATH_CONFLICT,
1221 mergestatemod.ACTION_PATH_CONFLICT_RESOLVE,
1237 mergestatemod.ACTION_PATH_CONFLICT_RESOLVE,
1222 mergestatemod.ACTION_GET_OTHER_AND_STORE,
1238 mergestatemod.ACTION_GET_OTHER_AND_STORE,
1223 )
1239 )
1224 }
1240 }
1225
1241
1226
1242
1227 def applyupdates(
1243 def applyupdates(
1228 repo, actions, wctx, mctx, overwrite, wantfiledata, labels=None
1244 repo, actions, wctx, mctx, overwrite, wantfiledata, labels=None
1229 ):
1245 ):
1230 """apply the merge action list to the working directory
1246 """apply the merge action list to the working directory
1231
1247
1232 wctx is the working copy context
1248 wctx is the working copy context
1233 mctx is the context to be merged into the working copy
1249 mctx is the context to be merged into the working copy
1234
1250
1235 Return a tuple of (counts, filedata), where counts is a tuple
1251 Return a tuple of (counts, filedata), where counts is a tuple
1236 (updated, merged, removed, unresolved) that describes how many
1252 (updated, merged, removed, unresolved) that describes how many
1237 files were affected by the update, and filedata is as described in
1253 files were affected by the update, and filedata is as described in
1238 batchget.
1254 batchget.
1239 """
1255 """
1240
1256
1241 _prefetchfiles(repo, mctx, actions)
1257 _prefetchfiles(repo, mctx, actions)
1242
1258
1243 updated, merged, removed = 0, 0, 0
1259 updated, merged, removed = 0, 0, 0
1244 ms = mergestatemod.mergestate.clean(
1260 ms = mergestatemod.mergestate.clean(
1245 repo, wctx.p1().node(), mctx.node(), labels
1261 repo, wctx.p1().node(), mctx.node(), labels
1246 )
1262 )
1247
1263
1248 # add ACTION_GET_OTHER_AND_STORE to mergestate
1264 # add ACTION_GET_OTHER_AND_STORE to mergestate
1249 for e in actions[mergestatemod.ACTION_GET_OTHER_AND_STORE]:
1265 for e in actions[mergestatemod.ACTION_GET_OTHER_AND_STORE]:
1250 ms.addmergedother(e[0])
1266 ms.addmergedother(e[0])
1251
1267
1252 moves = []
1268 moves = []
1253 for m, l in actions.items():
1269 for m, l in actions.items():
1254 l.sort()
1270 l.sort()
1255
1271
1256 # 'cd' and 'dc' actions are treated like other merge conflicts
1272 # 'cd' and 'dc' actions are treated like other merge conflicts
1257 mergeactions = sorted(actions[mergestatemod.ACTION_CHANGED_DELETED])
1273 mergeactions = sorted(actions[mergestatemod.ACTION_CHANGED_DELETED])
1258 mergeactions.extend(sorted(actions[mergestatemod.ACTION_DELETED_CHANGED]))
1274 mergeactions.extend(sorted(actions[mergestatemod.ACTION_DELETED_CHANGED]))
1259 mergeactions.extend(actions[mergestatemod.ACTION_MERGE])
1275 mergeactions.extend(actions[mergestatemod.ACTION_MERGE])
1260 for f, args, msg in mergeactions:
1276 for f, args, msg in mergeactions:
1261 f1, f2, fa, move, anc = args
1277 f1, f2, fa, move, anc = args
1262 if f == b'.hgsubstate': # merged internally
1278 if f == b'.hgsubstate': # merged internally
1263 continue
1279 continue
1264 if f1 is None:
1280 if f1 is None:
1265 fcl = filemerge.absentfilectx(wctx, fa)
1281 fcl = filemerge.absentfilectx(wctx, fa)
1266 else:
1282 else:
1267 repo.ui.debug(b" preserving %s for resolve of %s\n" % (f1, f))
1283 repo.ui.debug(b" preserving %s for resolve of %s\n" % (f1, f))
1268 fcl = wctx[f1]
1284 fcl = wctx[f1]
1269 if f2 is None:
1285 if f2 is None:
1270 fco = filemerge.absentfilectx(mctx, fa)
1286 fco = filemerge.absentfilectx(mctx, fa)
1271 else:
1287 else:
1272 fco = mctx[f2]
1288 fco = mctx[f2]
1273 actx = repo[anc]
1289 actx = repo[anc]
1274 if fa in actx:
1290 if fa in actx:
1275 fca = actx[fa]
1291 fca = actx[fa]
1276 else:
1292 else:
1277 # TODO: move to absentfilectx
1293 # TODO: move to absentfilectx
1278 fca = repo.filectx(f1, fileid=nullrev)
1294 fca = repo.filectx(f1, fileid=nullrev)
1279 ms.add(fcl, fco, fca, f)
1295 ms.add(fcl, fco, fca, f)
1280 if f1 != f and move:
1296 if f1 != f and move:
1281 moves.append(f1)
1297 moves.append(f1)
1282
1298
1283 # remove renamed files after safely stored
1299 # remove renamed files after safely stored
1284 for f in moves:
1300 for f in moves:
1285 if wctx[f].lexists():
1301 if wctx[f].lexists():
1286 repo.ui.debug(b"removing %s\n" % f)
1302 repo.ui.debug(b"removing %s\n" % f)
1287 wctx[f].audit()
1303 wctx[f].audit()
1288 wctx[f].remove()
1304 wctx[f].remove()
1289
1305
1290 numupdates = sum(
1306 numupdates = sum(
1291 len(l) for m, l in actions.items() if m != mergestatemod.ACTION_KEEP
1307 len(l) for m, l in actions.items() if m != mergestatemod.ACTION_KEEP
1292 )
1308 )
1293 progress = repo.ui.makeprogress(
1309 progress = repo.ui.makeprogress(
1294 _(b'updating'), unit=_(b'files'), total=numupdates
1310 _(b'updating'), unit=_(b'files'), total=numupdates
1295 )
1311 )
1296
1312
1297 if [
1313 if [
1298 a
1314 a
1299 for a in actions[mergestatemod.ACTION_REMOVE]
1315 for a in actions[mergestatemod.ACTION_REMOVE]
1300 if a[0] == b'.hgsubstate'
1316 if a[0] == b'.hgsubstate'
1301 ]:
1317 ]:
1302 subrepoutil.submerge(repo, wctx, mctx, wctx, overwrite, labels)
1318 subrepoutil.submerge(repo, wctx, mctx, wctx, overwrite, labels)
1303
1319
1304 # record path conflicts
1320 # record path conflicts
1305 for f, args, msg in actions[mergestatemod.ACTION_PATH_CONFLICT]:
1321 for f, args, msg in actions[mergestatemod.ACTION_PATH_CONFLICT]:
1306 f1, fo = args
1322 f1, fo = args
1307 s = repo.ui.status
1323 s = repo.ui.status
1308 s(
1324 s(
1309 _(
1325 _(
1310 b"%s: path conflict - a file or link has the same name as a "
1326 b"%s: path conflict - a file or link has the same name as a "
1311 b"directory\n"
1327 b"directory\n"
1312 )
1328 )
1313 % f
1329 % f
1314 )
1330 )
1315 if fo == b'l':
1331 if fo == b'l':
1316 s(_(b"the local file has been renamed to %s\n") % f1)
1332 s(_(b"the local file has been renamed to %s\n") % f1)
1317 else:
1333 else:
1318 s(_(b"the remote file has been renamed to %s\n") % f1)
1334 s(_(b"the remote file has been renamed to %s\n") % f1)
1319 s(_(b"resolve manually then use 'hg resolve --mark %s'\n") % f)
1335 s(_(b"resolve manually then use 'hg resolve --mark %s'\n") % f)
1320 ms.addpathconflict(f, f1, fo)
1336 ms.addpathconflict(f, f1, fo)
1321 progress.increment(item=f)
1337 progress.increment(item=f)
1322
1338
1323 # When merging in-memory, we can't support worker processes, so set the
1339 # When merging in-memory, we can't support worker processes, so set the
1324 # per-item cost at 0 in that case.
1340 # per-item cost at 0 in that case.
1325 cost = 0 if wctx.isinmemory() else 0.001
1341 cost = 0 if wctx.isinmemory() else 0.001
1326
1342
1327 # remove in parallel (must come before resolving path conflicts and getting)
1343 # remove in parallel (must come before resolving path conflicts and getting)
1328 prog = worker.worker(
1344 prog = worker.worker(
1329 repo.ui,
1345 repo.ui,
1330 cost,
1346 cost,
1331 batchremove,
1347 batchremove,
1332 (repo, wctx),
1348 (repo, wctx),
1333 actions[mergestatemod.ACTION_REMOVE],
1349 actions[mergestatemod.ACTION_REMOVE],
1334 )
1350 )
1335 for i, item in prog:
1351 for i, item in prog:
1336 progress.increment(step=i, item=item)
1352 progress.increment(step=i, item=item)
1337 removed = len(actions[mergestatemod.ACTION_REMOVE])
1353 removed = len(actions[mergestatemod.ACTION_REMOVE])
1338
1354
1339 # resolve path conflicts (must come before getting)
1355 # resolve path conflicts (must come before getting)
1340 for f, args, msg in actions[mergestatemod.ACTION_PATH_CONFLICT_RESOLVE]:
1356 for f, args, msg in actions[mergestatemod.ACTION_PATH_CONFLICT_RESOLVE]:
1341 repo.ui.debug(b" %s: %s -> pr\n" % (f, msg))
1357 repo.ui.debug(b" %s: %s -> pr\n" % (f, msg))
1342 (f0, origf0) = args
1358 (f0, origf0) = args
1343 if wctx[f0].lexists():
1359 if wctx[f0].lexists():
1344 repo.ui.note(_(b"moving %s to %s\n") % (f0, f))
1360 repo.ui.note(_(b"moving %s to %s\n") % (f0, f))
1345 wctx[f].audit()
1361 wctx[f].audit()
1346 wctx[f].write(wctx.filectx(f0).data(), wctx.filectx(f0).flags())
1362 wctx[f].write(wctx.filectx(f0).data(), wctx.filectx(f0).flags())
1347 wctx[f0].remove()
1363 wctx[f0].remove()
1348 progress.increment(item=f)
1364 progress.increment(item=f)
1349
1365
1350 # get in parallel.
1366 # get in parallel.
1351 threadsafe = repo.ui.configbool(
1367 threadsafe = repo.ui.configbool(
1352 b'experimental', b'worker.wdir-get-thread-safe'
1368 b'experimental', b'worker.wdir-get-thread-safe'
1353 )
1369 )
1354 prog = worker.worker(
1370 prog = worker.worker(
1355 repo.ui,
1371 repo.ui,
1356 cost,
1372 cost,
1357 batchget,
1373 batchget,
1358 (repo, mctx, wctx, wantfiledata),
1374 (repo, mctx, wctx, wantfiledata),
1359 actions[mergestatemod.ACTION_GET],
1375 actions[mergestatemod.ACTION_GET],
1360 threadsafe=threadsafe,
1376 threadsafe=threadsafe,
1361 hasretval=True,
1377 hasretval=True,
1362 )
1378 )
1363 getfiledata = {}
1379 getfiledata = {}
1364 for final, res in prog:
1380 for final, res in prog:
1365 if final:
1381 if final:
1366 getfiledata = res
1382 getfiledata = res
1367 else:
1383 else:
1368 i, item = res
1384 i, item = res
1369 progress.increment(step=i, item=item)
1385 progress.increment(step=i, item=item)
1370 updated = len(actions[mergestatemod.ACTION_GET])
1386 updated = len(actions[mergestatemod.ACTION_GET])
1371
1387
1372 if [a for a in actions[mergestatemod.ACTION_GET] if a[0] == b'.hgsubstate']:
1388 if [a for a in actions[mergestatemod.ACTION_GET] if a[0] == b'.hgsubstate']:
1373 subrepoutil.submerge(repo, wctx, mctx, wctx, overwrite, labels)
1389 subrepoutil.submerge(repo, wctx, mctx, wctx, overwrite, labels)
1374
1390
1375 # forget (manifest only, just log it) (must come first)
1391 # forget (manifest only, just log it) (must come first)
1376 for f, args, msg in actions[mergestatemod.ACTION_FORGET]:
1392 for f, args, msg in actions[mergestatemod.ACTION_FORGET]:
1377 repo.ui.debug(b" %s: %s -> f\n" % (f, msg))
1393 repo.ui.debug(b" %s: %s -> f\n" % (f, msg))
1378 progress.increment(item=f)
1394 progress.increment(item=f)
1379
1395
1380 # re-add (manifest only, just log it)
1396 # re-add (manifest only, just log it)
1381 for f, args, msg in actions[mergestatemod.ACTION_ADD]:
1397 for f, args, msg in actions[mergestatemod.ACTION_ADD]:
1382 repo.ui.debug(b" %s: %s -> a\n" % (f, msg))
1398 repo.ui.debug(b" %s: %s -> a\n" % (f, msg))
1383 progress.increment(item=f)
1399 progress.increment(item=f)
1384
1400
1385 # re-add/mark as modified (manifest only, just log it)
1401 # re-add/mark as modified (manifest only, just log it)
1386 for f, args, msg in actions[mergestatemod.ACTION_ADD_MODIFIED]:
1402 for f, args, msg in actions[mergestatemod.ACTION_ADD_MODIFIED]:
1387 repo.ui.debug(b" %s: %s -> am\n" % (f, msg))
1403 repo.ui.debug(b" %s: %s -> am\n" % (f, msg))
1388 progress.increment(item=f)
1404 progress.increment(item=f)
1389
1405
1390 # keep (noop, just log it)
1406 # keep (noop, just log it)
1391 for f, args, msg in actions[mergestatemod.ACTION_KEEP]:
1407 for f, args, msg in actions[mergestatemod.ACTION_KEEP]:
1392 repo.ui.debug(b" %s: %s -> k\n" % (f, msg))
1408 repo.ui.debug(b" %s: %s -> k\n" % (f, msg))
1393 # no progress
1409 # no progress
1394
1410
1395 # directory rename, move local
1411 # directory rename, move local
1396 for f, args, msg in actions[mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL]:
1412 for f, args, msg in actions[mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL]:
1397 repo.ui.debug(b" %s: %s -> dm\n" % (f, msg))
1413 repo.ui.debug(b" %s: %s -> dm\n" % (f, msg))
1398 progress.increment(item=f)
1414 progress.increment(item=f)
1399 f0, flags = args
1415 f0, flags = args
1400 repo.ui.note(_(b"moving %s to %s\n") % (f0, f))
1416 repo.ui.note(_(b"moving %s to %s\n") % (f0, f))
1401 wctx[f].audit()
1417 wctx[f].audit()
1402 wctx[f].write(wctx.filectx(f0).data(), flags)
1418 wctx[f].write(wctx.filectx(f0).data(), flags)
1403 wctx[f0].remove()
1419 wctx[f0].remove()
1404 updated += 1
1420 updated += 1
1405
1421
1406 # local directory rename, get
1422 # local directory rename, get
1407 for f, args, msg in actions[mergestatemod.ACTION_LOCAL_DIR_RENAME_GET]:
1423 for f, args, msg in actions[mergestatemod.ACTION_LOCAL_DIR_RENAME_GET]:
1408 repo.ui.debug(b" %s: %s -> dg\n" % (f, msg))
1424 repo.ui.debug(b" %s: %s -> dg\n" % (f, msg))
1409 progress.increment(item=f)
1425 progress.increment(item=f)
1410 f0, flags = args
1426 f0, flags = args
1411 repo.ui.note(_(b"getting %s to %s\n") % (f0, f))
1427 repo.ui.note(_(b"getting %s to %s\n") % (f0, f))
1412 wctx[f].write(mctx.filectx(f0).data(), flags)
1428 wctx[f].write(mctx.filectx(f0).data(), flags)
1413 updated += 1
1429 updated += 1
1414
1430
1415 # exec
1431 # exec
1416 for f, args, msg in actions[mergestatemod.ACTION_EXEC]:
1432 for f, args, msg in actions[mergestatemod.ACTION_EXEC]:
1417 repo.ui.debug(b" %s: %s -> e\n" % (f, msg))
1433 repo.ui.debug(b" %s: %s -> e\n" % (f, msg))
1418 progress.increment(item=f)
1434 progress.increment(item=f)
1419 (flags,) = args
1435 (flags,) = args
1420 wctx[f].audit()
1436 wctx[f].audit()
1421 wctx[f].setflags(b'l' in flags, b'x' in flags)
1437 wctx[f].setflags(b'l' in flags, b'x' in flags)
1422 updated += 1
1438 updated += 1
1423
1439
1424 # the ordering is important here -- ms.mergedriver will raise if the merge
1440 # the ordering is important here -- ms.mergedriver will raise if the merge
1425 # driver has changed, and we want to be able to bypass it when overwrite is
1441 # driver has changed, and we want to be able to bypass it when overwrite is
1426 # True
1442 # True
1427 usemergedriver = not overwrite and mergeactions and ms.mergedriver
1443 usemergedriver = not overwrite and mergeactions and ms.mergedriver
1428
1444
1429 if usemergedriver:
1445 if usemergedriver:
1430 if wctx.isinmemory():
1446 if wctx.isinmemory():
1431 raise error.InMemoryMergeConflictsError(
1447 raise error.InMemoryMergeConflictsError(
1432 b"in-memory merge does not support mergedriver"
1448 b"in-memory merge does not support mergedriver"
1433 )
1449 )
1434 ms.commit()
1450 ms.commit()
1435 proceed = driverpreprocess(repo, ms, wctx, labels=labels)
1451 proceed = driverpreprocess(repo, ms, wctx, labels=labels)
1436 # the driver might leave some files unresolved
1452 # the driver might leave some files unresolved
1437 unresolvedf = set(ms.unresolved())
1453 unresolvedf = set(ms.unresolved())
1438 if not proceed:
1454 if not proceed:
1439 # XXX setting unresolved to at least 1 is a hack to make sure we
1455 # XXX setting unresolved to at least 1 is a hack to make sure we
1440 # error out
1456 # error out
1441 return updateresult(
1457 return updateresult(
1442 updated, merged, removed, max(len(unresolvedf), 1)
1458 updated, merged, removed, max(len(unresolvedf), 1)
1443 )
1459 )
1444 newactions = []
1460 newactions = []
1445 for f, args, msg in mergeactions:
1461 for f, args, msg in mergeactions:
1446 if f in unresolvedf:
1462 if f in unresolvedf:
1447 newactions.append((f, args, msg))
1463 newactions.append((f, args, msg))
1448 mergeactions = newactions
1464 mergeactions = newactions
1449
1465
1450 try:
1466 try:
1451 # premerge
1467 # premerge
1452 tocomplete = []
1468 tocomplete = []
1453 for f, args, msg in mergeactions:
1469 for f, args, msg in mergeactions:
1454 repo.ui.debug(b" %s: %s -> m (premerge)\n" % (f, msg))
1470 repo.ui.debug(b" %s: %s -> m (premerge)\n" % (f, msg))
1455 progress.increment(item=f)
1471 progress.increment(item=f)
1456 if f == b'.hgsubstate': # subrepo states need updating
1472 if f == b'.hgsubstate': # subrepo states need updating
1457 subrepoutil.submerge(
1473 subrepoutil.submerge(
1458 repo, wctx, mctx, wctx.ancestor(mctx), overwrite, labels
1474 repo, wctx, mctx, wctx.ancestor(mctx), overwrite, labels
1459 )
1475 )
1460 continue
1476 continue
1461 wctx[f].audit()
1477 wctx[f].audit()
1462 complete, r = ms.preresolve(f, wctx)
1478 complete, r = ms.preresolve(f, wctx)
1463 if not complete:
1479 if not complete:
1464 numupdates += 1
1480 numupdates += 1
1465 tocomplete.append((f, args, msg))
1481 tocomplete.append((f, args, msg))
1466
1482
1467 # merge
1483 # merge
1468 for f, args, msg in tocomplete:
1484 for f, args, msg in tocomplete:
1469 repo.ui.debug(b" %s: %s -> m (merge)\n" % (f, msg))
1485 repo.ui.debug(b" %s: %s -> m (merge)\n" % (f, msg))
1470 progress.increment(item=f, total=numupdates)
1486 progress.increment(item=f, total=numupdates)
1471 ms.resolve(f, wctx)
1487 ms.resolve(f, wctx)
1472
1488
1473 finally:
1489 finally:
1474 ms.commit()
1490 ms.commit()
1475
1491
1476 unresolved = ms.unresolvedcount()
1492 unresolved = ms.unresolvedcount()
1477
1493
1478 if (
1494 if (
1479 usemergedriver
1495 usemergedriver
1480 and not unresolved
1496 and not unresolved
1481 and ms.mdstate() != mergestatemod.MERGE_DRIVER_STATE_SUCCESS
1497 and ms.mdstate() != mergestatemod.MERGE_DRIVER_STATE_SUCCESS
1482 ):
1498 ):
1483 if not driverconclude(repo, ms, wctx, labels=labels):
1499 if not driverconclude(repo, ms, wctx, labels=labels):
1484 # XXX setting unresolved to at least 1 is a hack to make sure we
1500 # XXX setting unresolved to at least 1 is a hack to make sure we
1485 # error out
1501 # error out
1486 unresolved = max(unresolved, 1)
1502 unresolved = max(unresolved, 1)
1487
1503
1488 ms.commit()
1504 ms.commit()
1489
1505
1490 msupdated, msmerged, msremoved = ms.counts()
1506 msupdated, msmerged, msremoved = ms.counts()
1491 updated += msupdated
1507 updated += msupdated
1492 merged += msmerged
1508 merged += msmerged
1493 removed += msremoved
1509 removed += msremoved
1494
1510
1495 extraactions = ms.actions()
1511 extraactions = ms.actions()
1496 if extraactions:
1512 if extraactions:
1497 mfiles = {a[0] for a in actions[mergestatemod.ACTION_MERGE]}
1513 mfiles = {a[0] for a in actions[mergestatemod.ACTION_MERGE]}
1498 for k, acts in pycompat.iteritems(extraactions):
1514 for k, acts in pycompat.iteritems(extraactions):
1499 actions[k].extend(acts)
1515 actions[k].extend(acts)
1500 if k == mergestatemod.ACTION_GET and wantfiledata:
1516 if k == mergestatemod.ACTION_GET and wantfiledata:
1501 # no filedata until mergestate is updated to provide it
1517 # no filedata until mergestate is updated to provide it
1502 for a in acts:
1518 for a in acts:
1503 getfiledata[a[0]] = None
1519 getfiledata[a[0]] = None
1504 # Remove these files from actions[ACTION_MERGE] as well. This is
1520 # Remove these files from actions[ACTION_MERGE] as well. This is
1505 # important because in recordupdates, files in actions[ACTION_MERGE]
1521 # important because in recordupdates, files in actions[ACTION_MERGE]
1506 # are processed after files in other actions, and the merge driver
1522 # are processed after files in other actions, and the merge driver
1507 # might add files to those actions via extraactions above. This can
1523 # might add files to those actions via extraactions above. This can
1508 # lead to a file being recorded twice, with poor results. This is
1524 # lead to a file being recorded twice, with poor results. This is
1509 # especially problematic for actions[ACTION_REMOVE] (currently only
1525 # especially problematic for actions[ACTION_REMOVE] (currently only
1510 # possible with the merge driver in the initial merge process;
1526 # possible with the merge driver in the initial merge process;
1511 # interrupted merges don't go through this flow).
1527 # interrupted merges don't go through this flow).
1512 #
1528 #
1513 # The real fix here is to have indexes by both file and action so
1529 # The real fix here is to have indexes by both file and action so
1514 # that when the action for a file is changed it is automatically
1530 # that when the action for a file is changed it is automatically
1515 # reflected in the other action lists. But that involves a more
1531 # reflected in the other action lists. But that involves a more
1516 # complex data structure, so this will do for now.
1532 # complex data structure, so this will do for now.
1517 #
1533 #
1518 # We don't need to do the same operation for 'dc' and 'cd' because
1534 # We don't need to do the same operation for 'dc' and 'cd' because
1519 # those lists aren't consulted again.
1535 # those lists aren't consulted again.
1520 mfiles.difference_update(a[0] for a in acts)
1536 mfiles.difference_update(a[0] for a in acts)
1521
1537
1522 actions[mergestatemod.ACTION_MERGE] = [
1538 actions[mergestatemod.ACTION_MERGE] = [
1523 a for a in actions[mergestatemod.ACTION_MERGE] if a[0] in mfiles
1539 a for a in actions[mergestatemod.ACTION_MERGE] if a[0] in mfiles
1524 ]
1540 ]
1525
1541
1526 progress.complete()
1542 progress.complete()
1527 assert len(getfiledata) == (
1543 assert len(getfiledata) == (
1528 len(actions[mergestatemod.ACTION_GET]) if wantfiledata else 0
1544 len(actions[mergestatemod.ACTION_GET]) if wantfiledata else 0
1529 )
1545 )
1530 return updateresult(updated, merged, removed, unresolved), getfiledata
1546 return updateresult(updated, merged, removed, unresolved), getfiledata
1531
1547
1532
1548
1533 def _advertisefsmonitor(repo, num_gets, p1node):
1549 def _advertisefsmonitor(repo, num_gets, p1node):
1534 # Advertise fsmonitor when its presence could be useful.
1550 # Advertise fsmonitor when its presence could be useful.
1535 #
1551 #
1536 # We only advertise when performing an update from an empty working
1552 # We only advertise when performing an update from an empty working
1537 # directory. This typically only occurs during initial clone.
1553 # directory. This typically only occurs during initial clone.
1538 #
1554 #
1539 # We give users a mechanism to disable the warning in case it is
1555 # We give users a mechanism to disable the warning in case it is
1540 # annoying.
1556 # annoying.
1541 #
1557 #
1542 # We only allow on Linux and MacOS because that's where fsmonitor is
1558 # We only allow on Linux and MacOS because that's where fsmonitor is
1543 # considered stable.
1559 # considered stable.
1544 fsmonitorwarning = repo.ui.configbool(b'fsmonitor', b'warn_when_unused')
1560 fsmonitorwarning = repo.ui.configbool(b'fsmonitor', b'warn_when_unused')
1545 fsmonitorthreshold = repo.ui.configint(
1561 fsmonitorthreshold = repo.ui.configint(
1546 b'fsmonitor', b'warn_update_file_count'
1562 b'fsmonitor', b'warn_update_file_count'
1547 )
1563 )
1548 try:
1564 try:
1549 # avoid cycle: extensions -> cmdutil -> merge
1565 # avoid cycle: extensions -> cmdutil -> merge
1550 from . import extensions
1566 from . import extensions
1551
1567
1552 extensions.find(b'fsmonitor')
1568 extensions.find(b'fsmonitor')
1553 fsmonitorenabled = repo.ui.config(b'fsmonitor', b'mode') != b'off'
1569 fsmonitorenabled = repo.ui.config(b'fsmonitor', b'mode') != b'off'
1554 # We intentionally don't look at whether fsmonitor has disabled
1570 # We intentionally don't look at whether fsmonitor has disabled
1555 # itself because a) fsmonitor may have already printed a warning
1571 # itself because a) fsmonitor may have already printed a warning
1556 # b) we only care about the config state here.
1572 # b) we only care about the config state here.
1557 except KeyError:
1573 except KeyError:
1558 fsmonitorenabled = False
1574 fsmonitorenabled = False
1559
1575
1560 if (
1576 if (
1561 fsmonitorwarning
1577 fsmonitorwarning
1562 and not fsmonitorenabled
1578 and not fsmonitorenabled
1563 and p1node == nullid
1579 and p1node == nullid
1564 and num_gets >= fsmonitorthreshold
1580 and num_gets >= fsmonitorthreshold
1565 and pycompat.sysplatform.startswith((b'linux', b'darwin'))
1581 and pycompat.sysplatform.startswith((b'linux', b'darwin'))
1566 ):
1582 ):
1567 repo.ui.warn(
1583 repo.ui.warn(
1568 _(
1584 _(
1569 b'(warning: large working directory being used without '
1585 b'(warning: large working directory being used without '
1570 b'fsmonitor enabled; enable fsmonitor to improve performance; '
1586 b'fsmonitor enabled; enable fsmonitor to improve performance; '
1571 b'see "hg help -e fsmonitor")\n'
1587 b'see "hg help -e fsmonitor")\n'
1572 )
1588 )
1573 )
1589 )
1574
1590
1575
1591
1576 UPDATECHECK_ABORT = b'abort' # handled at higher layers
1592 UPDATECHECK_ABORT = b'abort' # handled at higher layers
1577 UPDATECHECK_NONE = b'none'
1593 UPDATECHECK_NONE = b'none'
1578 UPDATECHECK_LINEAR = b'linear'
1594 UPDATECHECK_LINEAR = b'linear'
1579 UPDATECHECK_NO_CONFLICT = b'noconflict'
1595 UPDATECHECK_NO_CONFLICT = b'noconflict'
1580
1596
1581
1597
1582 def update(
1598 def update(
1583 repo,
1599 repo,
1584 node,
1600 node,
1585 branchmerge,
1601 branchmerge,
1586 force,
1602 force,
1587 ancestor=None,
1603 ancestor=None,
1588 mergeancestor=False,
1604 mergeancestor=False,
1589 labels=None,
1605 labels=None,
1590 matcher=None,
1606 matcher=None,
1591 mergeforce=False,
1607 mergeforce=False,
1592 updatedirstate=True,
1608 updatedirstate=True,
1593 updatecheck=None,
1609 updatecheck=None,
1594 wc=None,
1610 wc=None,
1595 ):
1611 ):
1596 """
1612 """
1597 Perform a merge between the working directory and the given node
1613 Perform a merge between the working directory and the given node
1598
1614
1599 node = the node to update to
1615 node = the node to update to
1600 branchmerge = whether to merge between branches
1616 branchmerge = whether to merge between branches
1601 force = whether to force branch merging or file overwriting
1617 force = whether to force branch merging or file overwriting
1602 matcher = a matcher to filter file lists (dirstate not updated)
1618 matcher = a matcher to filter file lists (dirstate not updated)
1603 mergeancestor = whether it is merging with an ancestor. If true,
1619 mergeancestor = whether it is merging with an ancestor. If true,
1604 we should accept the incoming changes for any prompts that occur.
1620 we should accept the incoming changes for any prompts that occur.
1605 If false, merging with an ancestor (fast-forward) is only allowed
1621 If false, merging with an ancestor (fast-forward) is only allowed
1606 between different named branches. This flag is used by rebase extension
1622 between different named branches. This flag is used by rebase extension
1607 as a temporary fix and should be avoided in general.
1623 as a temporary fix and should be avoided in general.
1608 labels = labels to use for base, local and other
1624 labels = labels to use for base, local and other
1609 mergeforce = whether the merge was run with 'merge --force' (deprecated): if
1625 mergeforce = whether the merge was run with 'merge --force' (deprecated): if
1610 this is True, then 'force' should be True as well.
1626 this is True, then 'force' should be True as well.
1611
1627
1612 The table below shows all the behaviors of the update command given the
1628 The table below shows all the behaviors of the update command given the
1613 -c/--check and -C/--clean or no options, whether the working directory is
1629 -c/--check and -C/--clean or no options, whether the working directory is
1614 dirty, whether a revision is specified, and the relationship of the parent
1630 dirty, whether a revision is specified, and the relationship of the parent
1615 rev to the target rev (linear or not). Match from top first. The -n
1631 rev to the target rev (linear or not). Match from top first. The -n
1616 option doesn't exist on the command line, but represents the
1632 option doesn't exist on the command line, but represents the
1617 experimental.updatecheck=noconflict option.
1633 experimental.updatecheck=noconflict option.
1618
1634
1619 This logic is tested by test-update-branches.t.
1635 This logic is tested by test-update-branches.t.
1620
1636
1621 -c -C -n -m dirty rev linear | result
1637 -c -C -n -m dirty rev linear | result
1622 y y * * * * * | (1)
1638 y y * * * * * | (1)
1623 y * y * * * * | (1)
1639 y * y * * * * | (1)
1624 y * * y * * * | (1)
1640 y * * y * * * | (1)
1625 * y y * * * * | (1)
1641 * y y * * * * | (1)
1626 * y * y * * * | (1)
1642 * y * y * * * | (1)
1627 * * y y * * * | (1)
1643 * * y y * * * | (1)
1628 * * * * * n n | x
1644 * * * * * n n | x
1629 * * * * n * * | ok
1645 * * * * n * * | ok
1630 n n n n y * y | merge
1646 n n n n y * y | merge
1631 n n n n y y n | (2)
1647 n n n n y y n | (2)
1632 n n n y y * * | merge
1648 n n n y y * * | merge
1633 n n y n y * * | merge if no conflict
1649 n n y n y * * | merge if no conflict
1634 n y n n y * * | discard
1650 n y n n y * * | discard
1635 y n n n y * * | (3)
1651 y n n n y * * | (3)
1636
1652
1637 x = can't happen
1653 x = can't happen
1638 * = don't-care
1654 * = don't-care
1639 1 = incompatible options (checked in commands.py)
1655 1 = incompatible options (checked in commands.py)
1640 2 = abort: uncommitted changes (commit or update --clean to discard changes)
1656 2 = abort: uncommitted changes (commit or update --clean to discard changes)
1641 3 = abort: uncommitted changes (checked in commands.py)
1657 3 = abort: uncommitted changes (checked in commands.py)
1642
1658
1643 The merge is performed inside ``wc``, a workingctx-like objects. It defaults
1659 The merge is performed inside ``wc``, a workingctx-like objects. It defaults
1644 to repo[None] if None is passed.
1660 to repo[None] if None is passed.
1645
1661
1646 Return the same tuple as applyupdates().
1662 Return the same tuple as applyupdates().
1647 """
1663 """
1648 # Avoid cycle.
1664 # Avoid cycle.
1649 from . import sparse
1665 from . import sparse
1650
1666
1651 # This function used to find the default destination if node was None, but
1667 # This function used to find the default destination if node was None, but
1652 # that's now in destutil.py.
1668 # that's now in destutil.py.
1653 assert node is not None
1669 assert node is not None
1654 if not branchmerge and not force:
1670 if not branchmerge and not force:
1655 # TODO: remove the default once all callers that pass branchmerge=False
1671 # TODO: remove the default once all callers that pass branchmerge=False
1656 # and force=False pass a value for updatecheck. We may want to allow
1672 # and force=False pass a value for updatecheck. We may want to allow
1657 # updatecheck='abort' to better suppport some of these callers.
1673 # updatecheck='abort' to better suppport some of these callers.
1658 if updatecheck is None:
1674 if updatecheck is None:
1659 updatecheck = UPDATECHECK_LINEAR
1675 updatecheck = UPDATECHECK_LINEAR
1660 if updatecheck not in (
1676 if updatecheck not in (
1661 UPDATECHECK_NONE,
1677 UPDATECHECK_NONE,
1662 UPDATECHECK_LINEAR,
1678 UPDATECHECK_LINEAR,
1663 UPDATECHECK_NO_CONFLICT,
1679 UPDATECHECK_NO_CONFLICT,
1664 ):
1680 ):
1665 raise ValueError(
1681 raise ValueError(
1666 r'Invalid updatecheck %r (can accept %r)'
1682 r'Invalid updatecheck %r (can accept %r)'
1667 % (
1683 % (
1668 updatecheck,
1684 updatecheck,
1669 (
1685 (
1670 UPDATECHECK_NONE,
1686 UPDATECHECK_NONE,
1671 UPDATECHECK_LINEAR,
1687 UPDATECHECK_LINEAR,
1672 UPDATECHECK_NO_CONFLICT,
1688 UPDATECHECK_NO_CONFLICT,
1673 ),
1689 ),
1674 )
1690 )
1675 )
1691 )
1676 if wc is not None and wc.isinmemory():
1692 if wc is not None and wc.isinmemory():
1677 maybe_wlock = util.nullcontextmanager()
1693 maybe_wlock = util.nullcontextmanager()
1678 else:
1694 else:
1679 maybe_wlock = repo.wlock()
1695 maybe_wlock = repo.wlock()
1680 with maybe_wlock:
1696 with maybe_wlock:
1681 if wc is None:
1697 if wc is None:
1682 wc = repo[None]
1698 wc = repo[None]
1683 pl = wc.parents()
1699 pl = wc.parents()
1684 p1 = pl[0]
1700 p1 = pl[0]
1685 p2 = repo[node]
1701 p2 = repo[node]
1686 if ancestor is not None:
1702 if ancestor is not None:
1687 pas = [repo[ancestor]]
1703 pas = [repo[ancestor]]
1688 else:
1704 else:
1689 if repo.ui.configlist(b'merge', b'preferancestor') == [b'*']:
1705 if repo.ui.configlist(b'merge', b'preferancestor') == [b'*']:
1690 cahs = repo.changelog.commonancestorsheads(p1.node(), p2.node())
1706 cahs = repo.changelog.commonancestorsheads(p1.node(), p2.node())
1691 pas = [repo[anc] for anc in (sorted(cahs) or [nullid])]
1707 pas = [repo[anc] for anc in (sorted(cahs) or [nullid])]
1692 else:
1708 else:
1693 pas = [p1.ancestor(p2, warn=branchmerge)]
1709 pas = [p1.ancestor(p2, warn=branchmerge)]
1694
1710
1695 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), bytes(p1), bytes(p2)
1711 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), bytes(p1), bytes(p2)
1696
1712
1697 overwrite = force and not branchmerge
1713 overwrite = force and not branchmerge
1698 ### check phase
1714 ### check phase
1699 if not overwrite:
1715 if not overwrite:
1700 if len(pl) > 1:
1716 if len(pl) > 1:
1701 raise error.Abort(_(b"outstanding uncommitted merge"))
1717 raise error.Abort(_(b"outstanding uncommitted merge"))
1702 ms = mergestatemod.mergestate.read(repo)
1718 ms = mergestatemod.mergestate.read(repo)
1703 if list(ms.unresolved()):
1719 if list(ms.unresolved()):
1704 raise error.Abort(
1720 raise error.Abort(
1705 _(b"outstanding merge conflicts"),
1721 _(b"outstanding merge conflicts"),
1706 hint=_(b"use 'hg resolve' to resolve"),
1722 hint=_(b"use 'hg resolve' to resolve"),
1707 )
1723 )
1708 if branchmerge:
1724 if branchmerge:
1709 if pas == [p2]:
1725 if pas == [p2]:
1710 raise error.Abort(
1726 raise error.Abort(
1711 _(
1727 _(
1712 b"merging with a working directory ancestor"
1728 b"merging with a working directory ancestor"
1713 b" has no effect"
1729 b" has no effect"
1714 )
1730 )
1715 )
1731 )
1716 elif pas == [p1]:
1732 elif pas == [p1]:
1717 if not mergeancestor and wc.branch() == p2.branch():
1733 if not mergeancestor and wc.branch() == p2.branch():
1718 raise error.Abort(
1734 raise error.Abort(
1719 _(b"nothing to merge"),
1735 _(b"nothing to merge"),
1720 hint=_(b"use 'hg update' or check 'hg heads'"),
1736 hint=_(b"use 'hg update' or check 'hg heads'"),
1721 )
1737 )
1722 if not force and (wc.files() or wc.deleted()):
1738 if not force and (wc.files() or wc.deleted()):
1723 raise error.Abort(
1739 raise error.Abort(
1724 _(b"uncommitted changes"),
1740 _(b"uncommitted changes"),
1725 hint=_(b"use 'hg status' to list changes"),
1741 hint=_(b"use 'hg status' to list changes"),
1726 )
1742 )
1727 if not wc.isinmemory():
1743 if not wc.isinmemory():
1728 for s in sorted(wc.substate):
1744 for s in sorted(wc.substate):
1729 wc.sub(s).bailifchanged()
1745 wc.sub(s).bailifchanged()
1730
1746
1731 elif not overwrite:
1747 elif not overwrite:
1732 if p1 == p2: # no-op update
1748 if p1 == p2: # no-op update
1733 # call the hooks and exit early
1749 # call the hooks and exit early
1734 repo.hook(b'preupdate', throw=True, parent1=xp2, parent2=b'')
1750 repo.hook(b'preupdate', throw=True, parent1=xp2, parent2=b'')
1735 repo.hook(b'update', parent1=xp2, parent2=b'', error=0)
1751 repo.hook(b'update', parent1=xp2, parent2=b'', error=0)
1736 return updateresult(0, 0, 0, 0)
1752 return updateresult(0, 0, 0, 0)
1737
1753
1738 if updatecheck == UPDATECHECK_LINEAR and pas not in (
1754 if updatecheck == UPDATECHECK_LINEAR and pas not in (
1739 [p1],
1755 [p1],
1740 [p2],
1756 [p2],
1741 ): # nonlinear
1757 ): # nonlinear
1742 dirty = wc.dirty(missing=True)
1758 dirty = wc.dirty(missing=True)
1743 if dirty:
1759 if dirty:
1744 # Branching is a bit strange to ensure we do the minimal
1760 # Branching is a bit strange to ensure we do the minimal
1745 # amount of call to obsutil.foreground.
1761 # amount of call to obsutil.foreground.
1746 foreground = obsutil.foreground(repo, [p1.node()])
1762 foreground = obsutil.foreground(repo, [p1.node()])
1747 # note: the <node> variable contains a random identifier
1763 # note: the <node> variable contains a random identifier
1748 if repo[node].node() in foreground:
1764 if repo[node].node() in foreground:
1749 pass # allow updating to successors
1765 pass # allow updating to successors
1750 else:
1766 else:
1751 msg = _(b"uncommitted changes")
1767 msg = _(b"uncommitted changes")
1752 hint = _(b"commit or update --clean to discard changes")
1768 hint = _(b"commit or update --clean to discard changes")
1753 raise error.UpdateAbort(msg, hint=hint)
1769 raise error.UpdateAbort(msg, hint=hint)
1754 else:
1770 else:
1755 # Allow jumping branches if clean and specific rev given
1771 # Allow jumping branches if clean and specific rev given
1756 pass
1772 pass
1757
1773
1758 if overwrite:
1774 if overwrite:
1759 pas = [wc]
1775 pas = [wc]
1760 elif not branchmerge:
1776 elif not branchmerge:
1761 pas = [p1]
1777 pas = [p1]
1762
1778
1763 # deprecated config: merge.followcopies
1779 # deprecated config: merge.followcopies
1764 followcopies = repo.ui.configbool(b'merge', b'followcopies')
1780 followcopies = repo.ui.configbool(b'merge', b'followcopies')
1765 if overwrite:
1781 if overwrite:
1766 followcopies = False
1782 followcopies = False
1767 elif not pas[0]:
1783 elif not pas[0]:
1768 followcopies = False
1784 followcopies = False
1769 if not branchmerge and not wc.dirty(missing=True):
1785 if not branchmerge and not wc.dirty(missing=True):
1770 followcopies = False
1786 followcopies = False
1771
1787
1772 ### calculate phase
1788 ### calculate phase
1773 mresult = calculateupdates(
1789 mresult = calculateupdates(
1774 repo,
1790 repo,
1775 wc,
1791 wc,
1776 p2,
1792 p2,
1777 pas,
1793 pas,
1778 branchmerge,
1794 branchmerge,
1779 force,
1795 force,
1780 mergeancestor,
1796 mergeancestor,
1781 followcopies,
1797 followcopies,
1782 matcher=matcher,
1798 matcher=matcher,
1783 mergeforce=mergeforce,
1799 mergeforce=mergeforce,
1784 )
1800 )
1785
1801
1786 actionbyfile = mresult.actions
1802 actionbyfile = mresult.actions
1787
1803
1788 if updatecheck == UPDATECHECK_NO_CONFLICT:
1804 if updatecheck == UPDATECHECK_NO_CONFLICT:
1789 for f, (m, args, msg) in pycompat.iteritems(actionbyfile):
1805 for f, (m, args, msg) in pycompat.iteritems(actionbyfile):
1790 if m not in (
1806 if m not in (
1791 mergestatemod.ACTION_GET,
1807 mergestatemod.ACTION_GET,
1792 mergestatemod.ACTION_KEEP,
1808 mergestatemod.ACTION_KEEP,
1793 mergestatemod.ACTION_EXEC,
1809 mergestatemod.ACTION_EXEC,
1794 mergestatemod.ACTION_REMOVE,
1810 mergestatemod.ACTION_REMOVE,
1795 mergestatemod.ACTION_PATH_CONFLICT_RESOLVE,
1811 mergestatemod.ACTION_PATH_CONFLICT_RESOLVE,
1796 mergestatemod.ACTION_GET_OTHER_AND_STORE,
1812 mergestatemod.ACTION_GET_OTHER_AND_STORE,
1797 ):
1813 ):
1798 msg = _(b"conflicting changes")
1814 msg = _(b"conflicting changes")
1799 hint = _(b"commit or update --clean to discard changes")
1815 hint = _(b"commit or update --clean to discard changes")
1800 raise error.Abort(msg, hint=hint)
1816 raise error.Abort(msg, hint=hint)
1801
1817
1802 # Prompt and create actions. Most of this is in the resolve phase
1818 # Prompt and create actions. Most of this is in the resolve phase
1803 # already, but we can't handle .hgsubstate in filemerge or
1819 # already, but we can't handle .hgsubstate in filemerge or
1804 # subrepoutil.submerge yet so we have to keep prompting for it.
1820 # subrepoutil.submerge yet so we have to keep prompting for it.
1805 if b'.hgsubstate' in actionbyfile:
1821 if b'.hgsubstate' in actionbyfile:
1806 f = b'.hgsubstate'
1822 f = b'.hgsubstate'
1807 m, args, msg = actionbyfile[f]
1823 m, args, msg = actionbyfile[f]
1808 prompts = filemerge.partextras(labels)
1824 prompts = filemerge.partextras(labels)
1809 prompts[b'f'] = f
1825 prompts[b'f'] = f
1810 if m == mergestatemod.ACTION_CHANGED_DELETED:
1826 if m == mergestatemod.ACTION_CHANGED_DELETED:
1811 if repo.ui.promptchoice(
1827 if repo.ui.promptchoice(
1812 _(
1828 _(
1813 b"local%(l)s changed %(f)s which other%(o)s deleted\n"
1829 b"local%(l)s changed %(f)s which other%(o)s deleted\n"
1814 b"use (c)hanged version or (d)elete?"
1830 b"use (c)hanged version or (d)elete?"
1815 b"$$ &Changed $$ &Delete"
1831 b"$$ &Changed $$ &Delete"
1816 )
1832 )
1817 % prompts,
1833 % prompts,
1818 0,
1834 0,
1819 ):
1835 ):
1820 actionbyfile[f] = (
1836 actionbyfile[f] = (
1821 mergestatemod.ACTION_REMOVE,
1837 mergestatemod.ACTION_REMOVE,
1822 None,
1838 None,
1823 b'prompt delete',
1839 b'prompt delete',
1824 )
1840 )
1825 elif f in p1:
1841 elif f in p1:
1826 actionbyfile[f] = (
1842 actionbyfile[f] = (
1827 mergestatemod.ACTION_ADD_MODIFIED,
1843 mergestatemod.ACTION_ADD_MODIFIED,
1828 None,
1844 None,
1829 b'prompt keep',
1845 b'prompt keep',
1830 )
1846 )
1831 else:
1847 else:
1832 actionbyfile[f] = (
1848 actionbyfile[f] = (
1833 mergestatemod.ACTION_ADD,
1849 mergestatemod.ACTION_ADD,
1834 None,
1850 None,
1835 b'prompt keep',
1851 b'prompt keep',
1836 )
1852 )
1837 elif m == mergestatemod.ACTION_DELETED_CHANGED:
1853 elif m == mergestatemod.ACTION_DELETED_CHANGED:
1838 f1, f2, fa, move, anc = args
1854 f1, f2, fa, move, anc = args
1839 flags = p2[f2].flags()
1855 flags = p2[f2].flags()
1840 if (
1856 if (
1841 repo.ui.promptchoice(
1857 repo.ui.promptchoice(
1842 _(
1858 _(
1843 b"other%(o)s changed %(f)s which local%(l)s deleted\n"
1859 b"other%(o)s changed %(f)s which local%(l)s deleted\n"
1844 b"use (c)hanged version or leave (d)eleted?"
1860 b"use (c)hanged version or leave (d)eleted?"
1845 b"$$ &Changed $$ &Deleted"
1861 b"$$ &Changed $$ &Deleted"
1846 )
1862 )
1847 % prompts,
1863 % prompts,
1848 0,
1864 0,
1849 )
1865 )
1850 == 0
1866 == 0
1851 ):
1867 ):
1852 actionbyfile[f] = (
1868 actionbyfile[f] = (
1853 mergestatemod.ACTION_GET,
1869 mergestatemod.ACTION_GET,
1854 (flags, False),
1870 (flags, False),
1855 b'prompt recreating',
1871 b'prompt recreating',
1856 )
1872 )
1857 else:
1873 else:
1858 del actionbyfile[f]
1874 del actionbyfile[f]
1859
1875
1860 # Convert to dictionary-of-lists format
1876 # Convert to dictionary-of-lists format
1861 actions = emptyactions()
1877 actions = emptyactions()
1862 for f, (m, args, msg) in pycompat.iteritems(actionbyfile):
1878 for f, (m, args, msg) in pycompat.iteritems(actionbyfile):
1863 if m not in actions:
1879 if m not in actions:
1864 actions[m] = []
1880 actions[m] = []
1865 actions[m].append((f, args, msg))
1881 actions[m].append((f, args, msg))
1866
1882
1867 # ACTION_GET_OTHER_AND_STORE is a mergestatemod.ACTION_GET + store in mergestate
1883 # ACTION_GET_OTHER_AND_STORE is a mergestatemod.ACTION_GET + store in mergestate
1868 for e in actions[mergestatemod.ACTION_GET_OTHER_AND_STORE]:
1884 for e in actions[mergestatemod.ACTION_GET_OTHER_AND_STORE]:
1869 actions[mergestatemod.ACTION_GET].append(e)
1885 actions[mergestatemod.ACTION_GET].append(e)
1870
1886
1871 if not util.fscasesensitive(repo.path):
1887 if not util.fscasesensitive(repo.path):
1872 # check collision between files only in p2 for clean update
1888 # check collision between files only in p2 for clean update
1873 if not branchmerge and (
1889 if not branchmerge and (
1874 force or not wc.dirty(missing=True, branch=False)
1890 force or not wc.dirty(missing=True, branch=False)
1875 ):
1891 ):
1876 _checkcollision(repo, p2.manifest(), None)
1892 _checkcollision(repo, p2.manifest(), None)
1877 else:
1893 else:
1878 _checkcollision(repo, wc.manifest(), actions)
1894 _checkcollision(repo, wc.manifest(), actions)
1879
1895
1880 # divergent renames
1896 # divergent renames
1881 for f, fl in sorted(pycompat.iteritems(mresult.diverge)):
1897 for f, fl in sorted(pycompat.iteritems(mresult.diverge)):
1882 repo.ui.warn(
1898 repo.ui.warn(
1883 _(
1899 _(
1884 b"note: possible conflict - %s was renamed "
1900 b"note: possible conflict - %s was renamed "
1885 b"multiple times to:\n"
1901 b"multiple times to:\n"
1886 )
1902 )
1887 % f
1903 % f
1888 )
1904 )
1889 for nf in sorted(fl):
1905 for nf in sorted(fl):
1890 repo.ui.warn(b" %s\n" % nf)
1906 repo.ui.warn(b" %s\n" % nf)
1891
1907
1892 # rename and delete
1908 # rename and delete
1893 for f, fl in sorted(pycompat.iteritems(mresult.renamedelete)):
1909 for f, fl in sorted(pycompat.iteritems(mresult.renamedelete)):
1894 repo.ui.warn(
1910 repo.ui.warn(
1895 _(
1911 _(
1896 b"note: possible conflict - %s was deleted "
1912 b"note: possible conflict - %s was deleted "
1897 b"and renamed to:\n"
1913 b"and renamed to:\n"
1898 )
1914 )
1899 % f
1915 % f
1900 )
1916 )
1901 for nf in sorted(fl):
1917 for nf in sorted(fl):
1902 repo.ui.warn(b" %s\n" % nf)
1918 repo.ui.warn(b" %s\n" % nf)
1903
1919
1904 ### apply phase
1920 ### apply phase
1905 if not branchmerge: # just jump to the new rev
1921 if not branchmerge: # just jump to the new rev
1906 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, b''
1922 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, b''
1907 # If we're doing a partial update, we need to skip updating
1923 # If we're doing a partial update, we need to skip updating
1908 # the dirstate.
1924 # the dirstate.
1909 always = matcher is None or matcher.always()
1925 always = matcher is None or matcher.always()
1910 updatedirstate = updatedirstate and always and not wc.isinmemory()
1926 updatedirstate = updatedirstate and always and not wc.isinmemory()
1911 if updatedirstate:
1927 if updatedirstate:
1912 repo.hook(b'preupdate', throw=True, parent1=xp1, parent2=xp2)
1928 repo.hook(b'preupdate', throw=True, parent1=xp1, parent2=xp2)
1913 # note that we're in the middle of an update
1929 # note that we're in the middle of an update
1914 repo.vfs.write(b'updatestate', p2.hex())
1930 repo.vfs.write(b'updatestate', p2.hex())
1915
1931
1916 _advertisefsmonitor(
1932 _advertisefsmonitor(
1917 repo, len(actions[mergestatemod.ACTION_GET]), p1.node()
1933 repo, len(actions[mergestatemod.ACTION_GET]), p1.node()
1918 )
1934 )
1919
1935
1920 wantfiledata = updatedirstate and not branchmerge
1936 wantfiledata = updatedirstate and not branchmerge
1921 stats, getfiledata = applyupdates(
1937 stats, getfiledata = applyupdates(
1922 repo, actions, wc, p2, overwrite, wantfiledata, labels=labels
1938 repo, actions, wc, p2, overwrite, wantfiledata, labels=labels
1923 )
1939 )
1924
1940
1925 if updatedirstate:
1941 if updatedirstate:
1926 with repo.dirstate.parentchange():
1942 with repo.dirstate.parentchange():
1927 repo.setparents(fp1, fp2)
1943 repo.setparents(fp1, fp2)
1928 mergestatemod.recordupdates(
1944 mergestatemod.recordupdates(
1929 repo, actions, branchmerge, getfiledata
1945 repo, actions, branchmerge, getfiledata
1930 )
1946 )
1931 # update completed, clear state
1947 # update completed, clear state
1932 util.unlink(repo.vfs.join(b'updatestate'))
1948 util.unlink(repo.vfs.join(b'updatestate'))
1933
1949
1934 if not branchmerge:
1950 if not branchmerge:
1935 repo.dirstate.setbranch(p2.branch())
1951 repo.dirstate.setbranch(p2.branch())
1936
1952
1937 # If we're updating to a location, clean up any stale temporary includes
1953 # If we're updating to a location, clean up any stale temporary includes
1938 # (ex: this happens during hg rebase --abort).
1954 # (ex: this happens during hg rebase --abort).
1939 if not branchmerge:
1955 if not branchmerge:
1940 sparse.prunetemporaryincludes(repo)
1956 sparse.prunetemporaryincludes(repo)
1941
1957
1942 if updatedirstate:
1958 if updatedirstate:
1943 repo.hook(
1959 repo.hook(
1944 b'update', parent1=xp1, parent2=xp2, error=stats.unresolvedcount
1960 b'update', parent1=xp1, parent2=xp2, error=stats.unresolvedcount
1945 )
1961 )
1946 return stats
1962 return stats
1947
1963
1948
1964
1949 def merge(ctx, labels=None, force=False, wc=None):
1965 def merge(ctx, labels=None, force=False, wc=None):
1950 """Merge another topological branch into the working copy.
1966 """Merge another topological branch into the working copy.
1951
1967
1952 force = whether the merge was run with 'merge --force' (deprecated)
1968 force = whether the merge was run with 'merge --force' (deprecated)
1953 """
1969 """
1954
1970
1955 return update(
1971 return update(
1956 ctx.repo(),
1972 ctx.repo(),
1957 ctx.rev(),
1973 ctx.rev(),
1958 labels=labels,
1974 labels=labels,
1959 branchmerge=True,
1975 branchmerge=True,
1960 force=force,
1976 force=force,
1961 mergeforce=force,
1977 mergeforce=force,
1962 wc=wc,
1978 wc=wc,
1963 )
1979 )
1964
1980
1965
1981
1966 def clean_update(ctx, wc=None):
1982 def clean_update(ctx, wc=None):
1967 """Do a clean update to the given commit.
1983 """Do a clean update to the given commit.
1968
1984
1969 This involves updating to the commit and discarding any changes in the
1985 This involves updating to the commit and discarding any changes in the
1970 working copy.
1986 working copy.
1971 """
1987 """
1972 return update(ctx.repo(), ctx.rev(), branchmerge=False, force=True, wc=wc)
1988 return update(ctx.repo(), ctx.rev(), branchmerge=False, force=True, wc=wc)
1973
1989
1974
1990
1975 def revert_to(ctx, matcher=None, wc=None):
1991 def revert_to(ctx, matcher=None, wc=None):
1976 """Revert the working copy to the given commit.
1992 """Revert the working copy to the given commit.
1977
1993
1978 The working copy will keep its current parent(s) but its content will
1994 The working copy will keep its current parent(s) but its content will
1979 be the same as in the given commit.
1995 be the same as in the given commit.
1980 """
1996 """
1981
1997
1982 return update(
1998 return update(
1983 ctx.repo(),
1999 ctx.repo(),
1984 ctx.rev(),
2000 ctx.rev(),
1985 branchmerge=False,
2001 branchmerge=False,
1986 force=True,
2002 force=True,
1987 updatedirstate=False,
2003 updatedirstate=False,
1988 matcher=matcher,
2004 matcher=matcher,
1989 wc=wc,
2005 wc=wc,
1990 )
2006 )
1991
2007
1992
2008
1993 def graft(
2009 def graft(
1994 repo,
2010 repo,
1995 ctx,
2011 ctx,
1996 base=None,
2012 base=None,
1997 labels=None,
2013 labels=None,
1998 keepparent=False,
2014 keepparent=False,
1999 keepconflictparent=False,
2015 keepconflictparent=False,
2000 wctx=None,
2016 wctx=None,
2001 ):
2017 ):
2002 """Do a graft-like merge.
2018 """Do a graft-like merge.
2003
2019
2004 This is a merge where the merge ancestor is chosen such that one
2020 This is a merge where the merge ancestor is chosen such that one
2005 or more changesets are grafted onto the current changeset. In
2021 or more changesets are grafted onto the current changeset. In
2006 addition to the merge, this fixes up the dirstate to include only
2022 addition to the merge, this fixes up the dirstate to include only
2007 a single parent (if keepparent is False) and tries to duplicate any
2023 a single parent (if keepparent is False) and tries to duplicate any
2008 renames/copies appropriately.
2024 renames/copies appropriately.
2009
2025
2010 ctx - changeset to rebase
2026 ctx - changeset to rebase
2011 base - merge base, or ctx.p1() if not specified
2027 base - merge base, or ctx.p1() if not specified
2012 labels - merge labels eg ['local', 'graft']
2028 labels - merge labels eg ['local', 'graft']
2013 keepparent - keep second parent if any
2029 keepparent - keep second parent if any
2014 keepconflictparent - if unresolved, keep parent used for the merge
2030 keepconflictparent - if unresolved, keep parent used for the merge
2015
2031
2016 """
2032 """
2017 # If we're grafting a descendant onto an ancestor, be sure to pass
2033 # If we're grafting a descendant onto an ancestor, be sure to pass
2018 # mergeancestor=True to update. This does two things: 1) allows the merge if
2034 # mergeancestor=True to update. This does two things: 1) allows the merge if
2019 # the destination is the same as the parent of the ctx (so we can use graft
2035 # the destination is the same as the parent of the ctx (so we can use graft
2020 # to copy commits), and 2) informs update that the incoming changes are
2036 # to copy commits), and 2) informs update that the incoming changes are
2021 # newer than the destination so it doesn't prompt about "remote changed foo
2037 # newer than the destination so it doesn't prompt about "remote changed foo
2022 # which local deleted".
2038 # which local deleted".
2023 # We also pass mergeancestor=True when base is the same revision as p1. 2)
2039 # We also pass mergeancestor=True when base is the same revision as p1. 2)
2024 # doesn't matter as there can't possibly be conflicts, but 1) is necessary.
2040 # doesn't matter as there can't possibly be conflicts, but 1) is necessary.
2025 wctx = wctx or repo[None]
2041 wctx = wctx or repo[None]
2026 pctx = wctx.p1()
2042 pctx = wctx.p1()
2027 base = base or ctx.p1()
2043 base = base or ctx.p1()
2028 mergeancestor = (
2044 mergeancestor = (
2029 repo.changelog.isancestor(pctx.node(), ctx.node())
2045 repo.changelog.isancestor(pctx.node(), ctx.node())
2030 or pctx.rev() == base.rev()
2046 or pctx.rev() == base.rev()
2031 )
2047 )
2032
2048
2033 stats = update(
2049 stats = update(
2034 repo,
2050 repo,
2035 ctx.node(),
2051 ctx.node(),
2036 True,
2052 True,
2037 True,
2053 True,
2038 base.node(),
2054 base.node(),
2039 mergeancestor=mergeancestor,
2055 mergeancestor=mergeancestor,
2040 labels=labels,
2056 labels=labels,
2041 wc=wctx,
2057 wc=wctx,
2042 )
2058 )
2043
2059
2044 if keepconflictparent and stats.unresolvedcount:
2060 if keepconflictparent and stats.unresolvedcount:
2045 pother = ctx.node()
2061 pother = ctx.node()
2046 else:
2062 else:
2047 pother = nullid
2063 pother = nullid
2048 parents = ctx.parents()
2064 parents = ctx.parents()
2049 if keepparent and len(parents) == 2 and base in parents:
2065 if keepparent and len(parents) == 2 and base in parents:
2050 parents.remove(base)
2066 parents.remove(base)
2051 pother = parents[0].node()
2067 pother = parents[0].node()
2052 # Never set both parents equal to each other
2068 # Never set both parents equal to each other
2053 if pother == pctx.node():
2069 if pother == pctx.node():
2054 pother = nullid
2070 pother = nullid
2055
2071
2056 if wctx.isinmemory():
2072 if wctx.isinmemory():
2057 wctx.setparents(pctx.node(), pother)
2073 wctx.setparents(pctx.node(), pother)
2058 # fix up dirstate for copies and renames
2074 # fix up dirstate for copies and renames
2059 copies.graftcopies(wctx, ctx, base)
2075 copies.graftcopies(wctx, ctx, base)
2060 else:
2076 else:
2061 with repo.dirstate.parentchange():
2077 with repo.dirstate.parentchange():
2062 repo.setparents(pctx.node(), pother)
2078 repo.setparents(pctx.node(), pother)
2063 repo.dirstate.write(repo.currenttransaction())
2079 repo.dirstate.write(repo.currenttransaction())
2064 # fix up dirstate for copies and renames
2080 # fix up dirstate for copies and renames
2065 copies.graftcopies(wctx, ctx, base)
2081 copies.graftcopies(wctx, ctx, base)
2066 return stats
2082 return stats
2067
2083
2068
2084
2069 def purge(
2085 def purge(
2070 repo,
2086 repo,
2071 matcher,
2087 matcher,
2072 unknown=True,
2088 unknown=True,
2073 ignored=False,
2089 ignored=False,
2074 removeemptydirs=True,
2090 removeemptydirs=True,
2075 removefiles=True,
2091 removefiles=True,
2076 abortonerror=False,
2092 abortonerror=False,
2077 noop=False,
2093 noop=False,
2078 ):
2094 ):
2079 """Purge the working directory of untracked files.
2095 """Purge the working directory of untracked files.
2080
2096
2081 ``matcher`` is a matcher configured to scan the working directory -
2097 ``matcher`` is a matcher configured to scan the working directory -
2082 potentially a subset.
2098 potentially a subset.
2083
2099
2084 ``unknown`` controls whether unknown files should be purged.
2100 ``unknown`` controls whether unknown files should be purged.
2085
2101
2086 ``ignored`` controls whether ignored files should be purged.
2102 ``ignored`` controls whether ignored files should be purged.
2087
2103
2088 ``removeemptydirs`` controls whether empty directories should be removed.
2104 ``removeemptydirs`` controls whether empty directories should be removed.
2089
2105
2090 ``removefiles`` controls whether files are removed.
2106 ``removefiles`` controls whether files are removed.
2091
2107
2092 ``abortonerror`` causes an exception to be raised if an error occurs
2108 ``abortonerror`` causes an exception to be raised if an error occurs
2093 deleting a file or directory.
2109 deleting a file or directory.
2094
2110
2095 ``noop`` controls whether to actually remove files. If not defined, actions
2111 ``noop`` controls whether to actually remove files. If not defined, actions
2096 will be taken.
2112 will be taken.
2097
2113
2098 Returns an iterable of relative paths in the working directory that were
2114 Returns an iterable of relative paths in the working directory that were
2099 or would be removed.
2115 or would be removed.
2100 """
2116 """
2101
2117
2102 def remove(removefn, path):
2118 def remove(removefn, path):
2103 try:
2119 try:
2104 removefn(path)
2120 removefn(path)
2105 except OSError:
2121 except OSError:
2106 m = _(b'%s cannot be removed') % path
2122 m = _(b'%s cannot be removed') % path
2107 if abortonerror:
2123 if abortonerror:
2108 raise error.Abort(m)
2124 raise error.Abort(m)
2109 else:
2125 else:
2110 repo.ui.warn(_(b'warning: %s\n') % m)
2126 repo.ui.warn(_(b'warning: %s\n') % m)
2111
2127
2112 # There's no API to copy a matcher. So mutate the passed matcher and
2128 # There's no API to copy a matcher. So mutate the passed matcher and
2113 # restore it when we're done.
2129 # restore it when we're done.
2114 oldtraversedir = matcher.traversedir
2130 oldtraversedir = matcher.traversedir
2115
2131
2116 res = []
2132 res = []
2117
2133
2118 try:
2134 try:
2119 if removeemptydirs:
2135 if removeemptydirs:
2120 directories = []
2136 directories = []
2121 matcher.traversedir = directories.append
2137 matcher.traversedir = directories.append
2122
2138
2123 status = repo.status(match=matcher, ignored=ignored, unknown=unknown)
2139 status = repo.status(match=matcher, ignored=ignored, unknown=unknown)
2124
2140
2125 if removefiles:
2141 if removefiles:
2126 for f in sorted(status.unknown + status.ignored):
2142 for f in sorted(status.unknown + status.ignored):
2127 if not noop:
2143 if not noop:
2128 repo.ui.note(_(b'removing file %s\n') % f)
2144 repo.ui.note(_(b'removing file %s\n') % f)
2129 remove(repo.wvfs.unlink, f)
2145 remove(repo.wvfs.unlink, f)
2130 res.append(f)
2146 res.append(f)
2131
2147
2132 if removeemptydirs:
2148 if removeemptydirs:
2133 for f in sorted(directories, reverse=True):
2149 for f in sorted(directories, reverse=True):
2134 if matcher(f) and not repo.wvfs.listdir(f):
2150 if matcher(f) and not repo.wvfs.listdir(f):
2135 if not noop:
2151 if not noop:
2136 repo.ui.note(_(b'removing directory %s\n') % f)
2152 repo.ui.note(_(b'removing directory %s\n') % f)
2137 remove(repo.wvfs.rmdir, f)
2153 remove(repo.wvfs.rmdir, f)
2138 res.append(f)
2154 res.append(f)
2139
2155
2140 return res
2156 return res
2141
2157
2142 finally:
2158 finally:
2143 matcher.traversedir = oldtraversedir
2159 matcher.traversedir = oldtraversedir
General Comments 0
You need to be logged in to leave comments. Login now