##// END OF EJS Templates
errors: raise InputError from revsingle() iff revset provided by the user...
Martin von Zweigbergk -
r48930:5105a997 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,119 +1,120 b''
1 # Copyright (C) 2015 - Mike Edgar <adgar@google.com>
1 # Copyright (C) 2015 - Mike Edgar <adgar@google.com>
2 #
2 #
3 # This extension enables removal of file content at a given revision,
3 # This extension enables removal of file content at a given revision,
4 # rewriting the data/metadata of successive revisions to preserve revision log
4 # rewriting the data/metadata of successive revisions to preserve revision log
5 # integrity.
5 # integrity.
6
6
7 """erase file content at a given revision
7 """erase file content at a given revision
8
8
9 The censor command instructs Mercurial to erase all content of a file at a given
9 The censor command instructs Mercurial to erase all content of a file at a given
10 revision *without updating the changeset hash.* This allows existing history to
10 revision *without updating the changeset hash.* This allows existing history to
11 remain valid while preventing future clones/pulls from receiving the erased
11 remain valid while preventing future clones/pulls from receiving the erased
12 data.
12 data.
13
13
14 Typical uses for censor are due to security or legal requirements, including::
14 Typical uses for censor are due to security or legal requirements, including::
15
15
16 * Passwords, private keys, cryptographic material
16 * Passwords, private keys, cryptographic material
17 * Licensed data/code/libraries for which the license has expired
17 * Licensed data/code/libraries for which the license has expired
18 * Personally Identifiable Information or other private data
18 * Personally Identifiable Information or other private data
19
19
20 Censored nodes can interrupt mercurial's typical operation whenever the excised
20 Censored nodes can interrupt mercurial's typical operation whenever the excised
21 data needs to be materialized. Some commands, like ``hg cat``/``hg revert``,
21 data needs to be materialized. Some commands, like ``hg cat``/``hg revert``,
22 simply fail when asked to produce censored data. Others, like ``hg verify`` and
22 simply fail when asked to produce censored data. Others, like ``hg verify`` and
23 ``hg update``, must be capable of tolerating censored data to continue to
23 ``hg update``, must be capable of tolerating censored data to continue to
24 function in a meaningful way. Such commands only tolerate censored file
24 function in a meaningful way. Such commands only tolerate censored file
25 revisions if they are allowed by the "censor.policy=ignore" config option.
25 revisions if they are allowed by the "censor.policy=ignore" config option.
26
26
27 A few informative commands such as ``hg grep`` will unconditionally
27 A few informative commands such as ``hg grep`` will unconditionally
28 ignore censored data and merely report that it was encountered.
28 ignore censored data and merely report that it was encountered.
29 """
29 """
30
30
31 from __future__ import absolute_import
31 from __future__ import absolute_import
32
32
33 from mercurial.i18n import _
33 from mercurial.i18n import _
34 from mercurial.node import short
34 from mercurial.node import short
35
35
36 from mercurial import (
36 from mercurial import (
37 error,
37 error,
38 logcmdutil,
38 registrar,
39 registrar,
39 scmutil,
40 scmutil,
40 )
41 )
41
42
42 cmdtable = {}
43 cmdtable = {}
43 command = registrar.command(cmdtable)
44 command = registrar.command(cmdtable)
44 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
45 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
45 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
46 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
46 # be specifying the version(s) of Mercurial they are tested with, or
47 # be specifying the version(s) of Mercurial they are tested with, or
47 # leave the attribute unspecified.
48 # leave the attribute unspecified.
48 testedwith = b'ships-with-hg-core'
49 testedwith = b'ships-with-hg-core'
49
50
50
51
51 @command(
52 @command(
52 b'censor',
53 b'censor',
53 [
54 [
54 (
55 (
55 b'r',
56 b'r',
56 b'rev',
57 b'rev',
57 b'',
58 b'',
58 _(b'censor file from specified revision'),
59 _(b'censor file from specified revision'),
59 _(b'REV'),
60 _(b'REV'),
60 ),
61 ),
61 (b't', b'tombstone', b'', _(b'replacement tombstone data'), _(b'TEXT')),
62 (b't', b'tombstone', b'', _(b'replacement tombstone data'), _(b'TEXT')),
62 ],
63 ],
63 _(b'-r REV [-t TEXT] [FILE]'),
64 _(b'-r REV [-t TEXT] [FILE]'),
64 helpcategory=command.CATEGORY_MAINTENANCE,
65 helpcategory=command.CATEGORY_MAINTENANCE,
65 )
66 )
66 def censor(ui, repo, path, rev=b'', tombstone=b'', **opts):
67 def censor(ui, repo, path, rev=b'', tombstone=b'', **opts):
67 with repo.wlock(), repo.lock():
68 with repo.wlock(), repo.lock():
68 return _docensor(ui, repo, path, rev, tombstone, **opts)
69 return _docensor(ui, repo, path, rev, tombstone, **opts)
69
70
70
71
71 def _docensor(ui, repo, path, rev=b'', tombstone=b'', **opts):
72 def _docensor(ui, repo, path, rev=b'', tombstone=b'', **opts):
72 if not path:
73 if not path:
73 raise error.Abort(_(b'must specify file path to censor'))
74 raise error.Abort(_(b'must specify file path to censor'))
74 if not rev:
75 if not rev:
75 raise error.Abort(_(b'must specify revision to censor'))
76 raise error.Abort(_(b'must specify revision to censor'))
76
77
77 wctx = repo[None]
78 wctx = repo[None]
78
79
79 m = scmutil.match(wctx, (path,))
80 m = scmutil.match(wctx, (path,))
80 if m.anypats() or len(m.files()) != 1:
81 if m.anypats() or len(m.files()) != 1:
81 raise error.Abort(_(b'can only specify an explicit filename'))
82 raise error.Abort(_(b'can only specify an explicit filename'))
82 path = m.files()[0]
83 path = m.files()[0]
83 flog = repo.file(path)
84 flog = repo.file(path)
84 if not len(flog):
85 if not len(flog):
85 raise error.Abort(_(b'cannot censor file with no history'))
86 raise error.Abort(_(b'cannot censor file with no history'))
86
87
87 rev = scmutil.revsingle(repo, rev, rev).rev()
88 rev = logcmdutil.revsingle(repo, rev, rev).rev()
88 try:
89 try:
89 ctx = repo[rev]
90 ctx = repo[rev]
90 except KeyError:
91 except KeyError:
91 raise error.Abort(_(b'invalid revision identifier %s') % rev)
92 raise error.Abort(_(b'invalid revision identifier %s') % rev)
92
93
93 try:
94 try:
94 fctx = ctx.filectx(path)
95 fctx = ctx.filectx(path)
95 except error.LookupError:
96 except error.LookupError:
96 raise error.Abort(_(b'file does not exist at revision %s') % rev)
97 raise error.Abort(_(b'file does not exist at revision %s') % rev)
97
98
98 fnode = fctx.filenode()
99 fnode = fctx.filenode()
99 heads = []
100 heads = []
100 for headnode in repo.heads():
101 for headnode in repo.heads():
101 hc = repo[headnode]
102 hc = repo[headnode]
102 if path in hc and hc.filenode(path) == fnode:
103 if path in hc and hc.filenode(path) == fnode:
103 heads.append(hc)
104 heads.append(hc)
104 if heads:
105 if heads:
105 headlist = b', '.join([short(c.node()) for c in heads])
106 headlist = b', '.join([short(c.node()) for c in heads])
106 raise error.Abort(
107 raise error.Abort(
107 _(b'cannot censor file in heads (%s)') % headlist,
108 _(b'cannot censor file in heads (%s)') % headlist,
108 hint=_(b'clean/delete and commit first'),
109 hint=_(b'clean/delete and commit first'),
109 )
110 )
110
111
111 wp = wctx.parents()
112 wp = wctx.parents()
112 if ctx.node() in [p.node() for p in wp]:
113 if ctx.node() in [p.node() for p in wp]:
113 raise error.Abort(
114 raise error.Abort(
114 _(b'cannot censor working directory'),
115 _(b'cannot censor working directory'),
115 hint=_(b'clean/delete/update first'),
116 hint=_(b'clean/delete/update first'),
116 )
117 )
117
118
118 with repo.transaction(b'censor') as tr:
119 with repo.transaction(b'censor') as tr:
119 flog.censorrevision(tr, fnode, tombstone=tombstone)
120 flog.censorrevision(tr, fnode, tombstone=tombstone)
@@ -1,84 +1,83 b''
1 # Mercurial extension to provide the 'hg children' command
1 # Mercurial extension to provide the 'hg children' command
2 #
2 #
3 # Copyright 2007 by Intevation GmbH <intevation@intevation.de>
3 # Copyright 2007 by Intevation GmbH <intevation@intevation.de>
4 #
4 #
5 # Author(s):
5 # Author(s):
6 # Thomas Arendsen Hein <thomas@intevation.de>
6 # Thomas Arendsen Hein <thomas@intevation.de>
7 #
7 #
8 # This software may be used and distributed according to the terms of the
8 # This software may be used and distributed according to the terms of the
9 # GNU General Public License version 2 or any later version.
9 # GNU General Public License version 2 or any later version.
10
10
11 '''command to display child changesets (DEPRECATED)
11 '''command to display child changesets (DEPRECATED)
12
12
13 This extension is deprecated. You should use :hg:`log -r
13 This extension is deprecated. You should use :hg:`log -r
14 "children(REV)"` instead.
14 "children(REV)"` instead.
15 '''
15 '''
16
16
17 from __future__ import absolute_import
17 from __future__ import absolute_import
18
18
19 from mercurial.i18n import _
19 from mercurial.i18n import _
20 from mercurial import (
20 from mercurial import (
21 cmdutil,
21 cmdutil,
22 logcmdutil,
22 logcmdutil,
23 pycompat,
23 pycompat,
24 registrar,
24 registrar,
25 scmutil,
26 )
25 )
27
26
28 templateopts = cmdutil.templateopts
27 templateopts = cmdutil.templateopts
29
28
30 cmdtable = {}
29 cmdtable = {}
31 command = registrar.command(cmdtable)
30 command = registrar.command(cmdtable)
32 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
31 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
33 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
32 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
34 # be specifying the version(s) of Mercurial they are tested with, or
33 # be specifying the version(s) of Mercurial they are tested with, or
35 # leave the attribute unspecified.
34 # leave the attribute unspecified.
36 testedwith = b'ships-with-hg-core'
35 testedwith = b'ships-with-hg-core'
37
36
38
37
39 @command(
38 @command(
40 b'children',
39 b'children',
41 [
40 [
42 (
41 (
43 b'r',
42 b'r',
44 b'rev',
43 b'rev',
45 b'.',
44 b'.',
46 _(b'show children of the specified revision'),
45 _(b'show children of the specified revision'),
47 _(b'REV'),
46 _(b'REV'),
48 ),
47 ),
49 ]
48 ]
50 + templateopts,
49 + templateopts,
51 _(b'hg children [-r REV] [FILE]'),
50 _(b'hg children [-r REV] [FILE]'),
52 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
51 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
53 inferrepo=True,
52 inferrepo=True,
54 )
53 )
55 def children(ui, repo, file_=None, **opts):
54 def children(ui, repo, file_=None, **opts):
56 """show the children of the given or working directory revision
55 """show the children of the given or working directory revision
57
56
58 Print the children of the working directory's revisions. If a
57 Print the children of the working directory's revisions. If a
59 revision is given via -r/--rev, the children of that revision will
58 revision is given via -r/--rev, the children of that revision will
60 be printed. If a file argument is given, revision in which the
59 be printed. If a file argument is given, revision in which the
61 file was last changed (after the working directory revision or the
60 file was last changed (after the working directory revision or the
62 argument to --rev if given) is printed.
61 argument to --rev if given) is printed.
63
62
64 Please use :hg:`log` instead::
63 Please use :hg:`log` instead::
65
64
66 hg children => hg log -r "children(.)"
65 hg children => hg log -r "children(.)"
67 hg children -r REV => hg log -r "children(REV)"
66 hg children -r REV => hg log -r "children(REV)"
68
67
69 See :hg:`help log` and :hg:`help revsets.children`.
68 See :hg:`help log` and :hg:`help revsets.children`.
70
69
71 """
70 """
72 opts = pycompat.byteskwargs(opts)
71 opts = pycompat.byteskwargs(opts)
73 rev = opts.get(b'rev')
72 rev = opts.get(b'rev')
74 ctx = scmutil.revsingle(repo, rev)
73 ctx = logcmdutil.revsingle(repo, rev)
75 if file_:
74 if file_:
76 fctx = repo.filectx(file_, changeid=ctx.rev())
75 fctx = repo.filectx(file_, changeid=ctx.rev())
77 childctxs = [fcctx.changectx() for fcctx in fctx.children()]
76 childctxs = [fcctx.changectx() for fcctx in fctx.children()]
78 else:
77 else:
79 childctxs = ctx.children()
78 childctxs = ctx.children()
80
79
81 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
80 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
82 for cctx in childctxs:
81 for cctx in childctxs:
83 displayer.show(cctx)
82 displayer.show(cctx)
84 displayer.close()
83 displayer.close()
@@ -1,95 +1,95 b''
1 # closehead.py - Close arbitrary heads without checking them out first
1 # closehead.py - Close arbitrary heads without checking them out first
2 #
2 #
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5
5
6 '''close arbitrary heads without checking them out first'''
6 '''close arbitrary heads without checking them out first'''
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from mercurial.i18n import _
10 from mercurial.i18n import _
11 from mercurial import (
11 from mercurial import (
12 bookmarks,
12 bookmarks,
13 cmdutil,
13 cmdutil,
14 context,
14 context,
15 error,
15 error,
16 logcmdutil,
16 pycompat,
17 pycompat,
17 registrar,
18 registrar,
18 logcmdutil,
19 )
19 )
20
20
21 cmdtable = {}
21 cmdtable = {}
22 command = registrar.command(cmdtable)
22 command = registrar.command(cmdtable)
23 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
23 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
24 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
24 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
25 # be specifying the version(s) of Mercurial they are tested with, or
25 # be specifying the version(s) of Mercurial they are tested with, or
26 # leave the attribute unspecified.
26 # leave the attribute unspecified.
27 testedwith = b'ships-with-hg-core'
27 testedwith = b'ships-with-hg-core'
28
28
29 commitopts = cmdutil.commitopts
29 commitopts = cmdutil.commitopts
30 commitopts2 = cmdutil.commitopts2
30 commitopts2 = cmdutil.commitopts2
31 commitopts3 = [(b'r', b'rev', [], _(b'revision to check'), _(b'REV'))]
31 commitopts3 = [(b'r', b'rev', [], _(b'revision to check'), _(b'REV'))]
32
32
33
33
34 @command(
34 @command(
35 b'close-head|close-heads',
35 b'close-head|close-heads',
36 commitopts + commitopts2 + commitopts3,
36 commitopts + commitopts2 + commitopts3,
37 _(b'[OPTION]... [REV]...'),
37 _(b'[OPTION]... [REV]...'),
38 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
38 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
39 inferrepo=True,
39 inferrepo=True,
40 )
40 )
41 def close_branch(ui, repo, *revs, **opts):
41 def close_branch(ui, repo, *revs, **opts):
42 """close the given head revisions
42 """close the given head revisions
43
43
44 This is equivalent to checking out each revision in a clean tree and running
44 This is equivalent to checking out each revision in a clean tree and running
45 ``hg commit --close-branch``, except that it doesn't change the working
45 ``hg commit --close-branch``, except that it doesn't change the working
46 directory.
46 directory.
47
47
48 The commit message must be specified with -l or -m.
48 The commit message must be specified with -l or -m.
49 """
49 """
50
50
51 def docommit(rev):
51 def docommit(rev):
52 cctx = context.memctx(
52 cctx = context.memctx(
53 repo,
53 repo,
54 parents=[rev, None],
54 parents=[rev, None],
55 text=message,
55 text=message,
56 files=[],
56 files=[],
57 filectxfn=None,
57 filectxfn=None,
58 user=opts.get(b'user'),
58 user=opts.get(b'user'),
59 date=opts.get(b'date'),
59 date=opts.get(b'date'),
60 extra=extra,
60 extra=extra,
61 )
61 )
62 tr = repo.transaction(b'commit')
62 tr = repo.transaction(b'commit')
63 ret = repo.commitctx(cctx, True)
63 ret = repo.commitctx(cctx, True)
64 bookmarks.update(repo, [rev, None], ret)
64 bookmarks.update(repo, [rev, None], ret)
65 cctx.markcommitted(ret)
65 cctx.markcommitted(ret)
66 tr.close()
66 tr.close()
67
67
68 opts = pycompat.byteskwargs(opts)
68 opts = pycompat.byteskwargs(opts)
69
69
70 revs += tuple(opts.get(b'rev', []))
70 revs += tuple(opts.get(b'rev', []))
71 revs = logcmdutil.revrange(repo, revs)
71 revs = logcmdutil.revrange(repo, revs)
72
72
73 if not revs:
73 if not revs:
74 raise error.Abort(_(b'no revisions specified'))
74 raise error.Abort(_(b'no revisions specified'))
75
75
76 heads = []
76 heads = []
77 for branch in repo.branchmap():
77 for branch in repo.branchmap():
78 heads.extend(repo.branchheads(branch))
78 heads.extend(repo.branchheads(branch))
79 heads = {repo[h].rev() for h in heads}
79 heads = {repo[h].rev() for h in heads}
80 for rev in revs:
80 for rev in revs:
81 if rev not in heads:
81 if rev not in heads:
82 raise error.Abort(_(b'revision is not an open head: %d') % rev)
82 raise error.Abort(_(b'revision is not an open head: %d') % rev)
83
83
84 message = cmdutil.logmessage(ui, opts)
84 message = cmdutil.logmessage(ui, opts)
85 if not message:
85 if not message:
86 raise error.Abort(_(b"no commit message specified with -l or -m"))
86 raise error.Abort(_(b"no commit message specified with -l or -m"))
87 extra = {b'close': b'1'}
87 extra = {b'close': b'1'}
88
88
89 with repo.wlock(), repo.lock():
89 with repo.wlock(), repo.lock():
90 for rev in revs:
90 for rev in revs:
91 r = repo[rev]
91 r = repo[rev]
92 branch = r.branch()
92 branch = r.branch()
93 extra[b'branch'] = branch
93 extra[b'branch'] = branch
94 docommit(r)
94 docommit(r)
95 return 0
95 return 0
@@ -1,804 +1,804 b''
1 # extdiff.py - external diff program support for mercurial
1 # extdiff.py - external diff program support for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to allow external programs to compare revisions
8 '''command to allow external programs to compare revisions
9
9
10 The extdiff Mercurial extension allows you to use external programs
10 The extdiff Mercurial extension allows you to use external programs
11 to compare revisions, or revision with working directory. The external
11 to compare revisions, or revision with working directory. The external
12 diff programs are called with a configurable set of options and two
12 diff programs are called with a configurable set of options and two
13 non-option arguments: paths to directories containing snapshots of
13 non-option arguments: paths to directories containing snapshots of
14 files to compare.
14 files to compare.
15
15
16 If there is more than one file being compared and the "child" revision
16 If there is more than one file being compared and the "child" revision
17 is the working directory, any modifications made in the external diff
17 is the working directory, any modifications made in the external diff
18 program will be copied back to the working directory from the temporary
18 program will be copied back to the working directory from the temporary
19 directory.
19 directory.
20
20
21 The extdiff extension also allows you to configure new diff commands, so
21 The extdiff extension also allows you to configure new diff commands, so
22 you do not need to type :hg:`extdiff -p kdiff3` always. ::
22 you do not need to type :hg:`extdiff -p kdiff3` always. ::
23
23
24 [extdiff]
24 [extdiff]
25 # add new command that runs GNU diff(1) in 'context diff' mode
25 # add new command that runs GNU diff(1) in 'context diff' mode
26 cdiff = gdiff -Nprc5
26 cdiff = gdiff -Nprc5
27 ## or the old way:
27 ## or the old way:
28 #cmd.cdiff = gdiff
28 #cmd.cdiff = gdiff
29 #opts.cdiff = -Nprc5
29 #opts.cdiff = -Nprc5
30
30
31 # add new command called meld, runs meld (no need to name twice). If
31 # add new command called meld, runs meld (no need to name twice). If
32 # the meld executable is not available, the meld tool in [merge-tools]
32 # the meld executable is not available, the meld tool in [merge-tools]
33 # will be used, if available
33 # will be used, if available
34 meld =
34 meld =
35
35
36 # add new command called vimdiff, runs gvimdiff with DirDiff plugin
36 # add new command called vimdiff, runs gvimdiff with DirDiff plugin
37 # (see http://www.vim.org/scripts/script.php?script_id=102) Non
37 # (see http://www.vim.org/scripts/script.php?script_id=102) Non
38 # English user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
38 # English user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
39 # your .vimrc
39 # your .vimrc
40 vimdiff = gvim -f "+next" \\
40 vimdiff = gvim -f "+next" \\
41 "+execute 'DirDiff' fnameescape(argv(0)) fnameescape(argv(1))"
41 "+execute 'DirDiff' fnameescape(argv(0)) fnameescape(argv(1))"
42
42
43 Tool arguments can include variables that are expanded at runtime::
43 Tool arguments can include variables that are expanded at runtime::
44
44
45 $parent1, $plabel1 - filename, descriptive label of first parent
45 $parent1, $plabel1 - filename, descriptive label of first parent
46 $child, $clabel - filename, descriptive label of child revision
46 $child, $clabel - filename, descriptive label of child revision
47 $parent2, $plabel2 - filename, descriptive label of second parent
47 $parent2, $plabel2 - filename, descriptive label of second parent
48 $root - repository root
48 $root - repository root
49 $parent is an alias for $parent1.
49 $parent is an alias for $parent1.
50
50
51 The extdiff extension will look in your [diff-tools] and [merge-tools]
51 The extdiff extension will look in your [diff-tools] and [merge-tools]
52 sections for diff tool arguments, when none are specified in [extdiff].
52 sections for diff tool arguments, when none are specified in [extdiff].
53
53
54 ::
54 ::
55
55
56 [extdiff]
56 [extdiff]
57 kdiff3 =
57 kdiff3 =
58
58
59 [diff-tools]
59 [diff-tools]
60 kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child
60 kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child
61
61
62 If a program has a graphical interface, it might be interesting to tell
62 If a program has a graphical interface, it might be interesting to tell
63 Mercurial about it. It will prevent the program from being mistakenly
63 Mercurial about it. It will prevent the program from being mistakenly
64 used in a terminal-only environment (such as an SSH terminal session),
64 used in a terminal-only environment (such as an SSH terminal session),
65 and will make :hg:`extdiff --per-file` open multiple file diffs at once
65 and will make :hg:`extdiff --per-file` open multiple file diffs at once
66 instead of one by one (if you still want to open file diffs one by one,
66 instead of one by one (if you still want to open file diffs one by one,
67 you can use the --confirm option).
67 you can use the --confirm option).
68
68
69 Declaring that a tool has a graphical interface can be done with the
69 Declaring that a tool has a graphical interface can be done with the
70 ``gui`` flag next to where ``diffargs`` are specified:
70 ``gui`` flag next to where ``diffargs`` are specified:
71
71
72 ::
72 ::
73
73
74 [diff-tools]
74 [diff-tools]
75 kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child
75 kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child
76 kdiff3.gui = true
76 kdiff3.gui = true
77
77
78 You can use -I/-X and list of file or directory names like normal
78 You can use -I/-X and list of file or directory names like normal
79 :hg:`diff` command. The extdiff extension makes snapshots of only
79 :hg:`diff` command. The extdiff extension makes snapshots of only
80 needed files, so running the external diff program will actually be
80 needed files, so running the external diff program will actually be
81 pretty fast (at least faster than having to compare the entire tree).
81 pretty fast (at least faster than having to compare the entire tree).
82 '''
82 '''
83
83
84 from __future__ import absolute_import
84 from __future__ import absolute_import
85
85
86 import os
86 import os
87 import re
87 import re
88 import shutil
88 import shutil
89 import stat
89 import stat
90 import subprocess
90 import subprocess
91
91
92 from mercurial.i18n import _
92 from mercurial.i18n import _
93 from mercurial.node import (
93 from mercurial.node import (
94 nullrev,
94 nullrev,
95 short,
95 short,
96 )
96 )
97 from mercurial import (
97 from mercurial import (
98 archival,
98 archival,
99 cmdutil,
99 cmdutil,
100 encoding,
100 encoding,
101 error,
101 error,
102 filemerge,
102 filemerge,
103 formatter,
103 formatter,
104 logcmdutil,
104 logcmdutil,
105 pycompat,
105 pycompat,
106 registrar,
106 registrar,
107 scmutil,
107 scmutil,
108 util,
108 util,
109 )
109 )
110 from mercurial.utils import (
110 from mercurial.utils import (
111 procutil,
111 procutil,
112 stringutil,
112 stringutil,
113 )
113 )
114
114
115 cmdtable = {}
115 cmdtable = {}
116 command = registrar.command(cmdtable)
116 command = registrar.command(cmdtable)
117
117
118 configtable = {}
118 configtable = {}
119 configitem = registrar.configitem(configtable)
119 configitem = registrar.configitem(configtable)
120
120
121 configitem(
121 configitem(
122 b'extdiff',
122 b'extdiff',
123 br'opts\..*',
123 br'opts\..*',
124 default=b'',
124 default=b'',
125 generic=True,
125 generic=True,
126 )
126 )
127
127
128 configitem(
128 configitem(
129 b'extdiff',
129 b'extdiff',
130 br'gui\..*',
130 br'gui\..*',
131 generic=True,
131 generic=True,
132 )
132 )
133
133
134 configitem(
134 configitem(
135 b'diff-tools',
135 b'diff-tools',
136 br'.*\.diffargs$',
136 br'.*\.diffargs$',
137 default=None,
137 default=None,
138 generic=True,
138 generic=True,
139 )
139 )
140
140
141 configitem(
141 configitem(
142 b'diff-tools',
142 b'diff-tools',
143 br'.*\.gui$',
143 br'.*\.gui$',
144 generic=True,
144 generic=True,
145 )
145 )
146
146
147 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
147 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
148 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
148 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
149 # be specifying the version(s) of Mercurial they are tested with, or
149 # be specifying the version(s) of Mercurial they are tested with, or
150 # leave the attribute unspecified.
150 # leave the attribute unspecified.
151 testedwith = b'ships-with-hg-core'
151 testedwith = b'ships-with-hg-core'
152
152
153
153
154 def snapshot(ui, repo, files, node, tmproot, listsubrepos):
154 def snapshot(ui, repo, files, node, tmproot, listsubrepos):
155 """snapshot files as of some revision
155 """snapshot files as of some revision
156 if not using snapshot, -I/-X does not work and recursive diff
156 if not using snapshot, -I/-X does not work and recursive diff
157 in tools like kdiff3 and meld displays too many files."""
157 in tools like kdiff3 and meld displays too many files."""
158 dirname = os.path.basename(repo.root)
158 dirname = os.path.basename(repo.root)
159 if dirname == b"":
159 if dirname == b"":
160 dirname = b"root"
160 dirname = b"root"
161 if node is not None:
161 if node is not None:
162 dirname = b'%s.%s' % (dirname, short(node))
162 dirname = b'%s.%s' % (dirname, short(node))
163 base = os.path.join(tmproot, dirname)
163 base = os.path.join(tmproot, dirname)
164 os.mkdir(base)
164 os.mkdir(base)
165 fnsandstat = []
165 fnsandstat = []
166
166
167 if node is not None:
167 if node is not None:
168 ui.note(
168 ui.note(
169 _(b'making snapshot of %d files from rev %s\n')
169 _(b'making snapshot of %d files from rev %s\n')
170 % (len(files), short(node))
170 % (len(files), short(node))
171 )
171 )
172 else:
172 else:
173 ui.note(
173 ui.note(
174 _(b'making snapshot of %d files from working directory\n')
174 _(b'making snapshot of %d files from working directory\n')
175 % (len(files))
175 % (len(files))
176 )
176 )
177
177
178 if files:
178 if files:
179 repo.ui.setconfig(b"ui", b"archivemeta", False)
179 repo.ui.setconfig(b"ui", b"archivemeta", False)
180
180
181 archival.archive(
181 archival.archive(
182 repo,
182 repo,
183 base,
183 base,
184 node,
184 node,
185 b'files',
185 b'files',
186 match=scmutil.matchfiles(repo, files),
186 match=scmutil.matchfiles(repo, files),
187 subrepos=listsubrepos,
187 subrepos=listsubrepos,
188 )
188 )
189
189
190 for fn in sorted(files):
190 for fn in sorted(files):
191 wfn = util.pconvert(fn)
191 wfn = util.pconvert(fn)
192 ui.note(b' %s\n' % wfn)
192 ui.note(b' %s\n' % wfn)
193
193
194 if node is None:
194 if node is None:
195 dest = os.path.join(base, wfn)
195 dest = os.path.join(base, wfn)
196
196
197 fnsandstat.append((dest, repo.wjoin(fn), os.lstat(dest)))
197 fnsandstat.append((dest, repo.wjoin(fn), os.lstat(dest)))
198 return dirname, fnsandstat
198 return dirname, fnsandstat
199
199
200
200
201 def formatcmdline(
201 def formatcmdline(
202 cmdline,
202 cmdline,
203 repo_root,
203 repo_root,
204 do3way,
204 do3way,
205 parent1,
205 parent1,
206 plabel1,
206 plabel1,
207 parent2,
207 parent2,
208 plabel2,
208 plabel2,
209 child,
209 child,
210 clabel,
210 clabel,
211 ):
211 ):
212 # Function to quote file/dir names in the argument string.
212 # Function to quote file/dir names in the argument string.
213 # When not operating in 3-way mode, an empty string is
213 # When not operating in 3-way mode, an empty string is
214 # returned for parent2
214 # returned for parent2
215 replace = {
215 replace = {
216 b'parent': parent1,
216 b'parent': parent1,
217 b'parent1': parent1,
217 b'parent1': parent1,
218 b'parent2': parent2,
218 b'parent2': parent2,
219 b'plabel1': plabel1,
219 b'plabel1': plabel1,
220 b'plabel2': plabel2,
220 b'plabel2': plabel2,
221 b'child': child,
221 b'child': child,
222 b'clabel': clabel,
222 b'clabel': clabel,
223 b'root': repo_root,
223 b'root': repo_root,
224 }
224 }
225
225
226 def quote(match):
226 def quote(match):
227 pre = match.group(2)
227 pre = match.group(2)
228 key = match.group(3)
228 key = match.group(3)
229 if not do3way and key == b'parent2':
229 if not do3way and key == b'parent2':
230 return pre
230 return pre
231 return pre + procutil.shellquote(replace[key])
231 return pre + procutil.shellquote(replace[key])
232
232
233 # Match parent2 first, so 'parent1?' will match both parent1 and parent
233 # Match parent2 first, so 'parent1?' will match both parent1 and parent
234 regex = (
234 regex = (
235 br'''(['"]?)([^\s'"$]*)'''
235 br'''(['"]?)([^\s'"$]*)'''
236 br'\$(parent2|parent1?|child|plabel1|plabel2|clabel|root)\1'
236 br'\$(parent2|parent1?|child|plabel1|plabel2|clabel|root)\1'
237 )
237 )
238 if not do3way and not re.search(regex, cmdline):
238 if not do3way and not re.search(regex, cmdline):
239 cmdline += b' $parent1 $child'
239 cmdline += b' $parent1 $child'
240 return re.sub(regex, quote, cmdline)
240 return re.sub(regex, quote, cmdline)
241
241
242
242
243 def _systembackground(cmd, environ=None, cwd=None):
243 def _systembackground(cmd, environ=None, cwd=None):
244 """like 'procutil.system', but returns the Popen object directly
244 """like 'procutil.system', but returns the Popen object directly
245 so we don't have to wait on it.
245 so we don't have to wait on it.
246 """
246 """
247 env = procutil.shellenviron(environ)
247 env = procutil.shellenviron(environ)
248 proc = subprocess.Popen(
248 proc = subprocess.Popen(
249 procutil.tonativestr(cmd),
249 procutil.tonativestr(cmd),
250 shell=True,
250 shell=True,
251 close_fds=procutil.closefds,
251 close_fds=procutil.closefds,
252 env=procutil.tonativeenv(env),
252 env=procutil.tonativeenv(env),
253 cwd=pycompat.rapply(procutil.tonativestr, cwd),
253 cwd=pycompat.rapply(procutil.tonativestr, cwd),
254 )
254 )
255 return proc
255 return proc
256
256
257
257
258 def _runperfilediff(
258 def _runperfilediff(
259 cmdline,
259 cmdline,
260 repo_root,
260 repo_root,
261 ui,
261 ui,
262 guitool,
262 guitool,
263 do3way,
263 do3way,
264 confirm,
264 confirm,
265 commonfiles,
265 commonfiles,
266 tmproot,
266 tmproot,
267 dir1a,
267 dir1a,
268 dir1b,
268 dir1b,
269 dir2,
269 dir2,
270 rev1a,
270 rev1a,
271 rev1b,
271 rev1b,
272 rev2,
272 rev2,
273 ):
273 ):
274 # Note that we need to sort the list of files because it was
274 # Note that we need to sort the list of files because it was
275 # built in an "unstable" way and it's annoying to get files in a
275 # built in an "unstable" way and it's annoying to get files in a
276 # random order, especially when "confirm" mode is enabled.
276 # random order, especially when "confirm" mode is enabled.
277 waitprocs = []
277 waitprocs = []
278 totalfiles = len(commonfiles)
278 totalfiles = len(commonfiles)
279 for idx, commonfile in enumerate(sorted(commonfiles)):
279 for idx, commonfile in enumerate(sorted(commonfiles)):
280 path1a = os.path.join(dir1a, commonfile)
280 path1a = os.path.join(dir1a, commonfile)
281 label1a = commonfile + rev1a
281 label1a = commonfile + rev1a
282 if not os.path.isfile(path1a):
282 if not os.path.isfile(path1a):
283 path1a = pycompat.osdevnull
283 path1a = pycompat.osdevnull
284
284
285 path1b = b''
285 path1b = b''
286 label1b = b''
286 label1b = b''
287 if do3way:
287 if do3way:
288 path1b = os.path.join(dir1b, commonfile)
288 path1b = os.path.join(dir1b, commonfile)
289 label1b = commonfile + rev1b
289 label1b = commonfile + rev1b
290 if not os.path.isfile(path1b):
290 if not os.path.isfile(path1b):
291 path1b = pycompat.osdevnull
291 path1b = pycompat.osdevnull
292
292
293 path2 = os.path.join(dir2, commonfile)
293 path2 = os.path.join(dir2, commonfile)
294 label2 = commonfile + rev2
294 label2 = commonfile + rev2
295
295
296 if confirm:
296 if confirm:
297 # Prompt before showing this diff
297 # Prompt before showing this diff
298 difffiles = _(b'diff %s (%d of %d)') % (
298 difffiles = _(b'diff %s (%d of %d)') % (
299 commonfile,
299 commonfile,
300 idx + 1,
300 idx + 1,
301 totalfiles,
301 totalfiles,
302 )
302 )
303 responses = _(
303 responses = _(
304 b'[Yns?]'
304 b'[Yns?]'
305 b'$$ &Yes, show diff'
305 b'$$ &Yes, show diff'
306 b'$$ &No, skip this diff'
306 b'$$ &No, skip this diff'
307 b'$$ &Skip remaining diffs'
307 b'$$ &Skip remaining diffs'
308 b'$$ &? (display help)'
308 b'$$ &? (display help)'
309 )
309 )
310 r = ui.promptchoice(b'%s %s' % (difffiles, responses))
310 r = ui.promptchoice(b'%s %s' % (difffiles, responses))
311 if r == 3: # ?
311 if r == 3: # ?
312 while r == 3:
312 while r == 3:
313 for c, t in ui.extractchoices(responses)[1]:
313 for c, t in ui.extractchoices(responses)[1]:
314 ui.write(b'%s - %s\n' % (c, encoding.lower(t)))
314 ui.write(b'%s - %s\n' % (c, encoding.lower(t)))
315 r = ui.promptchoice(b'%s %s' % (difffiles, responses))
315 r = ui.promptchoice(b'%s %s' % (difffiles, responses))
316 if r == 0: # yes
316 if r == 0: # yes
317 pass
317 pass
318 elif r == 1: # no
318 elif r == 1: # no
319 continue
319 continue
320 elif r == 2: # skip
320 elif r == 2: # skip
321 break
321 break
322
322
323 curcmdline = formatcmdline(
323 curcmdline = formatcmdline(
324 cmdline,
324 cmdline,
325 repo_root,
325 repo_root,
326 do3way=do3way,
326 do3way=do3way,
327 parent1=path1a,
327 parent1=path1a,
328 plabel1=label1a,
328 plabel1=label1a,
329 parent2=path1b,
329 parent2=path1b,
330 plabel2=label1b,
330 plabel2=label1b,
331 child=path2,
331 child=path2,
332 clabel=label2,
332 clabel=label2,
333 )
333 )
334
334
335 if confirm or not guitool:
335 if confirm or not guitool:
336 # Run the comparison program and wait for it to exit
336 # Run the comparison program and wait for it to exit
337 # before we show the next file.
337 # before we show the next file.
338 # This is because either we need to wait for confirmation
338 # This is because either we need to wait for confirmation
339 # from the user between each invocation, or because, as far
339 # from the user between each invocation, or because, as far
340 # as we know, the tool doesn't have a GUI, in which case
340 # as we know, the tool doesn't have a GUI, in which case
341 # we can't run multiple CLI programs at the same time.
341 # we can't run multiple CLI programs at the same time.
342 ui.debug(
342 ui.debug(
343 b'running %r in %s\n' % (pycompat.bytestr(curcmdline), tmproot)
343 b'running %r in %s\n' % (pycompat.bytestr(curcmdline), tmproot)
344 )
344 )
345 ui.system(curcmdline, cwd=tmproot, blockedtag=b'extdiff')
345 ui.system(curcmdline, cwd=tmproot, blockedtag=b'extdiff')
346 else:
346 else:
347 # Run the comparison program but don't wait, as we're
347 # Run the comparison program but don't wait, as we're
348 # going to rapid-fire each file diff and then wait on
348 # going to rapid-fire each file diff and then wait on
349 # the whole group.
349 # the whole group.
350 ui.debug(
350 ui.debug(
351 b'running %r in %s (backgrounded)\n'
351 b'running %r in %s (backgrounded)\n'
352 % (pycompat.bytestr(curcmdline), tmproot)
352 % (pycompat.bytestr(curcmdline), tmproot)
353 )
353 )
354 proc = _systembackground(curcmdline, cwd=tmproot)
354 proc = _systembackground(curcmdline, cwd=tmproot)
355 waitprocs.append(proc)
355 waitprocs.append(proc)
356
356
357 if waitprocs:
357 if waitprocs:
358 with ui.timeblockedsection(b'extdiff'):
358 with ui.timeblockedsection(b'extdiff'):
359 for proc in waitprocs:
359 for proc in waitprocs:
360 proc.wait()
360 proc.wait()
361
361
362
362
363 def diffpatch(ui, repo, node1, node2, tmproot, matcher, cmdline):
363 def diffpatch(ui, repo, node1, node2, tmproot, matcher, cmdline):
364 template = b'hg-%h.patch'
364 template = b'hg-%h.patch'
365 # write patches to temporary files
365 # write patches to temporary files
366 with formatter.nullformatter(ui, b'extdiff', {}) as fm:
366 with formatter.nullformatter(ui, b'extdiff', {}) as fm:
367 cmdutil.export(
367 cmdutil.export(
368 repo,
368 repo,
369 [repo[node1].rev(), repo[node2].rev()],
369 [repo[node1].rev(), repo[node2].rev()],
370 fm,
370 fm,
371 fntemplate=repo.vfs.reljoin(tmproot, template),
371 fntemplate=repo.vfs.reljoin(tmproot, template),
372 match=matcher,
372 match=matcher,
373 )
373 )
374 label1 = cmdutil.makefilename(repo[node1], template)
374 label1 = cmdutil.makefilename(repo[node1], template)
375 label2 = cmdutil.makefilename(repo[node2], template)
375 label2 = cmdutil.makefilename(repo[node2], template)
376 file1 = repo.vfs.reljoin(tmproot, label1)
376 file1 = repo.vfs.reljoin(tmproot, label1)
377 file2 = repo.vfs.reljoin(tmproot, label2)
377 file2 = repo.vfs.reljoin(tmproot, label2)
378 cmdline = formatcmdline(
378 cmdline = formatcmdline(
379 cmdline,
379 cmdline,
380 repo.root,
380 repo.root,
381 # no 3way while comparing patches
381 # no 3way while comparing patches
382 do3way=False,
382 do3way=False,
383 parent1=file1,
383 parent1=file1,
384 plabel1=label1,
384 plabel1=label1,
385 # while comparing patches, there is no second parent
385 # while comparing patches, there is no second parent
386 parent2=None,
386 parent2=None,
387 plabel2=None,
387 plabel2=None,
388 child=file2,
388 child=file2,
389 clabel=label2,
389 clabel=label2,
390 )
390 )
391 ui.debug(b'running %r in %s\n' % (pycompat.bytestr(cmdline), tmproot))
391 ui.debug(b'running %r in %s\n' % (pycompat.bytestr(cmdline), tmproot))
392 ui.system(cmdline, cwd=tmproot, blockedtag=b'extdiff')
392 ui.system(cmdline, cwd=tmproot, blockedtag=b'extdiff')
393 return 1
393 return 1
394
394
395
395
396 def diffrevs(
396 def diffrevs(
397 ui,
397 ui,
398 repo,
398 repo,
399 ctx1a,
399 ctx1a,
400 ctx1b,
400 ctx1b,
401 ctx2,
401 ctx2,
402 matcher,
402 matcher,
403 tmproot,
403 tmproot,
404 cmdline,
404 cmdline,
405 do3way,
405 do3way,
406 guitool,
406 guitool,
407 opts,
407 opts,
408 ):
408 ):
409
409
410 subrepos = opts.get(b'subrepos')
410 subrepos = opts.get(b'subrepos')
411
411
412 # calculate list of files changed between both revs
412 # calculate list of files changed between both revs
413 st = ctx1a.status(ctx2, matcher, listsubrepos=subrepos)
413 st = ctx1a.status(ctx2, matcher, listsubrepos=subrepos)
414 mod_a, add_a, rem_a = set(st.modified), set(st.added), set(st.removed)
414 mod_a, add_a, rem_a = set(st.modified), set(st.added), set(st.removed)
415 if do3way:
415 if do3way:
416 stb = ctx1b.status(ctx2, matcher, listsubrepos=subrepos)
416 stb = ctx1b.status(ctx2, matcher, listsubrepos=subrepos)
417 mod_b, add_b, rem_b = (
417 mod_b, add_b, rem_b = (
418 set(stb.modified),
418 set(stb.modified),
419 set(stb.added),
419 set(stb.added),
420 set(stb.removed),
420 set(stb.removed),
421 )
421 )
422 else:
422 else:
423 mod_b, add_b, rem_b = set(), set(), set()
423 mod_b, add_b, rem_b = set(), set(), set()
424 modadd = mod_a | add_a | mod_b | add_b
424 modadd = mod_a | add_a | mod_b | add_b
425 common = modadd | rem_a | rem_b
425 common = modadd | rem_a | rem_b
426 if not common:
426 if not common:
427 return 0
427 return 0
428
428
429 # Always make a copy of ctx1a (and ctx1b, if applicable)
429 # Always make a copy of ctx1a (and ctx1b, if applicable)
430 # dir1a should contain files which are:
430 # dir1a should contain files which are:
431 # * modified or removed from ctx1a to ctx2
431 # * modified or removed from ctx1a to ctx2
432 # * modified or added from ctx1b to ctx2
432 # * modified or added from ctx1b to ctx2
433 # (except file added from ctx1a to ctx2 as they were not present in
433 # (except file added from ctx1a to ctx2 as they were not present in
434 # ctx1a)
434 # ctx1a)
435 dir1a_files = mod_a | rem_a | ((mod_b | add_b) - add_a)
435 dir1a_files = mod_a | rem_a | ((mod_b | add_b) - add_a)
436 dir1a = snapshot(ui, repo, dir1a_files, ctx1a.node(), tmproot, subrepos)[0]
436 dir1a = snapshot(ui, repo, dir1a_files, ctx1a.node(), tmproot, subrepos)[0]
437 rev1a = b'' if ctx1a.rev() is None else b'@%d' % ctx1a.rev()
437 rev1a = b'' if ctx1a.rev() is None else b'@%d' % ctx1a.rev()
438 if do3way:
438 if do3way:
439 # file calculation criteria same as dir1a
439 # file calculation criteria same as dir1a
440 dir1b_files = mod_b | rem_b | ((mod_a | add_a) - add_b)
440 dir1b_files = mod_b | rem_b | ((mod_a | add_a) - add_b)
441 dir1b = snapshot(
441 dir1b = snapshot(
442 ui, repo, dir1b_files, ctx1b.node(), tmproot, subrepos
442 ui, repo, dir1b_files, ctx1b.node(), tmproot, subrepos
443 )[0]
443 )[0]
444 rev1b = b'@%d' % ctx1b.rev()
444 rev1b = b'@%d' % ctx1b.rev()
445 else:
445 else:
446 dir1b = None
446 dir1b = None
447 rev1b = b''
447 rev1b = b''
448
448
449 fnsandstat = []
449 fnsandstat = []
450
450
451 # If ctx2 is not the wc or there is >1 change, copy it
451 # If ctx2 is not the wc or there is >1 change, copy it
452 dir2root = b''
452 dir2root = b''
453 rev2 = b''
453 rev2 = b''
454 if ctx2.node() is not None:
454 if ctx2.node() is not None:
455 dir2 = snapshot(ui, repo, modadd, ctx2.node(), tmproot, subrepos)[0]
455 dir2 = snapshot(ui, repo, modadd, ctx2.node(), tmproot, subrepos)[0]
456 rev2 = b'@%d' % ctx2.rev()
456 rev2 = b'@%d' % ctx2.rev()
457 elif len(common) > 1:
457 elif len(common) > 1:
458 # we only actually need to get the files to copy back to
458 # we only actually need to get the files to copy back to
459 # the working dir in this case (because the other cases
459 # the working dir in this case (because the other cases
460 # are: diffing 2 revisions or single file -- in which case
460 # are: diffing 2 revisions or single file -- in which case
461 # the file is already directly passed to the diff tool).
461 # the file is already directly passed to the diff tool).
462 dir2, fnsandstat = snapshot(ui, repo, modadd, None, tmproot, subrepos)
462 dir2, fnsandstat = snapshot(ui, repo, modadd, None, tmproot, subrepos)
463 else:
463 else:
464 # This lets the diff tool open the changed file directly
464 # This lets the diff tool open the changed file directly
465 dir2 = b''
465 dir2 = b''
466 dir2root = repo.root
466 dir2root = repo.root
467
467
468 label1a = rev1a
468 label1a = rev1a
469 label1b = rev1b
469 label1b = rev1b
470 label2 = rev2
470 label2 = rev2
471
471
472 if not opts.get(b'per_file'):
472 if not opts.get(b'per_file'):
473 # If only one change, diff the files instead of the directories
473 # If only one change, diff the files instead of the directories
474 # Handle bogus modifies correctly by checking if the files exist
474 # Handle bogus modifies correctly by checking if the files exist
475 if len(common) == 1:
475 if len(common) == 1:
476 common_file = util.localpath(common.pop())
476 common_file = util.localpath(common.pop())
477 dir1a = os.path.join(tmproot, dir1a, common_file)
477 dir1a = os.path.join(tmproot, dir1a, common_file)
478 label1a = common_file + rev1a
478 label1a = common_file + rev1a
479 if not os.path.isfile(dir1a):
479 if not os.path.isfile(dir1a):
480 dir1a = pycompat.osdevnull
480 dir1a = pycompat.osdevnull
481 if do3way:
481 if do3way:
482 dir1b = os.path.join(tmproot, dir1b, common_file)
482 dir1b = os.path.join(tmproot, dir1b, common_file)
483 label1b = common_file + rev1b
483 label1b = common_file + rev1b
484 if not os.path.isfile(dir1b):
484 if not os.path.isfile(dir1b):
485 dir1b = pycompat.osdevnull
485 dir1b = pycompat.osdevnull
486 dir2 = os.path.join(dir2root, dir2, common_file)
486 dir2 = os.path.join(dir2root, dir2, common_file)
487 label2 = common_file + rev2
487 label2 = common_file + rev2
488
488
489 # Run the external tool on the 2 temp directories or the patches
489 # Run the external tool on the 2 temp directories or the patches
490 cmdline = formatcmdline(
490 cmdline = formatcmdline(
491 cmdline,
491 cmdline,
492 repo.root,
492 repo.root,
493 do3way=do3way,
493 do3way=do3way,
494 parent1=dir1a,
494 parent1=dir1a,
495 plabel1=label1a,
495 plabel1=label1a,
496 parent2=dir1b,
496 parent2=dir1b,
497 plabel2=label1b,
497 plabel2=label1b,
498 child=dir2,
498 child=dir2,
499 clabel=label2,
499 clabel=label2,
500 )
500 )
501 ui.debug(b'running %r in %s\n' % (pycompat.bytestr(cmdline), tmproot))
501 ui.debug(b'running %r in %s\n' % (pycompat.bytestr(cmdline), tmproot))
502 ui.system(cmdline, cwd=tmproot, blockedtag=b'extdiff')
502 ui.system(cmdline, cwd=tmproot, blockedtag=b'extdiff')
503 else:
503 else:
504 # Run the external tool once for each pair of files
504 # Run the external tool once for each pair of files
505 _runperfilediff(
505 _runperfilediff(
506 cmdline,
506 cmdline,
507 repo.root,
507 repo.root,
508 ui,
508 ui,
509 guitool=guitool,
509 guitool=guitool,
510 do3way=do3way,
510 do3way=do3way,
511 confirm=opts.get(b'confirm'),
511 confirm=opts.get(b'confirm'),
512 commonfiles=common,
512 commonfiles=common,
513 tmproot=tmproot,
513 tmproot=tmproot,
514 dir1a=os.path.join(tmproot, dir1a),
514 dir1a=os.path.join(tmproot, dir1a),
515 dir1b=os.path.join(tmproot, dir1b) if do3way else None,
515 dir1b=os.path.join(tmproot, dir1b) if do3way else None,
516 dir2=os.path.join(dir2root, dir2),
516 dir2=os.path.join(dir2root, dir2),
517 rev1a=rev1a,
517 rev1a=rev1a,
518 rev1b=rev1b,
518 rev1b=rev1b,
519 rev2=rev2,
519 rev2=rev2,
520 )
520 )
521
521
522 for copy_fn, working_fn, st in fnsandstat:
522 for copy_fn, working_fn, st in fnsandstat:
523 cpstat = os.lstat(copy_fn)
523 cpstat = os.lstat(copy_fn)
524 # Some tools copy the file and attributes, so mtime may not detect
524 # Some tools copy the file and attributes, so mtime may not detect
525 # all changes. A size check will detect more cases, but not all.
525 # all changes. A size check will detect more cases, but not all.
526 # The only certain way to detect every case is to diff all files,
526 # The only certain way to detect every case is to diff all files,
527 # which could be expensive.
527 # which could be expensive.
528 # copyfile() carries over the permission, so the mode check could
528 # copyfile() carries over the permission, so the mode check could
529 # be in an 'elif' branch, but for the case where the file has
529 # be in an 'elif' branch, but for the case where the file has
530 # changed without affecting mtime or size.
530 # changed without affecting mtime or size.
531 if (
531 if (
532 cpstat[stat.ST_MTIME] != st[stat.ST_MTIME]
532 cpstat[stat.ST_MTIME] != st[stat.ST_MTIME]
533 or cpstat.st_size != st.st_size
533 or cpstat.st_size != st.st_size
534 or (cpstat.st_mode & 0o100) != (st.st_mode & 0o100)
534 or (cpstat.st_mode & 0o100) != (st.st_mode & 0o100)
535 ):
535 ):
536 ui.debug(
536 ui.debug(
537 b'file changed while diffing. '
537 b'file changed while diffing. '
538 b'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn)
538 b'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn)
539 )
539 )
540 util.copyfile(copy_fn, working_fn)
540 util.copyfile(copy_fn, working_fn)
541
541
542 return 1
542 return 1
543
543
544
544
545 def dodiff(ui, repo, cmdline, pats, opts, guitool=False):
545 def dodiff(ui, repo, cmdline, pats, opts, guitool=False):
546 """Do the actual diff:
546 """Do the actual diff:
547
547
548 - copy to a temp structure if diffing 2 internal revisions
548 - copy to a temp structure if diffing 2 internal revisions
549 - copy to a temp structure if diffing working revision with
549 - copy to a temp structure if diffing working revision with
550 another one and more than 1 file is changed
550 another one and more than 1 file is changed
551 - just invoke the diff for a single file in the working dir
551 - just invoke the diff for a single file in the working dir
552 """
552 """
553
553
554 cmdutil.check_at_most_one_arg(opts, b'rev', b'change')
554 cmdutil.check_at_most_one_arg(opts, b'rev', b'change')
555 revs = opts.get(b'rev')
555 revs = opts.get(b'rev')
556 from_rev = opts.get(b'from')
556 from_rev = opts.get(b'from')
557 to_rev = opts.get(b'to')
557 to_rev = opts.get(b'to')
558 change = opts.get(b'change')
558 change = opts.get(b'change')
559 do3way = b'$parent2' in cmdline
559 do3way = b'$parent2' in cmdline
560
560
561 if change:
561 if change:
562 ctx2 = scmutil.revsingle(repo, change, None)
562 ctx2 = logcmdutil.revsingle(repo, change, None)
563 ctx1a, ctx1b = ctx2.p1(), ctx2.p2()
563 ctx1a, ctx1b = ctx2.p1(), ctx2.p2()
564 elif from_rev or to_rev:
564 elif from_rev or to_rev:
565 repo = scmutil.unhidehashlikerevs(
565 repo = scmutil.unhidehashlikerevs(
566 repo, [from_rev] + [to_rev], b'nowarn'
566 repo, [from_rev] + [to_rev], b'nowarn'
567 )
567 )
568 ctx1a = scmutil.revsingle(repo, from_rev, None)
568 ctx1a = logcmdutil.revsingle(repo, from_rev, None)
569 ctx1b = repo[nullrev]
569 ctx1b = repo[nullrev]
570 ctx2 = scmutil.revsingle(repo, to_rev, None)
570 ctx2 = logcmdutil.revsingle(repo, to_rev, None)
571 else:
571 else:
572 ctx1a, ctx2 = logcmdutil.revpair(repo, revs)
572 ctx1a, ctx2 = logcmdutil.revpair(repo, revs)
573 if not revs:
573 if not revs:
574 ctx1b = repo[None].p2()
574 ctx1b = repo[None].p2()
575 else:
575 else:
576 ctx1b = repo[nullrev]
576 ctx1b = repo[nullrev]
577
577
578 # Disable 3-way merge if there is only one parent
578 # Disable 3-way merge if there is only one parent
579 if do3way:
579 if do3way:
580 if ctx1b.rev() == nullrev:
580 if ctx1b.rev() == nullrev:
581 do3way = False
581 do3way = False
582
582
583 matcher = scmutil.match(ctx2, pats, opts)
583 matcher = scmutil.match(ctx2, pats, opts)
584
584
585 if opts.get(b'patch'):
585 if opts.get(b'patch'):
586 if opts.get(b'subrepos'):
586 if opts.get(b'subrepos'):
587 raise error.Abort(_(b'--patch cannot be used with --subrepos'))
587 raise error.Abort(_(b'--patch cannot be used with --subrepos'))
588 if opts.get(b'per_file'):
588 if opts.get(b'per_file'):
589 raise error.Abort(_(b'--patch cannot be used with --per-file'))
589 raise error.Abort(_(b'--patch cannot be used with --per-file'))
590 if ctx2.node() is None:
590 if ctx2.node() is None:
591 raise error.Abort(_(b'--patch requires two revisions'))
591 raise error.Abort(_(b'--patch requires two revisions'))
592
592
593 tmproot = pycompat.mkdtemp(prefix=b'extdiff.')
593 tmproot = pycompat.mkdtemp(prefix=b'extdiff.')
594 try:
594 try:
595 if opts.get(b'patch'):
595 if opts.get(b'patch'):
596 return diffpatch(
596 return diffpatch(
597 ui, repo, ctx1a.node(), ctx2.node(), tmproot, matcher, cmdline
597 ui, repo, ctx1a.node(), ctx2.node(), tmproot, matcher, cmdline
598 )
598 )
599
599
600 return diffrevs(
600 return diffrevs(
601 ui,
601 ui,
602 repo,
602 repo,
603 ctx1a,
603 ctx1a,
604 ctx1b,
604 ctx1b,
605 ctx2,
605 ctx2,
606 matcher,
606 matcher,
607 tmproot,
607 tmproot,
608 cmdline,
608 cmdline,
609 do3way,
609 do3way,
610 guitool,
610 guitool,
611 opts,
611 opts,
612 )
612 )
613
613
614 finally:
614 finally:
615 ui.note(_(b'cleaning up temp directory\n'))
615 ui.note(_(b'cleaning up temp directory\n'))
616 shutil.rmtree(tmproot)
616 shutil.rmtree(tmproot)
617
617
618
618
619 extdiffopts = (
619 extdiffopts = (
620 [
620 [
621 (
621 (
622 b'o',
622 b'o',
623 b'option',
623 b'option',
624 [],
624 [],
625 _(b'pass option to comparison program'),
625 _(b'pass option to comparison program'),
626 _(b'OPT'),
626 _(b'OPT'),
627 ),
627 ),
628 (b'r', b'rev', [], _(b'revision (DEPRECATED)'), _(b'REV')),
628 (b'r', b'rev', [], _(b'revision (DEPRECATED)'), _(b'REV')),
629 (b'', b'from', b'', _(b'revision to diff from'), _(b'REV1')),
629 (b'', b'from', b'', _(b'revision to diff from'), _(b'REV1')),
630 (b'', b'to', b'', _(b'revision to diff to'), _(b'REV2')),
630 (b'', b'to', b'', _(b'revision to diff to'), _(b'REV2')),
631 (b'c', b'change', b'', _(b'change made by revision'), _(b'REV')),
631 (b'c', b'change', b'', _(b'change made by revision'), _(b'REV')),
632 (
632 (
633 b'',
633 b'',
634 b'per-file',
634 b'per-file',
635 False,
635 False,
636 _(b'compare each file instead of revision snapshots'),
636 _(b'compare each file instead of revision snapshots'),
637 ),
637 ),
638 (
638 (
639 b'',
639 b'',
640 b'confirm',
640 b'confirm',
641 False,
641 False,
642 _(b'prompt user before each external program invocation'),
642 _(b'prompt user before each external program invocation'),
643 ),
643 ),
644 (b'', b'patch', None, _(b'compare patches for two revisions')),
644 (b'', b'patch', None, _(b'compare patches for two revisions')),
645 ]
645 ]
646 + cmdutil.walkopts
646 + cmdutil.walkopts
647 + cmdutil.subrepoopts
647 + cmdutil.subrepoopts
648 )
648 )
649
649
650
650
651 @command(
651 @command(
652 b'extdiff',
652 b'extdiff',
653 [
653 [
654 (b'p', b'program', b'', _(b'comparison program to run'), _(b'CMD')),
654 (b'p', b'program', b'', _(b'comparison program to run'), _(b'CMD')),
655 ]
655 ]
656 + extdiffopts,
656 + extdiffopts,
657 _(b'hg extdiff [OPT]... [FILE]...'),
657 _(b'hg extdiff [OPT]... [FILE]...'),
658 helpcategory=command.CATEGORY_FILE_CONTENTS,
658 helpcategory=command.CATEGORY_FILE_CONTENTS,
659 inferrepo=True,
659 inferrepo=True,
660 )
660 )
661 def extdiff(ui, repo, *pats, **opts):
661 def extdiff(ui, repo, *pats, **opts):
662 """use external program to diff repository (or selected files)
662 """use external program to diff repository (or selected files)
663
663
664 Show differences between revisions for the specified files, using
664 Show differences between revisions for the specified files, using
665 an external program. The default program used is diff, with
665 an external program. The default program used is diff, with
666 default options "-Npru".
666 default options "-Npru".
667
667
668 To select a different program, use the -p/--program option. The
668 To select a different program, use the -p/--program option. The
669 program will be passed the names of two directories to compare,
669 program will be passed the names of two directories to compare,
670 unless the --per-file option is specified (see below). To pass
670 unless the --per-file option is specified (see below). To pass
671 additional options to the program, use -o/--option. These will be
671 additional options to the program, use -o/--option. These will be
672 passed before the names of the directories or files to compare.
672 passed before the names of the directories or files to compare.
673
673
674 The --from, --to, and --change options work the same way they do for
674 The --from, --to, and --change options work the same way they do for
675 :hg:`diff`.
675 :hg:`diff`.
676
676
677 The --per-file option runs the external program repeatedly on each
677 The --per-file option runs the external program repeatedly on each
678 file to diff, instead of once on two directories. By default,
678 file to diff, instead of once on two directories. By default,
679 this happens one by one, where the next file diff is open in the
679 this happens one by one, where the next file diff is open in the
680 external program only once the previous external program (for the
680 external program only once the previous external program (for the
681 previous file diff) has exited. If the external program has a
681 previous file diff) has exited. If the external program has a
682 graphical interface, it can open all the file diffs at once instead
682 graphical interface, it can open all the file diffs at once instead
683 of one by one. See :hg:`help -e extdiff` for information about how
683 of one by one. See :hg:`help -e extdiff` for information about how
684 to tell Mercurial that a given program has a graphical interface.
684 to tell Mercurial that a given program has a graphical interface.
685
685
686 The --confirm option will prompt the user before each invocation of
686 The --confirm option will prompt the user before each invocation of
687 the external program. It is ignored if --per-file isn't specified.
687 the external program. It is ignored if --per-file isn't specified.
688 """
688 """
689 opts = pycompat.byteskwargs(opts)
689 opts = pycompat.byteskwargs(opts)
690 program = opts.get(b'program')
690 program = opts.get(b'program')
691 option = opts.get(b'option')
691 option = opts.get(b'option')
692 if not program:
692 if not program:
693 program = b'diff'
693 program = b'diff'
694 option = option or [b'-Npru']
694 option = option or [b'-Npru']
695 cmdline = b' '.join(map(procutil.shellquote, [program] + option))
695 cmdline = b' '.join(map(procutil.shellquote, [program] + option))
696 return dodiff(ui, repo, cmdline, pats, opts)
696 return dodiff(ui, repo, cmdline, pats, opts)
697
697
698
698
699 class savedcmd(object):
699 class savedcmd(object):
700 """use external program to diff repository (or selected files)
700 """use external program to diff repository (or selected files)
701
701
702 Show differences between revisions for the specified files, using
702 Show differences between revisions for the specified files, using
703 the following program::
703 the following program::
704
704
705 %(path)s
705 %(path)s
706
706
707 When two revision arguments are given, then changes are shown
707 When two revision arguments are given, then changes are shown
708 between those revisions. If only one revision is specified then
708 between those revisions. If only one revision is specified then
709 that revision is compared to the working directory, and, when no
709 that revision is compared to the working directory, and, when no
710 revisions are specified, the working directory files are compared
710 revisions are specified, the working directory files are compared
711 to its parent.
711 to its parent.
712 """
712 """
713
713
714 def __init__(self, path, cmdline, isgui):
714 def __init__(self, path, cmdline, isgui):
715 # We can't pass non-ASCII through docstrings (and path is
715 # We can't pass non-ASCII through docstrings (and path is
716 # in an unknown encoding anyway), but avoid double separators on
716 # in an unknown encoding anyway), but avoid double separators on
717 # Windows
717 # Windows
718 docpath = stringutil.escapestr(path).replace(b'\\\\', b'\\')
718 docpath = stringutil.escapestr(path).replace(b'\\\\', b'\\')
719 self.__doc__ %= {'path': pycompat.sysstr(stringutil.uirepr(docpath))}
719 self.__doc__ %= {'path': pycompat.sysstr(stringutil.uirepr(docpath))}
720 self._cmdline = cmdline
720 self._cmdline = cmdline
721 self._isgui = isgui
721 self._isgui = isgui
722
722
723 def __call__(self, ui, repo, *pats, **opts):
723 def __call__(self, ui, repo, *pats, **opts):
724 opts = pycompat.byteskwargs(opts)
724 opts = pycompat.byteskwargs(opts)
725 options = b' '.join(map(procutil.shellquote, opts[b'option']))
725 options = b' '.join(map(procutil.shellquote, opts[b'option']))
726 if options:
726 if options:
727 options = b' ' + options
727 options = b' ' + options
728 return dodiff(
728 return dodiff(
729 ui, repo, self._cmdline + options, pats, opts, guitool=self._isgui
729 ui, repo, self._cmdline + options, pats, opts, guitool=self._isgui
730 )
730 )
731
731
732
732
733 def _gettooldetails(ui, cmd, path):
733 def _gettooldetails(ui, cmd, path):
734 """
734 """
735 returns following things for a
735 returns following things for a
736 ```
736 ```
737 [extdiff]
737 [extdiff]
738 <cmd> = <path>
738 <cmd> = <path>
739 ```
739 ```
740 entry:
740 entry:
741
741
742 cmd: command/tool name
742 cmd: command/tool name
743 path: path to the tool
743 path: path to the tool
744 cmdline: the command which should be run
744 cmdline: the command which should be run
745 isgui: whether the tool uses GUI or not
745 isgui: whether the tool uses GUI or not
746
746
747 Reads all external tools related configs, whether it be extdiff section,
747 Reads all external tools related configs, whether it be extdiff section,
748 diff-tools or merge-tools section, or its specified in an old format or
748 diff-tools or merge-tools section, or its specified in an old format or
749 the latest format.
749 the latest format.
750 """
750 """
751 path = util.expandpath(path)
751 path = util.expandpath(path)
752 if cmd.startswith(b'cmd.'):
752 if cmd.startswith(b'cmd.'):
753 cmd = cmd[4:]
753 cmd = cmd[4:]
754 if not path:
754 if not path:
755 path = procutil.findexe(cmd)
755 path = procutil.findexe(cmd)
756 if path is None:
756 if path is None:
757 path = filemerge.findexternaltool(ui, cmd) or cmd
757 path = filemerge.findexternaltool(ui, cmd) or cmd
758 diffopts = ui.config(b'extdiff', b'opts.' + cmd)
758 diffopts = ui.config(b'extdiff', b'opts.' + cmd)
759 cmdline = procutil.shellquote(path)
759 cmdline = procutil.shellquote(path)
760 if diffopts:
760 if diffopts:
761 cmdline += b' ' + diffopts
761 cmdline += b' ' + diffopts
762 isgui = ui.configbool(b'extdiff', b'gui.' + cmd)
762 isgui = ui.configbool(b'extdiff', b'gui.' + cmd)
763 else:
763 else:
764 if path:
764 if path:
765 # case "cmd = path opts"
765 # case "cmd = path opts"
766 cmdline = path
766 cmdline = path
767 diffopts = len(pycompat.shlexsplit(cmdline)) > 1
767 diffopts = len(pycompat.shlexsplit(cmdline)) > 1
768 else:
768 else:
769 # case "cmd ="
769 # case "cmd ="
770 path = procutil.findexe(cmd)
770 path = procutil.findexe(cmd)
771 if path is None:
771 if path is None:
772 path = filemerge.findexternaltool(ui, cmd) or cmd
772 path = filemerge.findexternaltool(ui, cmd) or cmd
773 cmdline = procutil.shellquote(path)
773 cmdline = procutil.shellquote(path)
774 diffopts = False
774 diffopts = False
775 isgui = ui.configbool(b'extdiff', b'gui.' + cmd)
775 isgui = ui.configbool(b'extdiff', b'gui.' + cmd)
776 # look for diff arguments in [diff-tools] then [merge-tools]
776 # look for diff arguments in [diff-tools] then [merge-tools]
777 if not diffopts:
777 if not diffopts:
778 key = cmd + b'.diffargs'
778 key = cmd + b'.diffargs'
779 for section in (b'diff-tools', b'merge-tools'):
779 for section in (b'diff-tools', b'merge-tools'):
780 args = ui.config(section, key)
780 args = ui.config(section, key)
781 if args:
781 if args:
782 cmdline += b' ' + args
782 cmdline += b' ' + args
783 if isgui is None:
783 if isgui is None:
784 isgui = ui.configbool(section, cmd + b'.gui') or False
784 isgui = ui.configbool(section, cmd + b'.gui') or False
785 break
785 break
786 return cmd, path, cmdline, isgui
786 return cmd, path, cmdline, isgui
787
787
788
788
789 def uisetup(ui):
789 def uisetup(ui):
790 for cmd, path in ui.configitems(b'extdiff'):
790 for cmd, path in ui.configitems(b'extdiff'):
791 if cmd.startswith(b'opts.') or cmd.startswith(b'gui.'):
791 if cmd.startswith(b'opts.') or cmd.startswith(b'gui.'):
792 continue
792 continue
793 cmd, path, cmdline, isgui = _gettooldetails(ui, cmd, path)
793 cmd, path, cmdline, isgui = _gettooldetails(ui, cmd, path)
794 command(
794 command(
795 cmd,
795 cmd,
796 extdiffopts[:],
796 extdiffopts[:],
797 _(b'hg %s [OPTION]... [FILE]...') % cmd,
797 _(b'hg %s [OPTION]... [FILE]...') % cmd,
798 helpcategory=command.CATEGORY_FILE_CONTENTS,
798 helpcategory=command.CATEGORY_FILE_CONTENTS,
799 inferrepo=True,
799 inferrepo=True,
800 )(savedcmd(path, cmdline, isgui))
800 )(savedcmd(path, cmdline, isgui))
801
801
802
802
803 # tell hggettext to extract docstrings from these functions:
803 # tell hggettext to extract docstrings from these functions:
804 i18nfunctions = [savedcmd]
804 i18nfunctions = [savedcmd]
@@ -1,357 +1,358 b''
1 # Copyright 2016-present Facebook. All Rights Reserved.
1 # Copyright 2016-present Facebook. All Rights Reserved.
2 #
2 #
3 # commands: fastannotate commands
3 # commands: fastannotate commands
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import os
10 import os
11
11
12 from mercurial.i18n import _
12 from mercurial.i18n import _
13 from mercurial import (
13 from mercurial import (
14 commands,
14 commands,
15 encoding,
15 encoding,
16 error,
16 error,
17 extensions,
17 extensions,
18 logcmdutil,
18 patch,
19 patch,
19 pycompat,
20 pycompat,
20 registrar,
21 registrar,
21 scmutil,
22 scmutil,
22 util,
23 util,
23 )
24 )
24
25
25 from . import (
26 from . import (
26 context as facontext,
27 context as facontext,
27 error as faerror,
28 error as faerror,
28 formatter as faformatter,
29 formatter as faformatter,
29 )
30 )
30
31
31 cmdtable = {}
32 cmdtable = {}
32 command = registrar.command(cmdtable)
33 command = registrar.command(cmdtable)
33
34
34
35
35 def _matchpaths(repo, rev, pats, opts, aopts=facontext.defaultopts):
36 def _matchpaths(repo, rev, pats, opts, aopts=facontext.defaultopts):
36 """generate paths matching given patterns"""
37 """generate paths matching given patterns"""
37 perfhack = repo.ui.configbool(b'fastannotate', b'perfhack')
38 perfhack = repo.ui.configbool(b'fastannotate', b'perfhack')
38
39
39 # disable perfhack if:
40 # disable perfhack if:
40 # a) any walkopt is used
41 # a) any walkopt is used
41 # b) if we treat pats as plain file names, some of them do not have
42 # b) if we treat pats as plain file names, some of them do not have
42 # corresponding linelog files
43 # corresponding linelog files
43 if perfhack:
44 if perfhack:
44 # cwd related to reporoot
45 # cwd related to reporoot
45 reporoot = os.path.dirname(repo.path)
46 reporoot = os.path.dirname(repo.path)
46 reldir = os.path.relpath(encoding.getcwd(), reporoot)
47 reldir = os.path.relpath(encoding.getcwd(), reporoot)
47 if reldir == b'.':
48 if reldir == b'.':
48 reldir = b''
49 reldir = b''
49 if any(opts.get(o[1]) for o in commands.walkopts): # a)
50 if any(opts.get(o[1]) for o in commands.walkopts): # a)
50 perfhack = False
51 perfhack = False
51 else: # b)
52 else: # b)
52 relpats = [
53 relpats = [
53 os.path.relpath(p, reporoot) if os.path.isabs(p) else p
54 os.path.relpath(p, reporoot) if os.path.isabs(p) else p
54 for p in pats
55 for p in pats
55 ]
56 ]
56 # disable perfhack on '..' since it allows escaping from the repo
57 # disable perfhack on '..' since it allows escaping from the repo
57 if any(
58 if any(
58 (
59 (
59 b'..' in f
60 b'..' in f
60 or not os.path.isfile(
61 or not os.path.isfile(
61 facontext.pathhelper(repo, f, aopts).linelogpath
62 facontext.pathhelper(repo, f, aopts).linelogpath
62 )
63 )
63 )
64 )
64 for f in relpats
65 for f in relpats
65 ):
66 ):
66 perfhack = False
67 perfhack = False
67
68
68 # perfhack: emit paths directory without checking with manifest
69 # perfhack: emit paths directory without checking with manifest
69 # this can be incorrect if the rev dos not have file.
70 # this can be incorrect if the rev dos not have file.
70 if perfhack:
71 if perfhack:
71 for p in relpats:
72 for p in relpats:
72 yield os.path.join(reldir, p)
73 yield os.path.join(reldir, p)
73 else:
74 else:
74
75
75 def bad(x, y):
76 def bad(x, y):
76 raise error.Abort(b"%s: %s" % (x, y))
77 raise error.Abort(b"%s: %s" % (x, y))
77
78
78 ctx = scmutil.revsingle(repo, rev)
79 ctx = logcmdutil.revsingle(repo, rev)
79 m = scmutil.match(ctx, pats, opts, badfn=bad)
80 m = scmutil.match(ctx, pats, opts, badfn=bad)
80 for p in ctx.walk(m):
81 for p in ctx.walk(m):
81 yield p
82 yield p
82
83
83
84
84 fastannotatecommandargs = {
85 fastannotatecommandargs = {
85 'options': [
86 'options': [
86 (b'r', b'rev', b'.', _(b'annotate the specified revision'), _(b'REV')),
87 (b'r', b'rev', b'.', _(b'annotate the specified revision'), _(b'REV')),
87 (b'u', b'user', None, _(b'list the author (long with -v)')),
88 (b'u', b'user', None, _(b'list the author (long with -v)')),
88 (b'f', b'file', None, _(b'list the filename')),
89 (b'f', b'file', None, _(b'list the filename')),
89 (b'd', b'date', None, _(b'list the date (short with -q)')),
90 (b'd', b'date', None, _(b'list the date (short with -q)')),
90 (b'n', b'number', None, _(b'list the revision number (default)')),
91 (b'n', b'number', None, _(b'list the revision number (default)')),
91 (b'c', b'changeset', None, _(b'list the changeset')),
92 (b'c', b'changeset', None, _(b'list the changeset')),
92 (
93 (
93 b'l',
94 b'l',
94 b'line-number',
95 b'line-number',
95 None,
96 None,
96 _(b'show line number at the first appearance'),
97 _(b'show line number at the first appearance'),
97 ),
98 ),
98 (
99 (
99 b'e',
100 b'e',
100 b'deleted',
101 b'deleted',
101 None,
102 None,
102 _(b'show deleted lines (slow) (EXPERIMENTAL)'),
103 _(b'show deleted lines (slow) (EXPERIMENTAL)'),
103 ),
104 ),
104 (
105 (
105 b'',
106 b'',
106 b'no-content',
107 b'no-content',
107 None,
108 None,
108 _(b'do not show file content (EXPERIMENTAL)'),
109 _(b'do not show file content (EXPERIMENTAL)'),
109 ),
110 ),
110 (b'', b'no-follow', None, _(b"don't follow copies and renames")),
111 (b'', b'no-follow', None, _(b"don't follow copies and renames")),
111 (
112 (
112 b'',
113 b'',
113 b'linear',
114 b'linear',
114 None,
115 None,
115 _(
116 _(
116 b'enforce linear history, ignore second parent '
117 b'enforce linear history, ignore second parent '
117 b'of merges (EXPERIMENTAL)'
118 b'of merges (EXPERIMENTAL)'
118 ),
119 ),
119 ),
120 ),
120 (
121 (
121 b'',
122 b'',
122 b'long-hash',
123 b'long-hash',
123 None,
124 None,
124 _(b'show long changeset hash (EXPERIMENTAL)'),
125 _(b'show long changeset hash (EXPERIMENTAL)'),
125 ),
126 ),
126 (
127 (
127 b'',
128 b'',
128 b'rebuild',
129 b'rebuild',
129 None,
130 None,
130 _(b'rebuild cache even if it exists (EXPERIMENTAL)'),
131 _(b'rebuild cache even if it exists (EXPERIMENTAL)'),
131 ),
132 ),
132 ]
133 ]
133 + commands.diffwsopts
134 + commands.diffwsopts
134 + commands.walkopts
135 + commands.walkopts
135 + commands.formatteropts,
136 + commands.formatteropts,
136 'synopsis': _(b'[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
137 'synopsis': _(b'[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
137 'inferrepo': True,
138 'inferrepo': True,
138 }
139 }
139
140
140
141
141 def fastannotate(ui, repo, *pats, **opts):
142 def fastannotate(ui, repo, *pats, **opts):
142 """show changeset information by line for each file
143 """show changeset information by line for each file
143
144
144 List changes in files, showing the revision id responsible for each line.
145 List changes in files, showing the revision id responsible for each line.
145
146
146 This command is useful for discovering when a change was made and by whom.
147 This command is useful for discovering when a change was made and by whom.
147
148
148 By default this command prints revision numbers. If you include --file,
149 By default this command prints revision numbers. If you include --file,
149 --user, or --date, the revision number is suppressed unless you also
150 --user, or --date, the revision number is suppressed unless you also
150 include --number. The default format can also be customized by setting
151 include --number. The default format can also be customized by setting
151 fastannotate.defaultformat.
152 fastannotate.defaultformat.
152
153
153 Returns 0 on success.
154 Returns 0 on success.
154
155
155 .. container:: verbose
156 .. container:: verbose
156
157
157 This command uses an implementation different from the vanilla annotate
158 This command uses an implementation different from the vanilla annotate
158 command, which may produce slightly different (while still reasonable)
159 command, which may produce slightly different (while still reasonable)
159 outputs for some cases.
160 outputs for some cases.
160
161
161 Unlike the vanilla anootate, fastannotate follows rename regardless of
162 Unlike the vanilla anootate, fastannotate follows rename regardless of
162 the existence of --file.
163 the existence of --file.
163
164
164 For the best performance when running on a full repo, use -c, -l,
165 For the best performance when running on a full repo, use -c, -l,
165 avoid -u, -d, -n. Use --linear and --no-content to make it even faster.
166 avoid -u, -d, -n. Use --linear and --no-content to make it even faster.
166
167
167 For the best performance when running on a shallow (remotefilelog)
168 For the best performance when running on a shallow (remotefilelog)
168 repo, avoid --linear, --no-follow, or any diff options. As the server
169 repo, avoid --linear, --no-follow, or any diff options. As the server
169 won't be able to populate annotate cache when non-default options
170 won't be able to populate annotate cache when non-default options
170 affecting results are used.
171 affecting results are used.
171 """
172 """
172 if not pats:
173 if not pats:
173 raise error.Abort(_(b'at least one filename or pattern is required'))
174 raise error.Abort(_(b'at least one filename or pattern is required'))
174
175
175 # performance hack: filtered repo can be slow. unfilter by default.
176 # performance hack: filtered repo can be slow. unfilter by default.
176 if ui.configbool(b'fastannotate', b'unfilteredrepo'):
177 if ui.configbool(b'fastannotate', b'unfilteredrepo'):
177 repo = repo.unfiltered()
178 repo = repo.unfiltered()
178
179
179 opts = pycompat.byteskwargs(opts)
180 opts = pycompat.byteskwargs(opts)
180
181
181 rev = opts.get(b'rev', b'.')
182 rev = opts.get(b'rev', b'.')
182 rebuild = opts.get(b'rebuild', False)
183 rebuild = opts.get(b'rebuild', False)
183
184
184 diffopts = patch.difffeatureopts(
185 diffopts = patch.difffeatureopts(
185 ui, opts, section=b'annotate', whitespace=True
186 ui, opts, section=b'annotate', whitespace=True
186 )
187 )
187 aopts = facontext.annotateopts(
188 aopts = facontext.annotateopts(
188 diffopts=diffopts,
189 diffopts=diffopts,
189 followmerge=not opts.get(b'linear', False),
190 followmerge=not opts.get(b'linear', False),
190 followrename=not opts.get(b'no_follow', False),
191 followrename=not opts.get(b'no_follow', False),
191 )
192 )
192
193
193 if not any(
194 if not any(
194 opts.get(s)
195 opts.get(s)
195 for s in [b'user', b'date', b'file', b'number', b'changeset']
196 for s in [b'user', b'date', b'file', b'number', b'changeset']
196 ):
197 ):
197 # default 'number' for compatibility. but fastannotate is more
198 # default 'number' for compatibility. but fastannotate is more
198 # efficient with "changeset", "line-number" and "no-content".
199 # efficient with "changeset", "line-number" and "no-content".
199 for name in ui.configlist(
200 for name in ui.configlist(
200 b'fastannotate', b'defaultformat', [b'number']
201 b'fastannotate', b'defaultformat', [b'number']
201 ):
202 ):
202 opts[name] = True
203 opts[name] = True
203
204
204 ui.pager(b'fastannotate')
205 ui.pager(b'fastannotate')
205 template = opts.get(b'template')
206 template = opts.get(b'template')
206 if template == b'json':
207 if template == b'json':
207 formatter = faformatter.jsonformatter(ui, repo, opts)
208 formatter = faformatter.jsonformatter(ui, repo, opts)
208 else:
209 else:
209 formatter = faformatter.defaultformatter(ui, repo, opts)
210 formatter = faformatter.defaultformatter(ui, repo, opts)
210 showdeleted = opts.get(b'deleted', False)
211 showdeleted = opts.get(b'deleted', False)
211 showlines = not bool(opts.get(b'no_content'))
212 showlines = not bool(opts.get(b'no_content'))
212 showpath = opts.get(b'file', False)
213 showpath = opts.get(b'file', False)
213
214
214 # find the head of the main (master) branch
215 # find the head of the main (master) branch
215 master = ui.config(b'fastannotate', b'mainbranch') or rev
216 master = ui.config(b'fastannotate', b'mainbranch') or rev
216
217
217 # paths will be used for prefetching and the real annotating
218 # paths will be used for prefetching and the real annotating
218 paths = list(_matchpaths(repo, rev, pats, opts, aopts))
219 paths = list(_matchpaths(repo, rev, pats, opts, aopts))
219
220
220 # for client, prefetch from the server
221 # for client, prefetch from the server
221 if util.safehasattr(repo, 'prefetchfastannotate'):
222 if util.safehasattr(repo, 'prefetchfastannotate'):
222 repo.prefetchfastannotate(paths)
223 repo.prefetchfastannotate(paths)
223
224
224 for path in paths:
225 for path in paths:
225 result = lines = existinglines = None
226 result = lines = existinglines = None
226 while True:
227 while True:
227 try:
228 try:
228 with facontext.annotatecontext(repo, path, aopts, rebuild) as a:
229 with facontext.annotatecontext(repo, path, aopts, rebuild) as a:
229 result = a.annotate(
230 result = a.annotate(
230 rev,
231 rev,
231 master=master,
232 master=master,
232 showpath=showpath,
233 showpath=showpath,
233 showlines=(showlines and not showdeleted),
234 showlines=(showlines and not showdeleted),
234 )
235 )
235 if showdeleted:
236 if showdeleted:
236 existinglines = {(l[0], l[1]) for l in result}
237 existinglines = {(l[0], l[1]) for l in result}
237 result = a.annotatealllines(
238 result = a.annotatealllines(
238 rev, showpath=showpath, showlines=showlines
239 rev, showpath=showpath, showlines=showlines
239 )
240 )
240 break
241 break
241 except (faerror.CannotReuseError, faerror.CorruptedFileError):
242 except (faerror.CannotReuseError, faerror.CorruptedFileError):
242 # happens if master moves backwards, or the file was deleted
243 # happens if master moves backwards, or the file was deleted
243 # and readded, or renamed to an existing name, or corrupted.
244 # and readded, or renamed to an existing name, or corrupted.
244 if rebuild: # give up since we have tried rebuild already
245 if rebuild: # give up since we have tried rebuild already
245 raise
246 raise
246 else: # try a second time rebuilding the cache (slow)
247 else: # try a second time rebuilding the cache (slow)
247 rebuild = True
248 rebuild = True
248 continue
249 continue
249
250
250 if showlines:
251 if showlines:
251 result, lines = result
252 result, lines = result
252
253
253 formatter.write(result, lines, existinglines=existinglines)
254 formatter.write(result, lines, existinglines=existinglines)
254 formatter.end()
255 formatter.end()
255
256
256
257
257 _newopts = set()
258 _newopts = set()
258 _knownopts = {
259 _knownopts = {
259 opt[1].replace(b'-', b'_')
260 opt[1].replace(b'-', b'_')
260 for opt in (fastannotatecommandargs['options'] + commands.globalopts)
261 for opt in (fastannotatecommandargs['options'] + commands.globalopts)
261 }
262 }
262
263
263
264
264 def _annotatewrapper(orig, ui, repo, *pats, **opts):
265 def _annotatewrapper(orig, ui, repo, *pats, **opts):
265 """used by wrapdefault"""
266 """used by wrapdefault"""
266 # we need this hack until the obsstore has 0.0 seconds perf impact
267 # we need this hack until the obsstore has 0.0 seconds perf impact
267 if ui.configbool(b'fastannotate', b'unfilteredrepo'):
268 if ui.configbool(b'fastannotate', b'unfilteredrepo'):
268 repo = repo.unfiltered()
269 repo = repo.unfiltered()
269
270
270 # treat the file as text (skip the isbinary check)
271 # treat the file as text (skip the isbinary check)
271 if ui.configbool(b'fastannotate', b'forcetext'):
272 if ui.configbool(b'fastannotate', b'forcetext'):
272 opts['text'] = True
273 opts['text'] = True
273
274
274 # check if we need to do prefetch (client-side)
275 # check if we need to do prefetch (client-side)
275 rev = opts.get('rev')
276 rev = opts.get('rev')
276 if util.safehasattr(repo, 'prefetchfastannotate') and rev is not None:
277 if util.safehasattr(repo, 'prefetchfastannotate') and rev is not None:
277 paths = list(_matchpaths(repo, rev, pats, pycompat.byteskwargs(opts)))
278 paths = list(_matchpaths(repo, rev, pats, pycompat.byteskwargs(opts)))
278 repo.prefetchfastannotate(paths)
279 repo.prefetchfastannotate(paths)
279
280
280 return orig(ui, repo, *pats, **opts)
281 return orig(ui, repo, *pats, **opts)
281
282
282
283
283 def registercommand():
284 def registercommand():
284 """register the fastannotate command"""
285 """register the fastannotate command"""
285 name = b'fastannotate|fastblame|fa'
286 name = b'fastannotate|fastblame|fa'
286 command(name, helpbasic=True, **fastannotatecommandargs)(fastannotate)
287 command(name, helpbasic=True, **fastannotatecommandargs)(fastannotate)
287
288
288
289
289 def wrapdefault():
290 def wrapdefault():
290 """wrap the default annotate command, to be aware of the protocol"""
291 """wrap the default annotate command, to be aware of the protocol"""
291 extensions.wrapcommand(commands.table, b'annotate', _annotatewrapper)
292 extensions.wrapcommand(commands.table, b'annotate', _annotatewrapper)
292
293
293
294
294 @command(
295 @command(
295 b'debugbuildannotatecache',
296 b'debugbuildannotatecache',
296 [(b'r', b'rev', b'', _(b'build up to the specific revision'), _(b'REV'))]
297 [(b'r', b'rev', b'', _(b'build up to the specific revision'), _(b'REV'))]
297 + commands.walkopts,
298 + commands.walkopts,
298 _(b'[-r REV] FILE...'),
299 _(b'[-r REV] FILE...'),
299 )
300 )
300 def debugbuildannotatecache(ui, repo, *pats, **opts):
301 def debugbuildannotatecache(ui, repo, *pats, **opts):
301 """incrementally build fastannotate cache up to REV for specified files
302 """incrementally build fastannotate cache up to REV for specified files
302
303
303 If REV is not specified, use the config 'fastannotate.mainbranch'.
304 If REV is not specified, use the config 'fastannotate.mainbranch'.
304
305
305 If fastannotate.client is True, download the annotate cache from the
306 If fastannotate.client is True, download the annotate cache from the
306 server. Otherwise, build the annotate cache locally.
307 server. Otherwise, build the annotate cache locally.
307
308
308 The annotate cache will be built using the default diff and follow
309 The annotate cache will be built using the default diff and follow
309 options and lives in '.hg/fastannotate/default'.
310 options and lives in '.hg/fastannotate/default'.
310 """
311 """
311 opts = pycompat.byteskwargs(opts)
312 opts = pycompat.byteskwargs(opts)
312 rev = opts.get(b'REV') or ui.config(b'fastannotate', b'mainbranch')
313 rev = opts.get(b'REV') or ui.config(b'fastannotate', b'mainbranch')
313 if not rev:
314 if not rev:
314 raise error.Abort(
315 raise error.Abort(
315 _(b'you need to provide a revision'),
316 _(b'you need to provide a revision'),
316 hint=_(b'set fastannotate.mainbranch or use --rev'),
317 hint=_(b'set fastannotate.mainbranch or use --rev'),
317 )
318 )
318 if ui.configbool(b'fastannotate', b'unfilteredrepo'):
319 if ui.configbool(b'fastannotate', b'unfilteredrepo'):
319 repo = repo.unfiltered()
320 repo = repo.unfiltered()
320 ctx = scmutil.revsingle(repo, rev)
321 ctx = logcmdutil.revsingle(repo, rev)
321 m = scmutil.match(ctx, pats, opts)
322 m = scmutil.match(ctx, pats, opts)
322 paths = list(ctx.walk(m))
323 paths = list(ctx.walk(m))
323 if util.safehasattr(repo, 'prefetchfastannotate'):
324 if util.safehasattr(repo, 'prefetchfastannotate'):
324 # client
325 # client
325 if opts.get(b'REV'):
326 if opts.get(b'REV'):
326 raise error.Abort(_(b'--rev cannot be used for client'))
327 raise error.Abort(_(b'--rev cannot be used for client'))
327 repo.prefetchfastannotate(paths)
328 repo.prefetchfastannotate(paths)
328 else:
329 else:
329 # server, or full repo
330 # server, or full repo
330 progress = ui.makeprogress(_(b'building'), total=len(paths))
331 progress = ui.makeprogress(_(b'building'), total=len(paths))
331 for i, path in enumerate(paths):
332 for i, path in enumerate(paths):
332 progress.update(i)
333 progress.update(i)
333 with facontext.annotatecontext(repo, path) as actx:
334 with facontext.annotatecontext(repo, path) as actx:
334 try:
335 try:
335 if actx.isuptodate(rev):
336 if actx.isuptodate(rev):
336 continue
337 continue
337 actx.annotate(rev, rev)
338 actx.annotate(rev, rev)
338 except (faerror.CannotReuseError, faerror.CorruptedFileError):
339 except (faerror.CannotReuseError, faerror.CorruptedFileError):
339 # the cache is broken (could happen with renaming so the
340 # the cache is broken (could happen with renaming so the
340 # file history gets invalidated). rebuild and try again.
341 # file history gets invalidated). rebuild and try again.
341 ui.debug(
342 ui.debug(
342 b'fastannotate: %s: rebuilding broken cache\n' % path
343 b'fastannotate: %s: rebuilding broken cache\n' % path
343 )
344 )
344 actx.rebuild()
345 actx.rebuild()
345 try:
346 try:
346 actx.annotate(rev, rev)
347 actx.annotate(rev, rev)
347 except Exception as ex:
348 except Exception as ex:
348 # possibly a bug, but should not stop us from building
349 # possibly a bug, but should not stop us from building
349 # cache for other files.
350 # cache for other files.
350 ui.warn(
351 ui.warn(
351 _(
352 _(
352 b'fastannotate: %s: failed to '
353 b'fastannotate: %s: failed to '
353 b'build cache: %r\n'
354 b'build cache: %r\n'
354 )
355 )
355 % (path, ex)
356 % (path, ex)
356 )
357 )
357 progress.complete()
358 progress.complete()
@@ -1,1869 +1,1869 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''Overridden Mercurial commands and functions for the largefiles extension'''
9 '''Overridden Mercurial commands and functions for the largefiles extension'''
10 from __future__ import absolute_import
10 from __future__ import absolute_import
11
11
12 import copy
12 import copy
13 import os
13 import os
14
14
15 from mercurial.i18n import _
15 from mercurial.i18n import _
16
16
17 from mercurial.pycompat import open
17 from mercurial.pycompat import open
18
18
19 from mercurial.hgweb import webcommands
19 from mercurial.hgweb import webcommands
20
20
21 from mercurial import (
21 from mercurial import (
22 archival,
22 archival,
23 cmdutil,
23 cmdutil,
24 copies as copiesmod,
24 copies as copiesmod,
25 error,
25 error,
26 exchange,
26 exchange,
27 extensions,
27 extensions,
28 exthelper,
28 exthelper,
29 filemerge,
29 filemerge,
30 hg,
30 hg,
31 logcmdutil,
31 logcmdutil,
32 match as matchmod,
32 match as matchmod,
33 merge,
33 merge,
34 mergestate as mergestatemod,
34 mergestate as mergestatemod,
35 pathutil,
35 pathutil,
36 pycompat,
36 pycompat,
37 scmutil,
37 scmutil,
38 smartset,
38 smartset,
39 subrepo,
39 subrepo,
40 url as urlmod,
40 url as urlmod,
41 util,
41 util,
42 )
42 )
43
43
44 from mercurial.upgrade_utils import (
44 from mercurial.upgrade_utils import (
45 actions as upgrade_actions,
45 actions as upgrade_actions,
46 )
46 )
47
47
48 from . import (
48 from . import (
49 lfcommands,
49 lfcommands,
50 lfutil,
50 lfutil,
51 storefactory,
51 storefactory,
52 )
52 )
53
53
54 eh = exthelper.exthelper()
54 eh = exthelper.exthelper()
55
55
56 lfstatus = lfutil.lfstatus
56 lfstatus = lfutil.lfstatus
57
57
58 MERGE_ACTION_LARGEFILE_MARK_REMOVED = b'lfmr'
58 MERGE_ACTION_LARGEFILE_MARK_REMOVED = b'lfmr'
59
59
60 # -- Utility functions: commonly/repeatedly needed functionality ---------------
60 # -- Utility functions: commonly/repeatedly needed functionality ---------------
61
61
62
62
63 def composelargefilematcher(match, manifest):
63 def composelargefilematcher(match, manifest):
64 """create a matcher that matches only the largefiles in the original
64 """create a matcher that matches only the largefiles in the original
65 matcher"""
65 matcher"""
66 m = copy.copy(match)
66 m = copy.copy(match)
67 lfile = lambda f: lfutil.standin(f) in manifest
67 lfile = lambda f: lfutil.standin(f) in manifest
68 m._files = [lf for lf in m._files if lfile(lf)]
68 m._files = [lf for lf in m._files if lfile(lf)]
69 m._fileset = set(m._files)
69 m._fileset = set(m._files)
70 m.always = lambda: False
70 m.always = lambda: False
71 origmatchfn = m.matchfn
71 origmatchfn = m.matchfn
72 m.matchfn = lambda f: lfile(f) and origmatchfn(f)
72 m.matchfn = lambda f: lfile(f) and origmatchfn(f)
73 return m
73 return m
74
74
75
75
76 def composenormalfilematcher(match, manifest, exclude=None):
76 def composenormalfilematcher(match, manifest, exclude=None):
77 excluded = set()
77 excluded = set()
78 if exclude is not None:
78 if exclude is not None:
79 excluded.update(exclude)
79 excluded.update(exclude)
80
80
81 m = copy.copy(match)
81 m = copy.copy(match)
82 notlfile = lambda f: not (
82 notlfile = lambda f: not (
83 lfutil.isstandin(f) or lfutil.standin(f) in manifest or f in excluded
83 lfutil.isstandin(f) or lfutil.standin(f) in manifest or f in excluded
84 )
84 )
85 m._files = [lf for lf in m._files if notlfile(lf)]
85 m._files = [lf for lf in m._files if notlfile(lf)]
86 m._fileset = set(m._files)
86 m._fileset = set(m._files)
87 m.always = lambda: False
87 m.always = lambda: False
88 origmatchfn = m.matchfn
88 origmatchfn = m.matchfn
89 m.matchfn = lambda f: notlfile(f) and origmatchfn(f)
89 m.matchfn = lambda f: notlfile(f) and origmatchfn(f)
90 return m
90 return m
91
91
92
92
93 def addlargefiles(ui, repo, isaddremove, matcher, uipathfn, **opts):
93 def addlargefiles(ui, repo, isaddremove, matcher, uipathfn, **opts):
94 large = opts.get('large')
94 large = opts.get('large')
95 lfsize = lfutil.getminsize(
95 lfsize = lfutil.getminsize(
96 ui, lfutil.islfilesrepo(repo), opts.get('lfsize')
96 ui, lfutil.islfilesrepo(repo), opts.get('lfsize')
97 )
97 )
98
98
99 lfmatcher = None
99 lfmatcher = None
100 if lfutil.islfilesrepo(repo):
100 if lfutil.islfilesrepo(repo):
101 lfpats = ui.configlist(lfutil.longname, b'patterns')
101 lfpats = ui.configlist(lfutil.longname, b'patterns')
102 if lfpats:
102 if lfpats:
103 lfmatcher = matchmod.match(repo.root, b'', list(lfpats))
103 lfmatcher = matchmod.match(repo.root, b'', list(lfpats))
104
104
105 lfnames = []
105 lfnames = []
106 m = matcher
106 m = matcher
107
107
108 wctx = repo[None]
108 wctx = repo[None]
109 for f in wctx.walk(matchmod.badmatch(m, lambda x, y: None)):
109 for f in wctx.walk(matchmod.badmatch(m, lambda x, y: None)):
110 exact = m.exact(f)
110 exact = m.exact(f)
111 lfile = lfutil.standin(f) in wctx
111 lfile = lfutil.standin(f) in wctx
112 nfile = f in wctx
112 nfile = f in wctx
113 exists = lfile or nfile
113 exists = lfile or nfile
114
114
115 # Don't warn the user when they attempt to add a normal tracked file.
115 # Don't warn the user when they attempt to add a normal tracked file.
116 # The normal add code will do that for us.
116 # The normal add code will do that for us.
117 if exact and exists:
117 if exact and exists:
118 if lfile:
118 if lfile:
119 ui.warn(_(b'%s already a largefile\n') % uipathfn(f))
119 ui.warn(_(b'%s already a largefile\n') % uipathfn(f))
120 continue
120 continue
121
121
122 if (exact or not exists) and not lfutil.isstandin(f):
122 if (exact or not exists) and not lfutil.isstandin(f):
123 # In case the file was removed previously, but not committed
123 # In case the file was removed previously, but not committed
124 # (issue3507)
124 # (issue3507)
125 if not repo.wvfs.exists(f):
125 if not repo.wvfs.exists(f):
126 continue
126 continue
127
127
128 abovemin = (
128 abovemin = (
129 lfsize and repo.wvfs.lstat(f).st_size >= lfsize * 1024 * 1024
129 lfsize and repo.wvfs.lstat(f).st_size >= lfsize * 1024 * 1024
130 )
130 )
131 if large or abovemin or (lfmatcher and lfmatcher(f)):
131 if large or abovemin or (lfmatcher and lfmatcher(f)):
132 lfnames.append(f)
132 lfnames.append(f)
133 if ui.verbose or not exact:
133 if ui.verbose or not exact:
134 ui.status(_(b'adding %s as a largefile\n') % uipathfn(f))
134 ui.status(_(b'adding %s as a largefile\n') % uipathfn(f))
135
135
136 bad = []
136 bad = []
137
137
138 # Need to lock, otherwise there could be a race condition between
138 # Need to lock, otherwise there could be a race condition between
139 # when standins are created and added to the repo.
139 # when standins are created and added to the repo.
140 with repo.wlock():
140 with repo.wlock():
141 if not opts.get('dry_run'):
141 if not opts.get('dry_run'):
142 standins = []
142 standins = []
143 lfdirstate = lfutil.openlfdirstate(ui, repo)
143 lfdirstate = lfutil.openlfdirstate(ui, repo)
144 for f in lfnames:
144 for f in lfnames:
145 standinname = lfutil.standin(f)
145 standinname = lfutil.standin(f)
146 lfutil.writestandin(
146 lfutil.writestandin(
147 repo,
147 repo,
148 standinname,
148 standinname,
149 hash=b'',
149 hash=b'',
150 executable=lfutil.getexecutable(repo.wjoin(f)),
150 executable=lfutil.getexecutable(repo.wjoin(f)),
151 )
151 )
152 standins.append(standinname)
152 standins.append(standinname)
153 lfdirstate.set_tracked(f)
153 lfdirstate.set_tracked(f)
154 lfdirstate.write()
154 lfdirstate.write()
155 bad += [
155 bad += [
156 lfutil.splitstandin(f)
156 lfutil.splitstandin(f)
157 for f in repo[None].add(standins)
157 for f in repo[None].add(standins)
158 if f in m.files()
158 if f in m.files()
159 ]
159 ]
160
160
161 added = [f for f in lfnames if f not in bad]
161 added = [f for f in lfnames if f not in bad]
162 return added, bad
162 return added, bad
163
163
164
164
165 def removelargefiles(ui, repo, isaddremove, matcher, uipathfn, dryrun, **opts):
165 def removelargefiles(ui, repo, isaddremove, matcher, uipathfn, dryrun, **opts):
166 after = opts.get('after')
166 after = opts.get('after')
167 m = composelargefilematcher(matcher, repo[None].manifest())
167 m = composelargefilematcher(matcher, repo[None].manifest())
168 with lfstatus(repo):
168 with lfstatus(repo):
169 s = repo.status(match=m, clean=not isaddremove)
169 s = repo.status(match=m, clean=not isaddremove)
170 manifest = repo[None].manifest()
170 manifest = repo[None].manifest()
171 modified, added, deleted, clean = [
171 modified, added, deleted, clean = [
172 [f for f in list if lfutil.standin(f) in manifest]
172 [f for f in list if lfutil.standin(f) in manifest]
173 for list in (s.modified, s.added, s.deleted, s.clean)
173 for list in (s.modified, s.added, s.deleted, s.clean)
174 ]
174 ]
175
175
176 def warn(files, msg):
176 def warn(files, msg):
177 for f in files:
177 for f in files:
178 ui.warn(msg % uipathfn(f))
178 ui.warn(msg % uipathfn(f))
179 return int(len(files) > 0)
179 return int(len(files) > 0)
180
180
181 if after:
181 if after:
182 remove = deleted
182 remove = deleted
183 result = warn(
183 result = warn(
184 modified + added + clean, _(b'not removing %s: file still exists\n')
184 modified + added + clean, _(b'not removing %s: file still exists\n')
185 )
185 )
186 else:
186 else:
187 remove = deleted + clean
187 remove = deleted + clean
188 result = warn(
188 result = warn(
189 modified,
189 modified,
190 _(
190 _(
191 b'not removing %s: file is modified (use -f'
191 b'not removing %s: file is modified (use -f'
192 b' to force removal)\n'
192 b' to force removal)\n'
193 ),
193 ),
194 )
194 )
195 result = (
195 result = (
196 warn(
196 warn(
197 added,
197 added,
198 _(
198 _(
199 b'not removing %s: file has been marked for add'
199 b'not removing %s: file has been marked for add'
200 b' (use forget to undo)\n'
200 b' (use forget to undo)\n'
201 ),
201 ),
202 )
202 )
203 or result
203 or result
204 )
204 )
205
205
206 # Need to lock because standin files are deleted then removed from the
206 # Need to lock because standin files are deleted then removed from the
207 # repository and we could race in-between.
207 # repository and we could race in-between.
208 with repo.wlock():
208 with repo.wlock():
209 lfdirstate = lfutil.openlfdirstate(ui, repo)
209 lfdirstate = lfutil.openlfdirstate(ui, repo)
210 for f in sorted(remove):
210 for f in sorted(remove):
211 if ui.verbose or not m.exact(f):
211 if ui.verbose or not m.exact(f):
212 ui.status(_(b'removing %s\n') % uipathfn(f))
212 ui.status(_(b'removing %s\n') % uipathfn(f))
213
213
214 if not dryrun:
214 if not dryrun:
215 if not after:
215 if not after:
216 repo.wvfs.unlinkpath(f, ignoremissing=True)
216 repo.wvfs.unlinkpath(f, ignoremissing=True)
217
217
218 if dryrun:
218 if dryrun:
219 return result
219 return result
220
220
221 remove = [lfutil.standin(f) for f in remove]
221 remove = [lfutil.standin(f) for f in remove]
222 # If this is being called by addremove, let the original addremove
222 # If this is being called by addremove, let the original addremove
223 # function handle this.
223 # function handle this.
224 if not isaddremove:
224 if not isaddremove:
225 for f in remove:
225 for f in remove:
226 repo.wvfs.unlinkpath(f, ignoremissing=True)
226 repo.wvfs.unlinkpath(f, ignoremissing=True)
227 repo[None].forget(remove)
227 repo[None].forget(remove)
228
228
229 for f in remove:
229 for f in remove:
230 lfdirstate.set_untracked(lfutil.splitstandin(f))
230 lfdirstate.set_untracked(lfutil.splitstandin(f))
231
231
232 lfdirstate.write()
232 lfdirstate.write()
233
233
234 return result
234 return result
235
235
236
236
237 # For overriding mercurial.hgweb.webcommands so that largefiles will
237 # For overriding mercurial.hgweb.webcommands so that largefiles will
238 # appear at their right place in the manifests.
238 # appear at their right place in the manifests.
239 @eh.wrapfunction(webcommands, b'decodepath')
239 @eh.wrapfunction(webcommands, b'decodepath')
240 def decodepath(orig, path):
240 def decodepath(orig, path):
241 return lfutil.splitstandin(path) or path
241 return lfutil.splitstandin(path) or path
242
242
243
243
244 # -- Wrappers: modify existing commands --------------------------------
244 # -- Wrappers: modify existing commands --------------------------------
245
245
246
246
247 @eh.wrapcommand(
247 @eh.wrapcommand(
248 b'add',
248 b'add',
249 opts=[
249 opts=[
250 (b'', b'large', None, _(b'add as largefile')),
250 (b'', b'large', None, _(b'add as largefile')),
251 (b'', b'normal', None, _(b'add as normal file')),
251 (b'', b'normal', None, _(b'add as normal file')),
252 (
252 (
253 b'',
253 b'',
254 b'lfsize',
254 b'lfsize',
255 b'',
255 b'',
256 _(
256 _(
257 b'add all files above this size (in megabytes) '
257 b'add all files above this size (in megabytes) '
258 b'as largefiles (default: 10)'
258 b'as largefiles (default: 10)'
259 ),
259 ),
260 ),
260 ),
261 ],
261 ],
262 )
262 )
263 def overrideadd(orig, ui, repo, *pats, **opts):
263 def overrideadd(orig, ui, repo, *pats, **opts):
264 if opts.get('normal') and opts.get('large'):
264 if opts.get('normal') and opts.get('large'):
265 raise error.Abort(_(b'--normal cannot be used with --large'))
265 raise error.Abort(_(b'--normal cannot be used with --large'))
266 return orig(ui, repo, *pats, **opts)
266 return orig(ui, repo, *pats, **opts)
267
267
268
268
269 @eh.wrapfunction(cmdutil, b'add')
269 @eh.wrapfunction(cmdutil, b'add')
270 def cmdutiladd(orig, ui, repo, matcher, prefix, uipathfn, explicitonly, **opts):
270 def cmdutiladd(orig, ui, repo, matcher, prefix, uipathfn, explicitonly, **opts):
271 # The --normal flag short circuits this override
271 # The --normal flag short circuits this override
272 if opts.get('normal'):
272 if opts.get('normal'):
273 return orig(ui, repo, matcher, prefix, uipathfn, explicitonly, **opts)
273 return orig(ui, repo, matcher, prefix, uipathfn, explicitonly, **opts)
274
274
275 ladded, lbad = addlargefiles(ui, repo, False, matcher, uipathfn, **opts)
275 ladded, lbad = addlargefiles(ui, repo, False, matcher, uipathfn, **opts)
276 normalmatcher = composenormalfilematcher(
276 normalmatcher = composenormalfilematcher(
277 matcher, repo[None].manifest(), ladded
277 matcher, repo[None].manifest(), ladded
278 )
278 )
279 bad = orig(ui, repo, normalmatcher, prefix, uipathfn, explicitonly, **opts)
279 bad = orig(ui, repo, normalmatcher, prefix, uipathfn, explicitonly, **opts)
280
280
281 bad.extend(f for f in lbad)
281 bad.extend(f for f in lbad)
282 return bad
282 return bad
283
283
284
284
285 @eh.wrapfunction(cmdutil, b'remove')
285 @eh.wrapfunction(cmdutil, b'remove')
286 def cmdutilremove(
286 def cmdutilremove(
287 orig, ui, repo, matcher, prefix, uipathfn, after, force, subrepos, dryrun
287 orig, ui, repo, matcher, prefix, uipathfn, after, force, subrepos, dryrun
288 ):
288 ):
289 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest())
289 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest())
290 result = orig(
290 result = orig(
291 ui,
291 ui,
292 repo,
292 repo,
293 normalmatcher,
293 normalmatcher,
294 prefix,
294 prefix,
295 uipathfn,
295 uipathfn,
296 after,
296 after,
297 force,
297 force,
298 subrepos,
298 subrepos,
299 dryrun,
299 dryrun,
300 )
300 )
301 return (
301 return (
302 removelargefiles(
302 removelargefiles(
303 ui, repo, False, matcher, uipathfn, dryrun, after=after, force=force
303 ui, repo, False, matcher, uipathfn, dryrun, after=after, force=force
304 )
304 )
305 or result
305 or result
306 )
306 )
307
307
308
308
309 @eh.wrapfunction(subrepo.hgsubrepo, b'status')
309 @eh.wrapfunction(subrepo.hgsubrepo, b'status')
310 def overridestatusfn(orig, repo, rev2, **opts):
310 def overridestatusfn(orig, repo, rev2, **opts):
311 with lfstatus(repo._repo):
311 with lfstatus(repo._repo):
312 return orig(repo, rev2, **opts)
312 return orig(repo, rev2, **opts)
313
313
314
314
315 @eh.wrapcommand(b'status')
315 @eh.wrapcommand(b'status')
316 def overridestatus(orig, ui, repo, *pats, **opts):
316 def overridestatus(orig, ui, repo, *pats, **opts):
317 with lfstatus(repo):
317 with lfstatus(repo):
318 return orig(ui, repo, *pats, **opts)
318 return orig(ui, repo, *pats, **opts)
319
319
320
320
321 @eh.wrapfunction(subrepo.hgsubrepo, b'dirty')
321 @eh.wrapfunction(subrepo.hgsubrepo, b'dirty')
322 def overridedirty(orig, repo, ignoreupdate=False, missing=False):
322 def overridedirty(orig, repo, ignoreupdate=False, missing=False):
323 with lfstatus(repo._repo):
323 with lfstatus(repo._repo):
324 return orig(repo, ignoreupdate=ignoreupdate, missing=missing)
324 return orig(repo, ignoreupdate=ignoreupdate, missing=missing)
325
325
326
326
327 @eh.wrapcommand(b'log')
327 @eh.wrapcommand(b'log')
328 def overridelog(orig, ui, repo, *pats, **opts):
328 def overridelog(orig, ui, repo, *pats, **opts):
329 def overridematchandpats(
329 def overridematchandpats(
330 orig,
330 orig,
331 ctx,
331 ctx,
332 pats=(),
332 pats=(),
333 opts=None,
333 opts=None,
334 globbed=False,
334 globbed=False,
335 default=b'relpath',
335 default=b'relpath',
336 badfn=None,
336 badfn=None,
337 ):
337 ):
338 """Matcher that merges root directory with .hglf, suitable for log.
338 """Matcher that merges root directory with .hglf, suitable for log.
339 It is still possible to match .hglf directly.
339 It is still possible to match .hglf directly.
340 For any listed files run log on the standin too.
340 For any listed files run log on the standin too.
341 matchfn tries both the given filename and with .hglf stripped.
341 matchfn tries both the given filename and with .hglf stripped.
342 """
342 """
343 if opts is None:
343 if opts is None:
344 opts = {}
344 opts = {}
345 matchandpats = orig(ctx, pats, opts, globbed, default, badfn=badfn)
345 matchandpats = orig(ctx, pats, opts, globbed, default, badfn=badfn)
346 m, p = copy.copy(matchandpats)
346 m, p = copy.copy(matchandpats)
347
347
348 if m.always():
348 if m.always():
349 # We want to match everything anyway, so there's no benefit trying
349 # We want to match everything anyway, so there's no benefit trying
350 # to add standins.
350 # to add standins.
351 return matchandpats
351 return matchandpats
352
352
353 pats = set(p)
353 pats = set(p)
354
354
355 def fixpats(pat, tostandin=lfutil.standin):
355 def fixpats(pat, tostandin=lfutil.standin):
356 if pat.startswith(b'set:'):
356 if pat.startswith(b'set:'):
357 return pat
357 return pat
358
358
359 kindpat = matchmod._patsplit(pat, None)
359 kindpat = matchmod._patsplit(pat, None)
360
360
361 if kindpat[0] is not None:
361 if kindpat[0] is not None:
362 return kindpat[0] + b':' + tostandin(kindpat[1])
362 return kindpat[0] + b':' + tostandin(kindpat[1])
363 return tostandin(kindpat[1])
363 return tostandin(kindpat[1])
364
364
365 cwd = repo.getcwd()
365 cwd = repo.getcwd()
366 if cwd:
366 if cwd:
367 hglf = lfutil.shortname
367 hglf = lfutil.shortname
368 back = util.pconvert(repo.pathto(hglf)[: -len(hglf)])
368 back = util.pconvert(repo.pathto(hglf)[: -len(hglf)])
369
369
370 def tostandin(f):
370 def tostandin(f):
371 # The file may already be a standin, so truncate the back
371 # The file may already be a standin, so truncate the back
372 # prefix and test before mangling it. This avoids turning
372 # prefix and test before mangling it. This avoids turning
373 # 'glob:../.hglf/foo*' into 'glob:../.hglf/../.hglf/foo*'.
373 # 'glob:../.hglf/foo*' into 'glob:../.hglf/../.hglf/foo*'.
374 if f.startswith(back) and lfutil.splitstandin(f[len(back) :]):
374 if f.startswith(back) and lfutil.splitstandin(f[len(back) :]):
375 return f
375 return f
376
376
377 # An absolute path is from outside the repo, so truncate the
377 # An absolute path is from outside the repo, so truncate the
378 # path to the root before building the standin. Otherwise cwd
378 # path to the root before building the standin. Otherwise cwd
379 # is somewhere in the repo, relative to root, and needs to be
379 # is somewhere in the repo, relative to root, and needs to be
380 # prepended before building the standin.
380 # prepended before building the standin.
381 if os.path.isabs(cwd):
381 if os.path.isabs(cwd):
382 f = f[len(back) :]
382 f = f[len(back) :]
383 else:
383 else:
384 f = cwd + b'/' + f
384 f = cwd + b'/' + f
385 return back + lfutil.standin(f)
385 return back + lfutil.standin(f)
386
386
387 else:
387 else:
388
388
389 def tostandin(f):
389 def tostandin(f):
390 if lfutil.isstandin(f):
390 if lfutil.isstandin(f):
391 return f
391 return f
392 return lfutil.standin(f)
392 return lfutil.standin(f)
393
393
394 pats.update(fixpats(f, tostandin) for f in p)
394 pats.update(fixpats(f, tostandin) for f in p)
395
395
396 for i in range(0, len(m._files)):
396 for i in range(0, len(m._files)):
397 # Don't add '.hglf' to m.files, since that is already covered by '.'
397 # Don't add '.hglf' to m.files, since that is already covered by '.'
398 if m._files[i] == b'.':
398 if m._files[i] == b'.':
399 continue
399 continue
400 standin = lfutil.standin(m._files[i])
400 standin = lfutil.standin(m._files[i])
401 # If the "standin" is a directory, append instead of replace to
401 # If the "standin" is a directory, append instead of replace to
402 # support naming a directory on the command line with only
402 # support naming a directory on the command line with only
403 # largefiles. The original directory is kept to support normal
403 # largefiles. The original directory is kept to support normal
404 # files.
404 # files.
405 if standin in ctx:
405 if standin in ctx:
406 m._files[i] = standin
406 m._files[i] = standin
407 elif m._files[i] not in ctx and repo.wvfs.isdir(standin):
407 elif m._files[i] not in ctx and repo.wvfs.isdir(standin):
408 m._files.append(standin)
408 m._files.append(standin)
409
409
410 m._fileset = set(m._files)
410 m._fileset = set(m._files)
411 m.always = lambda: False
411 m.always = lambda: False
412 origmatchfn = m.matchfn
412 origmatchfn = m.matchfn
413
413
414 def lfmatchfn(f):
414 def lfmatchfn(f):
415 lf = lfutil.splitstandin(f)
415 lf = lfutil.splitstandin(f)
416 if lf is not None and origmatchfn(lf):
416 if lf is not None and origmatchfn(lf):
417 return True
417 return True
418 r = origmatchfn(f)
418 r = origmatchfn(f)
419 return r
419 return r
420
420
421 m.matchfn = lfmatchfn
421 m.matchfn = lfmatchfn
422
422
423 ui.debug(b'updated patterns: %s\n' % b', '.join(sorted(pats)))
423 ui.debug(b'updated patterns: %s\n' % b', '.join(sorted(pats)))
424 return m, pats
424 return m, pats
425
425
426 # For hg log --patch, the match object is used in two different senses:
426 # For hg log --patch, the match object is used in two different senses:
427 # (1) to determine what revisions should be printed out, and
427 # (1) to determine what revisions should be printed out, and
428 # (2) to determine what files to print out diffs for.
428 # (2) to determine what files to print out diffs for.
429 # The magic matchandpats override should be used for case (1) but not for
429 # The magic matchandpats override should be used for case (1) but not for
430 # case (2).
430 # case (2).
431 oldmatchandpats = scmutil.matchandpats
431 oldmatchandpats = scmutil.matchandpats
432
432
433 def overridemakefilematcher(orig, repo, pats, opts, badfn=None):
433 def overridemakefilematcher(orig, repo, pats, opts, badfn=None):
434 wctx = repo[None]
434 wctx = repo[None]
435 match, pats = oldmatchandpats(wctx, pats, opts, badfn=badfn)
435 match, pats = oldmatchandpats(wctx, pats, opts, badfn=badfn)
436 return lambda ctx: match
436 return lambda ctx: match
437
437
438 wrappedmatchandpats = extensions.wrappedfunction(
438 wrappedmatchandpats = extensions.wrappedfunction(
439 scmutil, b'matchandpats', overridematchandpats
439 scmutil, b'matchandpats', overridematchandpats
440 )
440 )
441 wrappedmakefilematcher = extensions.wrappedfunction(
441 wrappedmakefilematcher = extensions.wrappedfunction(
442 logcmdutil, b'_makenofollowfilematcher', overridemakefilematcher
442 logcmdutil, b'_makenofollowfilematcher', overridemakefilematcher
443 )
443 )
444 with wrappedmatchandpats, wrappedmakefilematcher:
444 with wrappedmatchandpats, wrappedmakefilematcher:
445 return orig(ui, repo, *pats, **opts)
445 return orig(ui, repo, *pats, **opts)
446
446
447
447
448 @eh.wrapcommand(
448 @eh.wrapcommand(
449 b'verify',
449 b'verify',
450 opts=[
450 opts=[
451 (
451 (
452 b'',
452 b'',
453 b'large',
453 b'large',
454 None,
454 None,
455 _(b'verify that all largefiles in current revision exists'),
455 _(b'verify that all largefiles in current revision exists'),
456 ),
456 ),
457 (
457 (
458 b'',
458 b'',
459 b'lfa',
459 b'lfa',
460 None,
460 None,
461 _(b'verify largefiles in all revisions, not just current'),
461 _(b'verify largefiles in all revisions, not just current'),
462 ),
462 ),
463 (
463 (
464 b'',
464 b'',
465 b'lfc',
465 b'lfc',
466 None,
466 None,
467 _(b'verify local largefile contents, not just existence'),
467 _(b'verify local largefile contents, not just existence'),
468 ),
468 ),
469 ],
469 ],
470 )
470 )
471 def overrideverify(orig, ui, repo, *pats, **opts):
471 def overrideverify(orig, ui, repo, *pats, **opts):
472 large = opts.pop('large', False)
472 large = opts.pop('large', False)
473 all = opts.pop('lfa', False)
473 all = opts.pop('lfa', False)
474 contents = opts.pop('lfc', False)
474 contents = opts.pop('lfc', False)
475
475
476 result = orig(ui, repo, *pats, **opts)
476 result = orig(ui, repo, *pats, **opts)
477 if large or all or contents:
477 if large or all or contents:
478 result = result or lfcommands.verifylfiles(ui, repo, all, contents)
478 result = result or lfcommands.verifylfiles(ui, repo, all, contents)
479 return result
479 return result
480
480
481
481
482 @eh.wrapcommand(
482 @eh.wrapcommand(
483 b'debugstate',
483 b'debugstate',
484 opts=[(b'', b'large', None, _(b'display largefiles dirstate'))],
484 opts=[(b'', b'large', None, _(b'display largefiles dirstate'))],
485 )
485 )
486 def overridedebugstate(orig, ui, repo, *pats, **opts):
486 def overridedebugstate(orig, ui, repo, *pats, **opts):
487 large = opts.pop('large', False)
487 large = opts.pop('large', False)
488 if large:
488 if large:
489
489
490 class fakerepo(object):
490 class fakerepo(object):
491 dirstate = lfutil.openlfdirstate(ui, repo)
491 dirstate = lfutil.openlfdirstate(ui, repo)
492
492
493 orig(ui, fakerepo, *pats, **opts)
493 orig(ui, fakerepo, *pats, **opts)
494 else:
494 else:
495 orig(ui, repo, *pats, **opts)
495 orig(ui, repo, *pats, **opts)
496
496
497
497
498 # Before starting the manifest merge, merge.updates will call
498 # Before starting the manifest merge, merge.updates will call
499 # _checkunknownfile to check if there are any files in the merged-in
499 # _checkunknownfile to check if there are any files in the merged-in
500 # changeset that collide with unknown files in the working copy.
500 # changeset that collide with unknown files in the working copy.
501 #
501 #
502 # The largefiles are seen as unknown, so this prevents us from merging
502 # The largefiles are seen as unknown, so this prevents us from merging
503 # in a file 'foo' if we already have a largefile with the same name.
503 # in a file 'foo' if we already have a largefile with the same name.
504 #
504 #
505 # The overridden function filters the unknown files by removing any
505 # The overridden function filters the unknown files by removing any
506 # largefiles. This makes the merge proceed and we can then handle this
506 # largefiles. This makes the merge proceed and we can then handle this
507 # case further in the overridden calculateupdates function below.
507 # case further in the overridden calculateupdates function below.
508 @eh.wrapfunction(merge, b'_checkunknownfile')
508 @eh.wrapfunction(merge, b'_checkunknownfile')
509 def overridecheckunknownfile(origfn, repo, wctx, mctx, f, f2=None):
509 def overridecheckunknownfile(origfn, repo, wctx, mctx, f, f2=None):
510 if lfutil.standin(repo.dirstate.normalize(f)) in wctx:
510 if lfutil.standin(repo.dirstate.normalize(f)) in wctx:
511 return False
511 return False
512 return origfn(repo, wctx, mctx, f, f2)
512 return origfn(repo, wctx, mctx, f, f2)
513
513
514
514
515 # The manifest merge handles conflicts on the manifest level. We want
515 # The manifest merge handles conflicts on the manifest level. We want
516 # to handle changes in largefile-ness of files at this level too.
516 # to handle changes in largefile-ness of files at this level too.
517 #
517 #
518 # The strategy is to run the original calculateupdates and then process
518 # The strategy is to run the original calculateupdates and then process
519 # the action list it outputs. There are two cases we need to deal with:
519 # the action list it outputs. There are two cases we need to deal with:
520 #
520 #
521 # 1. Normal file in p1, largefile in p2. Here the largefile is
521 # 1. Normal file in p1, largefile in p2. Here the largefile is
522 # detected via its standin file, which will enter the working copy
522 # detected via its standin file, which will enter the working copy
523 # with a "get" action. It is not "merge" since the standin is all
523 # with a "get" action. It is not "merge" since the standin is all
524 # Mercurial is concerned with at this level -- the link to the
524 # Mercurial is concerned with at this level -- the link to the
525 # existing normal file is not relevant here.
525 # existing normal file is not relevant here.
526 #
526 #
527 # 2. Largefile in p1, normal file in p2. Here we get a "merge" action
527 # 2. Largefile in p1, normal file in p2. Here we get a "merge" action
528 # since the largefile will be present in the working copy and
528 # since the largefile will be present in the working copy and
529 # different from the normal file in p2. Mercurial therefore
529 # different from the normal file in p2. Mercurial therefore
530 # triggers a merge action.
530 # triggers a merge action.
531 #
531 #
532 # In both cases, we prompt the user and emit new actions to either
532 # In both cases, we prompt the user and emit new actions to either
533 # remove the standin (if the normal file was kept) or to remove the
533 # remove the standin (if the normal file was kept) or to remove the
534 # normal file and get the standin (if the largefile was kept). The
534 # normal file and get the standin (if the largefile was kept). The
535 # default prompt answer is to use the largefile version since it was
535 # default prompt answer is to use the largefile version since it was
536 # presumably changed on purpose.
536 # presumably changed on purpose.
537 #
537 #
538 # Finally, the merge.applyupdates function will then take care of
538 # Finally, the merge.applyupdates function will then take care of
539 # writing the files into the working copy and lfcommands.updatelfiles
539 # writing the files into the working copy and lfcommands.updatelfiles
540 # will update the largefiles.
540 # will update the largefiles.
541 @eh.wrapfunction(merge, b'calculateupdates')
541 @eh.wrapfunction(merge, b'calculateupdates')
542 def overridecalculateupdates(
542 def overridecalculateupdates(
543 origfn, repo, p1, p2, pas, branchmerge, force, acceptremote, *args, **kwargs
543 origfn, repo, p1, p2, pas, branchmerge, force, acceptremote, *args, **kwargs
544 ):
544 ):
545 overwrite = force and not branchmerge
545 overwrite = force and not branchmerge
546 mresult = origfn(
546 mresult = origfn(
547 repo, p1, p2, pas, branchmerge, force, acceptremote, *args, **kwargs
547 repo, p1, p2, pas, branchmerge, force, acceptremote, *args, **kwargs
548 )
548 )
549
549
550 if overwrite:
550 if overwrite:
551 return mresult
551 return mresult
552
552
553 # Convert to dictionary with filename as key and action as value.
553 # Convert to dictionary with filename as key and action as value.
554 lfiles = set()
554 lfiles = set()
555 for f in mresult.files():
555 for f in mresult.files():
556 splitstandin = lfutil.splitstandin(f)
556 splitstandin = lfutil.splitstandin(f)
557 if splitstandin is not None and splitstandin in p1:
557 if splitstandin is not None and splitstandin in p1:
558 lfiles.add(splitstandin)
558 lfiles.add(splitstandin)
559 elif lfutil.standin(f) in p1:
559 elif lfutil.standin(f) in p1:
560 lfiles.add(f)
560 lfiles.add(f)
561
561
562 for lfile in sorted(lfiles):
562 for lfile in sorted(lfiles):
563 standin = lfutil.standin(lfile)
563 standin = lfutil.standin(lfile)
564 (lm, largs, lmsg) = mresult.getfile(lfile, (None, None, None))
564 (lm, largs, lmsg) = mresult.getfile(lfile, (None, None, None))
565 (sm, sargs, smsg) = mresult.getfile(standin, (None, None, None))
565 (sm, sargs, smsg) = mresult.getfile(standin, (None, None, None))
566 if sm in (b'g', b'dc') and lm != b'r':
566 if sm in (b'g', b'dc') and lm != b'r':
567 if sm == b'dc':
567 if sm == b'dc':
568 f1, f2, fa, move, anc = sargs
568 f1, f2, fa, move, anc = sargs
569 sargs = (p2[f2].flags(), False)
569 sargs = (p2[f2].flags(), False)
570 # Case 1: normal file in the working copy, largefile in
570 # Case 1: normal file in the working copy, largefile in
571 # the second parent
571 # the second parent
572 usermsg = (
572 usermsg = (
573 _(
573 _(
574 b'remote turned local normal file %s into a largefile\n'
574 b'remote turned local normal file %s into a largefile\n'
575 b'use (l)argefile or keep (n)ormal file?'
575 b'use (l)argefile or keep (n)ormal file?'
576 b'$$ &Largefile $$ &Normal file'
576 b'$$ &Largefile $$ &Normal file'
577 )
577 )
578 % lfile
578 % lfile
579 )
579 )
580 if repo.ui.promptchoice(usermsg, 0) == 0: # pick remote largefile
580 if repo.ui.promptchoice(usermsg, 0) == 0: # pick remote largefile
581 mresult.addfile(lfile, b'r', None, b'replaced by standin')
581 mresult.addfile(lfile, b'r', None, b'replaced by standin')
582 mresult.addfile(standin, b'g', sargs, b'replaces standin')
582 mresult.addfile(standin, b'g', sargs, b'replaces standin')
583 else: # keep local normal file
583 else: # keep local normal file
584 mresult.addfile(lfile, b'k', None, b'replaces standin')
584 mresult.addfile(lfile, b'k', None, b'replaces standin')
585 if branchmerge:
585 if branchmerge:
586 mresult.addfile(
586 mresult.addfile(
587 standin,
587 standin,
588 b'k',
588 b'k',
589 None,
589 None,
590 b'replaced by non-standin',
590 b'replaced by non-standin',
591 )
591 )
592 else:
592 else:
593 mresult.addfile(
593 mresult.addfile(
594 standin,
594 standin,
595 b'r',
595 b'r',
596 None,
596 None,
597 b'replaced by non-standin',
597 b'replaced by non-standin',
598 )
598 )
599 elif lm in (b'g', b'dc') and sm != b'r':
599 elif lm in (b'g', b'dc') and sm != b'r':
600 if lm == b'dc':
600 if lm == b'dc':
601 f1, f2, fa, move, anc = largs
601 f1, f2, fa, move, anc = largs
602 largs = (p2[f2].flags(), False)
602 largs = (p2[f2].flags(), False)
603 # Case 2: largefile in the working copy, normal file in
603 # Case 2: largefile in the working copy, normal file in
604 # the second parent
604 # the second parent
605 usermsg = (
605 usermsg = (
606 _(
606 _(
607 b'remote turned local largefile %s into a normal file\n'
607 b'remote turned local largefile %s into a normal file\n'
608 b'keep (l)argefile or use (n)ormal file?'
608 b'keep (l)argefile or use (n)ormal file?'
609 b'$$ &Largefile $$ &Normal file'
609 b'$$ &Largefile $$ &Normal file'
610 )
610 )
611 % lfile
611 % lfile
612 )
612 )
613 if repo.ui.promptchoice(usermsg, 0) == 0: # keep local largefile
613 if repo.ui.promptchoice(usermsg, 0) == 0: # keep local largefile
614 if branchmerge:
614 if branchmerge:
615 # largefile can be restored from standin safely
615 # largefile can be restored from standin safely
616 mresult.addfile(
616 mresult.addfile(
617 lfile,
617 lfile,
618 b'k',
618 b'k',
619 None,
619 None,
620 b'replaced by standin',
620 b'replaced by standin',
621 )
621 )
622 mresult.addfile(standin, b'k', None, b'replaces standin')
622 mresult.addfile(standin, b'k', None, b'replaces standin')
623 else:
623 else:
624 # "lfile" should be marked as "removed" without
624 # "lfile" should be marked as "removed" without
625 # removal of itself
625 # removal of itself
626 mresult.addfile(
626 mresult.addfile(
627 lfile,
627 lfile,
628 MERGE_ACTION_LARGEFILE_MARK_REMOVED,
628 MERGE_ACTION_LARGEFILE_MARK_REMOVED,
629 None,
629 None,
630 b'forget non-standin largefile',
630 b'forget non-standin largefile',
631 )
631 )
632
632
633 # linear-merge should treat this largefile as 're-added'
633 # linear-merge should treat this largefile as 're-added'
634 mresult.addfile(standin, b'a', None, b'keep standin')
634 mresult.addfile(standin, b'a', None, b'keep standin')
635 else: # pick remote normal file
635 else: # pick remote normal file
636 mresult.addfile(lfile, b'g', largs, b'replaces standin')
636 mresult.addfile(lfile, b'g', largs, b'replaces standin')
637 mresult.addfile(
637 mresult.addfile(
638 standin,
638 standin,
639 b'r',
639 b'r',
640 None,
640 None,
641 b'replaced by non-standin',
641 b'replaced by non-standin',
642 )
642 )
643
643
644 return mresult
644 return mresult
645
645
646
646
647 @eh.wrapfunction(mergestatemod, b'recordupdates')
647 @eh.wrapfunction(mergestatemod, b'recordupdates')
648 def mergerecordupdates(orig, repo, actions, branchmerge, getfiledata):
648 def mergerecordupdates(orig, repo, actions, branchmerge, getfiledata):
649 if MERGE_ACTION_LARGEFILE_MARK_REMOVED in actions:
649 if MERGE_ACTION_LARGEFILE_MARK_REMOVED in actions:
650 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
650 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
651 with lfdirstate.parentchange():
651 with lfdirstate.parentchange():
652 for lfile, args, msg in actions[
652 for lfile, args, msg in actions[
653 MERGE_ACTION_LARGEFILE_MARK_REMOVED
653 MERGE_ACTION_LARGEFILE_MARK_REMOVED
654 ]:
654 ]:
655 # this should be executed before 'orig', to execute 'remove'
655 # this should be executed before 'orig', to execute 'remove'
656 # before all other actions
656 # before all other actions
657 repo.dirstate.update_file(
657 repo.dirstate.update_file(
658 lfile, p1_tracked=True, wc_tracked=False
658 lfile, p1_tracked=True, wc_tracked=False
659 )
659 )
660 # make sure lfile doesn't get synclfdirstate'd as normal
660 # make sure lfile doesn't get synclfdirstate'd as normal
661 lfdirstate.update_file(lfile, p1_tracked=False, wc_tracked=True)
661 lfdirstate.update_file(lfile, p1_tracked=False, wc_tracked=True)
662 lfdirstate.write()
662 lfdirstate.write()
663
663
664 return orig(repo, actions, branchmerge, getfiledata)
664 return orig(repo, actions, branchmerge, getfiledata)
665
665
666
666
667 # Override filemerge to prompt the user about how they wish to merge
667 # Override filemerge to prompt the user about how they wish to merge
668 # largefiles. This will handle identical edits without prompting the user.
668 # largefiles. This will handle identical edits without prompting the user.
669 @eh.wrapfunction(filemerge, b'_filemerge')
669 @eh.wrapfunction(filemerge, b'_filemerge')
670 def overridefilemerge(
670 def overridefilemerge(
671 origfn, premerge, repo, wctx, mynode, orig, fcd, fco, fca, labels=None
671 origfn, premerge, repo, wctx, mynode, orig, fcd, fco, fca, labels=None
672 ):
672 ):
673 if not lfutil.isstandin(orig) or fcd.isabsent() or fco.isabsent():
673 if not lfutil.isstandin(orig) or fcd.isabsent() or fco.isabsent():
674 return origfn(
674 return origfn(
675 premerge, repo, wctx, mynode, orig, fcd, fco, fca, labels=labels
675 premerge, repo, wctx, mynode, orig, fcd, fco, fca, labels=labels
676 )
676 )
677
677
678 ahash = lfutil.readasstandin(fca).lower()
678 ahash = lfutil.readasstandin(fca).lower()
679 dhash = lfutil.readasstandin(fcd).lower()
679 dhash = lfutil.readasstandin(fcd).lower()
680 ohash = lfutil.readasstandin(fco).lower()
680 ohash = lfutil.readasstandin(fco).lower()
681 if (
681 if (
682 ohash != ahash
682 ohash != ahash
683 and ohash != dhash
683 and ohash != dhash
684 and (
684 and (
685 dhash == ahash
685 dhash == ahash
686 or repo.ui.promptchoice(
686 or repo.ui.promptchoice(
687 _(
687 _(
688 b'largefile %s has a merge conflict\nancestor was %s\n'
688 b'largefile %s has a merge conflict\nancestor was %s\n'
689 b'you can keep (l)ocal %s or take (o)ther %s.\n'
689 b'you can keep (l)ocal %s or take (o)ther %s.\n'
690 b'what do you want to do?'
690 b'what do you want to do?'
691 b'$$ &Local $$ &Other'
691 b'$$ &Local $$ &Other'
692 )
692 )
693 % (lfutil.splitstandin(orig), ahash, dhash, ohash),
693 % (lfutil.splitstandin(orig), ahash, dhash, ohash),
694 0,
694 0,
695 )
695 )
696 == 1
696 == 1
697 )
697 )
698 ):
698 ):
699 repo.wwrite(fcd.path(), fco.data(), fco.flags())
699 repo.wwrite(fcd.path(), fco.data(), fco.flags())
700 return True, 0, False
700 return True, 0, False
701
701
702
702
703 @eh.wrapfunction(copiesmod, b'pathcopies')
703 @eh.wrapfunction(copiesmod, b'pathcopies')
704 def copiespathcopies(orig, ctx1, ctx2, match=None):
704 def copiespathcopies(orig, ctx1, ctx2, match=None):
705 copies = orig(ctx1, ctx2, match=match)
705 copies = orig(ctx1, ctx2, match=match)
706 updated = {}
706 updated = {}
707
707
708 for k, v in pycompat.iteritems(copies):
708 for k, v in pycompat.iteritems(copies):
709 updated[lfutil.splitstandin(k) or k] = lfutil.splitstandin(v) or v
709 updated[lfutil.splitstandin(k) or k] = lfutil.splitstandin(v) or v
710
710
711 return updated
711 return updated
712
712
713
713
714 # Copy first changes the matchers to match standins instead of
714 # Copy first changes the matchers to match standins instead of
715 # largefiles. Then it overrides util.copyfile in that function it
715 # largefiles. Then it overrides util.copyfile in that function it
716 # checks if the destination largefile already exists. It also keeps a
716 # checks if the destination largefile already exists. It also keeps a
717 # list of copied files so that the largefiles can be copied and the
717 # list of copied files so that the largefiles can be copied and the
718 # dirstate updated.
718 # dirstate updated.
719 @eh.wrapfunction(cmdutil, b'copy')
719 @eh.wrapfunction(cmdutil, b'copy')
720 def overridecopy(orig, ui, repo, pats, opts, rename=False):
720 def overridecopy(orig, ui, repo, pats, opts, rename=False):
721 # doesn't remove largefile on rename
721 # doesn't remove largefile on rename
722 if len(pats) < 2:
722 if len(pats) < 2:
723 # this isn't legal, let the original function deal with it
723 # this isn't legal, let the original function deal with it
724 return orig(ui, repo, pats, opts, rename)
724 return orig(ui, repo, pats, opts, rename)
725
725
726 # This could copy both lfiles and normal files in one command,
726 # This could copy both lfiles and normal files in one command,
727 # but we don't want to do that. First replace their matcher to
727 # but we don't want to do that. First replace their matcher to
728 # only match normal files and run it, then replace it to just
728 # only match normal files and run it, then replace it to just
729 # match largefiles and run it again.
729 # match largefiles and run it again.
730 nonormalfiles = False
730 nonormalfiles = False
731 nolfiles = False
731 nolfiles = False
732 manifest = repo[None].manifest()
732 manifest = repo[None].manifest()
733
733
734 def normalfilesmatchfn(
734 def normalfilesmatchfn(
735 orig,
735 orig,
736 ctx,
736 ctx,
737 pats=(),
737 pats=(),
738 opts=None,
738 opts=None,
739 globbed=False,
739 globbed=False,
740 default=b'relpath',
740 default=b'relpath',
741 badfn=None,
741 badfn=None,
742 ):
742 ):
743 if opts is None:
743 if opts is None:
744 opts = {}
744 opts = {}
745 match = orig(ctx, pats, opts, globbed, default, badfn=badfn)
745 match = orig(ctx, pats, opts, globbed, default, badfn=badfn)
746 return composenormalfilematcher(match, manifest)
746 return composenormalfilematcher(match, manifest)
747
747
748 with extensions.wrappedfunction(scmutil, b'match', normalfilesmatchfn):
748 with extensions.wrappedfunction(scmutil, b'match', normalfilesmatchfn):
749 try:
749 try:
750 result = orig(ui, repo, pats, opts, rename)
750 result = orig(ui, repo, pats, opts, rename)
751 except error.Abort as e:
751 except error.Abort as e:
752 if e.message != _(b'no files to copy'):
752 if e.message != _(b'no files to copy'):
753 raise e
753 raise e
754 else:
754 else:
755 nonormalfiles = True
755 nonormalfiles = True
756 result = 0
756 result = 0
757
757
758 # The first rename can cause our current working directory to be removed.
758 # The first rename can cause our current working directory to be removed.
759 # In that case there is nothing left to copy/rename so just quit.
759 # In that case there is nothing left to copy/rename so just quit.
760 try:
760 try:
761 repo.getcwd()
761 repo.getcwd()
762 except OSError:
762 except OSError:
763 return result
763 return result
764
764
765 def makestandin(relpath):
765 def makestandin(relpath):
766 path = pathutil.canonpath(repo.root, repo.getcwd(), relpath)
766 path = pathutil.canonpath(repo.root, repo.getcwd(), relpath)
767 return repo.wvfs.join(lfutil.standin(path))
767 return repo.wvfs.join(lfutil.standin(path))
768
768
769 fullpats = scmutil.expandpats(pats)
769 fullpats = scmutil.expandpats(pats)
770 dest = fullpats[-1]
770 dest = fullpats[-1]
771
771
772 if os.path.isdir(dest):
772 if os.path.isdir(dest):
773 if not os.path.isdir(makestandin(dest)):
773 if not os.path.isdir(makestandin(dest)):
774 os.makedirs(makestandin(dest))
774 os.makedirs(makestandin(dest))
775
775
776 try:
776 try:
777 # When we call orig below it creates the standins but we don't add
777 # When we call orig below it creates the standins but we don't add
778 # them to the dir state until later so lock during that time.
778 # them to the dir state until later so lock during that time.
779 wlock = repo.wlock()
779 wlock = repo.wlock()
780
780
781 manifest = repo[None].manifest()
781 manifest = repo[None].manifest()
782
782
783 def overridematch(
783 def overridematch(
784 orig,
784 orig,
785 ctx,
785 ctx,
786 pats=(),
786 pats=(),
787 opts=None,
787 opts=None,
788 globbed=False,
788 globbed=False,
789 default=b'relpath',
789 default=b'relpath',
790 badfn=None,
790 badfn=None,
791 ):
791 ):
792 if opts is None:
792 if opts is None:
793 opts = {}
793 opts = {}
794 newpats = []
794 newpats = []
795 # The patterns were previously mangled to add the standin
795 # The patterns were previously mangled to add the standin
796 # directory; we need to remove that now
796 # directory; we need to remove that now
797 for pat in pats:
797 for pat in pats:
798 if matchmod.patkind(pat) is None and lfutil.shortname in pat:
798 if matchmod.patkind(pat) is None and lfutil.shortname in pat:
799 newpats.append(pat.replace(lfutil.shortname, b''))
799 newpats.append(pat.replace(lfutil.shortname, b''))
800 else:
800 else:
801 newpats.append(pat)
801 newpats.append(pat)
802 match = orig(ctx, newpats, opts, globbed, default, badfn=badfn)
802 match = orig(ctx, newpats, opts, globbed, default, badfn=badfn)
803 m = copy.copy(match)
803 m = copy.copy(match)
804 lfile = lambda f: lfutil.standin(f) in manifest
804 lfile = lambda f: lfutil.standin(f) in manifest
805 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
805 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
806 m._fileset = set(m._files)
806 m._fileset = set(m._files)
807 origmatchfn = m.matchfn
807 origmatchfn = m.matchfn
808
808
809 def matchfn(f):
809 def matchfn(f):
810 lfile = lfutil.splitstandin(f)
810 lfile = lfutil.splitstandin(f)
811 return (
811 return (
812 lfile is not None
812 lfile is not None
813 and (f in manifest)
813 and (f in manifest)
814 and origmatchfn(lfile)
814 and origmatchfn(lfile)
815 or None
815 or None
816 )
816 )
817
817
818 m.matchfn = matchfn
818 m.matchfn = matchfn
819 return m
819 return m
820
820
821 listpats = []
821 listpats = []
822 for pat in pats:
822 for pat in pats:
823 if matchmod.patkind(pat) is not None:
823 if matchmod.patkind(pat) is not None:
824 listpats.append(pat)
824 listpats.append(pat)
825 else:
825 else:
826 listpats.append(makestandin(pat))
826 listpats.append(makestandin(pat))
827
827
828 copiedfiles = []
828 copiedfiles = []
829
829
830 def overridecopyfile(orig, src, dest, *args, **kwargs):
830 def overridecopyfile(orig, src, dest, *args, **kwargs):
831 if lfutil.shortname in src and dest.startswith(
831 if lfutil.shortname in src and dest.startswith(
832 repo.wjoin(lfutil.shortname)
832 repo.wjoin(lfutil.shortname)
833 ):
833 ):
834 destlfile = dest.replace(lfutil.shortname, b'')
834 destlfile = dest.replace(lfutil.shortname, b'')
835 if not opts[b'force'] and os.path.exists(destlfile):
835 if not opts[b'force'] and os.path.exists(destlfile):
836 raise IOError(
836 raise IOError(
837 b'', _(b'destination largefile already exists')
837 b'', _(b'destination largefile already exists')
838 )
838 )
839 copiedfiles.append((src, dest))
839 copiedfiles.append((src, dest))
840 orig(src, dest, *args, **kwargs)
840 orig(src, dest, *args, **kwargs)
841
841
842 with extensions.wrappedfunction(util, b'copyfile', overridecopyfile):
842 with extensions.wrappedfunction(util, b'copyfile', overridecopyfile):
843 with extensions.wrappedfunction(scmutil, b'match', overridematch):
843 with extensions.wrappedfunction(scmutil, b'match', overridematch):
844 result += orig(ui, repo, listpats, opts, rename)
844 result += orig(ui, repo, listpats, opts, rename)
845
845
846 lfdirstate = lfutil.openlfdirstate(ui, repo)
846 lfdirstate = lfutil.openlfdirstate(ui, repo)
847 for (src, dest) in copiedfiles:
847 for (src, dest) in copiedfiles:
848 if lfutil.shortname in src and dest.startswith(
848 if lfutil.shortname in src and dest.startswith(
849 repo.wjoin(lfutil.shortname)
849 repo.wjoin(lfutil.shortname)
850 ):
850 ):
851 srclfile = src.replace(repo.wjoin(lfutil.standin(b'')), b'')
851 srclfile = src.replace(repo.wjoin(lfutil.standin(b'')), b'')
852 destlfile = dest.replace(repo.wjoin(lfutil.standin(b'')), b'')
852 destlfile = dest.replace(repo.wjoin(lfutil.standin(b'')), b'')
853 destlfiledir = repo.wvfs.dirname(repo.wjoin(destlfile)) or b'.'
853 destlfiledir = repo.wvfs.dirname(repo.wjoin(destlfile)) or b'.'
854 if not os.path.isdir(destlfiledir):
854 if not os.path.isdir(destlfiledir):
855 os.makedirs(destlfiledir)
855 os.makedirs(destlfiledir)
856 if rename:
856 if rename:
857 os.rename(repo.wjoin(srclfile), repo.wjoin(destlfile))
857 os.rename(repo.wjoin(srclfile), repo.wjoin(destlfile))
858
858
859 # The file is gone, but this deletes any empty parent
859 # The file is gone, but this deletes any empty parent
860 # directories as a side-effect.
860 # directories as a side-effect.
861 repo.wvfs.unlinkpath(srclfile, ignoremissing=True)
861 repo.wvfs.unlinkpath(srclfile, ignoremissing=True)
862 lfdirstate.set_untracked(srclfile)
862 lfdirstate.set_untracked(srclfile)
863 else:
863 else:
864 util.copyfile(repo.wjoin(srclfile), repo.wjoin(destlfile))
864 util.copyfile(repo.wjoin(srclfile), repo.wjoin(destlfile))
865
865
866 lfdirstate.set_tracked(destlfile)
866 lfdirstate.set_tracked(destlfile)
867 lfdirstate.write()
867 lfdirstate.write()
868 except error.Abort as e:
868 except error.Abort as e:
869 if e.message != _(b'no files to copy'):
869 if e.message != _(b'no files to copy'):
870 raise e
870 raise e
871 else:
871 else:
872 nolfiles = True
872 nolfiles = True
873 finally:
873 finally:
874 wlock.release()
874 wlock.release()
875
875
876 if nolfiles and nonormalfiles:
876 if nolfiles and nonormalfiles:
877 raise error.Abort(_(b'no files to copy'))
877 raise error.Abort(_(b'no files to copy'))
878
878
879 return result
879 return result
880
880
881
881
882 # When the user calls revert, we have to be careful to not revert any
882 # When the user calls revert, we have to be careful to not revert any
883 # changes to other largefiles accidentally. This means we have to keep
883 # changes to other largefiles accidentally. This means we have to keep
884 # track of the largefiles that are being reverted so we only pull down
884 # track of the largefiles that are being reverted so we only pull down
885 # the necessary largefiles.
885 # the necessary largefiles.
886 #
886 #
887 # Standins are only updated (to match the hash of largefiles) before
887 # Standins are only updated (to match the hash of largefiles) before
888 # commits. Update the standins then run the original revert, changing
888 # commits. Update the standins then run the original revert, changing
889 # the matcher to hit standins instead of largefiles. Based on the
889 # the matcher to hit standins instead of largefiles. Based on the
890 # resulting standins update the largefiles.
890 # resulting standins update the largefiles.
891 @eh.wrapfunction(cmdutil, b'revert')
891 @eh.wrapfunction(cmdutil, b'revert')
892 def overriderevert(orig, ui, repo, ctx, *pats, **opts):
892 def overriderevert(orig, ui, repo, ctx, *pats, **opts):
893 # Because we put the standins in a bad state (by updating them)
893 # Because we put the standins in a bad state (by updating them)
894 # and then return them to a correct state we need to lock to
894 # and then return them to a correct state we need to lock to
895 # prevent others from changing them in their incorrect state.
895 # prevent others from changing them in their incorrect state.
896 with repo.wlock():
896 with repo.wlock():
897 lfdirstate = lfutil.openlfdirstate(ui, repo)
897 lfdirstate = lfutil.openlfdirstate(ui, repo)
898 s = lfutil.lfdirstatestatus(lfdirstate, repo)
898 s = lfutil.lfdirstatestatus(lfdirstate, repo)
899 lfdirstate.write()
899 lfdirstate.write()
900 for lfile in s.modified:
900 for lfile in s.modified:
901 lfutil.updatestandin(repo, lfile, lfutil.standin(lfile))
901 lfutil.updatestandin(repo, lfile, lfutil.standin(lfile))
902 for lfile in s.deleted:
902 for lfile in s.deleted:
903 fstandin = lfutil.standin(lfile)
903 fstandin = lfutil.standin(lfile)
904 if repo.wvfs.exists(fstandin):
904 if repo.wvfs.exists(fstandin):
905 repo.wvfs.unlink(fstandin)
905 repo.wvfs.unlink(fstandin)
906
906
907 oldstandins = lfutil.getstandinsstate(repo)
907 oldstandins = lfutil.getstandinsstate(repo)
908
908
909 def overridematch(
909 def overridematch(
910 orig,
910 orig,
911 mctx,
911 mctx,
912 pats=(),
912 pats=(),
913 opts=None,
913 opts=None,
914 globbed=False,
914 globbed=False,
915 default=b'relpath',
915 default=b'relpath',
916 badfn=None,
916 badfn=None,
917 ):
917 ):
918 if opts is None:
918 if opts is None:
919 opts = {}
919 opts = {}
920 match = orig(mctx, pats, opts, globbed, default, badfn=badfn)
920 match = orig(mctx, pats, opts, globbed, default, badfn=badfn)
921 m = copy.copy(match)
921 m = copy.copy(match)
922
922
923 # revert supports recursing into subrepos, and though largefiles
923 # revert supports recursing into subrepos, and though largefiles
924 # currently doesn't work correctly in that case, this match is
924 # currently doesn't work correctly in that case, this match is
925 # called, so the lfdirstate above may not be the correct one for
925 # called, so the lfdirstate above may not be the correct one for
926 # this invocation of match.
926 # this invocation of match.
927 lfdirstate = lfutil.openlfdirstate(
927 lfdirstate = lfutil.openlfdirstate(
928 mctx.repo().ui, mctx.repo(), False
928 mctx.repo().ui, mctx.repo(), False
929 )
929 )
930
930
931 wctx = repo[None]
931 wctx = repo[None]
932 matchfiles = []
932 matchfiles = []
933 for f in m._files:
933 for f in m._files:
934 standin = lfutil.standin(f)
934 standin = lfutil.standin(f)
935 if standin in ctx or standin in mctx:
935 if standin in ctx or standin in mctx:
936 matchfiles.append(standin)
936 matchfiles.append(standin)
937 elif standin in wctx or lfdirstate.get_entry(f).removed:
937 elif standin in wctx or lfdirstate.get_entry(f).removed:
938 continue
938 continue
939 else:
939 else:
940 matchfiles.append(f)
940 matchfiles.append(f)
941 m._files = matchfiles
941 m._files = matchfiles
942 m._fileset = set(m._files)
942 m._fileset = set(m._files)
943 origmatchfn = m.matchfn
943 origmatchfn = m.matchfn
944
944
945 def matchfn(f):
945 def matchfn(f):
946 lfile = lfutil.splitstandin(f)
946 lfile = lfutil.splitstandin(f)
947 if lfile is not None:
947 if lfile is not None:
948 return origmatchfn(lfile) and (f in ctx or f in mctx)
948 return origmatchfn(lfile) and (f in ctx or f in mctx)
949 return origmatchfn(f)
949 return origmatchfn(f)
950
950
951 m.matchfn = matchfn
951 m.matchfn = matchfn
952 return m
952 return m
953
953
954 with extensions.wrappedfunction(scmutil, b'match', overridematch):
954 with extensions.wrappedfunction(scmutil, b'match', overridematch):
955 orig(ui, repo, ctx, *pats, **opts)
955 orig(ui, repo, ctx, *pats, **opts)
956
956
957 newstandins = lfutil.getstandinsstate(repo)
957 newstandins = lfutil.getstandinsstate(repo)
958 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
958 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
959 # lfdirstate should be 'normallookup'-ed for updated files,
959 # lfdirstate should be 'normallookup'-ed for updated files,
960 # because reverting doesn't touch dirstate for 'normal' files
960 # because reverting doesn't touch dirstate for 'normal' files
961 # when target revision is explicitly specified: in such case,
961 # when target revision is explicitly specified: in such case,
962 # 'n' and valid timestamp in dirstate doesn't ensure 'clean'
962 # 'n' and valid timestamp in dirstate doesn't ensure 'clean'
963 # of target (standin) file.
963 # of target (standin) file.
964 lfcommands.updatelfiles(
964 lfcommands.updatelfiles(
965 ui, repo, filelist, printmessage=False, normallookup=True
965 ui, repo, filelist, printmessage=False, normallookup=True
966 )
966 )
967
967
968
968
969 # after pulling changesets, we need to take some extra care to get
969 # after pulling changesets, we need to take some extra care to get
970 # largefiles updated remotely
970 # largefiles updated remotely
971 @eh.wrapcommand(
971 @eh.wrapcommand(
972 b'pull',
972 b'pull',
973 opts=[
973 opts=[
974 (
974 (
975 b'',
975 b'',
976 b'all-largefiles',
976 b'all-largefiles',
977 None,
977 None,
978 _(b'download all pulled versions of largefiles (DEPRECATED)'),
978 _(b'download all pulled versions of largefiles (DEPRECATED)'),
979 ),
979 ),
980 (
980 (
981 b'',
981 b'',
982 b'lfrev',
982 b'lfrev',
983 [],
983 [],
984 _(b'download largefiles for these revisions'),
984 _(b'download largefiles for these revisions'),
985 _(b'REV'),
985 _(b'REV'),
986 ),
986 ),
987 ],
987 ],
988 )
988 )
989 def overridepull(orig, ui, repo, source=None, **opts):
989 def overridepull(orig, ui, repo, source=None, **opts):
990 revsprepull = len(repo)
990 revsprepull = len(repo)
991 if not source:
991 if not source:
992 source = b'default'
992 source = b'default'
993 repo.lfpullsource = source
993 repo.lfpullsource = source
994 result = orig(ui, repo, source, **opts)
994 result = orig(ui, repo, source, **opts)
995 revspostpull = len(repo)
995 revspostpull = len(repo)
996 lfrevs = opts.get('lfrev', [])
996 lfrevs = opts.get('lfrev', [])
997 if opts.get('all_largefiles'):
997 if opts.get('all_largefiles'):
998 lfrevs.append(b'pulled()')
998 lfrevs.append(b'pulled()')
999 if lfrevs and revspostpull > revsprepull:
999 if lfrevs and revspostpull > revsprepull:
1000 numcached = 0
1000 numcached = 0
1001 repo.firstpulled = revsprepull # for pulled() revset expression
1001 repo.firstpulled = revsprepull # for pulled() revset expression
1002 try:
1002 try:
1003 for rev in logcmdutil.revrange(repo, lfrevs):
1003 for rev in logcmdutil.revrange(repo, lfrevs):
1004 ui.note(_(b'pulling largefiles for revision %d\n') % rev)
1004 ui.note(_(b'pulling largefiles for revision %d\n') % rev)
1005 (cached, missing) = lfcommands.cachelfiles(ui, repo, rev)
1005 (cached, missing) = lfcommands.cachelfiles(ui, repo, rev)
1006 numcached += len(cached)
1006 numcached += len(cached)
1007 finally:
1007 finally:
1008 del repo.firstpulled
1008 del repo.firstpulled
1009 ui.status(_(b"%d largefiles cached\n") % numcached)
1009 ui.status(_(b"%d largefiles cached\n") % numcached)
1010 return result
1010 return result
1011
1011
1012
1012
1013 @eh.wrapcommand(
1013 @eh.wrapcommand(
1014 b'push',
1014 b'push',
1015 opts=[
1015 opts=[
1016 (
1016 (
1017 b'',
1017 b'',
1018 b'lfrev',
1018 b'lfrev',
1019 [],
1019 [],
1020 _(b'upload largefiles for these revisions'),
1020 _(b'upload largefiles for these revisions'),
1021 _(b'REV'),
1021 _(b'REV'),
1022 )
1022 )
1023 ],
1023 ],
1024 )
1024 )
1025 def overridepush(orig, ui, repo, *args, **kwargs):
1025 def overridepush(orig, ui, repo, *args, **kwargs):
1026 """Override push command and store --lfrev parameters in opargs"""
1026 """Override push command and store --lfrev parameters in opargs"""
1027 lfrevs = kwargs.pop('lfrev', None)
1027 lfrevs = kwargs.pop('lfrev', None)
1028 if lfrevs:
1028 if lfrevs:
1029 opargs = kwargs.setdefault('opargs', {})
1029 opargs = kwargs.setdefault('opargs', {})
1030 opargs[b'lfrevs'] = logcmdutil.revrange(repo, lfrevs)
1030 opargs[b'lfrevs'] = logcmdutil.revrange(repo, lfrevs)
1031 return orig(ui, repo, *args, **kwargs)
1031 return orig(ui, repo, *args, **kwargs)
1032
1032
1033
1033
1034 @eh.wrapfunction(exchange, b'pushoperation')
1034 @eh.wrapfunction(exchange, b'pushoperation')
1035 def exchangepushoperation(orig, *args, **kwargs):
1035 def exchangepushoperation(orig, *args, **kwargs):
1036 """Override pushoperation constructor and store lfrevs parameter"""
1036 """Override pushoperation constructor and store lfrevs parameter"""
1037 lfrevs = kwargs.pop('lfrevs', None)
1037 lfrevs = kwargs.pop('lfrevs', None)
1038 pushop = orig(*args, **kwargs)
1038 pushop = orig(*args, **kwargs)
1039 pushop.lfrevs = lfrevs
1039 pushop.lfrevs = lfrevs
1040 return pushop
1040 return pushop
1041
1041
1042
1042
1043 @eh.revsetpredicate(b'pulled()')
1043 @eh.revsetpredicate(b'pulled()')
1044 def pulledrevsetsymbol(repo, subset, x):
1044 def pulledrevsetsymbol(repo, subset, x):
1045 """Changesets that just has been pulled.
1045 """Changesets that just has been pulled.
1046
1046
1047 Only available with largefiles from pull --lfrev expressions.
1047 Only available with largefiles from pull --lfrev expressions.
1048
1048
1049 .. container:: verbose
1049 .. container:: verbose
1050
1050
1051 Some examples:
1051 Some examples:
1052
1052
1053 - pull largefiles for all new changesets::
1053 - pull largefiles for all new changesets::
1054
1054
1055 hg pull -lfrev "pulled()"
1055 hg pull -lfrev "pulled()"
1056
1056
1057 - pull largefiles for all new branch heads::
1057 - pull largefiles for all new branch heads::
1058
1058
1059 hg pull -lfrev "head(pulled()) and not closed()"
1059 hg pull -lfrev "head(pulled()) and not closed()"
1060
1060
1061 """
1061 """
1062
1062
1063 try:
1063 try:
1064 firstpulled = repo.firstpulled
1064 firstpulled = repo.firstpulled
1065 except AttributeError:
1065 except AttributeError:
1066 raise error.Abort(_(b"pulled() only available in --lfrev"))
1066 raise error.Abort(_(b"pulled() only available in --lfrev"))
1067 return smartset.baseset([r for r in subset if r >= firstpulled])
1067 return smartset.baseset([r for r in subset if r >= firstpulled])
1068
1068
1069
1069
1070 @eh.wrapcommand(
1070 @eh.wrapcommand(
1071 b'clone',
1071 b'clone',
1072 opts=[
1072 opts=[
1073 (
1073 (
1074 b'',
1074 b'',
1075 b'all-largefiles',
1075 b'all-largefiles',
1076 None,
1076 None,
1077 _(b'download all versions of all largefiles'),
1077 _(b'download all versions of all largefiles'),
1078 )
1078 )
1079 ],
1079 ],
1080 )
1080 )
1081 def overrideclone(orig, ui, source, dest=None, **opts):
1081 def overrideclone(orig, ui, source, dest=None, **opts):
1082 d = dest
1082 d = dest
1083 if d is None:
1083 if d is None:
1084 d = hg.defaultdest(source)
1084 d = hg.defaultdest(source)
1085 if opts.get('all_largefiles') and not hg.islocal(d):
1085 if opts.get('all_largefiles') and not hg.islocal(d):
1086 raise error.Abort(
1086 raise error.Abort(
1087 _(b'--all-largefiles is incompatible with non-local destination %s')
1087 _(b'--all-largefiles is incompatible with non-local destination %s')
1088 % d
1088 % d
1089 )
1089 )
1090
1090
1091 return orig(ui, source, dest, **opts)
1091 return orig(ui, source, dest, **opts)
1092
1092
1093
1093
1094 @eh.wrapfunction(hg, b'clone')
1094 @eh.wrapfunction(hg, b'clone')
1095 def hgclone(orig, ui, opts, *args, **kwargs):
1095 def hgclone(orig, ui, opts, *args, **kwargs):
1096 result = orig(ui, opts, *args, **kwargs)
1096 result = orig(ui, opts, *args, **kwargs)
1097
1097
1098 if result is not None:
1098 if result is not None:
1099 sourcerepo, destrepo = result
1099 sourcerepo, destrepo = result
1100 repo = destrepo.local()
1100 repo = destrepo.local()
1101
1101
1102 # When cloning to a remote repo (like through SSH), no repo is available
1102 # When cloning to a remote repo (like through SSH), no repo is available
1103 # from the peer. Therefore the largefiles can't be downloaded and the
1103 # from the peer. Therefore the largefiles can't be downloaded and the
1104 # hgrc can't be updated.
1104 # hgrc can't be updated.
1105 if not repo:
1105 if not repo:
1106 return result
1106 return result
1107
1107
1108 # Caching is implicitly limited to 'rev' option, since the dest repo was
1108 # Caching is implicitly limited to 'rev' option, since the dest repo was
1109 # truncated at that point. The user may expect a download count with
1109 # truncated at that point. The user may expect a download count with
1110 # this option, so attempt whether or not this is a largefile repo.
1110 # this option, so attempt whether or not this is a largefile repo.
1111 if opts.get(b'all_largefiles'):
1111 if opts.get(b'all_largefiles'):
1112 success, missing = lfcommands.downloadlfiles(ui, repo)
1112 success, missing = lfcommands.downloadlfiles(ui, repo)
1113
1113
1114 if missing != 0:
1114 if missing != 0:
1115 return None
1115 return None
1116
1116
1117 return result
1117 return result
1118
1118
1119
1119
1120 @eh.wrapcommand(b'rebase', extension=b'rebase')
1120 @eh.wrapcommand(b'rebase', extension=b'rebase')
1121 def overriderebasecmd(orig, ui, repo, **opts):
1121 def overriderebasecmd(orig, ui, repo, **opts):
1122 if not util.safehasattr(repo, b'_largefilesenabled'):
1122 if not util.safehasattr(repo, b'_largefilesenabled'):
1123 return orig(ui, repo, **opts)
1123 return orig(ui, repo, **opts)
1124
1124
1125 resuming = opts.get('continue')
1125 resuming = opts.get('continue')
1126 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
1126 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
1127 repo._lfstatuswriters.append(lambda *msg, **opts: None)
1127 repo._lfstatuswriters.append(lambda *msg, **opts: None)
1128 try:
1128 try:
1129 with ui.configoverride(
1129 with ui.configoverride(
1130 {(b'rebase', b'experimental.inmemory'): False}, b"largefiles"
1130 {(b'rebase', b'experimental.inmemory'): False}, b"largefiles"
1131 ):
1131 ):
1132 return orig(ui, repo, **opts)
1132 return orig(ui, repo, **opts)
1133 finally:
1133 finally:
1134 repo._lfstatuswriters.pop()
1134 repo._lfstatuswriters.pop()
1135 repo._lfcommithooks.pop()
1135 repo._lfcommithooks.pop()
1136
1136
1137
1137
1138 @eh.extsetup
1138 @eh.extsetup
1139 def overriderebase(ui):
1139 def overriderebase(ui):
1140 try:
1140 try:
1141 rebase = extensions.find(b'rebase')
1141 rebase = extensions.find(b'rebase')
1142 except KeyError:
1142 except KeyError:
1143 pass
1143 pass
1144 else:
1144 else:
1145
1145
1146 def _dorebase(orig, *args, **kwargs):
1146 def _dorebase(orig, *args, **kwargs):
1147 kwargs['inmemory'] = False
1147 kwargs['inmemory'] = False
1148 return orig(*args, **kwargs)
1148 return orig(*args, **kwargs)
1149
1149
1150 extensions.wrapfunction(rebase, b'_dorebase', _dorebase)
1150 extensions.wrapfunction(rebase, b'_dorebase', _dorebase)
1151
1151
1152
1152
1153 @eh.wrapcommand(b'archive')
1153 @eh.wrapcommand(b'archive')
1154 def overridearchivecmd(orig, ui, repo, dest, **opts):
1154 def overridearchivecmd(orig, ui, repo, dest, **opts):
1155 with lfstatus(repo.unfiltered()):
1155 with lfstatus(repo.unfiltered()):
1156 return orig(ui, repo.unfiltered(), dest, **opts)
1156 return orig(ui, repo.unfiltered(), dest, **opts)
1157
1157
1158
1158
1159 @eh.wrapfunction(webcommands, b'archive')
1159 @eh.wrapfunction(webcommands, b'archive')
1160 def hgwebarchive(orig, web):
1160 def hgwebarchive(orig, web):
1161 with lfstatus(web.repo):
1161 with lfstatus(web.repo):
1162 return orig(web)
1162 return orig(web)
1163
1163
1164
1164
1165 @eh.wrapfunction(archival, b'archive')
1165 @eh.wrapfunction(archival, b'archive')
1166 def overridearchive(
1166 def overridearchive(
1167 orig,
1167 orig,
1168 repo,
1168 repo,
1169 dest,
1169 dest,
1170 node,
1170 node,
1171 kind,
1171 kind,
1172 decode=True,
1172 decode=True,
1173 match=None,
1173 match=None,
1174 prefix=b'',
1174 prefix=b'',
1175 mtime=None,
1175 mtime=None,
1176 subrepos=None,
1176 subrepos=None,
1177 ):
1177 ):
1178 # For some reason setting repo.lfstatus in hgwebarchive only changes the
1178 # For some reason setting repo.lfstatus in hgwebarchive only changes the
1179 # unfiltered repo's attr, so check that as well.
1179 # unfiltered repo's attr, so check that as well.
1180 if not repo.lfstatus and not repo.unfiltered().lfstatus:
1180 if not repo.lfstatus and not repo.unfiltered().lfstatus:
1181 return orig(
1181 return orig(
1182 repo, dest, node, kind, decode, match, prefix, mtime, subrepos
1182 repo, dest, node, kind, decode, match, prefix, mtime, subrepos
1183 )
1183 )
1184
1184
1185 # No need to lock because we are only reading history and
1185 # No need to lock because we are only reading history and
1186 # largefile caches, neither of which are modified.
1186 # largefile caches, neither of which are modified.
1187 if node is not None:
1187 if node is not None:
1188 lfcommands.cachelfiles(repo.ui, repo, node)
1188 lfcommands.cachelfiles(repo.ui, repo, node)
1189
1189
1190 if kind not in archival.archivers:
1190 if kind not in archival.archivers:
1191 raise error.Abort(_(b"unknown archive type '%s'") % kind)
1191 raise error.Abort(_(b"unknown archive type '%s'") % kind)
1192
1192
1193 ctx = repo[node]
1193 ctx = repo[node]
1194
1194
1195 if kind == b'files':
1195 if kind == b'files':
1196 if prefix:
1196 if prefix:
1197 raise error.Abort(_(b'cannot give prefix when archiving to files'))
1197 raise error.Abort(_(b'cannot give prefix when archiving to files'))
1198 else:
1198 else:
1199 prefix = archival.tidyprefix(dest, kind, prefix)
1199 prefix = archival.tidyprefix(dest, kind, prefix)
1200
1200
1201 def write(name, mode, islink, getdata):
1201 def write(name, mode, islink, getdata):
1202 if match and not match(name):
1202 if match and not match(name):
1203 return
1203 return
1204 data = getdata()
1204 data = getdata()
1205 if decode:
1205 if decode:
1206 data = repo.wwritedata(name, data)
1206 data = repo.wwritedata(name, data)
1207 archiver.addfile(prefix + name, mode, islink, data)
1207 archiver.addfile(prefix + name, mode, islink, data)
1208
1208
1209 archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
1209 archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
1210
1210
1211 if repo.ui.configbool(b"ui", b"archivemeta"):
1211 if repo.ui.configbool(b"ui", b"archivemeta"):
1212 write(
1212 write(
1213 b'.hg_archival.txt',
1213 b'.hg_archival.txt',
1214 0o644,
1214 0o644,
1215 False,
1215 False,
1216 lambda: archival.buildmetadata(ctx),
1216 lambda: archival.buildmetadata(ctx),
1217 )
1217 )
1218
1218
1219 for f in ctx:
1219 for f in ctx:
1220 ff = ctx.flags(f)
1220 ff = ctx.flags(f)
1221 getdata = ctx[f].data
1221 getdata = ctx[f].data
1222 lfile = lfutil.splitstandin(f)
1222 lfile = lfutil.splitstandin(f)
1223 if lfile is not None:
1223 if lfile is not None:
1224 if node is not None:
1224 if node is not None:
1225 path = lfutil.findfile(repo, getdata().strip())
1225 path = lfutil.findfile(repo, getdata().strip())
1226
1226
1227 if path is None:
1227 if path is None:
1228 raise error.Abort(
1228 raise error.Abort(
1229 _(
1229 _(
1230 b'largefile %s not found in repo store or system cache'
1230 b'largefile %s not found in repo store or system cache'
1231 )
1231 )
1232 % lfile
1232 % lfile
1233 )
1233 )
1234 else:
1234 else:
1235 path = lfile
1235 path = lfile
1236
1236
1237 f = lfile
1237 f = lfile
1238
1238
1239 getdata = lambda: util.readfile(path)
1239 getdata = lambda: util.readfile(path)
1240 write(f, b'x' in ff and 0o755 or 0o644, b'l' in ff, getdata)
1240 write(f, b'x' in ff and 0o755 or 0o644, b'l' in ff, getdata)
1241
1241
1242 if subrepos:
1242 if subrepos:
1243 for subpath in sorted(ctx.substate):
1243 for subpath in sorted(ctx.substate):
1244 sub = ctx.workingsub(subpath)
1244 sub = ctx.workingsub(subpath)
1245 submatch = matchmod.subdirmatcher(subpath, match)
1245 submatch = matchmod.subdirmatcher(subpath, match)
1246 subprefix = prefix + subpath + b'/'
1246 subprefix = prefix + subpath + b'/'
1247
1247
1248 # TODO: Only hgsubrepo instances have `_repo`, so figure out how to
1248 # TODO: Only hgsubrepo instances have `_repo`, so figure out how to
1249 # infer and possibly set lfstatus in hgsubrepoarchive. That would
1249 # infer and possibly set lfstatus in hgsubrepoarchive. That would
1250 # allow only hgsubrepos to set this, instead of the current scheme
1250 # allow only hgsubrepos to set this, instead of the current scheme
1251 # where the parent sets this for the child.
1251 # where the parent sets this for the child.
1252 with (
1252 with (
1253 util.safehasattr(sub, '_repo')
1253 util.safehasattr(sub, '_repo')
1254 and lfstatus(sub._repo)
1254 and lfstatus(sub._repo)
1255 or util.nullcontextmanager()
1255 or util.nullcontextmanager()
1256 ):
1256 ):
1257 sub.archive(archiver, subprefix, submatch)
1257 sub.archive(archiver, subprefix, submatch)
1258
1258
1259 archiver.done()
1259 archiver.done()
1260
1260
1261
1261
1262 @eh.wrapfunction(subrepo.hgsubrepo, b'archive')
1262 @eh.wrapfunction(subrepo.hgsubrepo, b'archive')
1263 def hgsubrepoarchive(orig, repo, archiver, prefix, match=None, decode=True):
1263 def hgsubrepoarchive(orig, repo, archiver, prefix, match=None, decode=True):
1264 lfenabled = util.safehasattr(repo._repo, b'_largefilesenabled')
1264 lfenabled = util.safehasattr(repo._repo, b'_largefilesenabled')
1265 if not lfenabled or not repo._repo.lfstatus:
1265 if not lfenabled or not repo._repo.lfstatus:
1266 return orig(repo, archiver, prefix, match, decode)
1266 return orig(repo, archiver, prefix, match, decode)
1267
1267
1268 repo._get(repo._state + (b'hg',))
1268 repo._get(repo._state + (b'hg',))
1269 rev = repo._state[1]
1269 rev = repo._state[1]
1270 ctx = repo._repo[rev]
1270 ctx = repo._repo[rev]
1271
1271
1272 if ctx.node() is not None:
1272 if ctx.node() is not None:
1273 lfcommands.cachelfiles(repo.ui, repo._repo, ctx.node())
1273 lfcommands.cachelfiles(repo.ui, repo._repo, ctx.node())
1274
1274
1275 def write(name, mode, islink, getdata):
1275 def write(name, mode, islink, getdata):
1276 # At this point, the standin has been replaced with the largefile name,
1276 # At this point, the standin has been replaced with the largefile name,
1277 # so the normal matcher works here without the lfutil variants.
1277 # so the normal matcher works here without the lfutil variants.
1278 if match and not match(f):
1278 if match and not match(f):
1279 return
1279 return
1280 data = getdata()
1280 data = getdata()
1281 if decode:
1281 if decode:
1282 data = repo._repo.wwritedata(name, data)
1282 data = repo._repo.wwritedata(name, data)
1283
1283
1284 archiver.addfile(prefix + name, mode, islink, data)
1284 archiver.addfile(prefix + name, mode, islink, data)
1285
1285
1286 for f in ctx:
1286 for f in ctx:
1287 ff = ctx.flags(f)
1287 ff = ctx.flags(f)
1288 getdata = ctx[f].data
1288 getdata = ctx[f].data
1289 lfile = lfutil.splitstandin(f)
1289 lfile = lfutil.splitstandin(f)
1290 if lfile is not None:
1290 if lfile is not None:
1291 if ctx.node() is not None:
1291 if ctx.node() is not None:
1292 path = lfutil.findfile(repo._repo, getdata().strip())
1292 path = lfutil.findfile(repo._repo, getdata().strip())
1293
1293
1294 if path is None:
1294 if path is None:
1295 raise error.Abort(
1295 raise error.Abort(
1296 _(
1296 _(
1297 b'largefile %s not found in repo store or system cache'
1297 b'largefile %s not found in repo store or system cache'
1298 )
1298 )
1299 % lfile
1299 % lfile
1300 )
1300 )
1301 else:
1301 else:
1302 path = lfile
1302 path = lfile
1303
1303
1304 f = lfile
1304 f = lfile
1305
1305
1306 getdata = lambda: util.readfile(os.path.join(prefix, path))
1306 getdata = lambda: util.readfile(os.path.join(prefix, path))
1307
1307
1308 write(f, b'x' in ff and 0o755 or 0o644, b'l' in ff, getdata)
1308 write(f, b'x' in ff and 0o755 or 0o644, b'l' in ff, getdata)
1309
1309
1310 for subpath in sorted(ctx.substate):
1310 for subpath in sorted(ctx.substate):
1311 sub = ctx.workingsub(subpath)
1311 sub = ctx.workingsub(subpath)
1312 submatch = matchmod.subdirmatcher(subpath, match)
1312 submatch = matchmod.subdirmatcher(subpath, match)
1313 subprefix = prefix + subpath + b'/'
1313 subprefix = prefix + subpath + b'/'
1314 # TODO: Only hgsubrepo instances have `_repo`, so figure out how to
1314 # TODO: Only hgsubrepo instances have `_repo`, so figure out how to
1315 # infer and possibly set lfstatus at the top of this function. That
1315 # infer and possibly set lfstatus at the top of this function. That
1316 # would allow only hgsubrepos to set this, instead of the current scheme
1316 # would allow only hgsubrepos to set this, instead of the current scheme
1317 # where the parent sets this for the child.
1317 # where the parent sets this for the child.
1318 with (
1318 with (
1319 util.safehasattr(sub, '_repo')
1319 util.safehasattr(sub, '_repo')
1320 and lfstatus(sub._repo)
1320 and lfstatus(sub._repo)
1321 or util.nullcontextmanager()
1321 or util.nullcontextmanager()
1322 ):
1322 ):
1323 sub.archive(archiver, subprefix, submatch, decode)
1323 sub.archive(archiver, subprefix, submatch, decode)
1324
1324
1325
1325
1326 # If a largefile is modified, the change is not reflected in its
1326 # If a largefile is modified, the change is not reflected in its
1327 # standin until a commit. cmdutil.bailifchanged() raises an exception
1327 # standin until a commit. cmdutil.bailifchanged() raises an exception
1328 # if the repo has uncommitted changes. Wrap it to also check if
1328 # if the repo has uncommitted changes. Wrap it to also check if
1329 # largefiles were changed. This is used by bisect, backout and fetch.
1329 # largefiles were changed. This is used by bisect, backout and fetch.
1330 @eh.wrapfunction(cmdutil, b'bailifchanged')
1330 @eh.wrapfunction(cmdutil, b'bailifchanged')
1331 def overridebailifchanged(orig, repo, *args, **kwargs):
1331 def overridebailifchanged(orig, repo, *args, **kwargs):
1332 orig(repo, *args, **kwargs)
1332 orig(repo, *args, **kwargs)
1333 with lfstatus(repo):
1333 with lfstatus(repo):
1334 s = repo.status()
1334 s = repo.status()
1335 if s.modified or s.added or s.removed or s.deleted:
1335 if s.modified or s.added or s.removed or s.deleted:
1336 raise error.Abort(_(b'uncommitted changes'))
1336 raise error.Abort(_(b'uncommitted changes'))
1337
1337
1338
1338
1339 @eh.wrapfunction(cmdutil, b'postcommitstatus')
1339 @eh.wrapfunction(cmdutil, b'postcommitstatus')
1340 def postcommitstatus(orig, repo, *args, **kwargs):
1340 def postcommitstatus(orig, repo, *args, **kwargs):
1341 with lfstatus(repo):
1341 with lfstatus(repo):
1342 return orig(repo, *args, **kwargs)
1342 return orig(repo, *args, **kwargs)
1343
1343
1344
1344
1345 @eh.wrapfunction(cmdutil, b'forget')
1345 @eh.wrapfunction(cmdutil, b'forget')
1346 def cmdutilforget(
1346 def cmdutilforget(
1347 orig, ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive
1347 orig, ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive
1348 ):
1348 ):
1349 normalmatcher = composenormalfilematcher(match, repo[None].manifest())
1349 normalmatcher = composenormalfilematcher(match, repo[None].manifest())
1350 bad, forgot = orig(
1350 bad, forgot = orig(
1351 ui,
1351 ui,
1352 repo,
1352 repo,
1353 normalmatcher,
1353 normalmatcher,
1354 prefix,
1354 prefix,
1355 uipathfn,
1355 uipathfn,
1356 explicitonly,
1356 explicitonly,
1357 dryrun,
1357 dryrun,
1358 interactive,
1358 interactive,
1359 )
1359 )
1360 m = composelargefilematcher(match, repo[None].manifest())
1360 m = composelargefilematcher(match, repo[None].manifest())
1361
1361
1362 with lfstatus(repo):
1362 with lfstatus(repo):
1363 s = repo.status(match=m, clean=True)
1363 s = repo.status(match=m, clean=True)
1364 manifest = repo[None].manifest()
1364 manifest = repo[None].manifest()
1365 forget = sorted(s.modified + s.added + s.deleted + s.clean)
1365 forget = sorted(s.modified + s.added + s.deleted + s.clean)
1366 forget = [f for f in forget if lfutil.standin(f) in manifest]
1366 forget = [f for f in forget if lfutil.standin(f) in manifest]
1367
1367
1368 for f in forget:
1368 for f in forget:
1369 fstandin = lfutil.standin(f)
1369 fstandin = lfutil.standin(f)
1370 if fstandin not in repo.dirstate and not repo.wvfs.isdir(fstandin):
1370 if fstandin not in repo.dirstate and not repo.wvfs.isdir(fstandin):
1371 ui.warn(
1371 ui.warn(
1372 _(b'not removing %s: file is already untracked\n') % uipathfn(f)
1372 _(b'not removing %s: file is already untracked\n') % uipathfn(f)
1373 )
1373 )
1374 bad.append(f)
1374 bad.append(f)
1375
1375
1376 for f in forget:
1376 for f in forget:
1377 if ui.verbose or not m.exact(f):
1377 if ui.verbose or not m.exact(f):
1378 ui.status(_(b'removing %s\n') % uipathfn(f))
1378 ui.status(_(b'removing %s\n') % uipathfn(f))
1379
1379
1380 # Need to lock because standin files are deleted then removed from the
1380 # Need to lock because standin files are deleted then removed from the
1381 # repository and we could race in-between.
1381 # repository and we could race in-between.
1382 with repo.wlock():
1382 with repo.wlock():
1383 lfdirstate = lfutil.openlfdirstate(ui, repo)
1383 lfdirstate = lfutil.openlfdirstate(ui, repo)
1384 for f in forget:
1384 for f in forget:
1385 lfdirstate.set_untracked(f)
1385 lfdirstate.set_untracked(f)
1386 lfdirstate.write()
1386 lfdirstate.write()
1387 standins = [lfutil.standin(f) for f in forget]
1387 standins = [lfutil.standin(f) for f in forget]
1388 for f in standins:
1388 for f in standins:
1389 repo.wvfs.unlinkpath(f, ignoremissing=True)
1389 repo.wvfs.unlinkpath(f, ignoremissing=True)
1390 rejected = repo[None].forget(standins)
1390 rejected = repo[None].forget(standins)
1391
1391
1392 bad.extend(f for f in rejected if f in m.files())
1392 bad.extend(f for f in rejected if f in m.files())
1393 forgot.extend(f for f in forget if f not in rejected)
1393 forgot.extend(f for f in forget if f not in rejected)
1394 return bad, forgot
1394 return bad, forgot
1395
1395
1396
1396
1397 def _getoutgoings(repo, other, missing, addfunc):
1397 def _getoutgoings(repo, other, missing, addfunc):
1398 """get pairs of filename and largefile hash in outgoing revisions
1398 """get pairs of filename and largefile hash in outgoing revisions
1399 in 'missing'.
1399 in 'missing'.
1400
1400
1401 largefiles already existing on 'other' repository are ignored.
1401 largefiles already existing on 'other' repository are ignored.
1402
1402
1403 'addfunc' is invoked with each unique pairs of filename and
1403 'addfunc' is invoked with each unique pairs of filename and
1404 largefile hash value.
1404 largefile hash value.
1405 """
1405 """
1406 knowns = set()
1406 knowns = set()
1407 lfhashes = set()
1407 lfhashes = set()
1408
1408
1409 def dedup(fn, lfhash):
1409 def dedup(fn, lfhash):
1410 k = (fn, lfhash)
1410 k = (fn, lfhash)
1411 if k not in knowns:
1411 if k not in knowns:
1412 knowns.add(k)
1412 knowns.add(k)
1413 lfhashes.add(lfhash)
1413 lfhashes.add(lfhash)
1414
1414
1415 lfutil.getlfilestoupload(repo, missing, dedup)
1415 lfutil.getlfilestoupload(repo, missing, dedup)
1416 if lfhashes:
1416 if lfhashes:
1417 lfexists = storefactory.openstore(repo, other).exists(lfhashes)
1417 lfexists = storefactory.openstore(repo, other).exists(lfhashes)
1418 for fn, lfhash in knowns:
1418 for fn, lfhash in knowns:
1419 if not lfexists[lfhash]: # lfhash doesn't exist on "other"
1419 if not lfexists[lfhash]: # lfhash doesn't exist on "other"
1420 addfunc(fn, lfhash)
1420 addfunc(fn, lfhash)
1421
1421
1422
1422
1423 def outgoinghook(ui, repo, other, opts, missing):
1423 def outgoinghook(ui, repo, other, opts, missing):
1424 if opts.pop(b'large', None):
1424 if opts.pop(b'large', None):
1425 lfhashes = set()
1425 lfhashes = set()
1426 if ui.debugflag:
1426 if ui.debugflag:
1427 toupload = {}
1427 toupload = {}
1428
1428
1429 def addfunc(fn, lfhash):
1429 def addfunc(fn, lfhash):
1430 if fn not in toupload:
1430 if fn not in toupload:
1431 toupload[fn] = []
1431 toupload[fn] = []
1432 toupload[fn].append(lfhash)
1432 toupload[fn].append(lfhash)
1433 lfhashes.add(lfhash)
1433 lfhashes.add(lfhash)
1434
1434
1435 def showhashes(fn):
1435 def showhashes(fn):
1436 for lfhash in sorted(toupload[fn]):
1436 for lfhash in sorted(toupload[fn]):
1437 ui.debug(b' %s\n' % lfhash)
1437 ui.debug(b' %s\n' % lfhash)
1438
1438
1439 else:
1439 else:
1440 toupload = set()
1440 toupload = set()
1441
1441
1442 def addfunc(fn, lfhash):
1442 def addfunc(fn, lfhash):
1443 toupload.add(fn)
1443 toupload.add(fn)
1444 lfhashes.add(lfhash)
1444 lfhashes.add(lfhash)
1445
1445
1446 def showhashes(fn):
1446 def showhashes(fn):
1447 pass
1447 pass
1448
1448
1449 _getoutgoings(repo, other, missing, addfunc)
1449 _getoutgoings(repo, other, missing, addfunc)
1450
1450
1451 if not toupload:
1451 if not toupload:
1452 ui.status(_(b'largefiles: no files to upload\n'))
1452 ui.status(_(b'largefiles: no files to upload\n'))
1453 else:
1453 else:
1454 ui.status(
1454 ui.status(
1455 _(b'largefiles to upload (%d entities):\n') % (len(lfhashes))
1455 _(b'largefiles to upload (%d entities):\n') % (len(lfhashes))
1456 )
1456 )
1457 for file in sorted(toupload):
1457 for file in sorted(toupload):
1458 ui.status(lfutil.splitstandin(file) + b'\n')
1458 ui.status(lfutil.splitstandin(file) + b'\n')
1459 showhashes(file)
1459 showhashes(file)
1460 ui.status(b'\n')
1460 ui.status(b'\n')
1461
1461
1462
1462
1463 @eh.wrapcommand(
1463 @eh.wrapcommand(
1464 b'outgoing', opts=[(b'', b'large', None, _(b'display outgoing largefiles'))]
1464 b'outgoing', opts=[(b'', b'large', None, _(b'display outgoing largefiles'))]
1465 )
1465 )
1466 def _outgoingcmd(orig, *args, **kwargs):
1466 def _outgoingcmd(orig, *args, **kwargs):
1467 # Nothing to do here other than add the extra help option- the hook above
1467 # Nothing to do here other than add the extra help option- the hook above
1468 # processes it.
1468 # processes it.
1469 return orig(*args, **kwargs)
1469 return orig(*args, **kwargs)
1470
1470
1471
1471
1472 def summaryremotehook(ui, repo, opts, changes):
1472 def summaryremotehook(ui, repo, opts, changes):
1473 largeopt = opts.get(b'large', False)
1473 largeopt = opts.get(b'large', False)
1474 if changes is None:
1474 if changes is None:
1475 if largeopt:
1475 if largeopt:
1476 return (False, True) # only outgoing check is needed
1476 return (False, True) # only outgoing check is needed
1477 else:
1477 else:
1478 return (False, False)
1478 return (False, False)
1479 elif largeopt:
1479 elif largeopt:
1480 url, branch, peer, outgoing = changes[1]
1480 url, branch, peer, outgoing = changes[1]
1481 if peer is None:
1481 if peer is None:
1482 # i18n: column positioning for "hg summary"
1482 # i18n: column positioning for "hg summary"
1483 ui.status(_(b'largefiles: (no remote repo)\n'))
1483 ui.status(_(b'largefiles: (no remote repo)\n'))
1484 return
1484 return
1485
1485
1486 toupload = set()
1486 toupload = set()
1487 lfhashes = set()
1487 lfhashes = set()
1488
1488
1489 def addfunc(fn, lfhash):
1489 def addfunc(fn, lfhash):
1490 toupload.add(fn)
1490 toupload.add(fn)
1491 lfhashes.add(lfhash)
1491 lfhashes.add(lfhash)
1492
1492
1493 _getoutgoings(repo, peer, outgoing.missing, addfunc)
1493 _getoutgoings(repo, peer, outgoing.missing, addfunc)
1494
1494
1495 if not toupload:
1495 if not toupload:
1496 # i18n: column positioning for "hg summary"
1496 # i18n: column positioning for "hg summary"
1497 ui.status(_(b'largefiles: (no files to upload)\n'))
1497 ui.status(_(b'largefiles: (no files to upload)\n'))
1498 else:
1498 else:
1499 # i18n: column positioning for "hg summary"
1499 # i18n: column positioning for "hg summary"
1500 ui.status(
1500 ui.status(
1501 _(b'largefiles: %d entities for %d files to upload\n')
1501 _(b'largefiles: %d entities for %d files to upload\n')
1502 % (len(lfhashes), len(toupload))
1502 % (len(lfhashes), len(toupload))
1503 )
1503 )
1504
1504
1505
1505
1506 @eh.wrapcommand(
1506 @eh.wrapcommand(
1507 b'summary', opts=[(b'', b'large', None, _(b'display outgoing largefiles'))]
1507 b'summary', opts=[(b'', b'large', None, _(b'display outgoing largefiles'))]
1508 )
1508 )
1509 def overridesummary(orig, ui, repo, *pats, **opts):
1509 def overridesummary(orig, ui, repo, *pats, **opts):
1510 with lfstatus(repo):
1510 with lfstatus(repo):
1511 orig(ui, repo, *pats, **opts)
1511 orig(ui, repo, *pats, **opts)
1512
1512
1513
1513
1514 @eh.wrapfunction(scmutil, b'addremove')
1514 @eh.wrapfunction(scmutil, b'addremove')
1515 def scmutiladdremove(orig, repo, matcher, prefix, uipathfn, opts=None):
1515 def scmutiladdremove(orig, repo, matcher, prefix, uipathfn, opts=None):
1516 if opts is None:
1516 if opts is None:
1517 opts = {}
1517 opts = {}
1518 if not lfutil.islfilesrepo(repo):
1518 if not lfutil.islfilesrepo(repo):
1519 return orig(repo, matcher, prefix, uipathfn, opts)
1519 return orig(repo, matcher, prefix, uipathfn, opts)
1520 # Get the list of missing largefiles so we can remove them
1520 # Get the list of missing largefiles so we can remove them
1521 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1521 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1522 unsure, s = lfdirstate.status(
1522 unsure, s = lfdirstate.status(
1523 matchmod.always(),
1523 matchmod.always(),
1524 subrepos=[],
1524 subrepos=[],
1525 ignored=False,
1525 ignored=False,
1526 clean=False,
1526 clean=False,
1527 unknown=False,
1527 unknown=False,
1528 )
1528 )
1529
1529
1530 # Call into the normal remove code, but the removing of the standin, we want
1530 # Call into the normal remove code, but the removing of the standin, we want
1531 # to have handled by original addremove. Monkey patching here makes sure
1531 # to have handled by original addremove. Monkey patching here makes sure
1532 # we don't remove the standin in the largefiles code, preventing a very
1532 # we don't remove the standin in the largefiles code, preventing a very
1533 # confused state later.
1533 # confused state later.
1534 if s.deleted:
1534 if s.deleted:
1535 m = copy.copy(matcher)
1535 m = copy.copy(matcher)
1536
1536
1537 # The m._files and m._map attributes are not changed to the deleted list
1537 # The m._files and m._map attributes are not changed to the deleted list
1538 # because that affects the m.exact() test, which in turn governs whether
1538 # because that affects the m.exact() test, which in turn governs whether
1539 # or not the file name is printed, and how. Simply limit the original
1539 # or not the file name is printed, and how. Simply limit the original
1540 # matches to those in the deleted status list.
1540 # matches to those in the deleted status list.
1541 matchfn = m.matchfn
1541 matchfn = m.matchfn
1542 m.matchfn = lambda f: f in s.deleted and matchfn(f)
1542 m.matchfn = lambda f: f in s.deleted and matchfn(f)
1543
1543
1544 removelargefiles(
1544 removelargefiles(
1545 repo.ui,
1545 repo.ui,
1546 repo,
1546 repo,
1547 True,
1547 True,
1548 m,
1548 m,
1549 uipathfn,
1549 uipathfn,
1550 opts.get(b'dry_run'),
1550 opts.get(b'dry_run'),
1551 **pycompat.strkwargs(opts)
1551 **pycompat.strkwargs(opts)
1552 )
1552 )
1553 # Call into the normal add code, and any files that *should* be added as
1553 # Call into the normal add code, and any files that *should* be added as
1554 # largefiles will be
1554 # largefiles will be
1555 added, bad = addlargefiles(
1555 added, bad = addlargefiles(
1556 repo.ui, repo, True, matcher, uipathfn, **pycompat.strkwargs(opts)
1556 repo.ui, repo, True, matcher, uipathfn, **pycompat.strkwargs(opts)
1557 )
1557 )
1558 # Now that we've handled largefiles, hand off to the original addremove
1558 # Now that we've handled largefiles, hand off to the original addremove
1559 # function to take care of the rest. Make sure it doesn't do anything with
1559 # function to take care of the rest. Make sure it doesn't do anything with
1560 # largefiles by passing a matcher that will ignore them.
1560 # largefiles by passing a matcher that will ignore them.
1561 matcher = composenormalfilematcher(matcher, repo[None].manifest(), added)
1561 matcher = composenormalfilematcher(matcher, repo[None].manifest(), added)
1562 return orig(repo, matcher, prefix, uipathfn, opts)
1562 return orig(repo, matcher, prefix, uipathfn, opts)
1563
1563
1564
1564
1565 # Calling purge with --all will cause the largefiles to be deleted.
1565 # Calling purge with --all will cause the largefiles to be deleted.
1566 # Override repo.status to prevent this from happening.
1566 # Override repo.status to prevent this from happening.
1567 @eh.wrapcommand(b'purge')
1567 @eh.wrapcommand(b'purge')
1568 def overridepurge(orig, ui, repo, *dirs, **opts):
1568 def overridepurge(orig, ui, repo, *dirs, **opts):
1569 # XXX Monkey patching a repoview will not work. The assigned attribute will
1569 # XXX Monkey patching a repoview will not work. The assigned attribute will
1570 # be set on the unfiltered repo, but we will only lookup attributes in the
1570 # be set on the unfiltered repo, but we will only lookup attributes in the
1571 # unfiltered repo if the lookup in the repoview object itself fails. As the
1571 # unfiltered repo if the lookup in the repoview object itself fails. As the
1572 # monkey patched method exists on the repoview class the lookup will not
1572 # monkey patched method exists on the repoview class the lookup will not
1573 # fail. As a result, the original version will shadow the monkey patched
1573 # fail. As a result, the original version will shadow the monkey patched
1574 # one, defeating the monkey patch.
1574 # one, defeating the monkey patch.
1575 #
1575 #
1576 # As a work around we use an unfiltered repo here. We should do something
1576 # As a work around we use an unfiltered repo here. We should do something
1577 # cleaner instead.
1577 # cleaner instead.
1578 repo = repo.unfiltered()
1578 repo = repo.unfiltered()
1579 oldstatus = repo.status
1579 oldstatus = repo.status
1580
1580
1581 def overridestatus(
1581 def overridestatus(
1582 node1=b'.',
1582 node1=b'.',
1583 node2=None,
1583 node2=None,
1584 match=None,
1584 match=None,
1585 ignored=False,
1585 ignored=False,
1586 clean=False,
1586 clean=False,
1587 unknown=False,
1587 unknown=False,
1588 listsubrepos=False,
1588 listsubrepos=False,
1589 ):
1589 ):
1590 r = oldstatus(
1590 r = oldstatus(
1591 node1, node2, match, ignored, clean, unknown, listsubrepos
1591 node1, node2, match, ignored, clean, unknown, listsubrepos
1592 )
1592 )
1593 lfdirstate = lfutil.openlfdirstate(ui, repo)
1593 lfdirstate = lfutil.openlfdirstate(ui, repo)
1594 unknown = [
1594 unknown = [
1595 f for f in r.unknown if not lfdirstate.get_entry(f).any_tracked
1595 f for f in r.unknown if not lfdirstate.get_entry(f).any_tracked
1596 ]
1596 ]
1597 ignored = [
1597 ignored = [
1598 f for f in r.ignored if not lfdirstate.get_entry(f).any_tracked
1598 f for f in r.ignored if not lfdirstate.get_entry(f).any_tracked
1599 ]
1599 ]
1600 return scmutil.status(
1600 return scmutil.status(
1601 r.modified, r.added, r.removed, r.deleted, unknown, ignored, r.clean
1601 r.modified, r.added, r.removed, r.deleted, unknown, ignored, r.clean
1602 )
1602 )
1603
1603
1604 repo.status = overridestatus
1604 repo.status = overridestatus
1605 orig(ui, repo, *dirs, **opts)
1605 orig(ui, repo, *dirs, **opts)
1606 repo.status = oldstatus
1606 repo.status = oldstatus
1607
1607
1608
1608
1609 @eh.wrapcommand(b'rollback')
1609 @eh.wrapcommand(b'rollback')
1610 def overriderollback(orig, ui, repo, **opts):
1610 def overriderollback(orig, ui, repo, **opts):
1611 with repo.wlock():
1611 with repo.wlock():
1612 before = repo.dirstate.parents()
1612 before = repo.dirstate.parents()
1613 orphans = {
1613 orphans = {
1614 f
1614 f
1615 for f in repo.dirstate
1615 for f in repo.dirstate
1616 if lfutil.isstandin(f) and not repo.dirstate.get_entry(f).removed
1616 if lfutil.isstandin(f) and not repo.dirstate.get_entry(f).removed
1617 }
1617 }
1618 result = orig(ui, repo, **opts)
1618 result = orig(ui, repo, **opts)
1619 after = repo.dirstate.parents()
1619 after = repo.dirstate.parents()
1620 if before == after:
1620 if before == after:
1621 return result # no need to restore standins
1621 return result # no need to restore standins
1622
1622
1623 pctx = repo[b'.']
1623 pctx = repo[b'.']
1624 for f in repo.dirstate:
1624 for f in repo.dirstate:
1625 if lfutil.isstandin(f):
1625 if lfutil.isstandin(f):
1626 orphans.discard(f)
1626 orphans.discard(f)
1627 if repo.dirstate.get_entry(f).removed:
1627 if repo.dirstate.get_entry(f).removed:
1628 repo.wvfs.unlinkpath(f, ignoremissing=True)
1628 repo.wvfs.unlinkpath(f, ignoremissing=True)
1629 elif f in pctx:
1629 elif f in pctx:
1630 fctx = pctx[f]
1630 fctx = pctx[f]
1631 repo.wwrite(f, fctx.data(), fctx.flags())
1631 repo.wwrite(f, fctx.data(), fctx.flags())
1632 else:
1632 else:
1633 # content of standin is not so important in 'a',
1633 # content of standin is not so important in 'a',
1634 # 'm' or 'n' (coming from the 2nd parent) cases
1634 # 'm' or 'n' (coming from the 2nd parent) cases
1635 lfutil.writestandin(repo, f, b'', False)
1635 lfutil.writestandin(repo, f, b'', False)
1636 for standin in orphans:
1636 for standin in orphans:
1637 repo.wvfs.unlinkpath(standin, ignoremissing=True)
1637 repo.wvfs.unlinkpath(standin, ignoremissing=True)
1638
1638
1639 lfdirstate = lfutil.openlfdirstate(ui, repo)
1639 lfdirstate = lfutil.openlfdirstate(ui, repo)
1640 with lfdirstate.parentchange():
1640 with lfdirstate.parentchange():
1641 orphans = set(lfdirstate)
1641 orphans = set(lfdirstate)
1642 lfiles = lfutil.listlfiles(repo)
1642 lfiles = lfutil.listlfiles(repo)
1643 for file in lfiles:
1643 for file in lfiles:
1644 lfutil.synclfdirstate(repo, lfdirstate, file, True)
1644 lfutil.synclfdirstate(repo, lfdirstate, file, True)
1645 orphans.discard(file)
1645 orphans.discard(file)
1646 for lfile in orphans:
1646 for lfile in orphans:
1647 lfdirstate.update_file(
1647 lfdirstate.update_file(
1648 lfile, p1_tracked=False, wc_tracked=False
1648 lfile, p1_tracked=False, wc_tracked=False
1649 )
1649 )
1650 lfdirstate.write()
1650 lfdirstate.write()
1651 return result
1651 return result
1652
1652
1653
1653
1654 @eh.wrapcommand(b'transplant', extension=b'transplant')
1654 @eh.wrapcommand(b'transplant', extension=b'transplant')
1655 def overridetransplant(orig, ui, repo, *revs, **opts):
1655 def overridetransplant(orig, ui, repo, *revs, **opts):
1656 resuming = opts.get('continue')
1656 resuming = opts.get('continue')
1657 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
1657 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
1658 repo._lfstatuswriters.append(lambda *msg, **opts: None)
1658 repo._lfstatuswriters.append(lambda *msg, **opts: None)
1659 try:
1659 try:
1660 result = orig(ui, repo, *revs, **opts)
1660 result = orig(ui, repo, *revs, **opts)
1661 finally:
1661 finally:
1662 repo._lfstatuswriters.pop()
1662 repo._lfstatuswriters.pop()
1663 repo._lfcommithooks.pop()
1663 repo._lfcommithooks.pop()
1664 return result
1664 return result
1665
1665
1666
1666
1667 @eh.wrapcommand(b'cat')
1667 @eh.wrapcommand(b'cat')
1668 def overridecat(orig, ui, repo, file1, *pats, **opts):
1668 def overridecat(orig, ui, repo, file1, *pats, **opts):
1669 opts = pycompat.byteskwargs(opts)
1669 opts = pycompat.byteskwargs(opts)
1670 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
1670 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'))
1671 err = 1
1671 err = 1
1672 notbad = set()
1672 notbad = set()
1673 m = scmutil.match(ctx, (file1,) + pats, opts)
1673 m = scmutil.match(ctx, (file1,) + pats, opts)
1674 origmatchfn = m.matchfn
1674 origmatchfn = m.matchfn
1675
1675
1676 def lfmatchfn(f):
1676 def lfmatchfn(f):
1677 if origmatchfn(f):
1677 if origmatchfn(f):
1678 return True
1678 return True
1679 lf = lfutil.splitstandin(f)
1679 lf = lfutil.splitstandin(f)
1680 if lf is None:
1680 if lf is None:
1681 return False
1681 return False
1682 notbad.add(lf)
1682 notbad.add(lf)
1683 return origmatchfn(lf)
1683 return origmatchfn(lf)
1684
1684
1685 m.matchfn = lfmatchfn
1685 m.matchfn = lfmatchfn
1686 origbadfn = m.bad
1686 origbadfn = m.bad
1687
1687
1688 def lfbadfn(f, msg):
1688 def lfbadfn(f, msg):
1689 if not f in notbad:
1689 if not f in notbad:
1690 origbadfn(f, msg)
1690 origbadfn(f, msg)
1691
1691
1692 m.bad = lfbadfn
1692 m.bad = lfbadfn
1693
1693
1694 origvisitdirfn = m.visitdir
1694 origvisitdirfn = m.visitdir
1695
1695
1696 def lfvisitdirfn(dir):
1696 def lfvisitdirfn(dir):
1697 if dir == lfutil.shortname:
1697 if dir == lfutil.shortname:
1698 return True
1698 return True
1699 ret = origvisitdirfn(dir)
1699 ret = origvisitdirfn(dir)
1700 if ret:
1700 if ret:
1701 return ret
1701 return ret
1702 lf = lfutil.splitstandin(dir)
1702 lf = lfutil.splitstandin(dir)
1703 if lf is None:
1703 if lf is None:
1704 return False
1704 return False
1705 return origvisitdirfn(lf)
1705 return origvisitdirfn(lf)
1706
1706
1707 m.visitdir = lfvisitdirfn
1707 m.visitdir = lfvisitdirfn
1708
1708
1709 for f in ctx.walk(m):
1709 for f in ctx.walk(m):
1710 with cmdutil.makefileobj(ctx, opts.get(b'output'), pathname=f) as fp:
1710 with cmdutil.makefileobj(ctx, opts.get(b'output'), pathname=f) as fp:
1711 lf = lfutil.splitstandin(f)
1711 lf = lfutil.splitstandin(f)
1712 if lf is None or origmatchfn(f):
1712 if lf is None or origmatchfn(f):
1713 # duplicating unreachable code from commands.cat
1713 # duplicating unreachable code from commands.cat
1714 data = ctx[f].data()
1714 data = ctx[f].data()
1715 if opts.get(b'decode'):
1715 if opts.get(b'decode'):
1716 data = repo.wwritedata(f, data)
1716 data = repo.wwritedata(f, data)
1717 fp.write(data)
1717 fp.write(data)
1718 else:
1718 else:
1719 hash = lfutil.readasstandin(ctx[f])
1719 hash = lfutil.readasstandin(ctx[f])
1720 if not lfutil.inusercache(repo.ui, hash):
1720 if not lfutil.inusercache(repo.ui, hash):
1721 store = storefactory.openstore(repo)
1721 store = storefactory.openstore(repo)
1722 success, missing = store.get([(lf, hash)])
1722 success, missing = store.get([(lf, hash)])
1723 if len(success) != 1:
1723 if len(success) != 1:
1724 raise error.Abort(
1724 raise error.Abort(
1725 _(
1725 _(
1726 b'largefile %s is not in cache and could not be '
1726 b'largefile %s is not in cache and could not be '
1727 b'downloaded'
1727 b'downloaded'
1728 )
1728 )
1729 % lf
1729 % lf
1730 )
1730 )
1731 path = lfutil.usercachepath(repo.ui, hash)
1731 path = lfutil.usercachepath(repo.ui, hash)
1732 with open(path, b"rb") as fpin:
1732 with open(path, b"rb") as fpin:
1733 for chunk in util.filechunkiter(fpin):
1733 for chunk in util.filechunkiter(fpin):
1734 fp.write(chunk)
1734 fp.write(chunk)
1735 err = 0
1735 err = 0
1736 return err
1736 return err
1737
1737
1738
1738
1739 @eh.wrapfunction(merge, b'_update')
1739 @eh.wrapfunction(merge, b'_update')
1740 def mergeupdate(orig, repo, node, branchmerge, force, *args, **kwargs):
1740 def mergeupdate(orig, repo, node, branchmerge, force, *args, **kwargs):
1741 matcher = kwargs.get('matcher', None)
1741 matcher = kwargs.get('matcher', None)
1742 # note if this is a partial update
1742 # note if this is a partial update
1743 partial = matcher and not matcher.always()
1743 partial = matcher and not matcher.always()
1744 with repo.wlock():
1744 with repo.wlock():
1745 # branch | | |
1745 # branch | | |
1746 # merge | force | partial | action
1746 # merge | force | partial | action
1747 # -------+-------+---------+--------------
1747 # -------+-------+---------+--------------
1748 # x | x | x | linear-merge
1748 # x | x | x | linear-merge
1749 # o | x | x | branch-merge
1749 # o | x | x | branch-merge
1750 # x | o | x | overwrite (as clean update)
1750 # x | o | x | overwrite (as clean update)
1751 # o | o | x | force-branch-merge (*1)
1751 # o | o | x | force-branch-merge (*1)
1752 # x | x | o | (*)
1752 # x | x | o | (*)
1753 # o | x | o | (*)
1753 # o | x | o | (*)
1754 # x | o | o | overwrite (as revert)
1754 # x | o | o | overwrite (as revert)
1755 # o | o | o | (*)
1755 # o | o | o | (*)
1756 #
1756 #
1757 # (*) don't care
1757 # (*) don't care
1758 # (*1) deprecated, but used internally (e.g: "rebase --collapse")
1758 # (*1) deprecated, but used internally (e.g: "rebase --collapse")
1759
1759
1760 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1760 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1761 unsure, s = lfdirstate.status(
1761 unsure, s = lfdirstate.status(
1762 matchmod.always(),
1762 matchmod.always(),
1763 subrepos=[],
1763 subrepos=[],
1764 ignored=False,
1764 ignored=False,
1765 clean=True,
1765 clean=True,
1766 unknown=False,
1766 unknown=False,
1767 )
1767 )
1768 oldclean = set(s.clean)
1768 oldclean = set(s.clean)
1769 pctx = repo[b'.']
1769 pctx = repo[b'.']
1770 dctx = repo[node]
1770 dctx = repo[node]
1771 for lfile in unsure + s.modified:
1771 for lfile in unsure + s.modified:
1772 lfileabs = repo.wvfs.join(lfile)
1772 lfileabs = repo.wvfs.join(lfile)
1773 if not repo.wvfs.exists(lfileabs):
1773 if not repo.wvfs.exists(lfileabs):
1774 continue
1774 continue
1775 lfhash = lfutil.hashfile(lfileabs)
1775 lfhash = lfutil.hashfile(lfileabs)
1776 standin = lfutil.standin(lfile)
1776 standin = lfutil.standin(lfile)
1777 lfutil.writestandin(
1777 lfutil.writestandin(
1778 repo, standin, lfhash, lfutil.getexecutable(lfileabs)
1778 repo, standin, lfhash, lfutil.getexecutable(lfileabs)
1779 )
1779 )
1780 if standin in pctx and lfhash == lfutil.readasstandin(
1780 if standin in pctx and lfhash == lfutil.readasstandin(
1781 pctx[standin]
1781 pctx[standin]
1782 ):
1782 ):
1783 oldclean.add(lfile)
1783 oldclean.add(lfile)
1784 for lfile in s.added:
1784 for lfile in s.added:
1785 fstandin = lfutil.standin(lfile)
1785 fstandin = lfutil.standin(lfile)
1786 if fstandin not in dctx:
1786 if fstandin not in dctx:
1787 # in this case, content of standin file is meaningless
1787 # in this case, content of standin file is meaningless
1788 # (in dctx, lfile is unknown, or normal file)
1788 # (in dctx, lfile is unknown, or normal file)
1789 continue
1789 continue
1790 lfutil.updatestandin(repo, lfile, fstandin)
1790 lfutil.updatestandin(repo, lfile, fstandin)
1791 # mark all clean largefiles as dirty, just in case the update gets
1791 # mark all clean largefiles as dirty, just in case the update gets
1792 # interrupted before largefiles and lfdirstate are synchronized
1792 # interrupted before largefiles and lfdirstate are synchronized
1793 for lfile in oldclean:
1793 for lfile in oldclean:
1794 lfdirstate.set_possibly_dirty(lfile)
1794 lfdirstate.set_possibly_dirty(lfile)
1795 lfdirstate.write()
1795 lfdirstate.write()
1796
1796
1797 oldstandins = lfutil.getstandinsstate(repo)
1797 oldstandins = lfutil.getstandinsstate(repo)
1798 wc = kwargs.get('wc')
1798 wc = kwargs.get('wc')
1799 if wc and wc.isinmemory():
1799 if wc and wc.isinmemory():
1800 # largefiles is not a good candidate for in-memory merge (large
1800 # largefiles is not a good candidate for in-memory merge (large
1801 # files, custom dirstate, matcher usage).
1801 # files, custom dirstate, matcher usage).
1802 raise error.ProgrammingError(
1802 raise error.ProgrammingError(
1803 b'largefiles is not compatible with in-memory merge'
1803 b'largefiles is not compatible with in-memory merge'
1804 )
1804 )
1805 with lfdirstate.parentchange():
1805 with lfdirstate.parentchange():
1806 result = orig(repo, node, branchmerge, force, *args, **kwargs)
1806 result = orig(repo, node, branchmerge, force, *args, **kwargs)
1807
1807
1808 newstandins = lfutil.getstandinsstate(repo)
1808 newstandins = lfutil.getstandinsstate(repo)
1809 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
1809 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
1810
1810
1811 # to avoid leaving all largefiles as dirty and thus rehash them, mark
1811 # to avoid leaving all largefiles as dirty and thus rehash them, mark
1812 # all the ones that didn't change as clean
1812 # all the ones that didn't change as clean
1813 for lfile in oldclean.difference(filelist):
1813 for lfile in oldclean.difference(filelist):
1814 lfdirstate.update_file(lfile, p1_tracked=True, wc_tracked=True)
1814 lfdirstate.update_file(lfile, p1_tracked=True, wc_tracked=True)
1815 lfdirstate.write()
1815 lfdirstate.write()
1816
1816
1817 if branchmerge or force or partial:
1817 if branchmerge or force or partial:
1818 filelist.extend(s.deleted + s.removed)
1818 filelist.extend(s.deleted + s.removed)
1819
1819
1820 lfcommands.updatelfiles(
1820 lfcommands.updatelfiles(
1821 repo.ui, repo, filelist=filelist, normallookup=partial
1821 repo.ui, repo, filelist=filelist, normallookup=partial
1822 )
1822 )
1823
1823
1824 return result
1824 return result
1825
1825
1826
1826
1827 @eh.wrapfunction(scmutil, b'marktouched')
1827 @eh.wrapfunction(scmutil, b'marktouched')
1828 def scmutilmarktouched(orig, repo, files, *args, **kwargs):
1828 def scmutilmarktouched(orig, repo, files, *args, **kwargs):
1829 result = orig(repo, files, *args, **kwargs)
1829 result = orig(repo, files, *args, **kwargs)
1830
1830
1831 filelist = []
1831 filelist = []
1832 for f in files:
1832 for f in files:
1833 lf = lfutil.splitstandin(f)
1833 lf = lfutil.splitstandin(f)
1834 if lf is not None:
1834 if lf is not None:
1835 filelist.append(lf)
1835 filelist.append(lf)
1836 if filelist:
1836 if filelist:
1837 lfcommands.updatelfiles(
1837 lfcommands.updatelfiles(
1838 repo.ui,
1838 repo.ui,
1839 repo,
1839 repo,
1840 filelist=filelist,
1840 filelist=filelist,
1841 printmessage=False,
1841 printmessage=False,
1842 normallookup=True,
1842 normallookup=True,
1843 )
1843 )
1844
1844
1845 return result
1845 return result
1846
1846
1847
1847
1848 @eh.wrapfunction(upgrade_actions, b'preservedrequirements')
1848 @eh.wrapfunction(upgrade_actions, b'preservedrequirements')
1849 @eh.wrapfunction(upgrade_actions, b'supporteddestrequirements')
1849 @eh.wrapfunction(upgrade_actions, b'supporteddestrequirements')
1850 def upgraderequirements(orig, repo):
1850 def upgraderequirements(orig, repo):
1851 reqs = orig(repo)
1851 reqs = orig(repo)
1852 if b'largefiles' in repo.requirements:
1852 if b'largefiles' in repo.requirements:
1853 reqs.add(b'largefiles')
1853 reqs.add(b'largefiles')
1854 return reqs
1854 return reqs
1855
1855
1856
1856
1857 _lfscheme = b'largefile://'
1857 _lfscheme = b'largefile://'
1858
1858
1859
1859
1860 @eh.wrapfunction(urlmod, b'open')
1860 @eh.wrapfunction(urlmod, b'open')
1861 def openlargefile(orig, ui, url_, data=None, **kwargs):
1861 def openlargefile(orig, ui, url_, data=None, **kwargs):
1862 if url_.startswith(_lfscheme):
1862 if url_.startswith(_lfscheme):
1863 if data:
1863 if data:
1864 msg = b"cannot use data on a 'largefile://' url"
1864 msg = b"cannot use data on a 'largefile://' url"
1865 raise error.ProgrammingError(msg)
1865 raise error.ProgrammingError(msg)
1866 lfid = url_[len(_lfscheme) :]
1866 lfid = url_[len(_lfscheme) :]
1867 return storefactory.getlfile(ui, lfid)
1867 return storefactory.getlfile(ui, lfid)
1868 else:
1868 else:
1869 return orig(ui, url_, data=data, **kwargs)
1869 return orig(ui, url_, data=data, **kwargs)
@@ -1,2288 +1,2288 b''
1 # rebase.py - rebasing feature for mercurial
1 # rebase.py - rebasing feature for mercurial
2 #
2 #
3 # Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com>
3 # Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to move sets of revisions to a different ancestor
8 '''command to move sets of revisions to a different ancestor
9
9
10 This extension lets you rebase changesets in an existing Mercurial
10 This extension lets you rebase changesets in an existing Mercurial
11 repository.
11 repository.
12
12
13 For more information:
13 For more information:
14 https://mercurial-scm.org/wiki/RebaseExtension
14 https://mercurial-scm.org/wiki/RebaseExtension
15 '''
15 '''
16
16
17 from __future__ import absolute_import
17 from __future__ import absolute_import
18
18
19 import errno
19 import errno
20 import os
20 import os
21
21
22 from mercurial.i18n import _
22 from mercurial.i18n import _
23 from mercurial.node import (
23 from mercurial.node import (
24 nullrev,
24 nullrev,
25 short,
25 short,
26 wdirrev,
26 wdirrev,
27 )
27 )
28 from mercurial.pycompat import open
28 from mercurial.pycompat import open
29 from mercurial import (
29 from mercurial import (
30 bookmarks,
30 bookmarks,
31 cmdutil,
31 cmdutil,
32 commands,
32 commands,
33 copies,
33 copies,
34 destutil,
34 destutil,
35 dirstateguard,
35 dirstateguard,
36 error,
36 error,
37 extensions,
37 extensions,
38 logcmdutil,
38 logcmdutil,
39 merge as mergemod,
39 merge as mergemod,
40 mergestate as mergestatemod,
40 mergestate as mergestatemod,
41 mergeutil,
41 mergeutil,
42 obsolete,
42 obsolete,
43 obsutil,
43 obsutil,
44 patch,
44 patch,
45 phases,
45 phases,
46 pycompat,
46 pycompat,
47 registrar,
47 registrar,
48 repair,
48 repair,
49 revset,
49 revset,
50 revsetlang,
50 revsetlang,
51 rewriteutil,
51 rewriteutil,
52 scmutil,
52 scmutil,
53 smartset,
53 smartset,
54 state as statemod,
54 state as statemod,
55 util,
55 util,
56 )
56 )
57
57
58 # The following constants are used throughout the rebase module. The ordering of
58 # The following constants are used throughout the rebase module. The ordering of
59 # their values must be maintained.
59 # their values must be maintained.
60
60
61 # Indicates that a revision needs to be rebased
61 # Indicates that a revision needs to be rebased
62 revtodo = -1
62 revtodo = -1
63 revtodostr = b'-1'
63 revtodostr = b'-1'
64
64
65 # legacy revstates no longer needed in current code
65 # legacy revstates no longer needed in current code
66 # -2: nullmerge, -3: revignored, -4: revprecursor, -5: revpruned
66 # -2: nullmerge, -3: revignored, -4: revprecursor, -5: revpruned
67 legacystates = {b'-2', b'-3', b'-4', b'-5'}
67 legacystates = {b'-2', b'-3', b'-4', b'-5'}
68
68
69 cmdtable = {}
69 cmdtable = {}
70 command = registrar.command(cmdtable)
70 command = registrar.command(cmdtable)
71
71
72 configtable = {}
72 configtable = {}
73 configitem = registrar.configitem(configtable)
73 configitem = registrar.configitem(configtable)
74 configitem(
74 configitem(
75 b'devel',
75 b'devel',
76 b'rebase.force-in-memory-merge',
76 b'rebase.force-in-memory-merge',
77 default=False,
77 default=False,
78 )
78 )
79 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
79 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
80 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
80 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
81 # be specifying the version(s) of Mercurial they are tested with, or
81 # be specifying the version(s) of Mercurial they are tested with, or
82 # leave the attribute unspecified.
82 # leave the attribute unspecified.
83 testedwith = b'ships-with-hg-core'
83 testedwith = b'ships-with-hg-core'
84
84
85
85
86 def _nothingtorebase():
86 def _nothingtorebase():
87 return 1
87 return 1
88
88
89
89
90 def _savegraft(ctx, extra):
90 def _savegraft(ctx, extra):
91 s = ctx.extra().get(b'source', None)
91 s = ctx.extra().get(b'source', None)
92 if s is not None:
92 if s is not None:
93 extra[b'source'] = s
93 extra[b'source'] = s
94 s = ctx.extra().get(b'intermediate-source', None)
94 s = ctx.extra().get(b'intermediate-source', None)
95 if s is not None:
95 if s is not None:
96 extra[b'intermediate-source'] = s
96 extra[b'intermediate-source'] = s
97
97
98
98
99 def _savebranch(ctx, extra):
99 def _savebranch(ctx, extra):
100 extra[b'branch'] = ctx.branch()
100 extra[b'branch'] = ctx.branch()
101
101
102
102
103 def _destrebase(repo, sourceset, destspace=None):
103 def _destrebase(repo, sourceset, destspace=None):
104 """small wrapper around destmerge to pass the right extra args
104 """small wrapper around destmerge to pass the right extra args
105
105
106 Please wrap destutil.destmerge instead."""
106 Please wrap destutil.destmerge instead."""
107 return destutil.destmerge(
107 return destutil.destmerge(
108 repo,
108 repo,
109 action=b'rebase',
109 action=b'rebase',
110 sourceset=sourceset,
110 sourceset=sourceset,
111 onheadcheck=False,
111 onheadcheck=False,
112 destspace=destspace,
112 destspace=destspace,
113 )
113 )
114
114
115
115
116 revsetpredicate = registrar.revsetpredicate()
116 revsetpredicate = registrar.revsetpredicate()
117
117
118
118
119 @revsetpredicate(b'_destrebase')
119 @revsetpredicate(b'_destrebase')
120 def _revsetdestrebase(repo, subset, x):
120 def _revsetdestrebase(repo, subset, x):
121 # ``_rebasedefaultdest()``
121 # ``_rebasedefaultdest()``
122
122
123 # default destination for rebase.
123 # default destination for rebase.
124 # # XXX: Currently private because I expect the signature to change.
124 # # XXX: Currently private because I expect the signature to change.
125 # # XXX: - bailing out in case of ambiguity vs returning all data.
125 # # XXX: - bailing out in case of ambiguity vs returning all data.
126 # i18n: "_rebasedefaultdest" is a keyword
126 # i18n: "_rebasedefaultdest" is a keyword
127 sourceset = None
127 sourceset = None
128 if x is not None:
128 if x is not None:
129 sourceset = revset.getset(repo, smartset.fullreposet(repo), x)
129 sourceset = revset.getset(repo, smartset.fullreposet(repo), x)
130 return subset & smartset.baseset([_destrebase(repo, sourceset)])
130 return subset & smartset.baseset([_destrebase(repo, sourceset)])
131
131
132
132
133 @revsetpredicate(b'_destautoorphanrebase')
133 @revsetpredicate(b'_destautoorphanrebase')
134 def _revsetdestautoorphanrebase(repo, subset, x):
134 def _revsetdestautoorphanrebase(repo, subset, x):
135 # ``_destautoorphanrebase()``
135 # ``_destautoorphanrebase()``
136
136
137 # automatic rebase destination for a single orphan revision.
137 # automatic rebase destination for a single orphan revision.
138 unfi = repo.unfiltered()
138 unfi = repo.unfiltered()
139 obsoleted = unfi.revs(b'obsolete()')
139 obsoleted = unfi.revs(b'obsolete()')
140
140
141 src = revset.getset(repo, subset, x).first()
141 src = revset.getset(repo, subset, x).first()
142
142
143 # Empty src or already obsoleted - Do not return a destination
143 # Empty src or already obsoleted - Do not return a destination
144 if not src or src in obsoleted:
144 if not src or src in obsoleted:
145 return smartset.baseset()
145 return smartset.baseset()
146 dests = destutil.orphanpossibledestination(repo, src)
146 dests = destutil.orphanpossibledestination(repo, src)
147 if len(dests) > 1:
147 if len(dests) > 1:
148 raise error.StateError(
148 raise error.StateError(
149 _(b"ambiguous automatic rebase: %r could end up on any of %r")
149 _(b"ambiguous automatic rebase: %r could end up on any of %r")
150 % (src, dests)
150 % (src, dests)
151 )
151 )
152 # We have zero or one destination, so we can just return here.
152 # We have zero or one destination, so we can just return here.
153 return smartset.baseset(dests)
153 return smartset.baseset(dests)
154
154
155
155
156 def _ctxdesc(ctx):
156 def _ctxdesc(ctx):
157 """short description for a context"""
157 """short description for a context"""
158 return cmdutil.format_changeset_summary(
158 return cmdutil.format_changeset_summary(
159 ctx.repo().ui, ctx, command=b'rebase'
159 ctx.repo().ui, ctx, command=b'rebase'
160 )
160 )
161
161
162
162
163 class rebaseruntime(object):
163 class rebaseruntime(object):
164 """This class is a container for rebase runtime state"""
164 """This class is a container for rebase runtime state"""
165
165
166 def __init__(self, repo, ui, inmemory=False, dryrun=False, opts=None):
166 def __init__(self, repo, ui, inmemory=False, dryrun=False, opts=None):
167 if opts is None:
167 if opts is None:
168 opts = {}
168 opts = {}
169
169
170 # prepared: whether we have rebasestate prepared or not. Currently it
170 # prepared: whether we have rebasestate prepared or not. Currently it
171 # decides whether "self.repo" is unfiltered or not.
171 # decides whether "self.repo" is unfiltered or not.
172 # The rebasestate has explicit hash to hash instructions not depending
172 # The rebasestate has explicit hash to hash instructions not depending
173 # on visibility. If rebasestate exists (in-memory or on-disk), use
173 # on visibility. If rebasestate exists (in-memory or on-disk), use
174 # unfiltered repo to avoid visibility issues.
174 # unfiltered repo to avoid visibility issues.
175 # Before knowing rebasestate (i.e. when starting a new rebase (not
175 # Before knowing rebasestate (i.e. when starting a new rebase (not
176 # --continue or --abort)), the original repo should be used so
176 # --continue or --abort)), the original repo should be used so
177 # visibility-dependent revsets are correct.
177 # visibility-dependent revsets are correct.
178 self.prepared = False
178 self.prepared = False
179 self.resume = False
179 self.resume = False
180 self._repo = repo
180 self._repo = repo
181
181
182 self.ui = ui
182 self.ui = ui
183 self.opts = opts
183 self.opts = opts
184 self.originalwd = None
184 self.originalwd = None
185 self.external = nullrev
185 self.external = nullrev
186 # Mapping between the old revision id and either what is the new rebased
186 # Mapping between the old revision id and either what is the new rebased
187 # revision or what needs to be done with the old revision. The state
187 # revision or what needs to be done with the old revision. The state
188 # dict will be what contains most of the rebase progress state.
188 # dict will be what contains most of the rebase progress state.
189 self.state = {}
189 self.state = {}
190 self.activebookmark = None
190 self.activebookmark = None
191 self.destmap = {}
191 self.destmap = {}
192 self.skipped = set()
192 self.skipped = set()
193
193
194 self.collapsef = opts.get('collapse', False)
194 self.collapsef = opts.get('collapse', False)
195 self.collapsemsg = cmdutil.logmessage(ui, pycompat.byteskwargs(opts))
195 self.collapsemsg = cmdutil.logmessage(ui, pycompat.byteskwargs(opts))
196 self.date = opts.get('date', None)
196 self.date = opts.get('date', None)
197
197
198 e = opts.get('extrafn') # internal, used by e.g. hgsubversion
198 e = opts.get('extrafn') # internal, used by e.g. hgsubversion
199 self.extrafns = [_savegraft]
199 self.extrafns = [_savegraft]
200 if e:
200 if e:
201 self.extrafns = [e]
201 self.extrafns = [e]
202
202
203 self.backupf = ui.configbool(b'rewrite', b'backup-bundle')
203 self.backupf = ui.configbool(b'rewrite', b'backup-bundle')
204 self.keepf = opts.get('keep', False)
204 self.keepf = opts.get('keep', False)
205 self.keepbranchesf = opts.get('keepbranches', False)
205 self.keepbranchesf = opts.get('keepbranches', False)
206 self.skipemptysuccessorf = rewriteutil.skip_empty_successor(
206 self.skipemptysuccessorf = rewriteutil.skip_empty_successor(
207 repo.ui, b'rebase'
207 repo.ui, b'rebase'
208 )
208 )
209 self.obsolete_with_successor_in_destination = {}
209 self.obsolete_with_successor_in_destination = {}
210 self.obsolete_with_successor_in_rebase_set = set()
210 self.obsolete_with_successor_in_rebase_set = set()
211 self.inmemory = inmemory
211 self.inmemory = inmemory
212 self.dryrun = dryrun
212 self.dryrun = dryrun
213 self.stateobj = statemod.cmdstate(repo, b'rebasestate')
213 self.stateobj = statemod.cmdstate(repo, b'rebasestate')
214
214
215 @property
215 @property
216 def repo(self):
216 def repo(self):
217 if self.prepared:
217 if self.prepared:
218 return self._repo.unfiltered()
218 return self._repo.unfiltered()
219 else:
219 else:
220 return self._repo
220 return self._repo
221
221
222 def storestatus(self, tr=None):
222 def storestatus(self, tr=None):
223 """Store the current status to allow recovery"""
223 """Store the current status to allow recovery"""
224 if tr:
224 if tr:
225 tr.addfilegenerator(
225 tr.addfilegenerator(
226 b'rebasestate',
226 b'rebasestate',
227 (b'rebasestate',),
227 (b'rebasestate',),
228 self._writestatus,
228 self._writestatus,
229 location=b'plain',
229 location=b'plain',
230 )
230 )
231 else:
231 else:
232 with self.repo.vfs(b"rebasestate", b"w") as f:
232 with self.repo.vfs(b"rebasestate", b"w") as f:
233 self._writestatus(f)
233 self._writestatus(f)
234
234
235 def _writestatus(self, f):
235 def _writestatus(self, f):
236 repo = self.repo
236 repo = self.repo
237 assert repo.filtername is None
237 assert repo.filtername is None
238 f.write(repo[self.originalwd].hex() + b'\n')
238 f.write(repo[self.originalwd].hex() + b'\n')
239 # was "dest". we now write dest per src root below.
239 # was "dest". we now write dest per src root below.
240 f.write(b'\n')
240 f.write(b'\n')
241 f.write(repo[self.external].hex() + b'\n')
241 f.write(repo[self.external].hex() + b'\n')
242 f.write(b'%d\n' % int(self.collapsef))
242 f.write(b'%d\n' % int(self.collapsef))
243 f.write(b'%d\n' % int(self.keepf))
243 f.write(b'%d\n' % int(self.keepf))
244 f.write(b'%d\n' % int(self.keepbranchesf))
244 f.write(b'%d\n' % int(self.keepbranchesf))
245 f.write(b'%s\n' % (self.activebookmark or b''))
245 f.write(b'%s\n' % (self.activebookmark or b''))
246 destmap = self.destmap
246 destmap = self.destmap
247 for d, v in pycompat.iteritems(self.state):
247 for d, v in pycompat.iteritems(self.state):
248 oldrev = repo[d].hex()
248 oldrev = repo[d].hex()
249 if v >= 0:
249 if v >= 0:
250 newrev = repo[v].hex()
250 newrev = repo[v].hex()
251 else:
251 else:
252 newrev = b"%d" % v
252 newrev = b"%d" % v
253 destnode = repo[destmap[d]].hex()
253 destnode = repo[destmap[d]].hex()
254 f.write(b"%s:%s:%s\n" % (oldrev, newrev, destnode))
254 f.write(b"%s:%s:%s\n" % (oldrev, newrev, destnode))
255 repo.ui.debug(b'rebase status stored\n')
255 repo.ui.debug(b'rebase status stored\n')
256
256
257 def restorestatus(self):
257 def restorestatus(self):
258 """Restore a previously stored status"""
258 """Restore a previously stored status"""
259 if not self.stateobj.exists():
259 if not self.stateobj.exists():
260 cmdutil.wrongtooltocontinue(self.repo, _(b'rebase'))
260 cmdutil.wrongtooltocontinue(self.repo, _(b'rebase'))
261
261
262 data = self._read()
262 data = self._read()
263 self.repo.ui.debug(b'rebase status resumed\n')
263 self.repo.ui.debug(b'rebase status resumed\n')
264
264
265 self.originalwd = data[b'originalwd']
265 self.originalwd = data[b'originalwd']
266 self.destmap = data[b'destmap']
266 self.destmap = data[b'destmap']
267 self.state = data[b'state']
267 self.state = data[b'state']
268 self.skipped = data[b'skipped']
268 self.skipped = data[b'skipped']
269 self.collapsef = data[b'collapse']
269 self.collapsef = data[b'collapse']
270 self.keepf = data[b'keep']
270 self.keepf = data[b'keep']
271 self.keepbranchesf = data[b'keepbranches']
271 self.keepbranchesf = data[b'keepbranches']
272 self.external = data[b'external']
272 self.external = data[b'external']
273 self.activebookmark = data[b'activebookmark']
273 self.activebookmark = data[b'activebookmark']
274
274
275 def _read(self):
275 def _read(self):
276 self.prepared = True
276 self.prepared = True
277 repo = self.repo
277 repo = self.repo
278 assert repo.filtername is None
278 assert repo.filtername is None
279 data = {
279 data = {
280 b'keepbranches': None,
280 b'keepbranches': None,
281 b'collapse': None,
281 b'collapse': None,
282 b'activebookmark': None,
282 b'activebookmark': None,
283 b'external': nullrev,
283 b'external': nullrev,
284 b'keep': None,
284 b'keep': None,
285 b'originalwd': None,
285 b'originalwd': None,
286 }
286 }
287 legacydest = None
287 legacydest = None
288 state = {}
288 state = {}
289 destmap = {}
289 destmap = {}
290
290
291 if True:
291 if True:
292 f = repo.vfs(b"rebasestate")
292 f = repo.vfs(b"rebasestate")
293 for i, l in enumerate(f.read().splitlines()):
293 for i, l in enumerate(f.read().splitlines()):
294 if i == 0:
294 if i == 0:
295 data[b'originalwd'] = repo[l].rev()
295 data[b'originalwd'] = repo[l].rev()
296 elif i == 1:
296 elif i == 1:
297 # this line should be empty in newer version. but legacy
297 # this line should be empty in newer version. but legacy
298 # clients may still use it
298 # clients may still use it
299 if l:
299 if l:
300 legacydest = repo[l].rev()
300 legacydest = repo[l].rev()
301 elif i == 2:
301 elif i == 2:
302 data[b'external'] = repo[l].rev()
302 data[b'external'] = repo[l].rev()
303 elif i == 3:
303 elif i == 3:
304 data[b'collapse'] = bool(int(l))
304 data[b'collapse'] = bool(int(l))
305 elif i == 4:
305 elif i == 4:
306 data[b'keep'] = bool(int(l))
306 data[b'keep'] = bool(int(l))
307 elif i == 5:
307 elif i == 5:
308 data[b'keepbranches'] = bool(int(l))
308 data[b'keepbranches'] = bool(int(l))
309 elif i == 6 and not (len(l) == 81 and b':' in l):
309 elif i == 6 and not (len(l) == 81 and b':' in l):
310 # line 6 is a recent addition, so for backwards
310 # line 6 is a recent addition, so for backwards
311 # compatibility check that the line doesn't look like the
311 # compatibility check that the line doesn't look like the
312 # oldrev:newrev lines
312 # oldrev:newrev lines
313 data[b'activebookmark'] = l
313 data[b'activebookmark'] = l
314 else:
314 else:
315 args = l.split(b':')
315 args = l.split(b':')
316 oldrev = repo[args[0]].rev()
316 oldrev = repo[args[0]].rev()
317 newrev = args[1]
317 newrev = args[1]
318 if newrev in legacystates:
318 if newrev in legacystates:
319 continue
319 continue
320 if len(args) > 2:
320 if len(args) > 2:
321 destrev = repo[args[2]].rev()
321 destrev = repo[args[2]].rev()
322 else:
322 else:
323 destrev = legacydest
323 destrev = legacydest
324 destmap[oldrev] = destrev
324 destmap[oldrev] = destrev
325 if newrev == revtodostr:
325 if newrev == revtodostr:
326 state[oldrev] = revtodo
326 state[oldrev] = revtodo
327 # Legacy compat special case
327 # Legacy compat special case
328 else:
328 else:
329 state[oldrev] = repo[newrev].rev()
329 state[oldrev] = repo[newrev].rev()
330
330
331 if data[b'keepbranches'] is None:
331 if data[b'keepbranches'] is None:
332 raise error.Abort(_(b'.hg/rebasestate is incomplete'))
332 raise error.Abort(_(b'.hg/rebasestate is incomplete'))
333
333
334 data[b'destmap'] = destmap
334 data[b'destmap'] = destmap
335 data[b'state'] = state
335 data[b'state'] = state
336 skipped = set()
336 skipped = set()
337 # recompute the set of skipped revs
337 # recompute the set of skipped revs
338 if not data[b'collapse']:
338 if not data[b'collapse']:
339 seen = set(destmap.values())
339 seen = set(destmap.values())
340 for old, new in sorted(state.items()):
340 for old, new in sorted(state.items()):
341 if new != revtodo and new in seen:
341 if new != revtodo and new in seen:
342 skipped.add(old)
342 skipped.add(old)
343 seen.add(new)
343 seen.add(new)
344 data[b'skipped'] = skipped
344 data[b'skipped'] = skipped
345 repo.ui.debug(
345 repo.ui.debug(
346 b'computed skipped revs: %s\n'
346 b'computed skipped revs: %s\n'
347 % (b' '.join(b'%d' % r for r in sorted(skipped)) or b'')
347 % (b' '.join(b'%d' % r for r in sorted(skipped)) or b'')
348 )
348 )
349
349
350 return data
350 return data
351
351
352 def _handleskippingobsolete(self):
352 def _handleskippingobsolete(self):
353 """Compute structures necessary for skipping obsolete revisions"""
353 """Compute structures necessary for skipping obsolete revisions"""
354 if self.keepf:
354 if self.keepf:
355 return
355 return
356 if not self.ui.configbool(b'experimental', b'rebaseskipobsolete'):
356 if not self.ui.configbool(b'experimental', b'rebaseskipobsolete'):
357 return
357 return
358 obsoleteset = {r for r in self.state if self.repo[r].obsolete()}
358 obsoleteset = {r for r in self.state if self.repo[r].obsolete()}
359 (
359 (
360 self.obsolete_with_successor_in_destination,
360 self.obsolete_with_successor_in_destination,
361 self.obsolete_with_successor_in_rebase_set,
361 self.obsolete_with_successor_in_rebase_set,
362 ) = _compute_obsolete_sets(self.repo, obsoleteset, self.destmap)
362 ) = _compute_obsolete_sets(self.repo, obsoleteset, self.destmap)
363 skippedset = set(self.obsolete_with_successor_in_destination)
363 skippedset = set(self.obsolete_with_successor_in_destination)
364 skippedset.update(self.obsolete_with_successor_in_rebase_set)
364 skippedset.update(self.obsolete_with_successor_in_rebase_set)
365 _checkobsrebase(self.repo, self.ui, obsoleteset, skippedset)
365 _checkobsrebase(self.repo, self.ui, obsoleteset, skippedset)
366 if obsolete.isenabled(self.repo, obsolete.allowdivergenceopt):
366 if obsolete.isenabled(self.repo, obsolete.allowdivergenceopt):
367 self.obsolete_with_successor_in_rebase_set = set()
367 self.obsolete_with_successor_in_rebase_set = set()
368 else:
368 else:
369 for rev in self.repo.revs(
369 for rev in self.repo.revs(
370 b'descendants(%ld) and not %ld',
370 b'descendants(%ld) and not %ld',
371 self.obsolete_with_successor_in_rebase_set,
371 self.obsolete_with_successor_in_rebase_set,
372 self.obsolete_with_successor_in_rebase_set,
372 self.obsolete_with_successor_in_rebase_set,
373 ):
373 ):
374 self.state.pop(rev, None)
374 self.state.pop(rev, None)
375 self.destmap.pop(rev, None)
375 self.destmap.pop(rev, None)
376
376
377 def _prepareabortorcontinue(
377 def _prepareabortorcontinue(
378 self, isabort, backup=True, suppwarns=False, dryrun=False, confirm=False
378 self, isabort, backup=True, suppwarns=False, dryrun=False, confirm=False
379 ):
379 ):
380 self.resume = True
380 self.resume = True
381 try:
381 try:
382 self.restorestatus()
382 self.restorestatus()
383 # Calculate self.obsolete_* sets
383 # Calculate self.obsolete_* sets
384 self._handleskippingobsolete()
384 self._handleskippingobsolete()
385 self.collapsemsg = restorecollapsemsg(self.repo, isabort)
385 self.collapsemsg = restorecollapsemsg(self.repo, isabort)
386 except error.RepoLookupError:
386 except error.RepoLookupError:
387 if isabort:
387 if isabort:
388 clearstatus(self.repo)
388 clearstatus(self.repo)
389 clearcollapsemsg(self.repo)
389 clearcollapsemsg(self.repo)
390 self.repo.ui.warn(
390 self.repo.ui.warn(
391 _(
391 _(
392 b'rebase aborted (no revision is removed,'
392 b'rebase aborted (no revision is removed,'
393 b' only broken state is cleared)\n'
393 b' only broken state is cleared)\n'
394 )
394 )
395 )
395 )
396 return 0
396 return 0
397 else:
397 else:
398 msg = _(b'cannot continue inconsistent rebase')
398 msg = _(b'cannot continue inconsistent rebase')
399 hint = _(b'use "hg rebase --abort" to clear broken state')
399 hint = _(b'use "hg rebase --abort" to clear broken state')
400 raise error.Abort(msg, hint=hint)
400 raise error.Abort(msg, hint=hint)
401
401
402 if isabort:
402 if isabort:
403 backup = backup and self.backupf
403 backup = backup and self.backupf
404 return self._abort(
404 return self._abort(
405 backup=backup,
405 backup=backup,
406 suppwarns=suppwarns,
406 suppwarns=suppwarns,
407 dryrun=dryrun,
407 dryrun=dryrun,
408 confirm=confirm,
408 confirm=confirm,
409 )
409 )
410
410
411 def _preparenewrebase(self, destmap):
411 def _preparenewrebase(self, destmap):
412 if not destmap:
412 if not destmap:
413 return _nothingtorebase()
413 return _nothingtorebase()
414
414
415 result = buildstate(self.repo, destmap, self.collapsef)
415 result = buildstate(self.repo, destmap, self.collapsef)
416
416
417 if not result:
417 if not result:
418 # Empty state built, nothing to rebase
418 # Empty state built, nothing to rebase
419 self.ui.status(_(b'nothing to rebase\n'))
419 self.ui.status(_(b'nothing to rebase\n'))
420 return _nothingtorebase()
420 return _nothingtorebase()
421
421
422 (self.originalwd, self.destmap, self.state) = result
422 (self.originalwd, self.destmap, self.state) = result
423 if self.collapsef:
423 if self.collapsef:
424 dests = set(self.destmap.values())
424 dests = set(self.destmap.values())
425 if len(dests) != 1:
425 if len(dests) != 1:
426 raise error.InputError(
426 raise error.InputError(
427 _(b'--collapse does not work with multiple destinations')
427 _(b'--collapse does not work with multiple destinations')
428 )
428 )
429 destrev = next(iter(dests))
429 destrev = next(iter(dests))
430 destancestors = self.repo.changelog.ancestors(
430 destancestors = self.repo.changelog.ancestors(
431 [destrev], inclusive=True
431 [destrev], inclusive=True
432 )
432 )
433 self.external = externalparent(self.repo, self.state, destancestors)
433 self.external = externalparent(self.repo, self.state, destancestors)
434
434
435 for destrev in sorted(set(destmap.values())):
435 for destrev in sorted(set(destmap.values())):
436 dest = self.repo[destrev]
436 dest = self.repo[destrev]
437 if dest.closesbranch() and not self.keepbranchesf:
437 if dest.closesbranch() and not self.keepbranchesf:
438 self.ui.status(_(b'reopening closed branch head %s\n') % dest)
438 self.ui.status(_(b'reopening closed branch head %s\n') % dest)
439
439
440 # Calculate self.obsolete_* sets
440 # Calculate self.obsolete_* sets
441 self._handleskippingobsolete()
441 self._handleskippingobsolete()
442
442
443 if not self.keepf:
443 if not self.keepf:
444 rebaseset = set(destmap.keys())
444 rebaseset = set(destmap.keys())
445 rebaseset -= set(self.obsolete_with_successor_in_destination)
445 rebaseset -= set(self.obsolete_with_successor_in_destination)
446 rebaseset -= self.obsolete_with_successor_in_rebase_set
446 rebaseset -= self.obsolete_with_successor_in_rebase_set
447 # We have our own divergence-checking in the rebase extension
447 # We have our own divergence-checking in the rebase extension
448 overrides = {}
448 overrides = {}
449 if obsolete.isenabled(self.repo, obsolete.createmarkersopt):
449 if obsolete.isenabled(self.repo, obsolete.createmarkersopt):
450 overrides = {
450 overrides = {
451 (b'experimental', b'evolution.allowdivergence'): b'true'
451 (b'experimental', b'evolution.allowdivergence'): b'true'
452 }
452 }
453 try:
453 try:
454 with self.ui.configoverride(overrides):
454 with self.ui.configoverride(overrides):
455 rewriteutil.precheck(self.repo, rebaseset, action=b'rebase')
455 rewriteutil.precheck(self.repo, rebaseset, action=b'rebase')
456 except error.Abort as e:
456 except error.Abort as e:
457 if e.hint is None:
457 if e.hint is None:
458 e.hint = _(b'use --keep to keep original changesets')
458 e.hint = _(b'use --keep to keep original changesets')
459 raise e
459 raise e
460
460
461 self.prepared = True
461 self.prepared = True
462
462
463 def _assignworkingcopy(self):
463 def _assignworkingcopy(self):
464 if self.inmemory:
464 if self.inmemory:
465 from mercurial.context import overlayworkingctx
465 from mercurial.context import overlayworkingctx
466
466
467 self.wctx = overlayworkingctx(self.repo)
467 self.wctx = overlayworkingctx(self.repo)
468 self.repo.ui.debug(b"rebasing in memory\n")
468 self.repo.ui.debug(b"rebasing in memory\n")
469 else:
469 else:
470 self.wctx = self.repo[None]
470 self.wctx = self.repo[None]
471 self.repo.ui.debug(b"rebasing on disk\n")
471 self.repo.ui.debug(b"rebasing on disk\n")
472 self.repo.ui.log(
472 self.repo.ui.log(
473 b"rebase",
473 b"rebase",
474 b"using in-memory rebase: %r\n",
474 b"using in-memory rebase: %r\n",
475 self.inmemory,
475 self.inmemory,
476 rebase_imm_used=self.inmemory,
476 rebase_imm_used=self.inmemory,
477 )
477 )
478
478
479 def _performrebase(self, tr):
479 def _performrebase(self, tr):
480 self._assignworkingcopy()
480 self._assignworkingcopy()
481 repo, ui = self.repo, self.ui
481 repo, ui = self.repo, self.ui
482 if self.keepbranchesf:
482 if self.keepbranchesf:
483 # insert _savebranch at the start of extrafns so if
483 # insert _savebranch at the start of extrafns so if
484 # there's a user-provided extrafn it can clobber branch if
484 # there's a user-provided extrafn it can clobber branch if
485 # desired
485 # desired
486 self.extrafns.insert(0, _savebranch)
486 self.extrafns.insert(0, _savebranch)
487 if self.collapsef:
487 if self.collapsef:
488 branches = set()
488 branches = set()
489 for rev in self.state:
489 for rev in self.state:
490 branches.add(repo[rev].branch())
490 branches.add(repo[rev].branch())
491 if len(branches) > 1:
491 if len(branches) > 1:
492 raise error.InputError(
492 raise error.InputError(
493 _(b'cannot collapse multiple named branches')
493 _(b'cannot collapse multiple named branches')
494 )
494 )
495
495
496 # Keep track of the active bookmarks in order to reset them later
496 # Keep track of the active bookmarks in order to reset them later
497 self.activebookmark = self.activebookmark or repo._activebookmark
497 self.activebookmark = self.activebookmark or repo._activebookmark
498 if self.activebookmark:
498 if self.activebookmark:
499 bookmarks.deactivate(repo)
499 bookmarks.deactivate(repo)
500
500
501 # Store the state before we begin so users can run 'hg rebase --abort'
501 # Store the state before we begin so users can run 'hg rebase --abort'
502 # if we fail before the transaction closes.
502 # if we fail before the transaction closes.
503 self.storestatus()
503 self.storestatus()
504 if tr:
504 if tr:
505 # When using single transaction, store state when transaction
505 # When using single transaction, store state when transaction
506 # commits.
506 # commits.
507 self.storestatus(tr)
507 self.storestatus(tr)
508
508
509 cands = [k for k, v in pycompat.iteritems(self.state) if v == revtodo]
509 cands = [k for k, v in pycompat.iteritems(self.state) if v == revtodo]
510 p = repo.ui.makeprogress(
510 p = repo.ui.makeprogress(
511 _(b"rebasing"), unit=_(b'changesets'), total=len(cands)
511 _(b"rebasing"), unit=_(b'changesets'), total=len(cands)
512 )
512 )
513
513
514 def progress(ctx):
514 def progress(ctx):
515 p.increment(item=(b"%d:%s" % (ctx.rev(), ctx)))
515 p.increment(item=(b"%d:%s" % (ctx.rev(), ctx)))
516
516
517 for subset in sortsource(self.destmap):
517 for subset in sortsource(self.destmap):
518 sortedrevs = self.repo.revs(b'sort(%ld, -topo)', subset)
518 sortedrevs = self.repo.revs(b'sort(%ld, -topo)', subset)
519 for rev in sortedrevs:
519 for rev in sortedrevs:
520 self._rebasenode(tr, rev, progress)
520 self._rebasenode(tr, rev, progress)
521 p.complete()
521 p.complete()
522 ui.note(_(b'rebase merging completed\n'))
522 ui.note(_(b'rebase merging completed\n'))
523
523
524 def _concludenode(self, rev, editor, commitmsg=None):
524 def _concludenode(self, rev, editor, commitmsg=None):
525 """Commit the wd changes with parents p1 and p2.
525 """Commit the wd changes with parents p1 and p2.
526
526
527 Reuse commit info from rev but also store useful information in extra.
527 Reuse commit info from rev but also store useful information in extra.
528 Return node of committed revision."""
528 Return node of committed revision."""
529 repo = self.repo
529 repo = self.repo
530 ctx = repo[rev]
530 ctx = repo[rev]
531 if commitmsg is None:
531 if commitmsg is None:
532 commitmsg = ctx.description()
532 commitmsg = ctx.description()
533
533
534 # Skip replacement if collapsing, as that degenerates to p1 for all
534 # Skip replacement if collapsing, as that degenerates to p1 for all
535 # nodes.
535 # nodes.
536 if not self.collapsef:
536 if not self.collapsef:
537 cl = repo.changelog
537 cl = repo.changelog
538 commitmsg = rewriteutil.update_hash_refs(
538 commitmsg = rewriteutil.update_hash_refs(
539 repo,
539 repo,
540 commitmsg,
540 commitmsg,
541 {
541 {
542 cl.node(oldrev): [cl.node(newrev)]
542 cl.node(oldrev): [cl.node(newrev)]
543 for oldrev, newrev in self.state.items()
543 for oldrev, newrev in self.state.items()
544 if newrev != revtodo
544 if newrev != revtodo
545 },
545 },
546 )
546 )
547
547
548 date = self.date
548 date = self.date
549 if date is None:
549 if date is None:
550 date = ctx.date()
550 date = ctx.date()
551 extra = {b'rebase_source': ctx.hex()}
551 extra = {b'rebase_source': ctx.hex()}
552 for c in self.extrafns:
552 for c in self.extrafns:
553 c(ctx, extra)
553 c(ctx, extra)
554 destphase = max(ctx.phase(), phases.draft)
554 destphase = max(ctx.phase(), phases.draft)
555 overrides = {
555 overrides = {
556 (b'phases', b'new-commit'): destphase,
556 (b'phases', b'new-commit'): destphase,
557 (b'ui', b'allowemptycommit'): not self.skipemptysuccessorf,
557 (b'ui', b'allowemptycommit'): not self.skipemptysuccessorf,
558 }
558 }
559 with repo.ui.configoverride(overrides, b'rebase'):
559 with repo.ui.configoverride(overrides, b'rebase'):
560 if self.inmemory:
560 if self.inmemory:
561 newnode = commitmemorynode(
561 newnode = commitmemorynode(
562 repo,
562 repo,
563 wctx=self.wctx,
563 wctx=self.wctx,
564 extra=extra,
564 extra=extra,
565 commitmsg=commitmsg,
565 commitmsg=commitmsg,
566 editor=editor,
566 editor=editor,
567 user=ctx.user(),
567 user=ctx.user(),
568 date=date,
568 date=date,
569 )
569 )
570 else:
570 else:
571 newnode = commitnode(
571 newnode = commitnode(
572 repo,
572 repo,
573 extra=extra,
573 extra=extra,
574 commitmsg=commitmsg,
574 commitmsg=commitmsg,
575 editor=editor,
575 editor=editor,
576 user=ctx.user(),
576 user=ctx.user(),
577 date=date,
577 date=date,
578 )
578 )
579
579
580 return newnode
580 return newnode
581
581
582 def _rebasenode(self, tr, rev, progressfn):
582 def _rebasenode(self, tr, rev, progressfn):
583 repo, ui, opts = self.repo, self.ui, self.opts
583 repo, ui, opts = self.repo, self.ui, self.opts
584 ctx = repo[rev]
584 ctx = repo[rev]
585 desc = _ctxdesc(ctx)
585 desc = _ctxdesc(ctx)
586 if self.state[rev] == rev:
586 if self.state[rev] == rev:
587 ui.status(_(b'already rebased %s\n') % desc)
587 ui.status(_(b'already rebased %s\n') % desc)
588 elif rev in self.obsolete_with_successor_in_rebase_set:
588 elif rev in self.obsolete_with_successor_in_rebase_set:
589 msg = (
589 msg = (
590 _(
590 _(
591 b'note: not rebasing %s and its descendants as '
591 b'note: not rebasing %s and its descendants as '
592 b'this would cause divergence\n'
592 b'this would cause divergence\n'
593 )
593 )
594 % desc
594 % desc
595 )
595 )
596 repo.ui.status(msg)
596 repo.ui.status(msg)
597 self.skipped.add(rev)
597 self.skipped.add(rev)
598 elif rev in self.obsolete_with_successor_in_destination:
598 elif rev in self.obsolete_with_successor_in_destination:
599 succ = self.obsolete_with_successor_in_destination[rev]
599 succ = self.obsolete_with_successor_in_destination[rev]
600 if succ is None:
600 if succ is None:
601 msg = _(b'note: not rebasing %s, it has no successor\n') % desc
601 msg = _(b'note: not rebasing %s, it has no successor\n') % desc
602 else:
602 else:
603 succdesc = _ctxdesc(repo[succ])
603 succdesc = _ctxdesc(repo[succ])
604 msg = _(
604 msg = _(
605 b'note: not rebasing %s, already in destination as %s\n'
605 b'note: not rebasing %s, already in destination as %s\n'
606 ) % (desc, succdesc)
606 ) % (desc, succdesc)
607 repo.ui.status(msg)
607 repo.ui.status(msg)
608 # Make clearrebased aware state[rev] is not a true successor
608 # Make clearrebased aware state[rev] is not a true successor
609 self.skipped.add(rev)
609 self.skipped.add(rev)
610 # Record rev as moved to its desired destination in self.state.
610 # Record rev as moved to its desired destination in self.state.
611 # This helps bookmark and working parent movement.
611 # This helps bookmark and working parent movement.
612 dest = max(
612 dest = max(
613 adjustdest(repo, rev, self.destmap, self.state, self.skipped)
613 adjustdest(repo, rev, self.destmap, self.state, self.skipped)
614 )
614 )
615 self.state[rev] = dest
615 self.state[rev] = dest
616 elif self.state[rev] == revtodo:
616 elif self.state[rev] == revtodo:
617 ui.status(_(b'rebasing %s\n') % desc)
617 ui.status(_(b'rebasing %s\n') % desc)
618 progressfn(ctx)
618 progressfn(ctx)
619 p1, p2, base = defineparents(
619 p1, p2, base = defineparents(
620 repo,
620 repo,
621 rev,
621 rev,
622 self.destmap,
622 self.destmap,
623 self.state,
623 self.state,
624 self.skipped,
624 self.skipped,
625 self.obsolete_with_successor_in_destination,
625 self.obsolete_with_successor_in_destination,
626 )
626 )
627 if self.resume and self.wctx.p1().rev() == p1:
627 if self.resume and self.wctx.p1().rev() == p1:
628 repo.ui.debug(b'resuming interrupted rebase\n')
628 repo.ui.debug(b'resuming interrupted rebase\n')
629 self.resume = False
629 self.resume = False
630 else:
630 else:
631 overrides = {(b'ui', b'forcemerge'): opts.get('tool', b'')}
631 overrides = {(b'ui', b'forcemerge'): opts.get('tool', b'')}
632 with ui.configoverride(overrides, b'rebase'):
632 with ui.configoverride(overrides, b'rebase'):
633 try:
633 try:
634 rebasenode(
634 rebasenode(
635 repo,
635 repo,
636 rev,
636 rev,
637 p1,
637 p1,
638 p2,
638 p2,
639 base,
639 base,
640 self.collapsef,
640 self.collapsef,
641 wctx=self.wctx,
641 wctx=self.wctx,
642 )
642 )
643 except error.InMemoryMergeConflictsError:
643 except error.InMemoryMergeConflictsError:
644 if self.dryrun:
644 if self.dryrun:
645 raise error.ConflictResolutionRequired(b'rebase')
645 raise error.ConflictResolutionRequired(b'rebase')
646 if self.collapsef:
646 if self.collapsef:
647 # TODO: Make the overlayworkingctx reflected
647 # TODO: Make the overlayworkingctx reflected
648 # in the working copy here instead of re-raising
648 # in the working copy here instead of re-raising
649 # so the entire rebase operation is retried.
649 # so the entire rebase operation is retried.
650 raise
650 raise
651 ui.status(
651 ui.status(
652 _(
652 _(
653 b"hit merge conflicts; rebasing that "
653 b"hit merge conflicts; rebasing that "
654 b"commit again in the working copy\n"
654 b"commit again in the working copy\n"
655 )
655 )
656 )
656 )
657 try:
657 try:
658 cmdutil.bailifchanged(repo)
658 cmdutil.bailifchanged(repo)
659 except error.Abort:
659 except error.Abort:
660 clearstatus(repo)
660 clearstatus(repo)
661 clearcollapsemsg(repo)
661 clearcollapsemsg(repo)
662 raise
662 raise
663 self.inmemory = False
663 self.inmemory = False
664 self._assignworkingcopy()
664 self._assignworkingcopy()
665 mergemod.update(repo[p1], wc=self.wctx)
665 mergemod.update(repo[p1], wc=self.wctx)
666 rebasenode(
666 rebasenode(
667 repo,
667 repo,
668 rev,
668 rev,
669 p1,
669 p1,
670 p2,
670 p2,
671 base,
671 base,
672 self.collapsef,
672 self.collapsef,
673 wctx=self.wctx,
673 wctx=self.wctx,
674 )
674 )
675 if not self.collapsef:
675 if not self.collapsef:
676 merging = p2 != nullrev
676 merging = p2 != nullrev
677 editform = cmdutil.mergeeditform(merging, b'rebase')
677 editform = cmdutil.mergeeditform(merging, b'rebase')
678 editor = cmdutil.getcommiteditor(editform=editform, **opts)
678 editor = cmdutil.getcommiteditor(editform=editform, **opts)
679 # We need to set parents again here just in case we're continuing
679 # We need to set parents again here just in case we're continuing
680 # a rebase started with an old hg version (before 9c9cfecd4600),
680 # a rebase started with an old hg version (before 9c9cfecd4600),
681 # because those old versions would have left us with two dirstate
681 # because those old versions would have left us with two dirstate
682 # parents, and we don't want to create a merge commit here (unless
682 # parents, and we don't want to create a merge commit here (unless
683 # we're rebasing a merge commit).
683 # we're rebasing a merge commit).
684 self.wctx.setparents(repo[p1].node(), repo[p2].node())
684 self.wctx.setparents(repo[p1].node(), repo[p2].node())
685 newnode = self._concludenode(rev, editor)
685 newnode = self._concludenode(rev, editor)
686 else:
686 else:
687 # Skip commit if we are collapsing
687 # Skip commit if we are collapsing
688 newnode = None
688 newnode = None
689 # Update the state
689 # Update the state
690 if newnode is not None:
690 if newnode is not None:
691 self.state[rev] = repo[newnode].rev()
691 self.state[rev] = repo[newnode].rev()
692 ui.debug(b'rebased as %s\n' % short(newnode))
692 ui.debug(b'rebased as %s\n' % short(newnode))
693 if repo[newnode].isempty():
693 if repo[newnode].isempty():
694 ui.warn(
694 ui.warn(
695 _(
695 _(
696 b'note: created empty successor for %s, its '
696 b'note: created empty successor for %s, its '
697 b'destination already has all its changes\n'
697 b'destination already has all its changes\n'
698 )
698 )
699 % desc
699 % desc
700 )
700 )
701 else:
701 else:
702 if not self.collapsef:
702 if not self.collapsef:
703 ui.warn(
703 ui.warn(
704 _(
704 _(
705 b'note: not rebasing %s, its destination already '
705 b'note: not rebasing %s, its destination already '
706 b'has all its changes\n'
706 b'has all its changes\n'
707 )
707 )
708 % desc
708 % desc
709 )
709 )
710 self.skipped.add(rev)
710 self.skipped.add(rev)
711 self.state[rev] = p1
711 self.state[rev] = p1
712 ui.debug(b'next revision set to %d\n' % p1)
712 ui.debug(b'next revision set to %d\n' % p1)
713 else:
713 else:
714 ui.status(
714 ui.status(
715 _(b'already rebased %s as %s\n') % (desc, repo[self.state[rev]])
715 _(b'already rebased %s as %s\n') % (desc, repo[self.state[rev]])
716 )
716 )
717 if not tr:
717 if not tr:
718 # When not using single transaction, store state after each
718 # When not using single transaction, store state after each
719 # commit is completely done. On InterventionRequired, we thus
719 # commit is completely done. On InterventionRequired, we thus
720 # won't store the status. Instead, we'll hit the "len(parents) == 2"
720 # won't store the status. Instead, we'll hit the "len(parents) == 2"
721 # case and realize that the commit was in progress.
721 # case and realize that the commit was in progress.
722 self.storestatus()
722 self.storestatus()
723
723
724 def _finishrebase(self):
724 def _finishrebase(self):
725 repo, ui, opts = self.repo, self.ui, self.opts
725 repo, ui, opts = self.repo, self.ui, self.opts
726 fm = ui.formatter(b'rebase', pycompat.byteskwargs(opts))
726 fm = ui.formatter(b'rebase', pycompat.byteskwargs(opts))
727 fm.startitem()
727 fm.startitem()
728 if self.collapsef:
728 if self.collapsef:
729 p1, p2, _base = defineparents(
729 p1, p2, _base = defineparents(
730 repo,
730 repo,
731 min(self.state),
731 min(self.state),
732 self.destmap,
732 self.destmap,
733 self.state,
733 self.state,
734 self.skipped,
734 self.skipped,
735 self.obsolete_with_successor_in_destination,
735 self.obsolete_with_successor_in_destination,
736 )
736 )
737 editopt = opts.get('edit')
737 editopt = opts.get('edit')
738 editform = b'rebase.collapse'
738 editform = b'rebase.collapse'
739 if self.collapsemsg:
739 if self.collapsemsg:
740 commitmsg = self.collapsemsg
740 commitmsg = self.collapsemsg
741 else:
741 else:
742 commitmsg = b'Collapsed revision'
742 commitmsg = b'Collapsed revision'
743 for rebased in sorted(self.state):
743 for rebased in sorted(self.state):
744 if rebased not in self.skipped:
744 if rebased not in self.skipped:
745 commitmsg += b'\n* %s' % repo[rebased].description()
745 commitmsg += b'\n* %s' % repo[rebased].description()
746 editopt = True
746 editopt = True
747 editor = cmdutil.getcommiteditor(edit=editopt, editform=editform)
747 editor = cmdutil.getcommiteditor(edit=editopt, editform=editform)
748 revtoreuse = max(self.state)
748 revtoreuse = max(self.state)
749
749
750 self.wctx.setparents(repo[p1].node(), repo[self.external].node())
750 self.wctx.setparents(repo[p1].node(), repo[self.external].node())
751 newnode = self._concludenode(
751 newnode = self._concludenode(
752 revtoreuse, editor, commitmsg=commitmsg
752 revtoreuse, editor, commitmsg=commitmsg
753 )
753 )
754
754
755 if newnode is not None:
755 if newnode is not None:
756 newrev = repo[newnode].rev()
756 newrev = repo[newnode].rev()
757 for oldrev in self.state:
757 for oldrev in self.state:
758 self.state[oldrev] = newrev
758 self.state[oldrev] = newrev
759
759
760 if b'qtip' in repo.tags():
760 if b'qtip' in repo.tags():
761 updatemq(repo, self.state, self.skipped, **opts)
761 updatemq(repo, self.state, self.skipped, **opts)
762
762
763 # restore original working directory
763 # restore original working directory
764 # (we do this before stripping)
764 # (we do this before stripping)
765 newwd = self.state.get(self.originalwd, self.originalwd)
765 newwd = self.state.get(self.originalwd, self.originalwd)
766 if newwd < 0:
766 if newwd < 0:
767 # original directory is a parent of rebase set root or ignored
767 # original directory is a parent of rebase set root or ignored
768 newwd = self.originalwd
768 newwd = self.originalwd
769 if newwd not in [c.rev() for c in repo[None].parents()]:
769 if newwd not in [c.rev() for c in repo[None].parents()]:
770 ui.note(_(b"update back to initial working directory parent\n"))
770 ui.note(_(b"update back to initial working directory parent\n"))
771 mergemod.update(repo[newwd])
771 mergemod.update(repo[newwd])
772
772
773 collapsedas = None
773 collapsedas = None
774 if self.collapsef and not self.keepf:
774 if self.collapsef and not self.keepf:
775 collapsedas = newnode
775 collapsedas = newnode
776 clearrebased(
776 clearrebased(
777 ui,
777 ui,
778 repo,
778 repo,
779 self.destmap,
779 self.destmap,
780 self.state,
780 self.state,
781 self.skipped,
781 self.skipped,
782 collapsedas,
782 collapsedas,
783 self.keepf,
783 self.keepf,
784 fm=fm,
784 fm=fm,
785 backup=self.backupf,
785 backup=self.backupf,
786 )
786 )
787
787
788 clearstatus(repo)
788 clearstatus(repo)
789 clearcollapsemsg(repo)
789 clearcollapsemsg(repo)
790
790
791 ui.note(_(b"rebase completed\n"))
791 ui.note(_(b"rebase completed\n"))
792 util.unlinkpath(repo.sjoin(b'undo'), ignoremissing=True)
792 util.unlinkpath(repo.sjoin(b'undo'), ignoremissing=True)
793 if self.skipped:
793 if self.skipped:
794 skippedlen = len(self.skipped)
794 skippedlen = len(self.skipped)
795 ui.note(_(b"%d revisions have been skipped\n") % skippedlen)
795 ui.note(_(b"%d revisions have been skipped\n") % skippedlen)
796 fm.end()
796 fm.end()
797
797
798 if (
798 if (
799 self.activebookmark
799 self.activebookmark
800 and self.activebookmark in repo._bookmarks
800 and self.activebookmark in repo._bookmarks
801 and repo[b'.'].node() == repo._bookmarks[self.activebookmark]
801 and repo[b'.'].node() == repo._bookmarks[self.activebookmark]
802 ):
802 ):
803 bookmarks.activate(repo, self.activebookmark)
803 bookmarks.activate(repo, self.activebookmark)
804
804
805 def _abort(self, backup=True, suppwarns=False, dryrun=False, confirm=False):
805 def _abort(self, backup=True, suppwarns=False, dryrun=False, confirm=False):
806 '''Restore the repository to its original state.'''
806 '''Restore the repository to its original state.'''
807
807
808 repo = self.repo
808 repo = self.repo
809 try:
809 try:
810 # If the first commits in the rebased set get skipped during the
810 # If the first commits in the rebased set get skipped during the
811 # rebase, their values within the state mapping will be the dest
811 # rebase, their values within the state mapping will be the dest
812 # rev id. The rebased list must must not contain the dest rev
812 # rev id. The rebased list must must not contain the dest rev
813 # (issue4896)
813 # (issue4896)
814 rebased = [
814 rebased = [
815 s
815 s
816 for r, s in self.state.items()
816 for r, s in self.state.items()
817 if s >= 0 and s != r and s != self.destmap[r]
817 if s >= 0 and s != r and s != self.destmap[r]
818 ]
818 ]
819 immutable = [d for d in rebased if not repo[d].mutable()]
819 immutable = [d for d in rebased if not repo[d].mutable()]
820 cleanup = True
820 cleanup = True
821 if immutable:
821 if immutable:
822 repo.ui.warn(
822 repo.ui.warn(
823 _(b"warning: can't clean up public changesets %s\n")
823 _(b"warning: can't clean up public changesets %s\n")
824 % b', '.join(bytes(repo[r]) for r in immutable),
824 % b', '.join(bytes(repo[r]) for r in immutable),
825 hint=_(b"see 'hg help phases' for details"),
825 hint=_(b"see 'hg help phases' for details"),
826 )
826 )
827 cleanup = False
827 cleanup = False
828
828
829 descendants = set()
829 descendants = set()
830 if rebased:
830 if rebased:
831 descendants = set(repo.changelog.descendants(rebased))
831 descendants = set(repo.changelog.descendants(rebased))
832 if descendants - set(rebased):
832 if descendants - set(rebased):
833 repo.ui.warn(
833 repo.ui.warn(
834 _(
834 _(
835 b"warning: new changesets detected on "
835 b"warning: new changesets detected on "
836 b"destination branch, can't strip\n"
836 b"destination branch, can't strip\n"
837 )
837 )
838 )
838 )
839 cleanup = False
839 cleanup = False
840
840
841 if cleanup:
841 if cleanup:
842 if rebased:
842 if rebased:
843 strippoints = [
843 strippoints = [
844 c.node() for c in repo.set(b'roots(%ld)', rebased)
844 c.node() for c in repo.set(b'roots(%ld)', rebased)
845 ]
845 ]
846
846
847 updateifonnodes = set(rebased)
847 updateifonnodes = set(rebased)
848 updateifonnodes.update(self.destmap.values())
848 updateifonnodes.update(self.destmap.values())
849
849
850 if not dryrun and not confirm:
850 if not dryrun and not confirm:
851 updateifonnodes.add(self.originalwd)
851 updateifonnodes.add(self.originalwd)
852
852
853 shouldupdate = repo[b'.'].rev() in updateifonnodes
853 shouldupdate = repo[b'.'].rev() in updateifonnodes
854
854
855 # Update away from the rebase if necessary
855 # Update away from the rebase if necessary
856 if shouldupdate:
856 if shouldupdate:
857 mergemod.clean_update(repo[self.originalwd])
857 mergemod.clean_update(repo[self.originalwd])
858
858
859 # Strip from the first rebased revision
859 # Strip from the first rebased revision
860 if rebased:
860 if rebased:
861 repair.strip(repo.ui, repo, strippoints, backup=backup)
861 repair.strip(repo.ui, repo, strippoints, backup=backup)
862
862
863 if self.activebookmark and self.activebookmark in repo._bookmarks:
863 if self.activebookmark and self.activebookmark in repo._bookmarks:
864 bookmarks.activate(repo, self.activebookmark)
864 bookmarks.activate(repo, self.activebookmark)
865
865
866 finally:
866 finally:
867 clearstatus(repo)
867 clearstatus(repo)
868 clearcollapsemsg(repo)
868 clearcollapsemsg(repo)
869 if not suppwarns:
869 if not suppwarns:
870 repo.ui.warn(_(b'rebase aborted\n'))
870 repo.ui.warn(_(b'rebase aborted\n'))
871 return 0
871 return 0
872
872
873
873
874 @command(
874 @command(
875 b'rebase',
875 b'rebase',
876 [
876 [
877 (
877 (
878 b's',
878 b's',
879 b'source',
879 b'source',
880 [],
880 [],
881 _(b'rebase the specified changesets and their descendants'),
881 _(b'rebase the specified changesets and their descendants'),
882 _(b'REV'),
882 _(b'REV'),
883 ),
883 ),
884 (
884 (
885 b'b',
885 b'b',
886 b'base',
886 b'base',
887 [],
887 [],
888 _(b'rebase everything from branching point of specified changeset'),
888 _(b'rebase everything from branching point of specified changeset'),
889 _(b'REV'),
889 _(b'REV'),
890 ),
890 ),
891 (b'r', b'rev', [], _(b'rebase these revisions'), _(b'REV')),
891 (b'r', b'rev', [], _(b'rebase these revisions'), _(b'REV')),
892 (
892 (
893 b'd',
893 b'd',
894 b'dest',
894 b'dest',
895 b'',
895 b'',
896 _(b'rebase onto the specified changeset'),
896 _(b'rebase onto the specified changeset'),
897 _(b'REV'),
897 _(b'REV'),
898 ),
898 ),
899 (b'', b'collapse', False, _(b'collapse the rebased changesets')),
899 (b'', b'collapse', False, _(b'collapse the rebased changesets')),
900 (
900 (
901 b'm',
901 b'm',
902 b'message',
902 b'message',
903 b'',
903 b'',
904 _(b'use text as collapse commit message'),
904 _(b'use text as collapse commit message'),
905 _(b'TEXT'),
905 _(b'TEXT'),
906 ),
906 ),
907 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
907 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
908 (
908 (
909 b'l',
909 b'l',
910 b'logfile',
910 b'logfile',
911 b'',
911 b'',
912 _(b'read collapse commit message from file'),
912 _(b'read collapse commit message from file'),
913 _(b'FILE'),
913 _(b'FILE'),
914 ),
914 ),
915 (b'k', b'keep', False, _(b'keep original changesets')),
915 (b'k', b'keep', False, _(b'keep original changesets')),
916 (b'', b'keepbranches', False, _(b'keep original branch names')),
916 (b'', b'keepbranches', False, _(b'keep original branch names')),
917 (b'D', b'detach', False, _(b'(DEPRECATED)')),
917 (b'D', b'detach', False, _(b'(DEPRECATED)')),
918 (b'i', b'interactive', False, _(b'(DEPRECATED)')),
918 (b'i', b'interactive', False, _(b'(DEPRECATED)')),
919 (b't', b'tool', b'', _(b'specify merge tool')),
919 (b't', b'tool', b'', _(b'specify merge tool')),
920 (b'', b'stop', False, _(b'stop interrupted rebase')),
920 (b'', b'stop', False, _(b'stop interrupted rebase')),
921 (b'c', b'continue', False, _(b'continue an interrupted rebase')),
921 (b'c', b'continue', False, _(b'continue an interrupted rebase')),
922 (b'a', b'abort', False, _(b'abort an interrupted rebase')),
922 (b'a', b'abort', False, _(b'abort an interrupted rebase')),
923 (
923 (
924 b'',
924 b'',
925 b'auto-orphans',
925 b'auto-orphans',
926 b'',
926 b'',
927 _(
927 _(
928 b'automatically rebase orphan revisions '
928 b'automatically rebase orphan revisions '
929 b'in the specified revset (EXPERIMENTAL)'
929 b'in the specified revset (EXPERIMENTAL)'
930 ),
930 ),
931 ),
931 ),
932 ]
932 ]
933 + cmdutil.dryrunopts
933 + cmdutil.dryrunopts
934 + cmdutil.formatteropts
934 + cmdutil.formatteropts
935 + cmdutil.confirmopts,
935 + cmdutil.confirmopts,
936 _(b'[[-s REV]... | [-b REV]... | [-r REV]...] [-d REV] [OPTION]...'),
936 _(b'[[-s REV]... | [-b REV]... | [-r REV]...] [-d REV] [OPTION]...'),
937 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
937 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
938 )
938 )
939 def rebase(ui, repo, **opts):
939 def rebase(ui, repo, **opts):
940 """move changeset (and descendants) to a different branch
940 """move changeset (and descendants) to a different branch
941
941
942 Rebase uses repeated merging to graft changesets from one part of
942 Rebase uses repeated merging to graft changesets from one part of
943 history (the source) onto another (the destination). This can be
943 history (the source) onto another (the destination). This can be
944 useful for linearizing *local* changes relative to a master
944 useful for linearizing *local* changes relative to a master
945 development tree.
945 development tree.
946
946
947 Published commits cannot be rebased (see :hg:`help phases`).
947 Published commits cannot be rebased (see :hg:`help phases`).
948 To copy commits, see :hg:`help graft`.
948 To copy commits, see :hg:`help graft`.
949
949
950 If you don't specify a destination changeset (``-d/--dest``), rebase
950 If you don't specify a destination changeset (``-d/--dest``), rebase
951 will use the same logic as :hg:`merge` to pick a destination. if
951 will use the same logic as :hg:`merge` to pick a destination. if
952 the current branch contains exactly one other head, the other head
952 the current branch contains exactly one other head, the other head
953 is merged with by default. Otherwise, an explicit revision with
953 is merged with by default. Otherwise, an explicit revision with
954 which to merge with must be provided. (destination changeset is not
954 which to merge with must be provided. (destination changeset is not
955 modified by rebasing, but new changesets are added as its
955 modified by rebasing, but new changesets are added as its
956 descendants.)
956 descendants.)
957
957
958 Here are the ways to select changesets:
958 Here are the ways to select changesets:
959
959
960 1. Explicitly select them using ``--rev``.
960 1. Explicitly select them using ``--rev``.
961
961
962 2. Use ``--source`` to select a root changeset and include all of its
962 2. Use ``--source`` to select a root changeset and include all of its
963 descendants.
963 descendants.
964
964
965 3. Use ``--base`` to select a changeset; rebase will find ancestors
965 3. Use ``--base`` to select a changeset; rebase will find ancestors
966 and their descendants which are not also ancestors of the destination.
966 and their descendants which are not also ancestors of the destination.
967
967
968 4. If you do not specify any of ``--rev``, ``--source``, or ``--base``,
968 4. If you do not specify any of ``--rev``, ``--source``, or ``--base``,
969 rebase will use ``--base .`` as above.
969 rebase will use ``--base .`` as above.
970
970
971 If ``--source`` or ``--rev`` is used, special names ``SRC`` and ``ALLSRC``
971 If ``--source`` or ``--rev`` is used, special names ``SRC`` and ``ALLSRC``
972 can be used in ``--dest``. Destination would be calculated per source
972 can be used in ``--dest``. Destination would be calculated per source
973 revision with ``SRC`` substituted by that single source revision and
973 revision with ``SRC`` substituted by that single source revision and
974 ``ALLSRC`` substituted by all source revisions.
974 ``ALLSRC`` substituted by all source revisions.
975
975
976 Rebase will destroy original changesets unless you use ``--keep``.
976 Rebase will destroy original changesets unless you use ``--keep``.
977 It will also move your bookmarks (even if you do).
977 It will also move your bookmarks (even if you do).
978
978
979 Some changesets may be dropped if they do not contribute changes
979 Some changesets may be dropped if they do not contribute changes
980 (e.g. merges from the destination branch).
980 (e.g. merges from the destination branch).
981
981
982 Unlike ``merge``, rebase will do nothing if you are at the branch tip of
982 Unlike ``merge``, rebase will do nothing if you are at the branch tip of
983 a named branch with two heads. You will need to explicitly specify source
983 a named branch with two heads. You will need to explicitly specify source
984 and/or destination.
984 and/or destination.
985
985
986 If you need to use a tool to automate merge/conflict decisions, you
986 If you need to use a tool to automate merge/conflict decisions, you
987 can specify one with ``--tool``, see :hg:`help merge-tools`.
987 can specify one with ``--tool``, see :hg:`help merge-tools`.
988 As a caveat: the tool will not be used to mediate when a file was
988 As a caveat: the tool will not be used to mediate when a file was
989 deleted, there is no hook presently available for this.
989 deleted, there is no hook presently available for this.
990
990
991 If a rebase is interrupted to manually resolve a conflict, it can be
991 If a rebase is interrupted to manually resolve a conflict, it can be
992 continued with --continue/-c, aborted with --abort/-a, or stopped with
992 continued with --continue/-c, aborted with --abort/-a, or stopped with
993 --stop.
993 --stop.
994
994
995 .. container:: verbose
995 .. container:: verbose
996
996
997 Examples:
997 Examples:
998
998
999 - move "local changes" (current commit back to branching point)
999 - move "local changes" (current commit back to branching point)
1000 to the current branch tip after a pull::
1000 to the current branch tip after a pull::
1001
1001
1002 hg rebase
1002 hg rebase
1003
1003
1004 - move a single changeset to the stable branch::
1004 - move a single changeset to the stable branch::
1005
1005
1006 hg rebase -r 5f493448 -d stable
1006 hg rebase -r 5f493448 -d stable
1007
1007
1008 - splice a commit and all its descendants onto another part of history::
1008 - splice a commit and all its descendants onto another part of history::
1009
1009
1010 hg rebase --source c0c3 --dest 4cf9
1010 hg rebase --source c0c3 --dest 4cf9
1011
1011
1012 - rebase everything on a branch marked by a bookmark onto the
1012 - rebase everything on a branch marked by a bookmark onto the
1013 default branch::
1013 default branch::
1014
1014
1015 hg rebase --base myfeature --dest default
1015 hg rebase --base myfeature --dest default
1016
1016
1017 - collapse a sequence of changes into a single commit::
1017 - collapse a sequence of changes into a single commit::
1018
1018
1019 hg rebase --collapse -r 1520:1525 -d .
1019 hg rebase --collapse -r 1520:1525 -d .
1020
1020
1021 - move a named branch while preserving its name::
1021 - move a named branch while preserving its name::
1022
1022
1023 hg rebase -r "branch(featureX)" -d 1.3 --keepbranches
1023 hg rebase -r "branch(featureX)" -d 1.3 --keepbranches
1024
1024
1025 - stabilize orphaned changesets so history looks linear::
1025 - stabilize orphaned changesets so history looks linear::
1026
1026
1027 hg rebase -r 'orphan()-obsolete()'\
1027 hg rebase -r 'orphan()-obsolete()'\
1028 -d 'first(max((successors(max(roots(ALLSRC) & ::SRC)^)-obsolete())::) +\
1028 -d 'first(max((successors(max(roots(ALLSRC) & ::SRC)^)-obsolete())::) +\
1029 max(::((roots(ALLSRC) & ::SRC)^)-obsolete()))'
1029 max(::((roots(ALLSRC) & ::SRC)^)-obsolete()))'
1030
1030
1031 Configuration Options:
1031 Configuration Options:
1032
1032
1033 You can make rebase require a destination if you set the following config
1033 You can make rebase require a destination if you set the following config
1034 option::
1034 option::
1035
1035
1036 [commands]
1036 [commands]
1037 rebase.requiredest = True
1037 rebase.requiredest = True
1038
1038
1039 By default, rebase will close the transaction after each commit. For
1039 By default, rebase will close the transaction after each commit. For
1040 performance purposes, you can configure rebase to use a single transaction
1040 performance purposes, you can configure rebase to use a single transaction
1041 across the entire rebase. WARNING: This setting introduces a significant
1041 across the entire rebase. WARNING: This setting introduces a significant
1042 risk of losing the work you've done in a rebase if the rebase aborts
1042 risk of losing the work you've done in a rebase if the rebase aborts
1043 unexpectedly::
1043 unexpectedly::
1044
1044
1045 [rebase]
1045 [rebase]
1046 singletransaction = True
1046 singletransaction = True
1047
1047
1048 By default, rebase writes to the working copy, but you can configure it to
1048 By default, rebase writes to the working copy, but you can configure it to
1049 run in-memory for better performance. When the rebase is not moving the
1049 run in-memory for better performance. When the rebase is not moving the
1050 parent(s) of the working copy (AKA the "currently checked out changesets"),
1050 parent(s) of the working copy (AKA the "currently checked out changesets"),
1051 this may also allow it to run even if the working copy is dirty::
1051 this may also allow it to run even if the working copy is dirty::
1052
1052
1053 [rebase]
1053 [rebase]
1054 experimental.inmemory = True
1054 experimental.inmemory = True
1055
1055
1056 Return Values:
1056 Return Values:
1057
1057
1058 Returns 0 on success, 1 if nothing to rebase or there are
1058 Returns 0 on success, 1 if nothing to rebase or there are
1059 unresolved conflicts.
1059 unresolved conflicts.
1060
1060
1061 """
1061 """
1062 inmemory = ui.configbool(b'rebase', b'experimental.inmemory')
1062 inmemory = ui.configbool(b'rebase', b'experimental.inmemory')
1063 action = cmdutil.check_at_most_one_arg(opts, 'abort', 'stop', 'continue')
1063 action = cmdutil.check_at_most_one_arg(opts, 'abort', 'stop', 'continue')
1064 if action:
1064 if action:
1065 cmdutil.check_incompatible_arguments(
1065 cmdutil.check_incompatible_arguments(
1066 opts, action, ['confirm', 'dry_run']
1066 opts, action, ['confirm', 'dry_run']
1067 )
1067 )
1068 cmdutil.check_incompatible_arguments(
1068 cmdutil.check_incompatible_arguments(
1069 opts, action, ['rev', 'source', 'base', 'dest']
1069 opts, action, ['rev', 'source', 'base', 'dest']
1070 )
1070 )
1071 cmdutil.check_at_most_one_arg(opts, 'confirm', 'dry_run')
1071 cmdutil.check_at_most_one_arg(opts, 'confirm', 'dry_run')
1072 cmdutil.check_at_most_one_arg(opts, 'rev', 'source', 'base')
1072 cmdutil.check_at_most_one_arg(opts, 'rev', 'source', 'base')
1073
1073
1074 if action or repo.currenttransaction() is not None:
1074 if action or repo.currenttransaction() is not None:
1075 # in-memory rebase is not compatible with resuming rebases.
1075 # in-memory rebase is not compatible with resuming rebases.
1076 # (Or if it is run within a transaction, since the restart logic can
1076 # (Or if it is run within a transaction, since the restart logic can
1077 # fail the entire transaction.)
1077 # fail the entire transaction.)
1078 inmemory = False
1078 inmemory = False
1079
1079
1080 if opts.get('auto_orphans'):
1080 if opts.get('auto_orphans'):
1081 disallowed_opts = set(opts) - {'auto_orphans'}
1081 disallowed_opts = set(opts) - {'auto_orphans'}
1082 cmdutil.check_incompatible_arguments(
1082 cmdutil.check_incompatible_arguments(
1083 opts, 'auto_orphans', disallowed_opts
1083 opts, 'auto_orphans', disallowed_opts
1084 )
1084 )
1085
1085
1086 userrevs = list(repo.revs(opts.get('auto_orphans')))
1086 userrevs = list(repo.revs(opts.get('auto_orphans')))
1087 opts['rev'] = [revsetlang.formatspec(b'%ld and orphan()', userrevs)]
1087 opts['rev'] = [revsetlang.formatspec(b'%ld and orphan()', userrevs)]
1088 opts['dest'] = b'_destautoorphanrebase(SRC)'
1088 opts['dest'] = b'_destautoorphanrebase(SRC)'
1089
1089
1090 if opts.get('dry_run') or opts.get('confirm'):
1090 if opts.get('dry_run') or opts.get('confirm'):
1091 return _dryrunrebase(ui, repo, action, opts)
1091 return _dryrunrebase(ui, repo, action, opts)
1092 elif action == 'stop':
1092 elif action == 'stop':
1093 rbsrt = rebaseruntime(repo, ui)
1093 rbsrt = rebaseruntime(repo, ui)
1094 with repo.wlock(), repo.lock():
1094 with repo.wlock(), repo.lock():
1095 rbsrt.restorestatus()
1095 rbsrt.restorestatus()
1096 if rbsrt.collapsef:
1096 if rbsrt.collapsef:
1097 raise error.StateError(_(b"cannot stop in --collapse session"))
1097 raise error.StateError(_(b"cannot stop in --collapse session"))
1098 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1098 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1099 if not (rbsrt.keepf or allowunstable):
1099 if not (rbsrt.keepf or allowunstable):
1100 raise error.StateError(
1100 raise error.StateError(
1101 _(
1101 _(
1102 b"cannot remove original changesets with"
1102 b"cannot remove original changesets with"
1103 b" unrebased descendants"
1103 b" unrebased descendants"
1104 ),
1104 ),
1105 hint=_(
1105 hint=_(
1106 b'either enable obsmarkers to allow unstable '
1106 b'either enable obsmarkers to allow unstable '
1107 b'revisions or use --keep to keep original '
1107 b'revisions or use --keep to keep original '
1108 b'changesets'
1108 b'changesets'
1109 ),
1109 ),
1110 )
1110 )
1111 # update to the current working revision
1111 # update to the current working revision
1112 # to clear interrupted merge
1112 # to clear interrupted merge
1113 mergemod.clean_update(repo[rbsrt.originalwd])
1113 mergemod.clean_update(repo[rbsrt.originalwd])
1114 rbsrt._finishrebase()
1114 rbsrt._finishrebase()
1115 return 0
1115 return 0
1116 elif inmemory:
1116 elif inmemory:
1117 try:
1117 try:
1118 # in-memory merge doesn't support conflicts, so if we hit any, abort
1118 # in-memory merge doesn't support conflicts, so if we hit any, abort
1119 # and re-run as an on-disk merge.
1119 # and re-run as an on-disk merge.
1120 overrides = {(b'rebase', b'singletransaction'): True}
1120 overrides = {(b'rebase', b'singletransaction'): True}
1121 with ui.configoverride(overrides, b'rebase'):
1121 with ui.configoverride(overrides, b'rebase'):
1122 return _dorebase(ui, repo, action, opts, inmemory=inmemory)
1122 return _dorebase(ui, repo, action, opts, inmemory=inmemory)
1123 except error.InMemoryMergeConflictsError:
1123 except error.InMemoryMergeConflictsError:
1124 if ui.configbool(b'devel', b'rebase.force-in-memory-merge'):
1124 if ui.configbool(b'devel', b'rebase.force-in-memory-merge'):
1125 raise
1125 raise
1126 ui.warn(
1126 ui.warn(
1127 _(
1127 _(
1128 b'hit merge conflicts; re-running rebase without in-memory'
1128 b'hit merge conflicts; re-running rebase without in-memory'
1129 b' merge\n'
1129 b' merge\n'
1130 )
1130 )
1131 )
1131 )
1132 clearstatus(repo)
1132 clearstatus(repo)
1133 clearcollapsemsg(repo)
1133 clearcollapsemsg(repo)
1134 return _dorebase(ui, repo, action, opts, inmemory=False)
1134 return _dorebase(ui, repo, action, opts, inmemory=False)
1135 else:
1135 else:
1136 return _dorebase(ui, repo, action, opts)
1136 return _dorebase(ui, repo, action, opts)
1137
1137
1138
1138
1139 def _dryrunrebase(ui, repo, action, opts):
1139 def _dryrunrebase(ui, repo, action, opts):
1140 rbsrt = rebaseruntime(repo, ui, inmemory=True, dryrun=True, opts=opts)
1140 rbsrt = rebaseruntime(repo, ui, inmemory=True, dryrun=True, opts=opts)
1141 confirm = opts.get('confirm')
1141 confirm = opts.get('confirm')
1142 if confirm:
1142 if confirm:
1143 ui.status(_(b'starting in-memory rebase\n'))
1143 ui.status(_(b'starting in-memory rebase\n'))
1144 else:
1144 else:
1145 ui.status(
1145 ui.status(
1146 _(b'starting dry-run rebase; repository will not be changed\n')
1146 _(b'starting dry-run rebase; repository will not be changed\n')
1147 )
1147 )
1148 with repo.wlock(), repo.lock():
1148 with repo.wlock(), repo.lock():
1149 needsabort = True
1149 needsabort = True
1150 try:
1150 try:
1151 overrides = {(b'rebase', b'singletransaction'): True}
1151 overrides = {(b'rebase', b'singletransaction'): True}
1152 with ui.configoverride(overrides, b'rebase'):
1152 with ui.configoverride(overrides, b'rebase'):
1153 res = _origrebase(
1153 res = _origrebase(
1154 ui,
1154 ui,
1155 repo,
1155 repo,
1156 action,
1156 action,
1157 opts,
1157 opts,
1158 rbsrt,
1158 rbsrt,
1159 )
1159 )
1160 if res == _nothingtorebase():
1160 if res == _nothingtorebase():
1161 needsabort = False
1161 needsabort = False
1162 return res
1162 return res
1163 except error.ConflictResolutionRequired:
1163 except error.ConflictResolutionRequired:
1164 ui.status(_(b'hit a merge conflict\n'))
1164 ui.status(_(b'hit a merge conflict\n'))
1165 return 1
1165 return 1
1166 except error.Abort:
1166 except error.Abort:
1167 needsabort = False
1167 needsabort = False
1168 raise
1168 raise
1169 else:
1169 else:
1170 if confirm:
1170 if confirm:
1171 ui.status(_(b'rebase completed successfully\n'))
1171 ui.status(_(b'rebase completed successfully\n'))
1172 if not ui.promptchoice(_(b'apply changes (yn)?$$ &Yes $$ &No')):
1172 if not ui.promptchoice(_(b'apply changes (yn)?$$ &Yes $$ &No')):
1173 # finish unfinished rebase
1173 # finish unfinished rebase
1174 rbsrt._finishrebase()
1174 rbsrt._finishrebase()
1175 else:
1175 else:
1176 rbsrt._prepareabortorcontinue(
1176 rbsrt._prepareabortorcontinue(
1177 isabort=True,
1177 isabort=True,
1178 backup=False,
1178 backup=False,
1179 suppwarns=True,
1179 suppwarns=True,
1180 confirm=confirm,
1180 confirm=confirm,
1181 )
1181 )
1182 needsabort = False
1182 needsabort = False
1183 else:
1183 else:
1184 ui.status(
1184 ui.status(
1185 _(
1185 _(
1186 b'dry-run rebase completed successfully; run without'
1186 b'dry-run rebase completed successfully; run without'
1187 b' -n/--dry-run to perform this rebase\n'
1187 b' -n/--dry-run to perform this rebase\n'
1188 )
1188 )
1189 )
1189 )
1190 return 0
1190 return 0
1191 finally:
1191 finally:
1192 if needsabort:
1192 if needsabort:
1193 # no need to store backup in case of dryrun
1193 # no need to store backup in case of dryrun
1194 rbsrt._prepareabortorcontinue(
1194 rbsrt._prepareabortorcontinue(
1195 isabort=True,
1195 isabort=True,
1196 backup=False,
1196 backup=False,
1197 suppwarns=True,
1197 suppwarns=True,
1198 dryrun=opts.get('dry_run'),
1198 dryrun=opts.get('dry_run'),
1199 )
1199 )
1200
1200
1201
1201
1202 def _dorebase(ui, repo, action, opts, inmemory=False):
1202 def _dorebase(ui, repo, action, opts, inmemory=False):
1203 rbsrt = rebaseruntime(repo, ui, inmemory, opts=opts)
1203 rbsrt = rebaseruntime(repo, ui, inmemory, opts=opts)
1204 return _origrebase(ui, repo, action, opts, rbsrt)
1204 return _origrebase(ui, repo, action, opts, rbsrt)
1205
1205
1206
1206
1207 def _origrebase(ui, repo, action, opts, rbsrt):
1207 def _origrebase(ui, repo, action, opts, rbsrt):
1208 assert action != 'stop'
1208 assert action != 'stop'
1209 with repo.wlock(), repo.lock():
1209 with repo.wlock(), repo.lock():
1210 if opts.get('interactive'):
1210 if opts.get('interactive'):
1211 try:
1211 try:
1212 if extensions.find(b'histedit'):
1212 if extensions.find(b'histedit'):
1213 enablehistedit = b''
1213 enablehistedit = b''
1214 except KeyError:
1214 except KeyError:
1215 enablehistedit = b" --config extensions.histedit="
1215 enablehistedit = b" --config extensions.histedit="
1216 help = b"hg%s help -e histedit" % enablehistedit
1216 help = b"hg%s help -e histedit" % enablehistedit
1217 msg = (
1217 msg = (
1218 _(
1218 _(
1219 b"interactive history editing is supported by the "
1219 b"interactive history editing is supported by the "
1220 b"'histedit' extension (see \"%s\")"
1220 b"'histedit' extension (see \"%s\")"
1221 )
1221 )
1222 % help
1222 % help
1223 )
1223 )
1224 raise error.InputError(msg)
1224 raise error.InputError(msg)
1225
1225
1226 if rbsrt.collapsemsg and not rbsrt.collapsef:
1226 if rbsrt.collapsemsg and not rbsrt.collapsef:
1227 raise error.InputError(
1227 raise error.InputError(
1228 _(b'message can only be specified with collapse')
1228 _(b'message can only be specified with collapse')
1229 )
1229 )
1230
1230
1231 if action:
1231 if action:
1232 if rbsrt.collapsef:
1232 if rbsrt.collapsef:
1233 raise error.InputError(
1233 raise error.InputError(
1234 _(b'cannot use collapse with continue or abort')
1234 _(b'cannot use collapse with continue or abort')
1235 )
1235 )
1236 if action == 'abort' and opts.get('tool', False):
1236 if action == 'abort' and opts.get('tool', False):
1237 ui.warn(_(b'tool option will be ignored\n'))
1237 ui.warn(_(b'tool option will be ignored\n'))
1238 if action == 'continue':
1238 if action == 'continue':
1239 ms = mergestatemod.mergestate.read(repo)
1239 ms = mergestatemod.mergestate.read(repo)
1240 mergeutil.checkunresolved(ms)
1240 mergeutil.checkunresolved(ms)
1241
1241
1242 retcode = rbsrt._prepareabortorcontinue(isabort=(action == 'abort'))
1242 retcode = rbsrt._prepareabortorcontinue(isabort=(action == 'abort'))
1243 if retcode is not None:
1243 if retcode is not None:
1244 return retcode
1244 return retcode
1245 else:
1245 else:
1246 # search default destination in this space
1246 # search default destination in this space
1247 # used in the 'hg pull --rebase' case, see issue 5214.
1247 # used in the 'hg pull --rebase' case, see issue 5214.
1248 destspace = opts.get('_destspace')
1248 destspace = opts.get('_destspace')
1249 destmap = _definedestmap(
1249 destmap = _definedestmap(
1250 ui,
1250 ui,
1251 repo,
1251 repo,
1252 rbsrt.inmemory,
1252 rbsrt.inmemory,
1253 opts.get('dest', None),
1253 opts.get('dest', None),
1254 opts.get('source', []),
1254 opts.get('source', []),
1255 opts.get('base', []),
1255 opts.get('base', []),
1256 opts.get('rev', []),
1256 opts.get('rev', []),
1257 destspace=destspace,
1257 destspace=destspace,
1258 )
1258 )
1259 retcode = rbsrt._preparenewrebase(destmap)
1259 retcode = rbsrt._preparenewrebase(destmap)
1260 if retcode is not None:
1260 if retcode is not None:
1261 return retcode
1261 return retcode
1262 storecollapsemsg(repo, rbsrt.collapsemsg)
1262 storecollapsemsg(repo, rbsrt.collapsemsg)
1263
1263
1264 tr = None
1264 tr = None
1265
1265
1266 singletr = ui.configbool(b'rebase', b'singletransaction')
1266 singletr = ui.configbool(b'rebase', b'singletransaction')
1267 if singletr:
1267 if singletr:
1268 tr = repo.transaction(b'rebase')
1268 tr = repo.transaction(b'rebase')
1269
1269
1270 # If `rebase.singletransaction` is enabled, wrap the entire operation in
1270 # If `rebase.singletransaction` is enabled, wrap the entire operation in
1271 # one transaction here. Otherwise, transactions are obtained when
1271 # one transaction here. Otherwise, transactions are obtained when
1272 # committing each node, which is slower but allows partial success.
1272 # committing each node, which is slower but allows partial success.
1273 with util.acceptintervention(tr):
1273 with util.acceptintervention(tr):
1274 # Same logic for the dirstate guard, except we don't create one when
1274 # Same logic for the dirstate guard, except we don't create one when
1275 # rebasing in-memory (it's not needed).
1275 # rebasing in-memory (it's not needed).
1276 dsguard = None
1276 dsguard = None
1277 if singletr and not rbsrt.inmemory:
1277 if singletr and not rbsrt.inmemory:
1278 dsguard = dirstateguard.dirstateguard(repo, b'rebase')
1278 dsguard = dirstateguard.dirstateguard(repo, b'rebase')
1279 with util.acceptintervention(dsguard):
1279 with util.acceptintervention(dsguard):
1280 rbsrt._performrebase(tr)
1280 rbsrt._performrebase(tr)
1281 if not rbsrt.dryrun:
1281 if not rbsrt.dryrun:
1282 rbsrt._finishrebase()
1282 rbsrt._finishrebase()
1283
1283
1284
1284
1285 def _definedestmap(ui, repo, inmemory, destf, srcf, basef, revf, destspace):
1285 def _definedestmap(ui, repo, inmemory, destf, srcf, basef, revf, destspace):
1286 """use revisions argument to define destmap {srcrev: destrev}"""
1286 """use revisions argument to define destmap {srcrev: destrev}"""
1287 if revf is None:
1287 if revf is None:
1288 revf = []
1288 revf = []
1289
1289
1290 # destspace is here to work around issues with `hg pull --rebase` see
1290 # destspace is here to work around issues with `hg pull --rebase` see
1291 # issue5214 for details
1291 # issue5214 for details
1292
1292
1293 cmdutil.checkunfinished(repo)
1293 cmdutil.checkunfinished(repo)
1294 if not inmemory:
1294 if not inmemory:
1295 cmdutil.bailifchanged(repo)
1295 cmdutil.bailifchanged(repo)
1296
1296
1297 if ui.configbool(b'commands', b'rebase.requiredest') and not destf:
1297 if ui.configbool(b'commands', b'rebase.requiredest') and not destf:
1298 raise error.InputError(
1298 raise error.InputError(
1299 _(b'you must specify a destination'),
1299 _(b'you must specify a destination'),
1300 hint=_(b'use: hg rebase -d REV'),
1300 hint=_(b'use: hg rebase -d REV'),
1301 )
1301 )
1302
1302
1303 dest = None
1303 dest = None
1304
1304
1305 if revf:
1305 if revf:
1306 rebaseset = logcmdutil.revrange(repo, revf)
1306 rebaseset = logcmdutil.revrange(repo, revf)
1307 if not rebaseset:
1307 if not rebaseset:
1308 ui.status(_(b'empty "rev" revision set - nothing to rebase\n'))
1308 ui.status(_(b'empty "rev" revision set - nothing to rebase\n'))
1309 return None
1309 return None
1310 elif srcf:
1310 elif srcf:
1311 src = logcmdutil.revrange(repo, srcf)
1311 src = logcmdutil.revrange(repo, srcf)
1312 if not src:
1312 if not src:
1313 ui.status(_(b'empty "source" revision set - nothing to rebase\n'))
1313 ui.status(_(b'empty "source" revision set - nothing to rebase\n'))
1314 return None
1314 return None
1315 # `+ (%ld)` to work around `wdir()::` being empty
1315 # `+ (%ld)` to work around `wdir()::` being empty
1316 rebaseset = repo.revs(b'(%ld):: + (%ld)', src, src)
1316 rebaseset = repo.revs(b'(%ld):: + (%ld)', src, src)
1317 else:
1317 else:
1318 base = logcmdutil.revrange(repo, basef or [b'.'])
1318 base = logcmdutil.revrange(repo, basef or [b'.'])
1319 if not base:
1319 if not base:
1320 ui.status(
1320 ui.status(
1321 _(b'empty "base" revision set - ' b"can't compute rebase set\n")
1321 _(b'empty "base" revision set - ' b"can't compute rebase set\n")
1322 )
1322 )
1323 return None
1323 return None
1324 if destf:
1324 if destf:
1325 # --base does not support multiple destinations
1325 # --base does not support multiple destinations
1326 dest = scmutil.revsingle(repo, destf)
1326 dest = logcmdutil.revsingle(repo, destf)
1327 else:
1327 else:
1328 dest = repo[_destrebase(repo, base, destspace=destspace)]
1328 dest = repo[_destrebase(repo, base, destspace=destspace)]
1329 destf = bytes(dest)
1329 destf = bytes(dest)
1330
1330
1331 roots = [] # selected children of branching points
1331 roots = [] # selected children of branching points
1332 bpbase = {} # {branchingpoint: [origbase]}
1332 bpbase = {} # {branchingpoint: [origbase]}
1333 for b in base: # group bases by branching points
1333 for b in base: # group bases by branching points
1334 bp = repo.revs(b'ancestor(%d, %d)', b, dest.rev()).first()
1334 bp = repo.revs(b'ancestor(%d, %d)', b, dest.rev()).first()
1335 bpbase[bp] = bpbase.get(bp, []) + [b]
1335 bpbase[bp] = bpbase.get(bp, []) + [b]
1336 if None in bpbase:
1336 if None in bpbase:
1337 # emulate the old behavior, showing "nothing to rebase" (a better
1337 # emulate the old behavior, showing "nothing to rebase" (a better
1338 # behavior may be abort with "cannot find branching point" error)
1338 # behavior may be abort with "cannot find branching point" error)
1339 bpbase.clear()
1339 bpbase.clear()
1340 for bp, bs in pycompat.iteritems(bpbase): # calculate roots
1340 for bp, bs in pycompat.iteritems(bpbase): # calculate roots
1341 roots += list(repo.revs(b'children(%d) & ancestors(%ld)', bp, bs))
1341 roots += list(repo.revs(b'children(%d) & ancestors(%ld)', bp, bs))
1342
1342
1343 rebaseset = repo.revs(b'%ld::', roots)
1343 rebaseset = repo.revs(b'%ld::', roots)
1344
1344
1345 if not rebaseset:
1345 if not rebaseset:
1346 # transform to list because smartsets are not comparable to
1346 # transform to list because smartsets are not comparable to
1347 # lists. This should be improved to honor laziness of
1347 # lists. This should be improved to honor laziness of
1348 # smartset.
1348 # smartset.
1349 if list(base) == [dest.rev()]:
1349 if list(base) == [dest.rev()]:
1350 if basef:
1350 if basef:
1351 ui.status(
1351 ui.status(
1352 _(
1352 _(
1353 b'nothing to rebase - %s is both "base"'
1353 b'nothing to rebase - %s is both "base"'
1354 b' and destination\n'
1354 b' and destination\n'
1355 )
1355 )
1356 % dest
1356 % dest
1357 )
1357 )
1358 else:
1358 else:
1359 ui.status(
1359 ui.status(
1360 _(
1360 _(
1361 b'nothing to rebase - working directory '
1361 b'nothing to rebase - working directory '
1362 b'parent is also destination\n'
1362 b'parent is also destination\n'
1363 )
1363 )
1364 )
1364 )
1365 elif not repo.revs(b'%ld - ::%d', base, dest.rev()):
1365 elif not repo.revs(b'%ld - ::%d', base, dest.rev()):
1366 if basef:
1366 if basef:
1367 ui.status(
1367 ui.status(
1368 _(
1368 _(
1369 b'nothing to rebase - "base" %s is '
1369 b'nothing to rebase - "base" %s is '
1370 b'already an ancestor of destination '
1370 b'already an ancestor of destination '
1371 b'%s\n'
1371 b'%s\n'
1372 )
1372 )
1373 % (b'+'.join(bytes(repo[r]) for r in base), dest)
1373 % (b'+'.join(bytes(repo[r]) for r in base), dest)
1374 )
1374 )
1375 else:
1375 else:
1376 ui.status(
1376 ui.status(
1377 _(
1377 _(
1378 b'nothing to rebase - working '
1378 b'nothing to rebase - working '
1379 b'directory parent is already an '
1379 b'directory parent is already an '
1380 b'ancestor of destination %s\n'
1380 b'ancestor of destination %s\n'
1381 )
1381 )
1382 % dest
1382 % dest
1383 )
1383 )
1384 else: # can it happen?
1384 else: # can it happen?
1385 ui.status(
1385 ui.status(
1386 _(b'nothing to rebase from %s to %s\n')
1386 _(b'nothing to rebase from %s to %s\n')
1387 % (b'+'.join(bytes(repo[r]) for r in base), dest)
1387 % (b'+'.join(bytes(repo[r]) for r in base), dest)
1388 )
1388 )
1389 return None
1389 return None
1390
1390
1391 if wdirrev in rebaseset:
1391 if wdirrev in rebaseset:
1392 raise error.InputError(_(b'cannot rebase the working copy'))
1392 raise error.InputError(_(b'cannot rebase the working copy'))
1393 rebasingwcp = repo[b'.'].rev() in rebaseset
1393 rebasingwcp = repo[b'.'].rev() in rebaseset
1394 ui.log(
1394 ui.log(
1395 b"rebase",
1395 b"rebase",
1396 b"rebasing working copy parent: %r\n",
1396 b"rebasing working copy parent: %r\n",
1397 rebasingwcp,
1397 rebasingwcp,
1398 rebase_rebasing_wcp=rebasingwcp,
1398 rebase_rebasing_wcp=rebasingwcp,
1399 )
1399 )
1400 if inmemory and rebasingwcp:
1400 if inmemory and rebasingwcp:
1401 # Check these since we did not before.
1401 # Check these since we did not before.
1402 cmdutil.checkunfinished(repo)
1402 cmdutil.checkunfinished(repo)
1403 cmdutil.bailifchanged(repo)
1403 cmdutil.bailifchanged(repo)
1404
1404
1405 if not destf:
1405 if not destf:
1406 dest = repo[_destrebase(repo, rebaseset, destspace=destspace)]
1406 dest = repo[_destrebase(repo, rebaseset, destspace=destspace)]
1407 destf = bytes(dest)
1407 destf = bytes(dest)
1408
1408
1409 allsrc = revsetlang.formatspec(b'%ld', rebaseset)
1409 allsrc = revsetlang.formatspec(b'%ld', rebaseset)
1410 alias = {b'ALLSRC': allsrc}
1410 alias = {b'ALLSRC': allsrc}
1411
1411
1412 if dest is None:
1412 if dest is None:
1413 try:
1413 try:
1414 # fast path: try to resolve dest without SRC alias
1414 # fast path: try to resolve dest without SRC alias
1415 dest = scmutil.revsingle(repo, destf, localalias=alias)
1415 dest = scmutil.revsingle(repo, destf, localalias=alias)
1416 except error.RepoLookupError:
1416 except error.RepoLookupError:
1417 # multi-dest path: resolve dest for each SRC separately
1417 # multi-dest path: resolve dest for each SRC separately
1418 destmap = {}
1418 destmap = {}
1419 for r in rebaseset:
1419 for r in rebaseset:
1420 alias[b'SRC'] = revsetlang.formatspec(b'%d', r)
1420 alias[b'SRC'] = revsetlang.formatspec(b'%d', r)
1421 # use repo.anyrevs instead of scmutil.revsingle because we
1421 # use repo.anyrevs instead of scmutil.revsingle because we
1422 # don't want to abort if destset is empty.
1422 # don't want to abort if destset is empty.
1423 destset = repo.anyrevs([destf], user=True, localalias=alias)
1423 destset = repo.anyrevs([destf], user=True, localalias=alias)
1424 size = len(destset)
1424 size = len(destset)
1425 if size == 1:
1425 if size == 1:
1426 destmap[r] = destset.first()
1426 destmap[r] = destset.first()
1427 elif size == 0:
1427 elif size == 0:
1428 ui.note(_(b'skipping %s - empty destination\n') % repo[r])
1428 ui.note(_(b'skipping %s - empty destination\n') % repo[r])
1429 else:
1429 else:
1430 raise error.InputError(
1430 raise error.InputError(
1431 _(b'rebase destination for %s is not unique') % repo[r]
1431 _(b'rebase destination for %s is not unique') % repo[r]
1432 )
1432 )
1433
1433
1434 if dest is not None:
1434 if dest is not None:
1435 # single-dest case: assign dest to each rev in rebaseset
1435 # single-dest case: assign dest to each rev in rebaseset
1436 destrev = dest.rev()
1436 destrev = dest.rev()
1437 destmap = {r: destrev for r in rebaseset} # {srcrev: destrev}
1437 destmap = {r: destrev for r in rebaseset} # {srcrev: destrev}
1438
1438
1439 if not destmap:
1439 if not destmap:
1440 ui.status(_(b'nothing to rebase - empty destination\n'))
1440 ui.status(_(b'nothing to rebase - empty destination\n'))
1441 return None
1441 return None
1442
1442
1443 return destmap
1443 return destmap
1444
1444
1445
1445
1446 def externalparent(repo, state, destancestors):
1446 def externalparent(repo, state, destancestors):
1447 """Return the revision that should be used as the second parent
1447 """Return the revision that should be used as the second parent
1448 when the revisions in state is collapsed on top of destancestors.
1448 when the revisions in state is collapsed on top of destancestors.
1449 Abort if there is more than one parent.
1449 Abort if there is more than one parent.
1450 """
1450 """
1451 parents = set()
1451 parents = set()
1452 source = min(state)
1452 source = min(state)
1453 for rev in state:
1453 for rev in state:
1454 if rev == source:
1454 if rev == source:
1455 continue
1455 continue
1456 for p in repo[rev].parents():
1456 for p in repo[rev].parents():
1457 if p.rev() not in state and p.rev() not in destancestors:
1457 if p.rev() not in state and p.rev() not in destancestors:
1458 parents.add(p.rev())
1458 parents.add(p.rev())
1459 if not parents:
1459 if not parents:
1460 return nullrev
1460 return nullrev
1461 if len(parents) == 1:
1461 if len(parents) == 1:
1462 return parents.pop()
1462 return parents.pop()
1463 raise error.StateError(
1463 raise error.StateError(
1464 _(
1464 _(
1465 b'unable to collapse on top of %d, there is more '
1465 b'unable to collapse on top of %d, there is more '
1466 b'than one external parent: %s'
1466 b'than one external parent: %s'
1467 )
1467 )
1468 % (max(destancestors), b', '.join(b"%d" % p for p in sorted(parents)))
1468 % (max(destancestors), b', '.join(b"%d" % p for p in sorted(parents)))
1469 )
1469 )
1470
1470
1471
1471
1472 def commitmemorynode(repo, wctx, editor, extra, user, date, commitmsg):
1472 def commitmemorynode(repo, wctx, editor, extra, user, date, commitmsg):
1473 """Commit the memory changes with parents p1 and p2.
1473 """Commit the memory changes with parents p1 and p2.
1474 Return node of committed revision."""
1474 Return node of committed revision."""
1475 # By convention, ``extra['branch']`` (set by extrafn) clobbers
1475 # By convention, ``extra['branch']`` (set by extrafn) clobbers
1476 # ``branch`` (used when passing ``--keepbranches``).
1476 # ``branch`` (used when passing ``--keepbranches``).
1477 branch = None
1477 branch = None
1478 if b'branch' in extra:
1478 if b'branch' in extra:
1479 branch = extra[b'branch']
1479 branch = extra[b'branch']
1480
1480
1481 # FIXME: We call _compact() because it's required to correctly detect
1481 # FIXME: We call _compact() because it's required to correctly detect
1482 # changed files. This was added to fix a regression shortly before the 5.5
1482 # changed files. This was added to fix a regression shortly before the 5.5
1483 # release. A proper fix will be done in the default branch.
1483 # release. A proper fix will be done in the default branch.
1484 wctx._compact()
1484 wctx._compact()
1485 memctx = wctx.tomemctx(
1485 memctx = wctx.tomemctx(
1486 commitmsg,
1486 commitmsg,
1487 date=date,
1487 date=date,
1488 extra=extra,
1488 extra=extra,
1489 user=user,
1489 user=user,
1490 branch=branch,
1490 branch=branch,
1491 editor=editor,
1491 editor=editor,
1492 )
1492 )
1493 if memctx.isempty() and not repo.ui.configbool(b'ui', b'allowemptycommit'):
1493 if memctx.isempty() and not repo.ui.configbool(b'ui', b'allowemptycommit'):
1494 return None
1494 return None
1495 commitres = repo.commitctx(memctx)
1495 commitres = repo.commitctx(memctx)
1496 wctx.clean() # Might be reused
1496 wctx.clean() # Might be reused
1497 return commitres
1497 return commitres
1498
1498
1499
1499
1500 def commitnode(repo, editor, extra, user, date, commitmsg):
1500 def commitnode(repo, editor, extra, user, date, commitmsg):
1501 """Commit the wd changes with parents p1 and p2.
1501 """Commit the wd changes with parents p1 and p2.
1502 Return node of committed revision."""
1502 Return node of committed revision."""
1503 dsguard = util.nullcontextmanager()
1503 dsguard = util.nullcontextmanager()
1504 if not repo.ui.configbool(b'rebase', b'singletransaction'):
1504 if not repo.ui.configbool(b'rebase', b'singletransaction'):
1505 dsguard = dirstateguard.dirstateguard(repo, b'rebase')
1505 dsguard = dirstateguard.dirstateguard(repo, b'rebase')
1506 with dsguard:
1506 with dsguard:
1507 # Commit might fail if unresolved files exist
1507 # Commit might fail if unresolved files exist
1508 newnode = repo.commit(
1508 newnode = repo.commit(
1509 text=commitmsg, user=user, date=date, extra=extra, editor=editor
1509 text=commitmsg, user=user, date=date, extra=extra, editor=editor
1510 )
1510 )
1511
1511
1512 repo.dirstate.setbranch(repo[newnode].branch())
1512 repo.dirstate.setbranch(repo[newnode].branch())
1513 return newnode
1513 return newnode
1514
1514
1515
1515
1516 def rebasenode(repo, rev, p1, p2, base, collapse, wctx):
1516 def rebasenode(repo, rev, p1, p2, base, collapse, wctx):
1517 """Rebase a single revision rev on top of p1 using base as merge ancestor"""
1517 """Rebase a single revision rev on top of p1 using base as merge ancestor"""
1518 # Merge phase
1518 # Merge phase
1519 # Update to destination and merge it with local
1519 # Update to destination and merge it with local
1520 p1ctx = repo[p1]
1520 p1ctx = repo[p1]
1521 if wctx.isinmemory():
1521 if wctx.isinmemory():
1522 wctx.setbase(p1ctx)
1522 wctx.setbase(p1ctx)
1523 else:
1523 else:
1524 if repo[b'.'].rev() != p1:
1524 if repo[b'.'].rev() != p1:
1525 repo.ui.debug(b" update to %d:%s\n" % (p1, p1ctx))
1525 repo.ui.debug(b" update to %d:%s\n" % (p1, p1ctx))
1526 mergemod.clean_update(p1ctx)
1526 mergemod.clean_update(p1ctx)
1527 else:
1527 else:
1528 repo.ui.debug(b" already in destination\n")
1528 repo.ui.debug(b" already in destination\n")
1529 # This is, alas, necessary to invalidate workingctx's manifest cache,
1529 # This is, alas, necessary to invalidate workingctx's manifest cache,
1530 # as well as other data we litter on it in other places.
1530 # as well as other data we litter on it in other places.
1531 wctx = repo[None]
1531 wctx = repo[None]
1532 repo.dirstate.write(repo.currenttransaction())
1532 repo.dirstate.write(repo.currenttransaction())
1533 ctx = repo[rev]
1533 ctx = repo[rev]
1534 repo.ui.debug(b" merge against %d:%s\n" % (rev, ctx))
1534 repo.ui.debug(b" merge against %d:%s\n" % (rev, ctx))
1535 if base is not None:
1535 if base is not None:
1536 repo.ui.debug(b" detach base %d:%s\n" % (base, repo[base]))
1536 repo.ui.debug(b" detach base %d:%s\n" % (base, repo[base]))
1537
1537
1538 # See explanation in merge.graft()
1538 # See explanation in merge.graft()
1539 mergeancestor = repo.changelog.isancestor(p1ctx.node(), ctx.node())
1539 mergeancestor = repo.changelog.isancestor(p1ctx.node(), ctx.node())
1540 stats = mergemod._update(
1540 stats = mergemod._update(
1541 repo,
1541 repo,
1542 rev,
1542 rev,
1543 branchmerge=True,
1543 branchmerge=True,
1544 force=True,
1544 force=True,
1545 ancestor=base,
1545 ancestor=base,
1546 mergeancestor=mergeancestor,
1546 mergeancestor=mergeancestor,
1547 labels=[b'dest', b'source'],
1547 labels=[b'dest', b'source'],
1548 wc=wctx,
1548 wc=wctx,
1549 )
1549 )
1550 wctx.setparents(p1ctx.node(), repo[p2].node())
1550 wctx.setparents(p1ctx.node(), repo[p2].node())
1551 if collapse:
1551 if collapse:
1552 copies.graftcopies(wctx, ctx, p1ctx)
1552 copies.graftcopies(wctx, ctx, p1ctx)
1553 else:
1553 else:
1554 # If we're not using --collapse, we need to
1554 # If we're not using --collapse, we need to
1555 # duplicate copies between the revision we're
1555 # duplicate copies between the revision we're
1556 # rebasing and its first parent.
1556 # rebasing and its first parent.
1557 copies.graftcopies(wctx, ctx, ctx.p1())
1557 copies.graftcopies(wctx, ctx, ctx.p1())
1558
1558
1559 if stats.unresolvedcount > 0:
1559 if stats.unresolvedcount > 0:
1560 if wctx.isinmemory():
1560 if wctx.isinmemory():
1561 raise error.InMemoryMergeConflictsError()
1561 raise error.InMemoryMergeConflictsError()
1562 else:
1562 else:
1563 raise error.ConflictResolutionRequired(b'rebase')
1563 raise error.ConflictResolutionRequired(b'rebase')
1564
1564
1565
1565
1566 def adjustdest(repo, rev, destmap, state, skipped):
1566 def adjustdest(repo, rev, destmap, state, skipped):
1567 r"""adjust rebase destination given the current rebase state
1567 r"""adjust rebase destination given the current rebase state
1568
1568
1569 rev is what is being rebased. Return a list of two revs, which are the
1569 rev is what is being rebased. Return a list of two revs, which are the
1570 adjusted destinations for rev's p1 and p2, respectively. If a parent is
1570 adjusted destinations for rev's p1 and p2, respectively. If a parent is
1571 nullrev, return dest without adjustment for it.
1571 nullrev, return dest without adjustment for it.
1572
1572
1573 For example, when doing rebasing B+E to F, C to G, rebase will first move B
1573 For example, when doing rebasing B+E to F, C to G, rebase will first move B
1574 to B1, and E's destination will be adjusted from F to B1.
1574 to B1, and E's destination will be adjusted from F to B1.
1575
1575
1576 B1 <- written during rebasing B
1576 B1 <- written during rebasing B
1577 |
1577 |
1578 F <- original destination of B, E
1578 F <- original destination of B, E
1579 |
1579 |
1580 | E <- rev, which is being rebased
1580 | E <- rev, which is being rebased
1581 | |
1581 | |
1582 | D <- prev, one parent of rev being checked
1582 | D <- prev, one parent of rev being checked
1583 | |
1583 | |
1584 | x <- skipped, ex. no successor or successor in (::dest)
1584 | x <- skipped, ex. no successor or successor in (::dest)
1585 | |
1585 | |
1586 | C <- rebased as C', different destination
1586 | C <- rebased as C', different destination
1587 | |
1587 | |
1588 | B <- rebased as B1 C'
1588 | B <- rebased as B1 C'
1589 |/ |
1589 |/ |
1590 A G <- destination of C, different
1590 A G <- destination of C, different
1591
1591
1592 Another example about merge changeset, rebase -r C+G+H -d K, rebase will
1592 Another example about merge changeset, rebase -r C+G+H -d K, rebase will
1593 first move C to C1, G to G1, and when it's checking H, the adjusted
1593 first move C to C1, G to G1, and when it's checking H, the adjusted
1594 destinations will be [C1, G1].
1594 destinations will be [C1, G1].
1595
1595
1596 H C1 G1
1596 H C1 G1
1597 /| | /
1597 /| | /
1598 F G |/
1598 F G |/
1599 K | | -> K
1599 K | | -> K
1600 | C D |
1600 | C D |
1601 | |/ |
1601 | |/ |
1602 | B | ...
1602 | B | ...
1603 |/ |/
1603 |/ |/
1604 A A
1604 A A
1605
1605
1606 Besides, adjust dest according to existing rebase information. For example,
1606 Besides, adjust dest according to existing rebase information. For example,
1607
1607
1608 B C D B needs to be rebased on top of C, C needs to be rebased on top
1608 B C D B needs to be rebased on top of C, C needs to be rebased on top
1609 \|/ of D. We will rebase C first.
1609 \|/ of D. We will rebase C first.
1610 A
1610 A
1611
1611
1612 C' After rebasing C, when considering B's destination, use C'
1612 C' After rebasing C, when considering B's destination, use C'
1613 | instead of the original C.
1613 | instead of the original C.
1614 B D
1614 B D
1615 \ /
1615 \ /
1616 A
1616 A
1617 """
1617 """
1618 # pick already rebased revs with same dest from state as interesting source
1618 # pick already rebased revs with same dest from state as interesting source
1619 dest = destmap[rev]
1619 dest = destmap[rev]
1620 source = [
1620 source = [
1621 s
1621 s
1622 for s, d in state.items()
1622 for s, d in state.items()
1623 if d > 0 and destmap[s] == dest and s not in skipped
1623 if d > 0 and destmap[s] == dest and s not in skipped
1624 ]
1624 ]
1625
1625
1626 result = []
1626 result = []
1627 for prev in repo.changelog.parentrevs(rev):
1627 for prev in repo.changelog.parentrevs(rev):
1628 adjusted = dest
1628 adjusted = dest
1629 if prev != nullrev:
1629 if prev != nullrev:
1630 candidate = repo.revs(b'max(%ld and (::%d))', source, prev).first()
1630 candidate = repo.revs(b'max(%ld and (::%d))', source, prev).first()
1631 if candidate is not None:
1631 if candidate is not None:
1632 adjusted = state[candidate]
1632 adjusted = state[candidate]
1633 if adjusted == dest and dest in state:
1633 if adjusted == dest and dest in state:
1634 adjusted = state[dest]
1634 adjusted = state[dest]
1635 if adjusted == revtodo:
1635 if adjusted == revtodo:
1636 # sortsource should produce an order that makes this impossible
1636 # sortsource should produce an order that makes this impossible
1637 raise error.ProgrammingError(
1637 raise error.ProgrammingError(
1638 b'rev %d should be rebased already at this time' % dest
1638 b'rev %d should be rebased already at this time' % dest
1639 )
1639 )
1640 result.append(adjusted)
1640 result.append(adjusted)
1641 return result
1641 return result
1642
1642
1643
1643
1644 def _checkobsrebase(repo, ui, rebaseobsrevs, rebaseobsskipped):
1644 def _checkobsrebase(repo, ui, rebaseobsrevs, rebaseobsskipped):
1645 """
1645 """
1646 Abort if rebase will create divergence or rebase is noop because of markers
1646 Abort if rebase will create divergence or rebase is noop because of markers
1647
1647
1648 `rebaseobsrevs`: set of obsolete revision in source
1648 `rebaseobsrevs`: set of obsolete revision in source
1649 `rebaseobsskipped`: set of revisions from source skipped because they have
1649 `rebaseobsskipped`: set of revisions from source skipped because they have
1650 successors in destination or no non-obsolete successor.
1650 successors in destination or no non-obsolete successor.
1651 """
1651 """
1652 # Obsolete node with successors not in dest leads to divergence
1652 # Obsolete node with successors not in dest leads to divergence
1653 divergenceok = obsolete.isenabled(repo, obsolete.allowdivergenceopt)
1653 divergenceok = obsolete.isenabled(repo, obsolete.allowdivergenceopt)
1654 divergencebasecandidates = rebaseobsrevs - rebaseobsskipped
1654 divergencebasecandidates = rebaseobsrevs - rebaseobsskipped
1655
1655
1656 if divergencebasecandidates and not divergenceok:
1656 if divergencebasecandidates and not divergenceok:
1657 divhashes = (bytes(repo[r]) for r in divergencebasecandidates)
1657 divhashes = (bytes(repo[r]) for r in divergencebasecandidates)
1658 msg = _(b"this rebase will cause divergences from: %s")
1658 msg = _(b"this rebase will cause divergences from: %s")
1659 h = _(
1659 h = _(
1660 b"to force the rebase please set "
1660 b"to force the rebase please set "
1661 b"experimental.evolution.allowdivergence=True"
1661 b"experimental.evolution.allowdivergence=True"
1662 )
1662 )
1663 raise error.StateError(msg % (b",".join(divhashes),), hint=h)
1663 raise error.StateError(msg % (b",".join(divhashes),), hint=h)
1664
1664
1665
1665
1666 def successorrevs(unfi, rev):
1666 def successorrevs(unfi, rev):
1667 """yield revision numbers for successors of rev"""
1667 """yield revision numbers for successors of rev"""
1668 assert unfi.filtername is None
1668 assert unfi.filtername is None
1669 get_rev = unfi.changelog.index.get_rev
1669 get_rev = unfi.changelog.index.get_rev
1670 for s in obsutil.allsuccessors(unfi.obsstore, [unfi[rev].node()]):
1670 for s in obsutil.allsuccessors(unfi.obsstore, [unfi[rev].node()]):
1671 r = get_rev(s)
1671 r = get_rev(s)
1672 if r is not None:
1672 if r is not None:
1673 yield r
1673 yield r
1674
1674
1675
1675
1676 def defineparents(repo, rev, destmap, state, skipped, obsskipped):
1676 def defineparents(repo, rev, destmap, state, skipped, obsskipped):
1677 """Return new parents and optionally a merge base for rev being rebased
1677 """Return new parents and optionally a merge base for rev being rebased
1678
1678
1679 The destination specified by "dest" cannot always be used directly because
1679 The destination specified by "dest" cannot always be used directly because
1680 previously rebase result could affect destination. For example,
1680 previously rebase result could affect destination. For example,
1681
1681
1682 D E rebase -r C+D+E -d B
1682 D E rebase -r C+D+E -d B
1683 |/ C will be rebased to C'
1683 |/ C will be rebased to C'
1684 B C D's new destination will be C' instead of B
1684 B C D's new destination will be C' instead of B
1685 |/ E's new destination will be C' instead of B
1685 |/ E's new destination will be C' instead of B
1686 A
1686 A
1687
1687
1688 The new parents of a merge is slightly more complicated. See the comment
1688 The new parents of a merge is slightly more complicated. See the comment
1689 block below.
1689 block below.
1690 """
1690 """
1691 # use unfiltered changelog since successorrevs may return filtered nodes
1691 # use unfiltered changelog since successorrevs may return filtered nodes
1692 assert repo.filtername is None
1692 assert repo.filtername is None
1693 cl = repo.changelog
1693 cl = repo.changelog
1694 isancestor = cl.isancestorrev
1694 isancestor = cl.isancestorrev
1695
1695
1696 dest = destmap[rev]
1696 dest = destmap[rev]
1697 oldps = repo.changelog.parentrevs(rev) # old parents
1697 oldps = repo.changelog.parentrevs(rev) # old parents
1698 newps = [nullrev, nullrev] # new parents
1698 newps = [nullrev, nullrev] # new parents
1699 dests = adjustdest(repo, rev, destmap, state, skipped)
1699 dests = adjustdest(repo, rev, destmap, state, skipped)
1700 bases = list(oldps) # merge base candidates, initially just old parents
1700 bases = list(oldps) # merge base candidates, initially just old parents
1701
1701
1702 if all(r == nullrev for r in oldps[1:]):
1702 if all(r == nullrev for r in oldps[1:]):
1703 # For non-merge changeset, just move p to adjusted dest as requested.
1703 # For non-merge changeset, just move p to adjusted dest as requested.
1704 newps[0] = dests[0]
1704 newps[0] = dests[0]
1705 else:
1705 else:
1706 # For merge changeset, if we move p to dests[i] unconditionally, both
1706 # For merge changeset, if we move p to dests[i] unconditionally, both
1707 # parents may change and the end result looks like "the merge loses a
1707 # parents may change and the end result looks like "the merge loses a
1708 # parent", which is a surprise. This is a limit because "--dest" only
1708 # parent", which is a surprise. This is a limit because "--dest" only
1709 # accepts one dest per src.
1709 # accepts one dest per src.
1710 #
1710 #
1711 # Therefore, only move p with reasonable conditions (in this order):
1711 # Therefore, only move p with reasonable conditions (in this order):
1712 # 1. use dest, if dest is a descendent of (p or one of p's successors)
1712 # 1. use dest, if dest is a descendent of (p or one of p's successors)
1713 # 2. use p's rebased result, if p is rebased (state[p] > 0)
1713 # 2. use p's rebased result, if p is rebased (state[p] > 0)
1714 #
1714 #
1715 # Comparing with adjustdest, the logic here does some additional work:
1715 # Comparing with adjustdest, the logic here does some additional work:
1716 # 1. decide which parents will not be moved towards dest
1716 # 1. decide which parents will not be moved towards dest
1717 # 2. if the above decision is "no", should a parent still be moved
1717 # 2. if the above decision is "no", should a parent still be moved
1718 # because it was rebased?
1718 # because it was rebased?
1719 #
1719 #
1720 # For example:
1720 # For example:
1721 #
1721 #
1722 # C # "rebase -r C -d D" is an error since none of the parents
1722 # C # "rebase -r C -d D" is an error since none of the parents
1723 # /| # can be moved. "rebase -r B+C -d D" will move C's parent
1723 # /| # can be moved. "rebase -r B+C -d D" will move C's parent
1724 # A B D # B (using rule "2."), since B will be rebased.
1724 # A B D # B (using rule "2."), since B will be rebased.
1725 #
1725 #
1726 # The loop tries to be not rely on the fact that a Mercurial node has
1726 # The loop tries to be not rely on the fact that a Mercurial node has
1727 # at most 2 parents.
1727 # at most 2 parents.
1728 for i, p in enumerate(oldps):
1728 for i, p in enumerate(oldps):
1729 np = p # new parent
1729 np = p # new parent
1730 if any(isancestor(x, dests[i]) for x in successorrevs(repo, p)):
1730 if any(isancestor(x, dests[i]) for x in successorrevs(repo, p)):
1731 np = dests[i]
1731 np = dests[i]
1732 elif p in state and state[p] > 0:
1732 elif p in state and state[p] > 0:
1733 np = state[p]
1733 np = state[p]
1734
1734
1735 # If one parent becomes an ancestor of the other, drop the ancestor
1735 # If one parent becomes an ancestor of the other, drop the ancestor
1736 for j, x in enumerate(newps[:i]):
1736 for j, x in enumerate(newps[:i]):
1737 if x == nullrev:
1737 if x == nullrev:
1738 continue
1738 continue
1739 if isancestor(np, x): # CASE-1
1739 if isancestor(np, x): # CASE-1
1740 np = nullrev
1740 np = nullrev
1741 elif isancestor(x, np): # CASE-2
1741 elif isancestor(x, np): # CASE-2
1742 newps[j] = np
1742 newps[j] = np
1743 np = nullrev
1743 np = nullrev
1744 # New parents forming an ancestor relationship does not
1744 # New parents forming an ancestor relationship does not
1745 # mean the old parents have a similar relationship. Do not
1745 # mean the old parents have a similar relationship. Do not
1746 # set bases[x] to nullrev.
1746 # set bases[x] to nullrev.
1747 bases[j], bases[i] = bases[i], bases[j]
1747 bases[j], bases[i] = bases[i], bases[j]
1748
1748
1749 newps[i] = np
1749 newps[i] = np
1750
1750
1751 # "rebasenode" updates to new p1, and the old p1 will be used as merge
1751 # "rebasenode" updates to new p1, and the old p1 will be used as merge
1752 # base. If only p2 changes, merging using unchanged p1 as merge base is
1752 # base. If only p2 changes, merging using unchanged p1 as merge base is
1753 # suboptimal. Therefore swap parents to make the merge sane.
1753 # suboptimal. Therefore swap parents to make the merge sane.
1754 if newps[1] != nullrev and oldps[0] == newps[0]:
1754 if newps[1] != nullrev and oldps[0] == newps[0]:
1755 assert len(newps) == 2 and len(oldps) == 2
1755 assert len(newps) == 2 and len(oldps) == 2
1756 newps.reverse()
1756 newps.reverse()
1757 bases.reverse()
1757 bases.reverse()
1758
1758
1759 # No parent change might be an error because we fail to make rev a
1759 # No parent change might be an error because we fail to make rev a
1760 # descendent of requested dest. This can happen, for example:
1760 # descendent of requested dest. This can happen, for example:
1761 #
1761 #
1762 # C # rebase -r C -d D
1762 # C # rebase -r C -d D
1763 # /| # None of A and B will be changed to D and rebase fails.
1763 # /| # None of A and B will be changed to D and rebase fails.
1764 # A B D
1764 # A B D
1765 if set(newps) == set(oldps) and dest not in newps:
1765 if set(newps) == set(oldps) and dest not in newps:
1766 raise error.InputError(
1766 raise error.InputError(
1767 _(
1767 _(
1768 b'cannot rebase %d:%s without '
1768 b'cannot rebase %d:%s without '
1769 b'moving at least one of its parents'
1769 b'moving at least one of its parents'
1770 )
1770 )
1771 % (rev, repo[rev])
1771 % (rev, repo[rev])
1772 )
1772 )
1773
1773
1774 # Source should not be ancestor of dest. The check here guarantees it's
1774 # Source should not be ancestor of dest. The check here guarantees it's
1775 # impossible. With multi-dest, the initial check does not cover complex
1775 # impossible. With multi-dest, the initial check does not cover complex
1776 # cases since we don't have abstractions to dry-run rebase cheaply.
1776 # cases since we don't have abstractions to dry-run rebase cheaply.
1777 if any(p != nullrev and isancestor(rev, p) for p in newps):
1777 if any(p != nullrev and isancestor(rev, p) for p in newps):
1778 raise error.InputError(_(b'source is ancestor of destination'))
1778 raise error.InputError(_(b'source is ancestor of destination'))
1779
1779
1780 # Check if the merge will contain unwanted changes. That may happen if
1780 # Check if the merge will contain unwanted changes. That may happen if
1781 # there are multiple special (non-changelog ancestor) merge bases, which
1781 # there are multiple special (non-changelog ancestor) merge bases, which
1782 # cannot be handled well by the 3-way merge algorithm. For example:
1782 # cannot be handled well by the 3-way merge algorithm. For example:
1783 #
1783 #
1784 # F
1784 # F
1785 # /|
1785 # /|
1786 # D E # "rebase -r D+E+F -d Z", when rebasing F, if "D" was chosen
1786 # D E # "rebase -r D+E+F -d Z", when rebasing F, if "D" was chosen
1787 # | | # as merge base, the difference between D and F will include
1787 # | | # as merge base, the difference between D and F will include
1788 # B C # C, so the rebased F will contain C surprisingly. If "E" was
1788 # B C # C, so the rebased F will contain C surprisingly. If "E" was
1789 # |/ # chosen, the rebased F will contain B.
1789 # |/ # chosen, the rebased F will contain B.
1790 # A Z
1790 # A Z
1791 #
1791 #
1792 # But our merge base candidates (D and E in above case) could still be
1792 # But our merge base candidates (D and E in above case) could still be
1793 # better than the default (ancestor(F, Z) == null). Therefore still
1793 # better than the default (ancestor(F, Z) == null). Therefore still
1794 # pick one (so choose p1 above).
1794 # pick one (so choose p1 above).
1795 if sum(1 for b in set(bases) if b != nullrev and b not in newps) > 1:
1795 if sum(1 for b in set(bases) if b != nullrev and b not in newps) > 1:
1796 unwanted = [None, None] # unwanted[i]: unwanted revs if choose bases[i]
1796 unwanted = [None, None] # unwanted[i]: unwanted revs if choose bases[i]
1797 for i, base in enumerate(bases):
1797 for i, base in enumerate(bases):
1798 if base == nullrev or base in newps:
1798 if base == nullrev or base in newps:
1799 continue
1799 continue
1800 # Revisions in the side (not chosen as merge base) branch that
1800 # Revisions in the side (not chosen as merge base) branch that
1801 # might contain "surprising" contents
1801 # might contain "surprising" contents
1802 other_bases = set(bases) - {base}
1802 other_bases = set(bases) - {base}
1803 siderevs = list(
1803 siderevs = list(
1804 repo.revs(b'(%ld %% (%d+%d))', other_bases, base, dest)
1804 repo.revs(b'(%ld %% (%d+%d))', other_bases, base, dest)
1805 )
1805 )
1806
1806
1807 # If those revisions are covered by rebaseset, the result is good.
1807 # If those revisions are covered by rebaseset, the result is good.
1808 # A merge in rebaseset would be considered to cover its ancestors.
1808 # A merge in rebaseset would be considered to cover its ancestors.
1809 if siderevs:
1809 if siderevs:
1810 rebaseset = [
1810 rebaseset = [
1811 r for r, d in state.items() if d > 0 and r not in obsskipped
1811 r for r, d in state.items() if d > 0 and r not in obsskipped
1812 ]
1812 ]
1813 merges = [
1813 merges = [
1814 r for r in rebaseset if cl.parentrevs(r)[1] != nullrev
1814 r for r in rebaseset if cl.parentrevs(r)[1] != nullrev
1815 ]
1815 ]
1816 unwanted[i] = list(
1816 unwanted[i] = list(
1817 repo.revs(
1817 repo.revs(
1818 b'%ld - (::%ld) - %ld', siderevs, merges, rebaseset
1818 b'%ld - (::%ld) - %ld', siderevs, merges, rebaseset
1819 )
1819 )
1820 )
1820 )
1821
1821
1822 if any(revs is not None for revs in unwanted):
1822 if any(revs is not None for revs in unwanted):
1823 # Choose a merge base that has a minimal number of unwanted revs.
1823 # Choose a merge base that has a minimal number of unwanted revs.
1824 l, i = min(
1824 l, i = min(
1825 (len(revs), i)
1825 (len(revs), i)
1826 for i, revs in enumerate(unwanted)
1826 for i, revs in enumerate(unwanted)
1827 if revs is not None
1827 if revs is not None
1828 )
1828 )
1829
1829
1830 # The merge will include unwanted revisions. Abort now. Revisit this if
1830 # The merge will include unwanted revisions. Abort now. Revisit this if
1831 # we have a more advanced merge algorithm that handles multiple bases.
1831 # we have a more advanced merge algorithm that handles multiple bases.
1832 if l > 0:
1832 if l > 0:
1833 unwanteddesc = _(b' or ').join(
1833 unwanteddesc = _(b' or ').join(
1834 (
1834 (
1835 b', '.join(b'%d:%s' % (r, repo[r]) for r in revs)
1835 b', '.join(b'%d:%s' % (r, repo[r]) for r in revs)
1836 for revs in unwanted
1836 for revs in unwanted
1837 if revs is not None
1837 if revs is not None
1838 )
1838 )
1839 )
1839 )
1840 raise error.InputError(
1840 raise error.InputError(
1841 _(b'rebasing %d:%s will include unwanted changes from %s')
1841 _(b'rebasing %d:%s will include unwanted changes from %s')
1842 % (rev, repo[rev], unwanteddesc)
1842 % (rev, repo[rev], unwanteddesc)
1843 )
1843 )
1844
1844
1845 # newps[0] should match merge base if possible. Currently, if newps[i]
1845 # newps[0] should match merge base if possible. Currently, if newps[i]
1846 # is nullrev, the only case is newps[i] and newps[j] (j < i), one is
1846 # is nullrev, the only case is newps[i] and newps[j] (j < i), one is
1847 # the other's ancestor. In that case, it's fine to not swap newps here.
1847 # the other's ancestor. In that case, it's fine to not swap newps here.
1848 # (see CASE-1 and CASE-2 above)
1848 # (see CASE-1 and CASE-2 above)
1849 if i != 0:
1849 if i != 0:
1850 if newps[i] != nullrev:
1850 if newps[i] != nullrev:
1851 newps[0], newps[i] = newps[i], newps[0]
1851 newps[0], newps[i] = newps[i], newps[0]
1852 bases[0], bases[i] = bases[i], bases[0]
1852 bases[0], bases[i] = bases[i], bases[0]
1853
1853
1854 # "rebasenode" updates to new p1, use the corresponding merge base.
1854 # "rebasenode" updates to new p1, use the corresponding merge base.
1855 base = bases[0]
1855 base = bases[0]
1856
1856
1857 repo.ui.debug(b" future parents are %d and %d\n" % tuple(newps))
1857 repo.ui.debug(b" future parents are %d and %d\n" % tuple(newps))
1858
1858
1859 return newps[0], newps[1], base
1859 return newps[0], newps[1], base
1860
1860
1861
1861
1862 def isagitpatch(repo, patchname):
1862 def isagitpatch(repo, patchname):
1863 """Return true if the given patch is in git format"""
1863 """Return true if the given patch is in git format"""
1864 mqpatch = os.path.join(repo.mq.path, patchname)
1864 mqpatch = os.path.join(repo.mq.path, patchname)
1865 for line in patch.linereader(open(mqpatch, b'rb')):
1865 for line in patch.linereader(open(mqpatch, b'rb')):
1866 if line.startswith(b'diff --git'):
1866 if line.startswith(b'diff --git'):
1867 return True
1867 return True
1868 return False
1868 return False
1869
1869
1870
1870
1871 def updatemq(repo, state, skipped, **opts):
1871 def updatemq(repo, state, skipped, **opts):
1872 """Update rebased mq patches - finalize and then import them"""
1872 """Update rebased mq patches - finalize and then import them"""
1873 mqrebase = {}
1873 mqrebase = {}
1874 mq = repo.mq
1874 mq = repo.mq
1875 original_series = mq.fullseries[:]
1875 original_series = mq.fullseries[:]
1876 skippedpatches = set()
1876 skippedpatches = set()
1877
1877
1878 for p in mq.applied:
1878 for p in mq.applied:
1879 rev = repo[p.node].rev()
1879 rev = repo[p.node].rev()
1880 if rev in state:
1880 if rev in state:
1881 repo.ui.debug(
1881 repo.ui.debug(
1882 b'revision %d is an mq patch (%s), finalize it.\n'
1882 b'revision %d is an mq patch (%s), finalize it.\n'
1883 % (rev, p.name)
1883 % (rev, p.name)
1884 )
1884 )
1885 mqrebase[rev] = (p.name, isagitpatch(repo, p.name))
1885 mqrebase[rev] = (p.name, isagitpatch(repo, p.name))
1886 else:
1886 else:
1887 # Applied but not rebased, not sure this should happen
1887 # Applied but not rebased, not sure this should happen
1888 skippedpatches.add(p.name)
1888 skippedpatches.add(p.name)
1889
1889
1890 if mqrebase:
1890 if mqrebase:
1891 mq.finish(repo, mqrebase.keys())
1891 mq.finish(repo, mqrebase.keys())
1892
1892
1893 # We must start import from the newest revision
1893 # We must start import from the newest revision
1894 for rev in sorted(mqrebase, reverse=True):
1894 for rev in sorted(mqrebase, reverse=True):
1895 if rev not in skipped:
1895 if rev not in skipped:
1896 name, isgit = mqrebase[rev]
1896 name, isgit = mqrebase[rev]
1897 repo.ui.note(
1897 repo.ui.note(
1898 _(b'updating mq patch %s to %d:%s\n')
1898 _(b'updating mq patch %s to %d:%s\n')
1899 % (name, state[rev], repo[state[rev]])
1899 % (name, state[rev], repo[state[rev]])
1900 )
1900 )
1901 mq.qimport(
1901 mq.qimport(
1902 repo,
1902 repo,
1903 (),
1903 (),
1904 patchname=name,
1904 patchname=name,
1905 git=isgit,
1905 git=isgit,
1906 rev=[b"%d" % state[rev]],
1906 rev=[b"%d" % state[rev]],
1907 )
1907 )
1908 else:
1908 else:
1909 # Rebased and skipped
1909 # Rebased and skipped
1910 skippedpatches.add(mqrebase[rev][0])
1910 skippedpatches.add(mqrebase[rev][0])
1911
1911
1912 # Patches were either applied and rebased and imported in
1912 # Patches were either applied and rebased and imported in
1913 # order, applied and removed or unapplied. Discard the removed
1913 # order, applied and removed or unapplied. Discard the removed
1914 # ones while preserving the original series order and guards.
1914 # ones while preserving the original series order and guards.
1915 newseries = [
1915 newseries = [
1916 s
1916 s
1917 for s in original_series
1917 for s in original_series
1918 if mq.guard_re.split(s, 1)[0] not in skippedpatches
1918 if mq.guard_re.split(s, 1)[0] not in skippedpatches
1919 ]
1919 ]
1920 mq.fullseries[:] = newseries
1920 mq.fullseries[:] = newseries
1921 mq.seriesdirty = True
1921 mq.seriesdirty = True
1922 mq.savedirty()
1922 mq.savedirty()
1923
1923
1924
1924
1925 def storecollapsemsg(repo, collapsemsg):
1925 def storecollapsemsg(repo, collapsemsg):
1926 """Store the collapse message to allow recovery"""
1926 """Store the collapse message to allow recovery"""
1927 collapsemsg = collapsemsg or b''
1927 collapsemsg = collapsemsg or b''
1928 f = repo.vfs(b"last-message.txt", b"w")
1928 f = repo.vfs(b"last-message.txt", b"w")
1929 f.write(b"%s\n" % collapsemsg)
1929 f.write(b"%s\n" % collapsemsg)
1930 f.close()
1930 f.close()
1931
1931
1932
1932
1933 def clearcollapsemsg(repo):
1933 def clearcollapsemsg(repo):
1934 """Remove collapse message file"""
1934 """Remove collapse message file"""
1935 repo.vfs.unlinkpath(b"last-message.txt", ignoremissing=True)
1935 repo.vfs.unlinkpath(b"last-message.txt", ignoremissing=True)
1936
1936
1937
1937
1938 def restorecollapsemsg(repo, isabort):
1938 def restorecollapsemsg(repo, isabort):
1939 """Restore previously stored collapse message"""
1939 """Restore previously stored collapse message"""
1940 try:
1940 try:
1941 f = repo.vfs(b"last-message.txt")
1941 f = repo.vfs(b"last-message.txt")
1942 collapsemsg = f.readline().strip()
1942 collapsemsg = f.readline().strip()
1943 f.close()
1943 f.close()
1944 except IOError as err:
1944 except IOError as err:
1945 if err.errno != errno.ENOENT:
1945 if err.errno != errno.ENOENT:
1946 raise
1946 raise
1947 if isabort:
1947 if isabort:
1948 # Oh well, just abort like normal
1948 # Oh well, just abort like normal
1949 collapsemsg = b''
1949 collapsemsg = b''
1950 else:
1950 else:
1951 raise error.Abort(_(b'missing .hg/last-message.txt for rebase'))
1951 raise error.Abort(_(b'missing .hg/last-message.txt for rebase'))
1952 return collapsemsg
1952 return collapsemsg
1953
1953
1954
1954
1955 def clearstatus(repo):
1955 def clearstatus(repo):
1956 """Remove the status files"""
1956 """Remove the status files"""
1957 # Make sure the active transaction won't write the state file
1957 # Make sure the active transaction won't write the state file
1958 tr = repo.currenttransaction()
1958 tr = repo.currenttransaction()
1959 if tr:
1959 if tr:
1960 tr.removefilegenerator(b'rebasestate')
1960 tr.removefilegenerator(b'rebasestate')
1961 repo.vfs.unlinkpath(b"rebasestate", ignoremissing=True)
1961 repo.vfs.unlinkpath(b"rebasestate", ignoremissing=True)
1962
1962
1963
1963
1964 def sortsource(destmap):
1964 def sortsource(destmap):
1965 """yield source revisions in an order that we only rebase things once
1965 """yield source revisions in an order that we only rebase things once
1966
1966
1967 If source and destination overlaps, we should filter out revisions
1967 If source and destination overlaps, we should filter out revisions
1968 depending on other revisions which hasn't been rebased yet.
1968 depending on other revisions which hasn't been rebased yet.
1969
1969
1970 Yield a sorted list of revisions each time.
1970 Yield a sorted list of revisions each time.
1971
1971
1972 For example, when rebasing A to B, B to C. This function yields [B], then
1972 For example, when rebasing A to B, B to C. This function yields [B], then
1973 [A], indicating B needs to be rebased first.
1973 [A], indicating B needs to be rebased first.
1974
1974
1975 Raise if there is a cycle so the rebase is impossible.
1975 Raise if there is a cycle so the rebase is impossible.
1976 """
1976 """
1977 srcset = set(destmap)
1977 srcset = set(destmap)
1978 while srcset:
1978 while srcset:
1979 srclist = sorted(srcset)
1979 srclist = sorted(srcset)
1980 result = []
1980 result = []
1981 for r in srclist:
1981 for r in srclist:
1982 if destmap[r] not in srcset:
1982 if destmap[r] not in srcset:
1983 result.append(r)
1983 result.append(r)
1984 if not result:
1984 if not result:
1985 raise error.InputError(_(b'source and destination form a cycle'))
1985 raise error.InputError(_(b'source and destination form a cycle'))
1986 srcset -= set(result)
1986 srcset -= set(result)
1987 yield result
1987 yield result
1988
1988
1989
1989
1990 def buildstate(repo, destmap, collapse):
1990 def buildstate(repo, destmap, collapse):
1991 """Define which revisions are going to be rebased and where
1991 """Define which revisions are going to be rebased and where
1992
1992
1993 repo: repo
1993 repo: repo
1994 destmap: {srcrev: destrev}
1994 destmap: {srcrev: destrev}
1995 """
1995 """
1996 rebaseset = destmap.keys()
1996 rebaseset = destmap.keys()
1997 originalwd = repo[b'.'].rev()
1997 originalwd = repo[b'.'].rev()
1998
1998
1999 # This check isn't strictly necessary, since mq detects commits over an
1999 # This check isn't strictly necessary, since mq detects commits over an
2000 # applied patch. But it prevents messing up the working directory when
2000 # applied patch. But it prevents messing up the working directory when
2001 # a partially completed rebase is blocked by mq.
2001 # a partially completed rebase is blocked by mq.
2002 if b'qtip' in repo.tags():
2002 if b'qtip' in repo.tags():
2003 mqapplied = {repo[s.node].rev() for s in repo.mq.applied}
2003 mqapplied = {repo[s.node].rev() for s in repo.mq.applied}
2004 if set(destmap.values()) & mqapplied:
2004 if set(destmap.values()) & mqapplied:
2005 raise error.StateError(_(b'cannot rebase onto an applied mq patch'))
2005 raise error.StateError(_(b'cannot rebase onto an applied mq patch'))
2006
2006
2007 # Get "cycle" error early by exhausting the generator.
2007 # Get "cycle" error early by exhausting the generator.
2008 sortedsrc = list(sortsource(destmap)) # a list of sorted revs
2008 sortedsrc = list(sortsource(destmap)) # a list of sorted revs
2009 if not sortedsrc:
2009 if not sortedsrc:
2010 raise error.InputError(_(b'no matching revisions'))
2010 raise error.InputError(_(b'no matching revisions'))
2011
2011
2012 # Only check the first batch of revisions to rebase not depending on other
2012 # Only check the first batch of revisions to rebase not depending on other
2013 # rebaseset. This means "source is ancestor of destination" for the second
2013 # rebaseset. This means "source is ancestor of destination" for the second
2014 # (and following) batches of revisions are not checked here. We rely on
2014 # (and following) batches of revisions are not checked here. We rely on
2015 # "defineparents" to do that check.
2015 # "defineparents" to do that check.
2016 roots = list(repo.set(b'roots(%ld)', sortedsrc[0]))
2016 roots = list(repo.set(b'roots(%ld)', sortedsrc[0]))
2017 if not roots:
2017 if not roots:
2018 raise error.InputError(_(b'no matching revisions'))
2018 raise error.InputError(_(b'no matching revisions'))
2019
2019
2020 def revof(r):
2020 def revof(r):
2021 return r.rev()
2021 return r.rev()
2022
2022
2023 roots = sorted(roots, key=revof)
2023 roots = sorted(roots, key=revof)
2024 state = dict.fromkeys(rebaseset, revtodo)
2024 state = dict.fromkeys(rebaseset, revtodo)
2025 emptyrebase = len(sortedsrc) == 1
2025 emptyrebase = len(sortedsrc) == 1
2026 for root in roots:
2026 for root in roots:
2027 dest = repo[destmap[root.rev()]]
2027 dest = repo[destmap[root.rev()]]
2028 commonbase = root.ancestor(dest)
2028 commonbase = root.ancestor(dest)
2029 if commonbase == root:
2029 if commonbase == root:
2030 raise error.InputError(_(b'source is ancestor of destination'))
2030 raise error.InputError(_(b'source is ancestor of destination'))
2031 if commonbase == dest:
2031 if commonbase == dest:
2032 wctx = repo[None]
2032 wctx = repo[None]
2033 if dest == wctx.p1():
2033 if dest == wctx.p1():
2034 # when rebasing to '.', it will use the current wd branch name
2034 # when rebasing to '.', it will use the current wd branch name
2035 samebranch = root.branch() == wctx.branch()
2035 samebranch = root.branch() == wctx.branch()
2036 else:
2036 else:
2037 samebranch = root.branch() == dest.branch()
2037 samebranch = root.branch() == dest.branch()
2038 if not collapse and samebranch and dest in root.parents():
2038 if not collapse and samebranch and dest in root.parents():
2039 # mark the revision as done by setting its new revision
2039 # mark the revision as done by setting its new revision
2040 # equal to its old (current) revisions
2040 # equal to its old (current) revisions
2041 state[root.rev()] = root.rev()
2041 state[root.rev()] = root.rev()
2042 repo.ui.debug(b'source is a child of destination\n')
2042 repo.ui.debug(b'source is a child of destination\n')
2043 continue
2043 continue
2044
2044
2045 emptyrebase = False
2045 emptyrebase = False
2046 repo.ui.debug(b'rebase onto %s starting from %s\n' % (dest, root))
2046 repo.ui.debug(b'rebase onto %s starting from %s\n' % (dest, root))
2047 if emptyrebase:
2047 if emptyrebase:
2048 return None
2048 return None
2049 for rev in sorted(state):
2049 for rev in sorted(state):
2050 parents = [p for p in repo.changelog.parentrevs(rev) if p != nullrev]
2050 parents = [p for p in repo.changelog.parentrevs(rev) if p != nullrev]
2051 # if all parents of this revision are done, then so is this revision
2051 # if all parents of this revision are done, then so is this revision
2052 if parents and all((state.get(p) == p for p in parents)):
2052 if parents and all((state.get(p) == p for p in parents)):
2053 state[rev] = rev
2053 state[rev] = rev
2054 return originalwd, destmap, state
2054 return originalwd, destmap, state
2055
2055
2056
2056
2057 def clearrebased(
2057 def clearrebased(
2058 ui,
2058 ui,
2059 repo,
2059 repo,
2060 destmap,
2060 destmap,
2061 state,
2061 state,
2062 skipped,
2062 skipped,
2063 collapsedas=None,
2063 collapsedas=None,
2064 keepf=False,
2064 keepf=False,
2065 fm=None,
2065 fm=None,
2066 backup=True,
2066 backup=True,
2067 ):
2067 ):
2068 """dispose of rebased revision at the end of the rebase
2068 """dispose of rebased revision at the end of the rebase
2069
2069
2070 If `collapsedas` is not None, the rebase was a collapse whose result if the
2070 If `collapsedas` is not None, the rebase was a collapse whose result if the
2071 `collapsedas` node.
2071 `collapsedas` node.
2072
2072
2073 If `keepf` is not True, the rebase has --keep set and no nodes should be
2073 If `keepf` is not True, the rebase has --keep set and no nodes should be
2074 removed (but bookmarks still need to be moved).
2074 removed (but bookmarks still need to be moved).
2075
2075
2076 If `backup` is False, no backup will be stored when stripping rebased
2076 If `backup` is False, no backup will be stored when stripping rebased
2077 revisions.
2077 revisions.
2078 """
2078 """
2079 tonode = repo.changelog.node
2079 tonode = repo.changelog.node
2080 replacements = {}
2080 replacements = {}
2081 moves = {}
2081 moves = {}
2082 stripcleanup = not obsolete.isenabled(repo, obsolete.createmarkersopt)
2082 stripcleanup = not obsolete.isenabled(repo, obsolete.createmarkersopt)
2083
2083
2084 collapsednodes = []
2084 collapsednodes = []
2085 for rev, newrev in sorted(state.items()):
2085 for rev, newrev in sorted(state.items()):
2086 if newrev >= 0 and newrev != rev:
2086 if newrev >= 0 and newrev != rev:
2087 oldnode = tonode(rev)
2087 oldnode = tonode(rev)
2088 newnode = collapsedas or tonode(newrev)
2088 newnode = collapsedas or tonode(newrev)
2089 moves[oldnode] = newnode
2089 moves[oldnode] = newnode
2090 succs = None
2090 succs = None
2091 if rev in skipped:
2091 if rev in skipped:
2092 if stripcleanup or not repo[rev].obsolete():
2092 if stripcleanup or not repo[rev].obsolete():
2093 succs = ()
2093 succs = ()
2094 elif collapsedas:
2094 elif collapsedas:
2095 collapsednodes.append(oldnode)
2095 collapsednodes.append(oldnode)
2096 else:
2096 else:
2097 succs = (newnode,)
2097 succs = (newnode,)
2098 if succs is not None:
2098 if succs is not None:
2099 replacements[(oldnode,)] = succs
2099 replacements[(oldnode,)] = succs
2100 if collapsednodes:
2100 if collapsednodes:
2101 replacements[tuple(collapsednodes)] = (collapsedas,)
2101 replacements[tuple(collapsednodes)] = (collapsedas,)
2102 if fm:
2102 if fm:
2103 hf = fm.hexfunc
2103 hf = fm.hexfunc
2104 fl = fm.formatlist
2104 fl = fm.formatlist
2105 fd = fm.formatdict
2105 fd = fm.formatdict
2106 changes = {}
2106 changes = {}
2107 for oldns, newn in pycompat.iteritems(replacements):
2107 for oldns, newn in pycompat.iteritems(replacements):
2108 for oldn in oldns:
2108 for oldn in oldns:
2109 changes[hf(oldn)] = fl([hf(n) for n in newn], name=b'node')
2109 changes[hf(oldn)] = fl([hf(n) for n in newn], name=b'node')
2110 nodechanges = fd(changes, key=b"oldnode", value=b"newnodes")
2110 nodechanges = fd(changes, key=b"oldnode", value=b"newnodes")
2111 fm.data(nodechanges=nodechanges)
2111 fm.data(nodechanges=nodechanges)
2112 if keepf:
2112 if keepf:
2113 replacements = {}
2113 replacements = {}
2114 scmutil.cleanupnodes(repo, replacements, b'rebase', moves, backup=backup)
2114 scmutil.cleanupnodes(repo, replacements, b'rebase', moves, backup=backup)
2115
2115
2116
2116
2117 def pullrebase(orig, ui, repo, *args, **opts):
2117 def pullrebase(orig, ui, repo, *args, **opts):
2118 """Call rebase after pull if the latter has been invoked with --rebase"""
2118 """Call rebase after pull if the latter has been invoked with --rebase"""
2119 if opts.get('rebase'):
2119 if opts.get('rebase'):
2120 if ui.configbool(b'commands', b'rebase.requiredest'):
2120 if ui.configbool(b'commands', b'rebase.requiredest'):
2121 msg = _(b'rebase destination required by configuration')
2121 msg = _(b'rebase destination required by configuration')
2122 hint = _(b'use hg pull followed by hg rebase -d DEST')
2122 hint = _(b'use hg pull followed by hg rebase -d DEST')
2123 raise error.InputError(msg, hint=hint)
2123 raise error.InputError(msg, hint=hint)
2124
2124
2125 with repo.wlock(), repo.lock():
2125 with repo.wlock(), repo.lock():
2126 if opts.get('update'):
2126 if opts.get('update'):
2127 del opts['update']
2127 del opts['update']
2128 ui.debug(
2128 ui.debug(
2129 b'--update and --rebase are not compatible, ignoring '
2129 b'--update and --rebase are not compatible, ignoring '
2130 b'the update flag\n'
2130 b'the update flag\n'
2131 )
2131 )
2132
2132
2133 cmdutil.checkunfinished(repo, skipmerge=True)
2133 cmdutil.checkunfinished(repo, skipmerge=True)
2134 cmdutil.bailifchanged(
2134 cmdutil.bailifchanged(
2135 repo,
2135 repo,
2136 hint=_(
2136 hint=_(
2137 b'cannot pull with rebase: '
2137 b'cannot pull with rebase: '
2138 b'please commit or shelve your changes first'
2138 b'please commit or shelve your changes first'
2139 ),
2139 ),
2140 )
2140 )
2141
2141
2142 revsprepull = len(repo)
2142 revsprepull = len(repo)
2143 origpostincoming = commands.postincoming
2143 origpostincoming = commands.postincoming
2144
2144
2145 def _dummy(*args, **kwargs):
2145 def _dummy(*args, **kwargs):
2146 pass
2146 pass
2147
2147
2148 commands.postincoming = _dummy
2148 commands.postincoming = _dummy
2149 try:
2149 try:
2150 ret = orig(ui, repo, *args, **opts)
2150 ret = orig(ui, repo, *args, **opts)
2151 finally:
2151 finally:
2152 commands.postincoming = origpostincoming
2152 commands.postincoming = origpostincoming
2153 revspostpull = len(repo)
2153 revspostpull = len(repo)
2154 if revspostpull > revsprepull:
2154 if revspostpull > revsprepull:
2155 # --rev option from pull conflict with rebase own --rev
2155 # --rev option from pull conflict with rebase own --rev
2156 # dropping it
2156 # dropping it
2157 if 'rev' in opts:
2157 if 'rev' in opts:
2158 del opts['rev']
2158 del opts['rev']
2159 # positional argument from pull conflicts with rebase's own
2159 # positional argument from pull conflicts with rebase's own
2160 # --source.
2160 # --source.
2161 if 'source' in opts:
2161 if 'source' in opts:
2162 del opts['source']
2162 del opts['source']
2163 # revsprepull is the len of the repo, not revnum of tip.
2163 # revsprepull is the len of the repo, not revnum of tip.
2164 destspace = list(repo.changelog.revs(start=revsprepull))
2164 destspace = list(repo.changelog.revs(start=revsprepull))
2165 opts['_destspace'] = destspace
2165 opts['_destspace'] = destspace
2166 try:
2166 try:
2167 rebase(ui, repo, **opts)
2167 rebase(ui, repo, **opts)
2168 except error.NoMergeDestAbort:
2168 except error.NoMergeDestAbort:
2169 # we can maybe update instead
2169 # we can maybe update instead
2170 rev, _a, _b = destutil.destupdate(repo)
2170 rev, _a, _b = destutil.destupdate(repo)
2171 if rev == repo[b'.'].rev():
2171 if rev == repo[b'.'].rev():
2172 ui.status(_(b'nothing to rebase\n'))
2172 ui.status(_(b'nothing to rebase\n'))
2173 else:
2173 else:
2174 ui.status(_(b'nothing to rebase - updating instead\n'))
2174 ui.status(_(b'nothing to rebase - updating instead\n'))
2175 # not passing argument to get the bare update behavior
2175 # not passing argument to get the bare update behavior
2176 # with warning and trumpets
2176 # with warning and trumpets
2177 commands.update(ui, repo)
2177 commands.update(ui, repo)
2178 else:
2178 else:
2179 if opts.get('tool'):
2179 if opts.get('tool'):
2180 raise error.InputError(_(b'--tool can only be used with --rebase'))
2180 raise error.InputError(_(b'--tool can only be used with --rebase'))
2181 ret = orig(ui, repo, *args, **opts)
2181 ret = orig(ui, repo, *args, **opts)
2182
2182
2183 return ret
2183 return ret
2184
2184
2185
2185
2186 def _compute_obsolete_sets(repo, rebaseobsrevs, destmap):
2186 def _compute_obsolete_sets(repo, rebaseobsrevs, destmap):
2187 """Figure out what to do about about obsolete revisions
2187 """Figure out what to do about about obsolete revisions
2188
2188
2189 `obsolete_with_successor_in_destination` is a mapping mapping obsolete => successor for all
2189 `obsolete_with_successor_in_destination` is a mapping mapping obsolete => successor for all
2190 obsolete nodes to be rebased given in `rebaseobsrevs`.
2190 obsolete nodes to be rebased given in `rebaseobsrevs`.
2191
2191
2192 `obsolete_with_successor_in_rebase_set` is a set with obsolete revisions,
2192 `obsolete_with_successor_in_rebase_set` is a set with obsolete revisions,
2193 without a successor in destination, that would cause divergence.
2193 without a successor in destination, that would cause divergence.
2194 """
2194 """
2195 obsolete_with_successor_in_destination = {}
2195 obsolete_with_successor_in_destination = {}
2196 obsolete_with_successor_in_rebase_set = set()
2196 obsolete_with_successor_in_rebase_set = set()
2197
2197
2198 cl = repo.changelog
2198 cl = repo.changelog
2199 get_rev = cl.index.get_rev
2199 get_rev = cl.index.get_rev
2200 extinctrevs = set(repo.revs(b'extinct()'))
2200 extinctrevs = set(repo.revs(b'extinct()'))
2201 for srcrev in rebaseobsrevs:
2201 for srcrev in rebaseobsrevs:
2202 srcnode = cl.node(srcrev)
2202 srcnode = cl.node(srcrev)
2203 # XXX: more advanced APIs are required to handle split correctly
2203 # XXX: more advanced APIs are required to handle split correctly
2204 successors = set(obsutil.allsuccessors(repo.obsstore, [srcnode]))
2204 successors = set(obsutil.allsuccessors(repo.obsstore, [srcnode]))
2205 # obsutil.allsuccessors includes node itself
2205 # obsutil.allsuccessors includes node itself
2206 successors.remove(srcnode)
2206 successors.remove(srcnode)
2207 succrevs = {get_rev(s) for s in successors}
2207 succrevs = {get_rev(s) for s in successors}
2208 succrevs.discard(None)
2208 succrevs.discard(None)
2209 if not successors or succrevs.issubset(extinctrevs):
2209 if not successors or succrevs.issubset(extinctrevs):
2210 # no successor, or all successors are extinct
2210 # no successor, or all successors are extinct
2211 obsolete_with_successor_in_destination[srcrev] = None
2211 obsolete_with_successor_in_destination[srcrev] = None
2212 else:
2212 else:
2213 dstrev = destmap[srcrev]
2213 dstrev = destmap[srcrev]
2214 for succrev in succrevs:
2214 for succrev in succrevs:
2215 if cl.isancestorrev(succrev, dstrev):
2215 if cl.isancestorrev(succrev, dstrev):
2216 obsolete_with_successor_in_destination[srcrev] = succrev
2216 obsolete_with_successor_in_destination[srcrev] = succrev
2217 break
2217 break
2218 else:
2218 else:
2219 # If 'srcrev' has a successor in rebase set but none in
2219 # If 'srcrev' has a successor in rebase set but none in
2220 # destination (which would be catched above), we shall skip it
2220 # destination (which would be catched above), we shall skip it
2221 # and its descendants to avoid divergence.
2221 # and its descendants to avoid divergence.
2222 if srcrev in extinctrevs or any(s in destmap for s in succrevs):
2222 if srcrev in extinctrevs or any(s in destmap for s in succrevs):
2223 obsolete_with_successor_in_rebase_set.add(srcrev)
2223 obsolete_with_successor_in_rebase_set.add(srcrev)
2224
2224
2225 return (
2225 return (
2226 obsolete_with_successor_in_destination,
2226 obsolete_with_successor_in_destination,
2227 obsolete_with_successor_in_rebase_set,
2227 obsolete_with_successor_in_rebase_set,
2228 )
2228 )
2229
2229
2230
2230
2231 def abortrebase(ui, repo):
2231 def abortrebase(ui, repo):
2232 with repo.wlock(), repo.lock():
2232 with repo.wlock(), repo.lock():
2233 rbsrt = rebaseruntime(repo, ui)
2233 rbsrt = rebaseruntime(repo, ui)
2234 rbsrt._prepareabortorcontinue(isabort=True)
2234 rbsrt._prepareabortorcontinue(isabort=True)
2235
2235
2236
2236
2237 def continuerebase(ui, repo):
2237 def continuerebase(ui, repo):
2238 with repo.wlock(), repo.lock():
2238 with repo.wlock(), repo.lock():
2239 rbsrt = rebaseruntime(repo, ui)
2239 rbsrt = rebaseruntime(repo, ui)
2240 ms = mergestatemod.mergestate.read(repo)
2240 ms = mergestatemod.mergestate.read(repo)
2241 mergeutil.checkunresolved(ms)
2241 mergeutil.checkunresolved(ms)
2242 retcode = rbsrt._prepareabortorcontinue(isabort=False)
2242 retcode = rbsrt._prepareabortorcontinue(isabort=False)
2243 if retcode is not None:
2243 if retcode is not None:
2244 return retcode
2244 return retcode
2245 rbsrt._performrebase(None)
2245 rbsrt._performrebase(None)
2246 rbsrt._finishrebase()
2246 rbsrt._finishrebase()
2247
2247
2248
2248
2249 def summaryhook(ui, repo):
2249 def summaryhook(ui, repo):
2250 if not repo.vfs.exists(b'rebasestate'):
2250 if not repo.vfs.exists(b'rebasestate'):
2251 return
2251 return
2252 try:
2252 try:
2253 rbsrt = rebaseruntime(repo, ui, {})
2253 rbsrt = rebaseruntime(repo, ui, {})
2254 rbsrt.restorestatus()
2254 rbsrt.restorestatus()
2255 state = rbsrt.state
2255 state = rbsrt.state
2256 except error.RepoLookupError:
2256 except error.RepoLookupError:
2257 # i18n: column positioning for "hg summary"
2257 # i18n: column positioning for "hg summary"
2258 msg = _(b'rebase: (use "hg rebase --abort" to clear broken state)\n')
2258 msg = _(b'rebase: (use "hg rebase --abort" to clear broken state)\n')
2259 ui.write(msg)
2259 ui.write(msg)
2260 return
2260 return
2261 numrebased = len([i for i in pycompat.itervalues(state) if i >= 0])
2261 numrebased = len([i for i in pycompat.itervalues(state) if i >= 0])
2262 # i18n: column positioning for "hg summary"
2262 # i18n: column positioning for "hg summary"
2263 ui.write(
2263 ui.write(
2264 _(b'rebase: %s, %s (rebase --continue)\n')
2264 _(b'rebase: %s, %s (rebase --continue)\n')
2265 % (
2265 % (
2266 ui.label(_(b'%d rebased'), b'rebase.rebased') % numrebased,
2266 ui.label(_(b'%d rebased'), b'rebase.rebased') % numrebased,
2267 ui.label(_(b'%d remaining'), b'rebase.remaining')
2267 ui.label(_(b'%d remaining'), b'rebase.remaining')
2268 % (len(state) - numrebased),
2268 % (len(state) - numrebased),
2269 )
2269 )
2270 )
2270 )
2271
2271
2272
2272
2273 def uisetup(ui):
2273 def uisetup(ui):
2274 # Replace pull with a decorator to provide --rebase option
2274 # Replace pull with a decorator to provide --rebase option
2275 entry = extensions.wrapcommand(commands.table, b'pull', pullrebase)
2275 entry = extensions.wrapcommand(commands.table, b'pull', pullrebase)
2276 entry[1].append(
2276 entry[1].append(
2277 (b'', b'rebase', None, _(b"rebase working directory to branch head"))
2277 (b'', b'rebase', None, _(b"rebase working directory to branch head"))
2278 )
2278 )
2279 entry[1].append((b't', b'tool', b'', _(b"specify merge tool for rebase")))
2279 entry[1].append((b't', b'tool', b'', _(b"specify merge tool for rebase")))
2280 cmdutil.summaryhooks.add(b'rebase', summaryhook)
2280 cmdutil.summaryhooks.add(b'rebase', summaryhook)
2281 statemod.addunfinished(
2281 statemod.addunfinished(
2282 b'rebase',
2282 b'rebase',
2283 fname=b'rebasestate',
2283 fname=b'rebasestate',
2284 stopflag=True,
2284 stopflag=True,
2285 continueflag=True,
2285 continueflag=True,
2286 abortfunc=abortrebase,
2286 abortfunc=abortrebase,
2287 continuefunc=continuerebase,
2287 continuefunc=continuerebase,
2288 )
2288 )
@@ -1,3946 +1,3946 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import copy as copymod
10 import copy as copymod
11 import errno
11 import errno
12 import os
12 import os
13 import re
13 import re
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 hex,
17 hex,
18 nullrev,
18 nullrev,
19 short,
19 short,
20 )
20 )
21 from .pycompat import (
21 from .pycompat import (
22 getattr,
22 getattr,
23 open,
23 open,
24 setattr,
24 setattr,
25 )
25 )
26 from .thirdparty import attr
26 from .thirdparty import attr
27
27
28 from . import (
28 from . import (
29 bookmarks,
29 bookmarks,
30 changelog,
30 changelog,
31 copies,
31 copies,
32 crecord as crecordmod,
32 crecord as crecordmod,
33 dirstateguard,
33 dirstateguard,
34 encoding,
34 encoding,
35 error,
35 error,
36 formatter,
36 formatter,
37 logcmdutil,
37 logcmdutil,
38 match as matchmod,
38 match as matchmod,
39 merge as mergemod,
39 merge as mergemod,
40 mergestate as mergestatemod,
40 mergestate as mergestatemod,
41 mergeutil,
41 mergeutil,
42 obsolete,
42 obsolete,
43 patch,
43 patch,
44 pathutil,
44 pathutil,
45 phases,
45 phases,
46 pycompat,
46 pycompat,
47 repair,
47 repair,
48 revlog,
48 revlog,
49 rewriteutil,
49 rewriteutil,
50 scmutil,
50 scmutil,
51 state as statemod,
51 state as statemod,
52 subrepoutil,
52 subrepoutil,
53 templatekw,
53 templatekw,
54 templater,
54 templater,
55 util,
55 util,
56 vfs as vfsmod,
56 vfs as vfsmod,
57 )
57 )
58
58
59 from .utils import (
59 from .utils import (
60 dateutil,
60 dateutil,
61 stringutil,
61 stringutil,
62 )
62 )
63
63
64 from .revlogutils import (
64 from .revlogutils import (
65 constants as revlog_constants,
65 constants as revlog_constants,
66 )
66 )
67
67
68 if pycompat.TYPE_CHECKING:
68 if pycompat.TYPE_CHECKING:
69 from typing import (
69 from typing import (
70 Any,
70 Any,
71 Dict,
71 Dict,
72 )
72 )
73
73
74 for t in (Any, Dict):
74 for t in (Any, Dict):
75 assert t
75 assert t
76
76
77 stringio = util.stringio
77 stringio = util.stringio
78
78
79 # templates of common command options
79 # templates of common command options
80
80
81 dryrunopts = [
81 dryrunopts = [
82 (b'n', b'dry-run', None, _(b'do not perform actions, just print output')),
82 (b'n', b'dry-run', None, _(b'do not perform actions, just print output')),
83 ]
83 ]
84
84
85 confirmopts = [
85 confirmopts = [
86 (b'', b'confirm', None, _(b'ask before applying actions')),
86 (b'', b'confirm', None, _(b'ask before applying actions')),
87 ]
87 ]
88
88
89 remoteopts = [
89 remoteopts = [
90 (b'e', b'ssh', b'', _(b'specify ssh command to use'), _(b'CMD')),
90 (b'e', b'ssh', b'', _(b'specify ssh command to use'), _(b'CMD')),
91 (
91 (
92 b'',
92 b'',
93 b'remotecmd',
93 b'remotecmd',
94 b'',
94 b'',
95 _(b'specify hg command to run on the remote side'),
95 _(b'specify hg command to run on the remote side'),
96 _(b'CMD'),
96 _(b'CMD'),
97 ),
97 ),
98 (
98 (
99 b'',
99 b'',
100 b'insecure',
100 b'insecure',
101 None,
101 None,
102 _(b'do not verify server certificate (ignoring web.cacerts config)'),
102 _(b'do not verify server certificate (ignoring web.cacerts config)'),
103 ),
103 ),
104 ]
104 ]
105
105
106 walkopts = [
106 walkopts = [
107 (
107 (
108 b'I',
108 b'I',
109 b'include',
109 b'include',
110 [],
110 [],
111 _(b'include names matching the given patterns'),
111 _(b'include names matching the given patterns'),
112 _(b'PATTERN'),
112 _(b'PATTERN'),
113 ),
113 ),
114 (
114 (
115 b'X',
115 b'X',
116 b'exclude',
116 b'exclude',
117 [],
117 [],
118 _(b'exclude names matching the given patterns'),
118 _(b'exclude names matching the given patterns'),
119 _(b'PATTERN'),
119 _(b'PATTERN'),
120 ),
120 ),
121 ]
121 ]
122
122
123 commitopts = [
123 commitopts = [
124 (b'm', b'message', b'', _(b'use text as commit message'), _(b'TEXT')),
124 (b'm', b'message', b'', _(b'use text as commit message'), _(b'TEXT')),
125 (b'l', b'logfile', b'', _(b'read commit message from file'), _(b'FILE')),
125 (b'l', b'logfile', b'', _(b'read commit message from file'), _(b'FILE')),
126 ]
126 ]
127
127
128 commitopts2 = [
128 commitopts2 = [
129 (
129 (
130 b'd',
130 b'd',
131 b'date',
131 b'date',
132 b'',
132 b'',
133 _(b'record the specified date as commit date'),
133 _(b'record the specified date as commit date'),
134 _(b'DATE'),
134 _(b'DATE'),
135 ),
135 ),
136 (
136 (
137 b'u',
137 b'u',
138 b'user',
138 b'user',
139 b'',
139 b'',
140 _(b'record the specified user as committer'),
140 _(b'record the specified user as committer'),
141 _(b'USER'),
141 _(b'USER'),
142 ),
142 ),
143 ]
143 ]
144
144
145 commitopts3 = [
145 commitopts3 = [
146 (b'D', b'currentdate', None, _(b'record the current date as commit date')),
146 (b'D', b'currentdate', None, _(b'record the current date as commit date')),
147 (b'U', b'currentuser', None, _(b'record the current user as committer')),
147 (b'U', b'currentuser', None, _(b'record the current user as committer')),
148 ]
148 ]
149
149
150 formatteropts = [
150 formatteropts = [
151 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
151 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
152 ]
152 ]
153
153
154 templateopts = [
154 templateopts = [
155 (
155 (
156 b'',
156 b'',
157 b'style',
157 b'style',
158 b'',
158 b'',
159 _(b'display using template map file (DEPRECATED)'),
159 _(b'display using template map file (DEPRECATED)'),
160 _(b'STYLE'),
160 _(b'STYLE'),
161 ),
161 ),
162 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
162 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
163 ]
163 ]
164
164
165 logopts = [
165 logopts = [
166 (b'p', b'patch', None, _(b'show patch')),
166 (b'p', b'patch', None, _(b'show patch')),
167 (b'g', b'git', None, _(b'use git extended diff format')),
167 (b'g', b'git', None, _(b'use git extended diff format')),
168 (b'l', b'limit', b'', _(b'limit number of changes displayed'), _(b'NUM')),
168 (b'l', b'limit', b'', _(b'limit number of changes displayed'), _(b'NUM')),
169 (b'M', b'no-merges', None, _(b'do not show merges')),
169 (b'M', b'no-merges', None, _(b'do not show merges')),
170 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
170 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
171 (b'G', b'graph', None, _(b"show the revision DAG")),
171 (b'G', b'graph', None, _(b"show the revision DAG")),
172 ] + templateopts
172 ] + templateopts
173
173
174 diffopts = [
174 diffopts = [
175 (b'a', b'text', None, _(b'treat all files as text')),
175 (b'a', b'text', None, _(b'treat all files as text')),
176 (
176 (
177 b'g',
177 b'g',
178 b'git',
178 b'git',
179 None,
179 None,
180 _(b'use git extended diff format (DEFAULT: diff.git)'),
180 _(b'use git extended diff format (DEFAULT: diff.git)'),
181 ),
181 ),
182 (b'', b'binary', None, _(b'generate binary diffs in git mode (default)')),
182 (b'', b'binary', None, _(b'generate binary diffs in git mode (default)')),
183 (b'', b'nodates', None, _(b'omit dates from diff headers')),
183 (b'', b'nodates', None, _(b'omit dates from diff headers')),
184 ]
184 ]
185
185
186 diffwsopts = [
186 diffwsopts = [
187 (
187 (
188 b'w',
188 b'w',
189 b'ignore-all-space',
189 b'ignore-all-space',
190 None,
190 None,
191 _(b'ignore white space when comparing lines'),
191 _(b'ignore white space when comparing lines'),
192 ),
192 ),
193 (
193 (
194 b'b',
194 b'b',
195 b'ignore-space-change',
195 b'ignore-space-change',
196 None,
196 None,
197 _(b'ignore changes in the amount of white space'),
197 _(b'ignore changes in the amount of white space'),
198 ),
198 ),
199 (
199 (
200 b'B',
200 b'B',
201 b'ignore-blank-lines',
201 b'ignore-blank-lines',
202 None,
202 None,
203 _(b'ignore changes whose lines are all blank'),
203 _(b'ignore changes whose lines are all blank'),
204 ),
204 ),
205 (
205 (
206 b'Z',
206 b'Z',
207 b'ignore-space-at-eol',
207 b'ignore-space-at-eol',
208 None,
208 None,
209 _(b'ignore changes in whitespace at EOL'),
209 _(b'ignore changes in whitespace at EOL'),
210 ),
210 ),
211 ]
211 ]
212
212
213 diffopts2 = (
213 diffopts2 = (
214 [
214 [
215 (b'', b'noprefix', None, _(b'omit a/ and b/ prefixes from filenames')),
215 (b'', b'noprefix', None, _(b'omit a/ and b/ prefixes from filenames')),
216 (
216 (
217 b'p',
217 b'p',
218 b'show-function',
218 b'show-function',
219 None,
219 None,
220 _(
220 _(
221 b'show which function each change is in (DEFAULT: diff.showfunc)'
221 b'show which function each change is in (DEFAULT: diff.showfunc)'
222 ),
222 ),
223 ),
223 ),
224 (b'', b'reverse', None, _(b'produce a diff that undoes the changes')),
224 (b'', b'reverse', None, _(b'produce a diff that undoes the changes')),
225 ]
225 ]
226 + diffwsopts
226 + diffwsopts
227 + [
227 + [
228 (
228 (
229 b'U',
229 b'U',
230 b'unified',
230 b'unified',
231 b'',
231 b'',
232 _(b'number of lines of context to show'),
232 _(b'number of lines of context to show'),
233 _(b'NUM'),
233 _(b'NUM'),
234 ),
234 ),
235 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
235 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
236 (
236 (
237 b'',
237 b'',
238 b'root',
238 b'root',
239 b'',
239 b'',
240 _(b'produce diffs relative to subdirectory'),
240 _(b'produce diffs relative to subdirectory'),
241 _(b'DIR'),
241 _(b'DIR'),
242 ),
242 ),
243 ]
243 ]
244 )
244 )
245
245
246 mergetoolopts = [
246 mergetoolopts = [
247 (b't', b'tool', b'', _(b'specify merge tool'), _(b'TOOL')),
247 (b't', b'tool', b'', _(b'specify merge tool'), _(b'TOOL')),
248 ]
248 ]
249
249
250 similarityopts = [
250 similarityopts = [
251 (
251 (
252 b's',
252 b's',
253 b'similarity',
253 b'similarity',
254 b'',
254 b'',
255 _(b'guess renamed files by similarity (0<=s<=100)'),
255 _(b'guess renamed files by similarity (0<=s<=100)'),
256 _(b'SIMILARITY'),
256 _(b'SIMILARITY'),
257 )
257 )
258 ]
258 ]
259
259
260 subrepoopts = [(b'S', b'subrepos', None, _(b'recurse into subrepositories'))]
260 subrepoopts = [(b'S', b'subrepos', None, _(b'recurse into subrepositories'))]
261
261
262 debugrevlogopts = [
262 debugrevlogopts = [
263 (b'c', b'changelog', False, _(b'open changelog')),
263 (b'c', b'changelog', False, _(b'open changelog')),
264 (b'm', b'manifest', False, _(b'open manifest')),
264 (b'm', b'manifest', False, _(b'open manifest')),
265 (b'', b'dir', b'', _(b'open directory manifest')),
265 (b'', b'dir', b'', _(b'open directory manifest')),
266 ]
266 ]
267
267
268 # special string such that everything below this line will be ingored in the
268 # special string such that everything below this line will be ingored in the
269 # editor text
269 # editor text
270 _linebelow = b"^HG: ------------------------ >8 ------------------------$"
270 _linebelow = b"^HG: ------------------------ >8 ------------------------$"
271
271
272
272
273 def check_at_most_one_arg(opts, *args):
273 def check_at_most_one_arg(opts, *args):
274 """abort if more than one of the arguments are in opts
274 """abort if more than one of the arguments are in opts
275
275
276 Returns the unique argument or None if none of them were specified.
276 Returns the unique argument or None if none of them were specified.
277 """
277 """
278
278
279 def to_display(name):
279 def to_display(name):
280 return pycompat.sysbytes(name).replace(b'_', b'-')
280 return pycompat.sysbytes(name).replace(b'_', b'-')
281
281
282 previous = None
282 previous = None
283 for x in args:
283 for x in args:
284 if opts.get(x):
284 if opts.get(x):
285 if previous:
285 if previous:
286 raise error.InputError(
286 raise error.InputError(
287 _(b'cannot specify both --%s and --%s')
287 _(b'cannot specify both --%s and --%s')
288 % (to_display(previous), to_display(x))
288 % (to_display(previous), to_display(x))
289 )
289 )
290 previous = x
290 previous = x
291 return previous
291 return previous
292
292
293
293
294 def check_incompatible_arguments(opts, first, others):
294 def check_incompatible_arguments(opts, first, others):
295 """abort if the first argument is given along with any of the others
295 """abort if the first argument is given along with any of the others
296
296
297 Unlike check_at_most_one_arg(), `others` are not mutually exclusive
297 Unlike check_at_most_one_arg(), `others` are not mutually exclusive
298 among themselves, and they're passed as a single collection.
298 among themselves, and they're passed as a single collection.
299 """
299 """
300 for other in others:
300 for other in others:
301 check_at_most_one_arg(opts, first, other)
301 check_at_most_one_arg(opts, first, other)
302
302
303
303
304 def resolve_commit_options(ui, opts):
304 def resolve_commit_options(ui, opts):
305 """modify commit options dict to handle related options
305 """modify commit options dict to handle related options
306
306
307 The return value indicates that ``rewrite.update-timestamp`` is the reason
307 The return value indicates that ``rewrite.update-timestamp`` is the reason
308 the ``date`` option is set.
308 the ``date`` option is set.
309 """
309 """
310 check_at_most_one_arg(opts, 'date', 'currentdate')
310 check_at_most_one_arg(opts, 'date', 'currentdate')
311 check_at_most_one_arg(opts, 'user', 'currentuser')
311 check_at_most_one_arg(opts, 'user', 'currentuser')
312
312
313 datemaydiffer = False # date-only change should be ignored?
313 datemaydiffer = False # date-only change should be ignored?
314
314
315 if opts.get('currentdate'):
315 if opts.get('currentdate'):
316 opts['date'] = b'%d %d' % dateutil.makedate()
316 opts['date'] = b'%d %d' % dateutil.makedate()
317 elif (
317 elif (
318 not opts.get('date')
318 not opts.get('date')
319 and ui.configbool(b'rewrite', b'update-timestamp')
319 and ui.configbool(b'rewrite', b'update-timestamp')
320 and opts.get('currentdate') is None
320 and opts.get('currentdate') is None
321 ):
321 ):
322 opts['date'] = b'%d %d' % dateutil.makedate()
322 opts['date'] = b'%d %d' % dateutil.makedate()
323 datemaydiffer = True
323 datemaydiffer = True
324
324
325 if opts.get('currentuser'):
325 if opts.get('currentuser'):
326 opts['user'] = ui.username()
326 opts['user'] = ui.username()
327
327
328 return datemaydiffer
328 return datemaydiffer
329
329
330
330
331 def check_note_size(opts):
331 def check_note_size(opts):
332 """make sure note is of valid format"""
332 """make sure note is of valid format"""
333
333
334 note = opts.get('note')
334 note = opts.get('note')
335 if not note:
335 if not note:
336 return
336 return
337
337
338 if len(note) > 255:
338 if len(note) > 255:
339 raise error.InputError(_(b"cannot store a note of more than 255 bytes"))
339 raise error.InputError(_(b"cannot store a note of more than 255 bytes"))
340 if b'\n' in note:
340 if b'\n' in note:
341 raise error.InputError(_(b"note cannot contain a newline"))
341 raise error.InputError(_(b"note cannot contain a newline"))
342
342
343
343
344 def ishunk(x):
344 def ishunk(x):
345 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
345 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
346 return isinstance(x, hunkclasses)
346 return isinstance(x, hunkclasses)
347
347
348
348
349 def isheader(x):
349 def isheader(x):
350 headerclasses = (crecordmod.uiheader, patch.header)
350 headerclasses = (crecordmod.uiheader, patch.header)
351 return isinstance(x, headerclasses)
351 return isinstance(x, headerclasses)
352
352
353
353
354 def newandmodified(chunks):
354 def newandmodified(chunks):
355 newlyaddedandmodifiedfiles = set()
355 newlyaddedandmodifiedfiles = set()
356 alsorestore = set()
356 alsorestore = set()
357 for chunk in chunks:
357 for chunk in chunks:
358 if isheader(chunk) and chunk.isnewfile():
358 if isheader(chunk) and chunk.isnewfile():
359 newlyaddedandmodifiedfiles.add(chunk.filename())
359 newlyaddedandmodifiedfiles.add(chunk.filename())
360 alsorestore.update(set(chunk.files()) - {chunk.filename()})
360 alsorestore.update(set(chunk.files()) - {chunk.filename()})
361 return newlyaddedandmodifiedfiles, alsorestore
361 return newlyaddedandmodifiedfiles, alsorestore
362
362
363
363
364 def parsealiases(cmd):
364 def parsealiases(cmd):
365 base_aliases = cmd.split(b"|")
365 base_aliases = cmd.split(b"|")
366 all_aliases = set(base_aliases)
366 all_aliases = set(base_aliases)
367 extra_aliases = []
367 extra_aliases = []
368 for alias in base_aliases:
368 for alias in base_aliases:
369 if b'-' in alias:
369 if b'-' in alias:
370 folded_alias = alias.replace(b'-', b'')
370 folded_alias = alias.replace(b'-', b'')
371 if folded_alias not in all_aliases:
371 if folded_alias not in all_aliases:
372 all_aliases.add(folded_alias)
372 all_aliases.add(folded_alias)
373 extra_aliases.append(folded_alias)
373 extra_aliases.append(folded_alias)
374 base_aliases.extend(extra_aliases)
374 base_aliases.extend(extra_aliases)
375 return base_aliases
375 return base_aliases
376
376
377
377
378 def setupwrapcolorwrite(ui):
378 def setupwrapcolorwrite(ui):
379 # wrap ui.write so diff output can be labeled/colorized
379 # wrap ui.write so diff output can be labeled/colorized
380 def wrapwrite(orig, *args, **kw):
380 def wrapwrite(orig, *args, **kw):
381 label = kw.pop('label', b'')
381 label = kw.pop('label', b'')
382 for chunk, l in patch.difflabel(lambda: args):
382 for chunk, l in patch.difflabel(lambda: args):
383 orig(chunk, label=label + l)
383 orig(chunk, label=label + l)
384
384
385 oldwrite = ui.write
385 oldwrite = ui.write
386
386
387 def wrap(*args, **kwargs):
387 def wrap(*args, **kwargs):
388 return wrapwrite(oldwrite, *args, **kwargs)
388 return wrapwrite(oldwrite, *args, **kwargs)
389
389
390 setattr(ui, 'write', wrap)
390 setattr(ui, 'write', wrap)
391 return oldwrite
391 return oldwrite
392
392
393
393
394 def filterchunks(ui, originalhunks, usecurses, testfile, match, operation=None):
394 def filterchunks(ui, originalhunks, usecurses, testfile, match, operation=None):
395 try:
395 try:
396 if usecurses:
396 if usecurses:
397 if testfile:
397 if testfile:
398 recordfn = crecordmod.testdecorator(
398 recordfn = crecordmod.testdecorator(
399 testfile, crecordmod.testchunkselector
399 testfile, crecordmod.testchunkselector
400 )
400 )
401 else:
401 else:
402 recordfn = crecordmod.chunkselector
402 recordfn = crecordmod.chunkselector
403
403
404 return crecordmod.filterpatch(
404 return crecordmod.filterpatch(
405 ui, originalhunks, recordfn, operation
405 ui, originalhunks, recordfn, operation
406 )
406 )
407 except crecordmod.fallbackerror as e:
407 except crecordmod.fallbackerror as e:
408 ui.warn(b'%s\n' % e)
408 ui.warn(b'%s\n' % e)
409 ui.warn(_(b'falling back to text mode\n'))
409 ui.warn(_(b'falling back to text mode\n'))
410
410
411 return patch.filterpatch(ui, originalhunks, match, operation)
411 return patch.filterpatch(ui, originalhunks, match, operation)
412
412
413
413
414 def recordfilter(ui, originalhunks, match, operation=None):
414 def recordfilter(ui, originalhunks, match, operation=None):
415 """Prompts the user to filter the originalhunks and return a list of
415 """Prompts the user to filter the originalhunks and return a list of
416 selected hunks.
416 selected hunks.
417 *operation* is used for to build ui messages to indicate the user what
417 *operation* is used for to build ui messages to indicate the user what
418 kind of filtering they are doing: reverting, committing, shelving, etc.
418 kind of filtering they are doing: reverting, committing, shelving, etc.
419 (see patch.filterpatch).
419 (see patch.filterpatch).
420 """
420 """
421 usecurses = crecordmod.checkcurses(ui)
421 usecurses = crecordmod.checkcurses(ui)
422 testfile = ui.config(b'experimental', b'crecordtest')
422 testfile = ui.config(b'experimental', b'crecordtest')
423 oldwrite = setupwrapcolorwrite(ui)
423 oldwrite = setupwrapcolorwrite(ui)
424 try:
424 try:
425 newchunks, newopts = filterchunks(
425 newchunks, newopts = filterchunks(
426 ui, originalhunks, usecurses, testfile, match, operation
426 ui, originalhunks, usecurses, testfile, match, operation
427 )
427 )
428 finally:
428 finally:
429 ui.write = oldwrite
429 ui.write = oldwrite
430 return newchunks, newopts
430 return newchunks, newopts
431
431
432
432
433 def dorecord(
433 def dorecord(
434 ui, repo, commitfunc, cmdsuggest, backupall, filterfn, *pats, **opts
434 ui, repo, commitfunc, cmdsuggest, backupall, filterfn, *pats, **opts
435 ):
435 ):
436 opts = pycompat.byteskwargs(opts)
436 opts = pycompat.byteskwargs(opts)
437 if not ui.interactive():
437 if not ui.interactive():
438 if cmdsuggest:
438 if cmdsuggest:
439 msg = _(b'running non-interactively, use %s instead') % cmdsuggest
439 msg = _(b'running non-interactively, use %s instead') % cmdsuggest
440 else:
440 else:
441 msg = _(b'running non-interactively')
441 msg = _(b'running non-interactively')
442 raise error.InputError(msg)
442 raise error.InputError(msg)
443
443
444 # make sure username is set before going interactive
444 # make sure username is set before going interactive
445 if not opts.get(b'user'):
445 if not opts.get(b'user'):
446 ui.username() # raise exception, username not provided
446 ui.username() # raise exception, username not provided
447
447
448 def recordfunc(ui, repo, message, match, opts):
448 def recordfunc(ui, repo, message, match, opts):
449 """This is generic record driver.
449 """This is generic record driver.
450
450
451 Its job is to interactively filter local changes, and
451 Its job is to interactively filter local changes, and
452 accordingly prepare working directory into a state in which the
452 accordingly prepare working directory into a state in which the
453 job can be delegated to a non-interactive commit command such as
453 job can be delegated to a non-interactive commit command such as
454 'commit' or 'qrefresh'.
454 'commit' or 'qrefresh'.
455
455
456 After the actual job is done by non-interactive command, the
456 After the actual job is done by non-interactive command, the
457 working directory is restored to its original state.
457 working directory is restored to its original state.
458
458
459 In the end we'll record interesting changes, and everything else
459 In the end we'll record interesting changes, and everything else
460 will be left in place, so the user can continue working.
460 will be left in place, so the user can continue working.
461 """
461 """
462 if not opts.get(b'interactive-unshelve'):
462 if not opts.get(b'interactive-unshelve'):
463 checkunfinished(repo, commit=True)
463 checkunfinished(repo, commit=True)
464 wctx = repo[None]
464 wctx = repo[None]
465 merge = len(wctx.parents()) > 1
465 merge = len(wctx.parents()) > 1
466 if merge:
466 if merge:
467 raise error.InputError(
467 raise error.InputError(
468 _(
468 _(
469 b'cannot partially commit a merge '
469 b'cannot partially commit a merge '
470 b'(use "hg commit" instead)'
470 b'(use "hg commit" instead)'
471 )
471 )
472 )
472 )
473
473
474 def fail(f, msg):
474 def fail(f, msg):
475 raise error.InputError(b'%s: %s' % (f, msg))
475 raise error.InputError(b'%s: %s' % (f, msg))
476
476
477 force = opts.get(b'force')
477 force = opts.get(b'force')
478 if not force:
478 if not force:
479 match = matchmod.badmatch(match, fail)
479 match = matchmod.badmatch(match, fail)
480
480
481 status = repo.status(match=match)
481 status = repo.status(match=match)
482
482
483 overrides = {(b'ui', b'commitsubrepos'): True}
483 overrides = {(b'ui', b'commitsubrepos'): True}
484
484
485 with repo.ui.configoverride(overrides, b'record'):
485 with repo.ui.configoverride(overrides, b'record'):
486 # subrepoutil.precommit() modifies the status
486 # subrepoutil.precommit() modifies the status
487 tmpstatus = scmutil.status(
487 tmpstatus = scmutil.status(
488 copymod.copy(status.modified),
488 copymod.copy(status.modified),
489 copymod.copy(status.added),
489 copymod.copy(status.added),
490 copymod.copy(status.removed),
490 copymod.copy(status.removed),
491 copymod.copy(status.deleted),
491 copymod.copy(status.deleted),
492 copymod.copy(status.unknown),
492 copymod.copy(status.unknown),
493 copymod.copy(status.ignored),
493 copymod.copy(status.ignored),
494 copymod.copy(status.clean), # pytype: disable=wrong-arg-count
494 copymod.copy(status.clean), # pytype: disable=wrong-arg-count
495 )
495 )
496
496
497 # Force allows -X subrepo to skip the subrepo.
497 # Force allows -X subrepo to skip the subrepo.
498 subs, commitsubs, newstate = subrepoutil.precommit(
498 subs, commitsubs, newstate = subrepoutil.precommit(
499 repo.ui, wctx, tmpstatus, match, force=True
499 repo.ui, wctx, tmpstatus, match, force=True
500 )
500 )
501 for s in subs:
501 for s in subs:
502 if s in commitsubs:
502 if s in commitsubs:
503 dirtyreason = wctx.sub(s).dirtyreason(True)
503 dirtyreason = wctx.sub(s).dirtyreason(True)
504 raise error.Abort(dirtyreason)
504 raise error.Abort(dirtyreason)
505
505
506 if not force:
506 if not force:
507 repo.checkcommitpatterns(wctx, match, status, fail)
507 repo.checkcommitpatterns(wctx, match, status, fail)
508 diffopts = patch.difffeatureopts(
508 diffopts = patch.difffeatureopts(
509 ui,
509 ui,
510 opts=opts,
510 opts=opts,
511 whitespace=True,
511 whitespace=True,
512 section=b'commands',
512 section=b'commands',
513 configprefix=b'commit.interactive.',
513 configprefix=b'commit.interactive.',
514 )
514 )
515 diffopts.nodates = True
515 diffopts.nodates = True
516 diffopts.git = True
516 diffopts.git = True
517 diffopts.showfunc = True
517 diffopts.showfunc = True
518 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
518 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
519 original_headers = patch.parsepatch(originaldiff)
519 original_headers = patch.parsepatch(originaldiff)
520 match = scmutil.match(repo[None], pats)
520 match = scmutil.match(repo[None], pats)
521
521
522 # 1. filter patch, since we are intending to apply subset of it
522 # 1. filter patch, since we are intending to apply subset of it
523 try:
523 try:
524 chunks, newopts = filterfn(ui, original_headers, match)
524 chunks, newopts = filterfn(ui, original_headers, match)
525 except error.PatchError as err:
525 except error.PatchError as err:
526 raise error.InputError(_(b'error parsing patch: %s') % err)
526 raise error.InputError(_(b'error parsing patch: %s') % err)
527 opts.update(newopts)
527 opts.update(newopts)
528
528
529 # We need to keep a backup of files that have been newly added and
529 # We need to keep a backup of files that have been newly added and
530 # modified during the recording process because there is a previous
530 # modified during the recording process because there is a previous
531 # version without the edit in the workdir. We also will need to restore
531 # version without the edit in the workdir. We also will need to restore
532 # files that were the sources of renames so that the patch application
532 # files that were the sources of renames so that the patch application
533 # works.
533 # works.
534 newlyaddedandmodifiedfiles, alsorestore = newandmodified(chunks)
534 newlyaddedandmodifiedfiles, alsorestore = newandmodified(chunks)
535 contenders = set()
535 contenders = set()
536 for h in chunks:
536 for h in chunks:
537 if isheader(h):
537 if isheader(h):
538 contenders.update(set(h.files()))
538 contenders.update(set(h.files()))
539
539
540 changed = status.modified + status.added + status.removed
540 changed = status.modified + status.added + status.removed
541 newfiles = [f for f in changed if f in contenders]
541 newfiles = [f for f in changed if f in contenders]
542 if not newfiles:
542 if not newfiles:
543 ui.status(_(b'no changes to record\n'))
543 ui.status(_(b'no changes to record\n'))
544 return 0
544 return 0
545
545
546 modified = set(status.modified)
546 modified = set(status.modified)
547
547
548 # 2. backup changed files, so we can restore them in the end
548 # 2. backup changed files, so we can restore them in the end
549
549
550 if backupall:
550 if backupall:
551 tobackup = changed
551 tobackup = changed
552 else:
552 else:
553 tobackup = [
553 tobackup = [
554 f
554 f
555 for f in newfiles
555 for f in newfiles
556 if f in modified or f in newlyaddedandmodifiedfiles
556 if f in modified or f in newlyaddedandmodifiedfiles
557 ]
557 ]
558 backups = {}
558 backups = {}
559 if tobackup:
559 if tobackup:
560 backupdir = repo.vfs.join(b'record-backups')
560 backupdir = repo.vfs.join(b'record-backups')
561 try:
561 try:
562 os.mkdir(backupdir)
562 os.mkdir(backupdir)
563 except OSError as err:
563 except OSError as err:
564 if err.errno != errno.EEXIST:
564 if err.errno != errno.EEXIST:
565 raise
565 raise
566 try:
566 try:
567 # backup continues
567 # backup continues
568 for f in tobackup:
568 for f in tobackup:
569 fd, tmpname = pycompat.mkstemp(
569 fd, tmpname = pycompat.mkstemp(
570 prefix=os.path.basename(f) + b'.', dir=backupdir
570 prefix=os.path.basename(f) + b'.', dir=backupdir
571 )
571 )
572 os.close(fd)
572 os.close(fd)
573 ui.debug(b'backup %r as %r\n' % (f, tmpname))
573 ui.debug(b'backup %r as %r\n' % (f, tmpname))
574 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
574 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
575 backups[f] = tmpname
575 backups[f] = tmpname
576
576
577 fp = stringio()
577 fp = stringio()
578 for c in chunks:
578 for c in chunks:
579 fname = c.filename()
579 fname = c.filename()
580 if fname in backups:
580 if fname in backups:
581 c.write(fp)
581 c.write(fp)
582 dopatch = fp.tell()
582 dopatch = fp.tell()
583 fp.seek(0)
583 fp.seek(0)
584
584
585 # 2.5 optionally review / modify patch in text editor
585 # 2.5 optionally review / modify patch in text editor
586 if opts.get(b'review', False):
586 if opts.get(b'review', False):
587 patchtext = (
587 patchtext = (
588 crecordmod.diffhelptext
588 crecordmod.diffhelptext
589 + crecordmod.patchhelptext
589 + crecordmod.patchhelptext
590 + fp.read()
590 + fp.read()
591 )
591 )
592 reviewedpatch = ui.edit(
592 reviewedpatch = ui.edit(
593 patchtext, b"", action=b"diff", repopath=repo.path
593 patchtext, b"", action=b"diff", repopath=repo.path
594 )
594 )
595 fp.truncate(0)
595 fp.truncate(0)
596 fp.write(reviewedpatch)
596 fp.write(reviewedpatch)
597 fp.seek(0)
597 fp.seek(0)
598
598
599 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
599 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
600 # 3a. apply filtered patch to clean repo (clean)
600 # 3a. apply filtered patch to clean repo (clean)
601 if backups:
601 if backups:
602 m = scmutil.matchfiles(repo, set(backups.keys()) | alsorestore)
602 m = scmutil.matchfiles(repo, set(backups.keys()) | alsorestore)
603 mergemod.revert_to(repo[b'.'], matcher=m)
603 mergemod.revert_to(repo[b'.'], matcher=m)
604
604
605 # 3b. (apply)
605 # 3b. (apply)
606 if dopatch:
606 if dopatch:
607 try:
607 try:
608 ui.debug(b'applying patch\n')
608 ui.debug(b'applying patch\n')
609 ui.debug(fp.getvalue())
609 ui.debug(fp.getvalue())
610 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
610 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
611 except error.PatchError as err:
611 except error.PatchError as err:
612 raise error.InputError(pycompat.bytestr(err))
612 raise error.InputError(pycompat.bytestr(err))
613 del fp
613 del fp
614
614
615 # 4. We prepared working directory according to filtered
615 # 4. We prepared working directory according to filtered
616 # patch. Now is the time to delegate the job to
616 # patch. Now is the time to delegate the job to
617 # commit/qrefresh or the like!
617 # commit/qrefresh or the like!
618
618
619 # Make all of the pathnames absolute.
619 # Make all of the pathnames absolute.
620 newfiles = [repo.wjoin(nf) for nf in newfiles]
620 newfiles = [repo.wjoin(nf) for nf in newfiles]
621 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
621 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
622 finally:
622 finally:
623 # 5. finally restore backed-up files
623 # 5. finally restore backed-up files
624 try:
624 try:
625 dirstate = repo.dirstate
625 dirstate = repo.dirstate
626 for realname, tmpname in pycompat.iteritems(backups):
626 for realname, tmpname in pycompat.iteritems(backups):
627 ui.debug(b'restoring %r to %r\n' % (tmpname, realname))
627 ui.debug(b'restoring %r to %r\n' % (tmpname, realname))
628
628
629 if dirstate.get_entry(realname).maybe_clean:
629 if dirstate.get_entry(realname).maybe_clean:
630 # without normallookup, restoring timestamp
630 # without normallookup, restoring timestamp
631 # may cause partially committed files
631 # may cause partially committed files
632 # to be treated as unmodified
632 # to be treated as unmodified
633
633
634 # XXX-PENDINGCHANGE: We should clarify the context in
634 # XXX-PENDINGCHANGE: We should clarify the context in
635 # which this function is called to make sure it
635 # which this function is called to make sure it
636 # already called within a `pendingchange`, However we
636 # already called within a `pendingchange`, However we
637 # are taking a shortcut here in order to be able to
637 # are taking a shortcut here in order to be able to
638 # quickly deprecated the older API.
638 # quickly deprecated the older API.
639 with dirstate.parentchange():
639 with dirstate.parentchange():
640 dirstate.update_file(
640 dirstate.update_file(
641 realname,
641 realname,
642 p1_tracked=True,
642 p1_tracked=True,
643 wc_tracked=True,
643 wc_tracked=True,
644 possibly_dirty=True,
644 possibly_dirty=True,
645 )
645 )
646
646
647 # copystat=True here and above are a hack to trick any
647 # copystat=True here and above are a hack to trick any
648 # editors that have f open that we haven't modified them.
648 # editors that have f open that we haven't modified them.
649 #
649 #
650 # Also note that this racy as an editor could notice the
650 # Also note that this racy as an editor could notice the
651 # file's mtime before we've finished writing it.
651 # file's mtime before we've finished writing it.
652 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
652 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
653 os.unlink(tmpname)
653 os.unlink(tmpname)
654 if tobackup:
654 if tobackup:
655 os.rmdir(backupdir)
655 os.rmdir(backupdir)
656 except OSError:
656 except OSError:
657 pass
657 pass
658
658
659 def recordinwlock(ui, repo, message, match, opts):
659 def recordinwlock(ui, repo, message, match, opts):
660 with repo.wlock():
660 with repo.wlock():
661 return recordfunc(ui, repo, message, match, opts)
661 return recordfunc(ui, repo, message, match, opts)
662
662
663 return commit(ui, repo, recordinwlock, pats, opts)
663 return commit(ui, repo, recordinwlock, pats, opts)
664
664
665
665
666 class dirnode(object):
666 class dirnode(object):
667 """
667 """
668 Represent a directory in user working copy with information required for
668 Represent a directory in user working copy with information required for
669 the purpose of tersing its status.
669 the purpose of tersing its status.
670
670
671 path is the path to the directory, without a trailing '/'
671 path is the path to the directory, without a trailing '/'
672
672
673 statuses is a set of statuses of all files in this directory (this includes
673 statuses is a set of statuses of all files in this directory (this includes
674 all the files in all the subdirectories too)
674 all the files in all the subdirectories too)
675
675
676 files is a list of files which are direct child of this directory
676 files is a list of files which are direct child of this directory
677
677
678 subdirs is a dictionary of sub-directory name as the key and it's own
678 subdirs is a dictionary of sub-directory name as the key and it's own
679 dirnode object as the value
679 dirnode object as the value
680 """
680 """
681
681
682 def __init__(self, dirpath):
682 def __init__(self, dirpath):
683 self.path = dirpath
683 self.path = dirpath
684 self.statuses = set()
684 self.statuses = set()
685 self.files = []
685 self.files = []
686 self.subdirs = {}
686 self.subdirs = {}
687
687
688 def _addfileindir(self, filename, status):
688 def _addfileindir(self, filename, status):
689 """Add a file in this directory as a direct child."""
689 """Add a file in this directory as a direct child."""
690 self.files.append((filename, status))
690 self.files.append((filename, status))
691
691
692 def addfile(self, filename, status):
692 def addfile(self, filename, status):
693 """
693 """
694 Add a file to this directory or to its direct parent directory.
694 Add a file to this directory or to its direct parent directory.
695
695
696 If the file is not direct child of this directory, we traverse to the
696 If the file is not direct child of this directory, we traverse to the
697 directory of which this file is a direct child of and add the file
697 directory of which this file is a direct child of and add the file
698 there.
698 there.
699 """
699 """
700
700
701 # the filename contains a path separator, it means it's not the direct
701 # the filename contains a path separator, it means it's not the direct
702 # child of this directory
702 # child of this directory
703 if b'/' in filename:
703 if b'/' in filename:
704 subdir, filep = filename.split(b'/', 1)
704 subdir, filep = filename.split(b'/', 1)
705
705
706 # does the dirnode object for subdir exists
706 # does the dirnode object for subdir exists
707 if subdir not in self.subdirs:
707 if subdir not in self.subdirs:
708 subdirpath = pathutil.join(self.path, subdir)
708 subdirpath = pathutil.join(self.path, subdir)
709 self.subdirs[subdir] = dirnode(subdirpath)
709 self.subdirs[subdir] = dirnode(subdirpath)
710
710
711 # try adding the file in subdir
711 # try adding the file in subdir
712 self.subdirs[subdir].addfile(filep, status)
712 self.subdirs[subdir].addfile(filep, status)
713
713
714 else:
714 else:
715 self._addfileindir(filename, status)
715 self._addfileindir(filename, status)
716
716
717 if status not in self.statuses:
717 if status not in self.statuses:
718 self.statuses.add(status)
718 self.statuses.add(status)
719
719
720 def iterfilepaths(self):
720 def iterfilepaths(self):
721 """Yield (status, path) for files directly under this directory."""
721 """Yield (status, path) for files directly under this directory."""
722 for f, st in self.files:
722 for f, st in self.files:
723 yield st, pathutil.join(self.path, f)
723 yield st, pathutil.join(self.path, f)
724
724
725 def tersewalk(self, terseargs):
725 def tersewalk(self, terseargs):
726 """
726 """
727 Yield (status, path) obtained by processing the status of this
727 Yield (status, path) obtained by processing the status of this
728 dirnode.
728 dirnode.
729
729
730 terseargs is the string of arguments passed by the user with `--terse`
730 terseargs is the string of arguments passed by the user with `--terse`
731 flag.
731 flag.
732
732
733 Following are the cases which can happen:
733 Following are the cases which can happen:
734
734
735 1) All the files in the directory (including all the files in its
735 1) All the files in the directory (including all the files in its
736 subdirectories) share the same status and the user has asked us to terse
736 subdirectories) share the same status and the user has asked us to terse
737 that status. -> yield (status, dirpath). dirpath will end in '/'.
737 that status. -> yield (status, dirpath). dirpath will end in '/'.
738
738
739 2) Otherwise, we do following:
739 2) Otherwise, we do following:
740
740
741 a) Yield (status, filepath) for all the files which are in this
741 a) Yield (status, filepath) for all the files which are in this
742 directory (only the ones in this directory, not the subdirs)
742 directory (only the ones in this directory, not the subdirs)
743
743
744 b) Recurse the function on all the subdirectories of this
744 b) Recurse the function on all the subdirectories of this
745 directory
745 directory
746 """
746 """
747
747
748 if len(self.statuses) == 1:
748 if len(self.statuses) == 1:
749 onlyst = self.statuses.pop()
749 onlyst = self.statuses.pop()
750
750
751 # Making sure we terse only when the status abbreviation is
751 # Making sure we terse only when the status abbreviation is
752 # passed as terse argument
752 # passed as terse argument
753 if onlyst in terseargs:
753 if onlyst in terseargs:
754 yield onlyst, self.path + b'/'
754 yield onlyst, self.path + b'/'
755 return
755 return
756
756
757 # add the files to status list
757 # add the files to status list
758 for st, fpath in self.iterfilepaths():
758 for st, fpath in self.iterfilepaths():
759 yield st, fpath
759 yield st, fpath
760
760
761 # recurse on the subdirs
761 # recurse on the subdirs
762 for dirobj in self.subdirs.values():
762 for dirobj in self.subdirs.values():
763 for st, fpath in dirobj.tersewalk(terseargs):
763 for st, fpath in dirobj.tersewalk(terseargs):
764 yield st, fpath
764 yield st, fpath
765
765
766
766
767 def tersedir(statuslist, terseargs):
767 def tersedir(statuslist, terseargs):
768 """
768 """
769 Terse the status if all the files in a directory shares the same status.
769 Terse the status if all the files in a directory shares the same status.
770
770
771 statuslist is scmutil.status() object which contains a list of files for
771 statuslist is scmutil.status() object which contains a list of files for
772 each status.
772 each status.
773 terseargs is string which is passed by the user as the argument to `--terse`
773 terseargs is string which is passed by the user as the argument to `--terse`
774 flag.
774 flag.
775
775
776 The function makes a tree of objects of dirnode class, and at each node it
776 The function makes a tree of objects of dirnode class, and at each node it
777 stores the information required to know whether we can terse a certain
777 stores the information required to know whether we can terse a certain
778 directory or not.
778 directory or not.
779 """
779 """
780 # the order matters here as that is used to produce final list
780 # the order matters here as that is used to produce final list
781 allst = (b'm', b'a', b'r', b'd', b'u', b'i', b'c')
781 allst = (b'm', b'a', b'r', b'd', b'u', b'i', b'c')
782
782
783 # checking the argument validity
783 # checking the argument validity
784 for s in pycompat.bytestr(terseargs):
784 for s in pycompat.bytestr(terseargs):
785 if s not in allst:
785 if s not in allst:
786 raise error.InputError(_(b"'%s' not recognized") % s)
786 raise error.InputError(_(b"'%s' not recognized") % s)
787
787
788 # creating a dirnode object for the root of the repo
788 # creating a dirnode object for the root of the repo
789 rootobj = dirnode(b'')
789 rootobj = dirnode(b'')
790 pstatus = (
790 pstatus = (
791 b'modified',
791 b'modified',
792 b'added',
792 b'added',
793 b'deleted',
793 b'deleted',
794 b'clean',
794 b'clean',
795 b'unknown',
795 b'unknown',
796 b'ignored',
796 b'ignored',
797 b'removed',
797 b'removed',
798 )
798 )
799
799
800 tersedict = {}
800 tersedict = {}
801 for attrname in pstatus:
801 for attrname in pstatus:
802 statuschar = attrname[0:1]
802 statuschar = attrname[0:1]
803 for f in getattr(statuslist, attrname):
803 for f in getattr(statuslist, attrname):
804 rootobj.addfile(f, statuschar)
804 rootobj.addfile(f, statuschar)
805 tersedict[statuschar] = []
805 tersedict[statuschar] = []
806
806
807 # we won't be tersing the root dir, so add files in it
807 # we won't be tersing the root dir, so add files in it
808 for st, fpath in rootobj.iterfilepaths():
808 for st, fpath in rootobj.iterfilepaths():
809 tersedict[st].append(fpath)
809 tersedict[st].append(fpath)
810
810
811 # process each sub-directory and build tersedict
811 # process each sub-directory and build tersedict
812 for subdir in rootobj.subdirs.values():
812 for subdir in rootobj.subdirs.values():
813 for st, f in subdir.tersewalk(terseargs):
813 for st, f in subdir.tersewalk(terseargs):
814 tersedict[st].append(f)
814 tersedict[st].append(f)
815
815
816 tersedlist = []
816 tersedlist = []
817 for st in allst:
817 for st in allst:
818 tersedict[st].sort()
818 tersedict[st].sort()
819 tersedlist.append(tersedict[st])
819 tersedlist.append(tersedict[st])
820
820
821 return scmutil.status(*tersedlist)
821 return scmutil.status(*tersedlist)
822
822
823
823
824 def _commentlines(raw):
824 def _commentlines(raw):
825 '''Surround lineswith a comment char and a new line'''
825 '''Surround lineswith a comment char and a new line'''
826 lines = raw.splitlines()
826 lines = raw.splitlines()
827 commentedlines = [b'# %s' % line for line in lines]
827 commentedlines = [b'# %s' % line for line in lines]
828 return b'\n'.join(commentedlines) + b'\n'
828 return b'\n'.join(commentedlines) + b'\n'
829
829
830
830
831 @attr.s(frozen=True)
831 @attr.s(frozen=True)
832 class morestatus(object):
832 class morestatus(object):
833 reporoot = attr.ib()
833 reporoot = attr.ib()
834 unfinishedop = attr.ib()
834 unfinishedop = attr.ib()
835 unfinishedmsg = attr.ib()
835 unfinishedmsg = attr.ib()
836 activemerge = attr.ib()
836 activemerge = attr.ib()
837 unresolvedpaths = attr.ib()
837 unresolvedpaths = attr.ib()
838 _formattedpaths = attr.ib(init=False, default=set())
838 _formattedpaths = attr.ib(init=False, default=set())
839 _label = b'status.morestatus'
839 _label = b'status.morestatus'
840
840
841 def formatfile(self, path, fm):
841 def formatfile(self, path, fm):
842 self._formattedpaths.add(path)
842 self._formattedpaths.add(path)
843 if self.activemerge and path in self.unresolvedpaths:
843 if self.activemerge and path in self.unresolvedpaths:
844 fm.data(unresolved=True)
844 fm.data(unresolved=True)
845
845
846 def formatfooter(self, fm):
846 def formatfooter(self, fm):
847 if self.unfinishedop or self.unfinishedmsg:
847 if self.unfinishedop or self.unfinishedmsg:
848 fm.startitem()
848 fm.startitem()
849 fm.data(itemtype=b'morestatus')
849 fm.data(itemtype=b'morestatus')
850
850
851 if self.unfinishedop:
851 if self.unfinishedop:
852 fm.data(unfinished=self.unfinishedop)
852 fm.data(unfinished=self.unfinishedop)
853 statemsg = (
853 statemsg = (
854 _(b'The repository is in an unfinished *%s* state.')
854 _(b'The repository is in an unfinished *%s* state.')
855 % self.unfinishedop
855 % self.unfinishedop
856 )
856 )
857 fm.plain(b'%s\n' % _commentlines(statemsg), label=self._label)
857 fm.plain(b'%s\n' % _commentlines(statemsg), label=self._label)
858 if self.unfinishedmsg:
858 if self.unfinishedmsg:
859 fm.data(unfinishedmsg=self.unfinishedmsg)
859 fm.data(unfinishedmsg=self.unfinishedmsg)
860
860
861 # May also start new data items.
861 # May also start new data items.
862 self._formatconflicts(fm)
862 self._formatconflicts(fm)
863
863
864 if self.unfinishedmsg:
864 if self.unfinishedmsg:
865 fm.plain(
865 fm.plain(
866 b'%s\n' % _commentlines(self.unfinishedmsg), label=self._label
866 b'%s\n' % _commentlines(self.unfinishedmsg), label=self._label
867 )
867 )
868
868
869 def _formatconflicts(self, fm):
869 def _formatconflicts(self, fm):
870 if not self.activemerge:
870 if not self.activemerge:
871 return
871 return
872
872
873 if self.unresolvedpaths:
873 if self.unresolvedpaths:
874 mergeliststr = b'\n'.join(
874 mergeliststr = b'\n'.join(
875 [
875 [
876 b' %s'
876 b' %s'
877 % util.pathto(self.reporoot, encoding.getcwd(), path)
877 % util.pathto(self.reporoot, encoding.getcwd(), path)
878 for path in self.unresolvedpaths
878 for path in self.unresolvedpaths
879 ]
879 ]
880 )
880 )
881 msg = (
881 msg = (
882 _(
882 _(
883 b'''Unresolved merge conflicts:
883 b'''Unresolved merge conflicts:
884
884
885 %s
885 %s
886
886
887 To mark files as resolved: hg resolve --mark FILE'''
887 To mark files as resolved: hg resolve --mark FILE'''
888 )
888 )
889 % mergeliststr
889 % mergeliststr
890 )
890 )
891
891
892 # If any paths with unresolved conflicts were not previously
892 # If any paths with unresolved conflicts were not previously
893 # formatted, output them now.
893 # formatted, output them now.
894 for f in self.unresolvedpaths:
894 for f in self.unresolvedpaths:
895 if f in self._formattedpaths:
895 if f in self._formattedpaths:
896 # Already output.
896 # Already output.
897 continue
897 continue
898 fm.startitem()
898 fm.startitem()
899 # We can't claim to know the status of the file - it may just
899 # We can't claim to know the status of the file - it may just
900 # have been in one of the states that were not requested for
900 # have been in one of the states that were not requested for
901 # display, so it could be anything.
901 # display, so it could be anything.
902 fm.data(itemtype=b'file', path=f, unresolved=True)
902 fm.data(itemtype=b'file', path=f, unresolved=True)
903
903
904 else:
904 else:
905 msg = _(b'No unresolved merge conflicts.')
905 msg = _(b'No unresolved merge conflicts.')
906
906
907 fm.plain(b'%s\n' % _commentlines(msg), label=self._label)
907 fm.plain(b'%s\n' % _commentlines(msg), label=self._label)
908
908
909
909
910 def readmorestatus(repo):
910 def readmorestatus(repo):
911 """Returns a morestatus object if the repo has unfinished state."""
911 """Returns a morestatus object if the repo has unfinished state."""
912 statetuple = statemod.getrepostate(repo)
912 statetuple = statemod.getrepostate(repo)
913 mergestate = mergestatemod.mergestate.read(repo)
913 mergestate = mergestatemod.mergestate.read(repo)
914 activemerge = mergestate.active()
914 activemerge = mergestate.active()
915 if not statetuple and not activemerge:
915 if not statetuple and not activemerge:
916 return None
916 return None
917
917
918 unfinishedop = unfinishedmsg = unresolved = None
918 unfinishedop = unfinishedmsg = unresolved = None
919 if statetuple:
919 if statetuple:
920 unfinishedop, unfinishedmsg = statetuple
920 unfinishedop, unfinishedmsg = statetuple
921 if activemerge:
921 if activemerge:
922 unresolved = sorted(mergestate.unresolved())
922 unresolved = sorted(mergestate.unresolved())
923 return morestatus(
923 return morestatus(
924 repo.root, unfinishedop, unfinishedmsg, activemerge, unresolved
924 repo.root, unfinishedop, unfinishedmsg, activemerge, unresolved
925 )
925 )
926
926
927
927
928 def findpossible(cmd, table, strict=False):
928 def findpossible(cmd, table, strict=False):
929 """
929 """
930 Return cmd -> (aliases, command table entry)
930 Return cmd -> (aliases, command table entry)
931 for each matching command.
931 for each matching command.
932 Return debug commands (or their aliases) only if no normal command matches.
932 Return debug commands (or their aliases) only if no normal command matches.
933 """
933 """
934 choice = {}
934 choice = {}
935 debugchoice = {}
935 debugchoice = {}
936
936
937 if cmd in table:
937 if cmd in table:
938 # short-circuit exact matches, "log" alias beats "log|history"
938 # short-circuit exact matches, "log" alias beats "log|history"
939 keys = [cmd]
939 keys = [cmd]
940 else:
940 else:
941 keys = table.keys()
941 keys = table.keys()
942
942
943 allcmds = []
943 allcmds = []
944 for e in keys:
944 for e in keys:
945 aliases = parsealiases(e)
945 aliases = parsealiases(e)
946 allcmds.extend(aliases)
946 allcmds.extend(aliases)
947 found = None
947 found = None
948 if cmd in aliases:
948 if cmd in aliases:
949 found = cmd
949 found = cmd
950 elif not strict:
950 elif not strict:
951 for a in aliases:
951 for a in aliases:
952 if a.startswith(cmd):
952 if a.startswith(cmd):
953 found = a
953 found = a
954 break
954 break
955 if found is not None:
955 if found is not None:
956 if aliases[0].startswith(b"debug") or found.startswith(b"debug"):
956 if aliases[0].startswith(b"debug") or found.startswith(b"debug"):
957 debugchoice[found] = (aliases, table[e])
957 debugchoice[found] = (aliases, table[e])
958 else:
958 else:
959 choice[found] = (aliases, table[e])
959 choice[found] = (aliases, table[e])
960
960
961 if not choice and debugchoice:
961 if not choice and debugchoice:
962 choice = debugchoice
962 choice = debugchoice
963
963
964 return choice, allcmds
964 return choice, allcmds
965
965
966
966
967 def findcmd(cmd, table, strict=True):
967 def findcmd(cmd, table, strict=True):
968 """Return (aliases, command table entry) for command string."""
968 """Return (aliases, command table entry) for command string."""
969 choice, allcmds = findpossible(cmd, table, strict)
969 choice, allcmds = findpossible(cmd, table, strict)
970
970
971 if cmd in choice:
971 if cmd in choice:
972 return choice[cmd]
972 return choice[cmd]
973
973
974 if len(choice) > 1:
974 if len(choice) > 1:
975 clist = sorted(choice)
975 clist = sorted(choice)
976 raise error.AmbiguousCommand(cmd, clist)
976 raise error.AmbiguousCommand(cmd, clist)
977
977
978 if choice:
978 if choice:
979 return list(choice.values())[0]
979 return list(choice.values())[0]
980
980
981 raise error.UnknownCommand(cmd, allcmds)
981 raise error.UnknownCommand(cmd, allcmds)
982
982
983
983
984 def changebranch(ui, repo, revs, label, opts):
984 def changebranch(ui, repo, revs, label, opts):
985 """Change the branch name of given revs to label"""
985 """Change the branch name of given revs to label"""
986
986
987 with repo.wlock(), repo.lock(), repo.transaction(b'branches'):
987 with repo.wlock(), repo.lock(), repo.transaction(b'branches'):
988 # abort in case of uncommitted merge or dirty wdir
988 # abort in case of uncommitted merge or dirty wdir
989 bailifchanged(repo)
989 bailifchanged(repo)
990 revs = logcmdutil.revrange(repo, revs)
990 revs = logcmdutil.revrange(repo, revs)
991 if not revs:
991 if not revs:
992 raise error.InputError(b"empty revision set")
992 raise error.InputError(b"empty revision set")
993 roots = repo.revs(b'roots(%ld)', revs)
993 roots = repo.revs(b'roots(%ld)', revs)
994 if len(roots) > 1:
994 if len(roots) > 1:
995 raise error.InputError(
995 raise error.InputError(
996 _(b"cannot change branch of non-linear revisions")
996 _(b"cannot change branch of non-linear revisions")
997 )
997 )
998 rewriteutil.precheck(repo, revs, b'change branch of')
998 rewriteutil.precheck(repo, revs, b'change branch of')
999
999
1000 root = repo[roots.first()]
1000 root = repo[roots.first()]
1001 rpb = {parent.branch() for parent in root.parents()}
1001 rpb = {parent.branch() for parent in root.parents()}
1002 if (
1002 if (
1003 not opts.get(b'force')
1003 not opts.get(b'force')
1004 and label not in rpb
1004 and label not in rpb
1005 and label in repo.branchmap()
1005 and label in repo.branchmap()
1006 ):
1006 ):
1007 raise error.InputError(
1007 raise error.InputError(
1008 _(b"a branch of the same name already exists")
1008 _(b"a branch of the same name already exists")
1009 )
1009 )
1010
1010
1011 # make sure only topological heads
1011 # make sure only topological heads
1012 if repo.revs(b'heads(%ld) - head()', revs):
1012 if repo.revs(b'heads(%ld) - head()', revs):
1013 raise error.InputError(
1013 raise error.InputError(
1014 _(b"cannot change branch in middle of a stack")
1014 _(b"cannot change branch in middle of a stack")
1015 )
1015 )
1016
1016
1017 replacements = {}
1017 replacements = {}
1018 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
1018 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
1019 # mercurial.subrepo -> mercurial.cmdutil
1019 # mercurial.subrepo -> mercurial.cmdutil
1020 from . import context
1020 from . import context
1021
1021
1022 for rev in revs:
1022 for rev in revs:
1023 ctx = repo[rev]
1023 ctx = repo[rev]
1024 oldbranch = ctx.branch()
1024 oldbranch = ctx.branch()
1025 # check if ctx has same branch
1025 # check if ctx has same branch
1026 if oldbranch == label:
1026 if oldbranch == label:
1027 continue
1027 continue
1028
1028
1029 def filectxfn(repo, newctx, path):
1029 def filectxfn(repo, newctx, path):
1030 try:
1030 try:
1031 return ctx[path]
1031 return ctx[path]
1032 except error.ManifestLookupError:
1032 except error.ManifestLookupError:
1033 return None
1033 return None
1034
1034
1035 ui.debug(
1035 ui.debug(
1036 b"changing branch of '%s' from '%s' to '%s'\n"
1036 b"changing branch of '%s' from '%s' to '%s'\n"
1037 % (hex(ctx.node()), oldbranch, label)
1037 % (hex(ctx.node()), oldbranch, label)
1038 )
1038 )
1039 extra = ctx.extra()
1039 extra = ctx.extra()
1040 extra[b'branch_change'] = hex(ctx.node())
1040 extra[b'branch_change'] = hex(ctx.node())
1041 # While changing branch of set of linear commits, make sure that
1041 # While changing branch of set of linear commits, make sure that
1042 # we base our commits on new parent rather than old parent which
1042 # we base our commits on new parent rather than old parent which
1043 # was obsoleted while changing the branch
1043 # was obsoleted while changing the branch
1044 p1 = ctx.p1().node()
1044 p1 = ctx.p1().node()
1045 p2 = ctx.p2().node()
1045 p2 = ctx.p2().node()
1046 if p1 in replacements:
1046 if p1 in replacements:
1047 p1 = replacements[p1][0]
1047 p1 = replacements[p1][0]
1048 if p2 in replacements:
1048 if p2 in replacements:
1049 p2 = replacements[p2][0]
1049 p2 = replacements[p2][0]
1050
1050
1051 mc = context.memctx(
1051 mc = context.memctx(
1052 repo,
1052 repo,
1053 (p1, p2),
1053 (p1, p2),
1054 ctx.description(),
1054 ctx.description(),
1055 ctx.files(),
1055 ctx.files(),
1056 filectxfn,
1056 filectxfn,
1057 user=ctx.user(),
1057 user=ctx.user(),
1058 date=ctx.date(),
1058 date=ctx.date(),
1059 extra=extra,
1059 extra=extra,
1060 branch=label,
1060 branch=label,
1061 )
1061 )
1062
1062
1063 newnode = repo.commitctx(mc)
1063 newnode = repo.commitctx(mc)
1064 replacements[ctx.node()] = (newnode,)
1064 replacements[ctx.node()] = (newnode,)
1065 ui.debug(b'new node id is %s\n' % hex(newnode))
1065 ui.debug(b'new node id is %s\n' % hex(newnode))
1066
1066
1067 # create obsmarkers and move bookmarks
1067 # create obsmarkers and move bookmarks
1068 scmutil.cleanupnodes(
1068 scmutil.cleanupnodes(
1069 repo, replacements, b'branch-change', fixphase=True
1069 repo, replacements, b'branch-change', fixphase=True
1070 )
1070 )
1071
1071
1072 # move the working copy too
1072 # move the working copy too
1073 wctx = repo[None]
1073 wctx = repo[None]
1074 # in-progress merge is a bit too complex for now.
1074 # in-progress merge is a bit too complex for now.
1075 if len(wctx.parents()) == 1:
1075 if len(wctx.parents()) == 1:
1076 newid = replacements.get(wctx.p1().node())
1076 newid = replacements.get(wctx.p1().node())
1077 if newid is not None:
1077 if newid is not None:
1078 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
1078 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
1079 # mercurial.cmdutil
1079 # mercurial.cmdutil
1080 from . import hg
1080 from . import hg
1081
1081
1082 hg.update(repo, newid[0], quietempty=True)
1082 hg.update(repo, newid[0], quietempty=True)
1083
1083
1084 ui.status(_(b"changed branch on %d changesets\n") % len(replacements))
1084 ui.status(_(b"changed branch on %d changesets\n") % len(replacements))
1085
1085
1086
1086
1087 def findrepo(p):
1087 def findrepo(p):
1088 while not os.path.isdir(os.path.join(p, b".hg")):
1088 while not os.path.isdir(os.path.join(p, b".hg")):
1089 oldp, p = p, os.path.dirname(p)
1089 oldp, p = p, os.path.dirname(p)
1090 if p == oldp:
1090 if p == oldp:
1091 return None
1091 return None
1092
1092
1093 return p
1093 return p
1094
1094
1095
1095
1096 def bailifchanged(repo, merge=True, hint=None):
1096 def bailifchanged(repo, merge=True, hint=None):
1097 """enforce the precondition that working directory must be clean.
1097 """enforce the precondition that working directory must be clean.
1098
1098
1099 'merge' can be set to false if a pending uncommitted merge should be
1099 'merge' can be set to false if a pending uncommitted merge should be
1100 ignored (such as when 'update --check' runs).
1100 ignored (such as when 'update --check' runs).
1101
1101
1102 'hint' is the usual hint given to Abort exception.
1102 'hint' is the usual hint given to Abort exception.
1103 """
1103 """
1104
1104
1105 if merge and repo.dirstate.p2() != repo.nullid:
1105 if merge and repo.dirstate.p2() != repo.nullid:
1106 raise error.StateError(_(b'outstanding uncommitted merge'), hint=hint)
1106 raise error.StateError(_(b'outstanding uncommitted merge'), hint=hint)
1107 st = repo.status()
1107 st = repo.status()
1108 if st.modified or st.added or st.removed or st.deleted:
1108 if st.modified or st.added or st.removed or st.deleted:
1109 raise error.StateError(_(b'uncommitted changes'), hint=hint)
1109 raise error.StateError(_(b'uncommitted changes'), hint=hint)
1110 ctx = repo[None]
1110 ctx = repo[None]
1111 for s in sorted(ctx.substate):
1111 for s in sorted(ctx.substate):
1112 ctx.sub(s).bailifchanged(hint=hint)
1112 ctx.sub(s).bailifchanged(hint=hint)
1113
1113
1114
1114
1115 def logmessage(ui, opts):
1115 def logmessage(ui, opts):
1116 """get the log message according to -m and -l option"""
1116 """get the log message according to -m and -l option"""
1117
1117
1118 check_at_most_one_arg(opts, b'message', b'logfile')
1118 check_at_most_one_arg(opts, b'message', b'logfile')
1119
1119
1120 message = opts.get(b'message')
1120 message = opts.get(b'message')
1121 logfile = opts.get(b'logfile')
1121 logfile = opts.get(b'logfile')
1122
1122
1123 if not message and logfile:
1123 if not message and logfile:
1124 try:
1124 try:
1125 if isstdiofilename(logfile):
1125 if isstdiofilename(logfile):
1126 message = ui.fin.read()
1126 message = ui.fin.read()
1127 else:
1127 else:
1128 message = b'\n'.join(util.readfile(logfile).splitlines())
1128 message = b'\n'.join(util.readfile(logfile).splitlines())
1129 except IOError as inst:
1129 except IOError as inst:
1130 raise error.Abort(
1130 raise error.Abort(
1131 _(b"can't read commit message '%s': %s")
1131 _(b"can't read commit message '%s': %s")
1132 % (logfile, encoding.strtolocal(inst.strerror))
1132 % (logfile, encoding.strtolocal(inst.strerror))
1133 )
1133 )
1134 return message
1134 return message
1135
1135
1136
1136
1137 def mergeeditform(ctxorbool, baseformname):
1137 def mergeeditform(ctxorbool, baseformname):
1138 """return appropriate editform name (referencing a committemplate)
1138 """return appropriate editform name (referencing a committemplate)
1139
1139
1140 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
1140 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
1141 merging is committed.
1141 merging is committed.
1142
1142
1143 This returns baseformname with '.merge' appended if it is a merge,
1143 This returns baseformname with '.merge' appended if it is a merge,
1144 otherwise '.normal' is appended.
1144 otherwise '.normal' is appended.
1145 """
1145 """
1146 if isinstance(ctxorbool, bool):
1146 if isinstance(ctxorbool, bool):
1147 if ctxorbool:
1147 if ctxorbool:
1148 return baseformname + b".merge"
1148 return baseformname + b".merge"
1149 elif len(ctxorbool.parents()) > 1:
1149 elif len(ctxorbool.parents()) > 1:
1150 return baseformname + b".merge"
1150 return baseformname + b".merge"
1151
1151
1152 return baseformname + b".normal"
1152 return baseformname + b".normal"
1153
1153
1154
1154
1155 def getcommiteditor(
1155 def getcommiteditor(
1156 edit=False, finishdesc=None, extramsg=None, editform=b'', **opts
1156 edit=False, finishdesc=None, extramsg=None, editform=b'', **opts
1157 ):
1157 ):
1158 """get appropriate commit message editor according to '--edit' option
1158 """get appropriate commit message editor according to '--edit' option
1159
1159
1160 'finishdesc' is a function to be called with edited commit message
1160 'finishdesc' is a function to be called with edited commit message
1161 (= 'description' of the new changeset) just after editing, but
1161 (= 'description' of the new changeset) just after editing, but
1162 before checking empty-ness. It should return actual text to be
1162 before checking empty-ness. It should return actual text to be
1163 stored into history. This allows to change description before
1163 stored into history. This allows to change description before
1164 storing.
1164 storing.
1165
1165
1166 'extramsg' is a extra message to be shown in the editor instead of
1166 'extramsg' is a extra message to be shown in the editor instead of
1167 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
1167 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
1168 is automatically added.
1168 is automatically added.
1169
1169
1170 'editform' is a dot-separated list of names, to distinguish
1170 'editform' is a dot-separated list of names, to distinguish
1171 the purpose of commit text editing.
1171 the purpose of commit text editing.
1172
1172
1173 'getcommiteditor' returns 'commitforceeditor' regardless of
1173 'getcommiteditor' returns 'commitforceeditor' regardless of
1174 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
1174 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
1175 they are specific for usage in MQ.
1175 they are specific for usage in MQ.
1176 """
1176 """
1177 if edit or finishdesc or extramsg:
1177 if edit or finishdesc or extramsg:
1178 return lambda r, c, s: commitforceeditor(
1178 return lambda r, c, s: commitforceeditor(
1179 r, c, s, finishdesc=finishdesc, extramsg=extramsg, editform=editform
1179 r, c, s, finishdesc=finishdesc, extramsg=extramsg, editform=editform
1180 )
1180 )
1181 elif editform:
1181 elif editform:
1182 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
1182 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
1183 else:
1183 else:
1184 return commiteditor
1184 return commiteditor
1185
1185
1186
1186
1187 def _escapecommandtemplate(tmpl):
1187 def _escapecommandtemplate(tmpl):
1188 parts = []
1188 parts = []
1189 for typ, start, end in templater.scantemplate(tmpl, raw=True):
1189 for typ, start, end in templater.scantemplate(tmpl, raw=True):
1190 if typ == b'string':
1190 if typ == b'string':
1191 parts.append(stringutil.escapestr(tmpl[start:end]))
1191 parts.append(stringutil.escapestr(tmpl[start:end]))
1192 else:
1192 else:
1193 parts.append(tmpl[start:end])
1193 parts.append(tmpl[start:end])
1194 return b''.join(parts)
1194 return b''.join(parts)
1195
1195
1196
1196
1197 def rendercommandtemplate(ui, tmpl, props):
1197 def rendercommandtemplate(ui, tmpl, props):
1198 r"""Expand a literal template 'tmpl' in a way suitable for command line
1198 r"""Expand a literal template 'tmpl' in a way suitable for command line
1199
1199
1200 '\' in outermost string is not taken as an escape character because it
1200 '\' in outermost string is not taken as an escape character because it
1201 is a directory separator on Windows.
1201 is a directory separator on Windows.
1202
1202
1203 >>> from . import ui as uimod
1203 >>> from . import ui as uimod
1204 >>> ui = uimod.ui()
1204 >>> ui = uimod.ui()
1205 >>> rendercommandtemplate(ui, b'c:\\{path}', {b'path': b'foo'})
1205 >>> rendercommandtemplate(ui, b'c:\\{path}', {b'path': b'foo'})
1206 'c:\\foo'
1206 'c:\\foo'
1207 >>> rendercommandtemplate(ui, b'{"c:\\{path}"}', {'path': b'foo'})
1207 >>> rendercommandtemplate(ui, b'{"c:\\{path}"}', {'path': b'foo'})
1208 'c:{path}'
1208 'c:{path}'
1209 """
1209 """
1210 if not tmpl:
1210 if not tmpl:
1211 return tmpl
1211 return tmpl
1212 t = formatter.maketemplater(ui, _escapecommandtemplate(tmpl))
1212 t = formatter.maketemplater(ui, _escapecommandtemplate(tmpl))
1213 return t.renderdefault(props)
1213 return t.renderdefault(props)
1214
1214
1215
1215
1216 def rendertemplate(ctx, tmpl, props=None):
1216 def rendertemplate(ctx, tmpl, props=None):
1217 """Expand a literal template 'tmpl' byte-string against one changeset
1217 """Expand a literal template 'tmpl' byte-string against one changeset
1218
1218
1219 Each props item must be a stringify-able value or a callable returning
1219 Each props item must be a stringify-able value or a callable returning
1220 such value, i.e. no bare list nor dict should be passed.
1220 such value, i.e. no bare list nor dict should be passed.
1221 """
1221 """
1222 repo = ctx.repo()
1222 repo = ctx.repo()
1223 tres = formatter.templateresources(repo.ui, repo)
1223 tres = formatter.templateresources(repo.ui, repo)
1224 t = formatter.maketemplater(
1224 t = formatter.maketemplater(
1225 repo.ui, tmpl, defaults=templatekw.keywords, resources=tres
1225 repo.ui, tmpl, defaults=templatekw.keywords, resources=tres
1226 )
1226 )
1227 mapping = {b'ctx': ctx}
1227 mapping = {b'ctx': ctx}
1228 if props:
1228 if props:
1229 mapping.update(props)
1229 mapping.update(props)
1230 return t.renderdefault(mapping)
1230 return t.renderdefault(mapping)
1231
1231
1232
1232
1233 def format_changeset_summary(ui, ctx, command=None, default_spec=None):
1233 def format_changeset_summary(ui, ctx, command=None, default_spec=None):
1234 """Format a changeset summary (one line)."""
1234 """Format a changeset summary (one line)."""
1235 spec = None
1235 spec = None
1236 if command:
1236 if command:
1237 spec = ui.config(
1237 spec = ui.config(
1238 b'command-templates', b'oneline-summary.%s' % command, None
1238 b'command-templates', b'oneline-summary.%s' % command, None
1239 )
1239 )
1240 if not spec:
1240 if not spec:
1241 spec = ui.config(b'command-templates', b'oneline-summary')
1241 spec = ui.config(b'command-templates', b'oneline-summary')
1242 if not spec:
1242 if not spec:
1243 spec = default_spec
1243 spec = default_spec
1244 if not spec:
1244 if not spec:
1245 spec = (
1245 spec = (
1246 b'{separate(" ", '
1246 b'{separate(" ", '
1247 b'label("oneline-summary.changeset", "{rev}:{node|short}")'
1247 b'label("oneline-summary.changeset", "{rev}:{node|short}")'
1248 b', '
1248 b', '
1249 b'join(filter(namespaces % "{ifeq(namespace, "branches", "", join(names % "{label("oneline-summary.{namespace}", name)}", " "))}"), " ")'
1249 b'join(filter(namespaces % "{ifeq(namespace, "branches", "", join(names % "{label("oneline-summary.{namespace}", name)}", " "))}"), " ")'
1250 b')} '
1250 b')} '
1251 b'"{label("oneline-summary.desc", desc|firstline)}"'
1251 b'"{label("oneline-summary.desc", desc|firstline)}"'
1252 )
1252 )
1253 text = rendertemplate(ctx, spec)
1253 text = rendertemplate(ctx, spec)
1254 return text.split(b'\n')[0]
1254 return text.split(b'\n')[0]
1255
1255
1256
1256
1257 def _buildfntemplate(pat, total=None, seqno=None, revwidth=None, pathname=None):
1257 def _buildfntemplate(pat, total=None, seqno=None, revwidth=None, pathname=None):
1258 r"""Convert old-style filename format string to template string
1258 r"""Convert old-style filename format string to template string
1259
1259
1260 >>> _buildfntemplate(b'foo-%b-%n.patch', seqno=0)
1260 >>> _buildfntemplate(b'foo-%b-%n.patch', seqno=0)
1261 'foo-{reporoot|basename}-{seqno}.patch'
1261 'foo-{reporoot|basename}-{seqno}.patch'
1262 >>> _buildfntemplate(b'%R{tags % "{tag}"}%H')
1262 >>> _buildfntemplate(b'%R{tags % "{tag}"}%H')
1263 '{rev}{tags % "{tag}"}{node}'
1263 '{rev}{tags % "{tag}"}{node}'
1264
1264
1265 '\' in outermost strings has to be escaped because it is a directory
1265 '\' in outermost strings has to be escaped because it is a directory
1266 separator on Windows:
1266 separator on Windows:
1267
1267
1268 >>> _buildfntemplate(b'c:\\tmp\\%R\\%n.patch', seqno=0)
1268 >>> _buildfntemplate(b'c:\\tmp\\%R\\%n.patch', seqno=0)
1269 'c:\\\\tmp\\\\{rev}\\\\{seqno}.patch'
1269 'c:\\\\tmp\\\\{rev}\\\\{seqno}.patch'
1270 >>> _buildfntemplate(b'\\\\foo\\bar.patch')
1270 >>> _buildfntemplate(b'\\\\foo\\bar.patch')
1271 '\\\\\\\\foo\\\\bar.patch'
1271 '\\\\\\\\foo\\\\bar.patch'
1272 >>> _buildfntemplate(b'\\{tags % "{tag}"}')
1272 >>> _buildfntemplate(b'\\{tags % "{tag}"}')
1273 '\\\\{tags % "{tag}"}'
1273 '\\\\{tags % "{tag}"}'
1274
1274
1275 but inner strings follow the template rules (i.e. '\' is taken as an
1275 but inner strings follow the template rules (i.e. '\' is taken as an
1276 escape character):
1276 escape character):
1277
1277
1278 >>> _buildfntemplate(br'{"c:\tmp"}', seqno=0)
1278 >>> _buildfntemplate(br'{"c:\tmp"}', seqno=0)
1279 '{"c:\\tmp"}'
1279 '{"c:\\tmp"}'
1280 """
1280 """
1281 expander = {
1281 expander = {
1282 b'H': b'{node}',
1282 b'H': b'{node}',
1283 b'R': b'{rev}',
1283 b'R': b'{rev}',
1284 b'h': b'{node|short}',
1284 b'h': b'{node|short}',
1285 b'm': br'{sub(r"[^\w]", "_", desc|firstline)}',
1285 b'm': br'{sub(r"[^\w]", "_", desc|firstline)}',
1286 b'r': b'{if(revwidth, pad(rev, revwidth, "0", left=True), rev)}',
1286 b'r': b'{if(revwidth, pad(rev, revwidth, "0", left=True), rev)}',
1287 b'%': b'%',
1287 b'%': b'%',
1288 b'b': b'{reporoot|basename}',
1288 b'b': b'{reporoot|basename}',
1289 }
1289 }
1290 if total is not None:
1290 if total is not None:
1291 expander[b'N'] = b'{total}'
1291 expander[b'N'] = b'{total}'
1292 if seqno is not None:
1292 if seqno is not None:
1293 expander[b'n'] = b'{seqno}'
1293 expander[b'n'] = b'{seqno}'
1294 if total is not None and seqno is not None:
1294 if total is not None and seqno is not None:
1295 expander[b'n'] = b'{pad(seqno, total|stringify|count, "0", left=True)}'
1295 expander[b'n'] = b'{pad(seqno, total|stringify|count, "0", left=True)}'
1296 if pathname is not None:
1296 if pathname is not None:
1297 expander[b's'] = b'{pathname|basename}'
1297 expander[b's'] = b'{pathname|basename}'
1298 expander[b'd'] = b'{if(pathname|dirname, pathname|dirname, ".")}'
1298 expander[b'd'] = b'{if(pathname|dirname, pathname|dirname, ".")}'
1299 expander[b'p'] = b'{pathname}'
1299 expander[b'p'] = b'{pathname}'
1300
1300
1301 newname = []
1301 newname = []
1302 for typ, start, end in templater.scantemplate(pat, raw=True):
1302 for typ, start, end in templater.scantemplate(pat, raw=True):
1303 if typ != b'string':
1303 if typ != b'string':
1304 newname.append(pat[start:end])
1304 newname.append(pat[start:end])
1305 continue
1305 continue
1306 i = start
1306 i = start
1307 while i < end:
1307 while i < end:
1308 n = pat.find(b'%', i, end)
1308 n = pat.find(b'%', i, end)
1309 if n < 0:
1309 if n < 0:
1310 newname.append(stringutil.escapestr(pat[i:end]))
1310 newname.append(stringutil.escapestr(pat[i:end]))
1311 break
1311 break
1312 newname.append(stringutil.escapestr(pat[i:n]))
1312 newname.append(stringutil.escapestr(pat[i:n]))
1313 if n + 2 > end:
1313 if n + 2 > end:
1314 raise error.Abort(
1314 raise error.Abort(
1315 _(b"incomplete format spec in output filename")
1315 _(b"incomplete format spec in output filename")
1316 )
1316 )
1317 c = pat[n + 1 : n + 2]
1317 c = pat[n + 1 : n + 2]
1318 i = n + 2
1318 i = n + 2
1319 try:
1319 try:
1320 newname.append(expander[c])
1320 newname.append(expander[c])
1321 except KeyError:
1321 except KeyError:
1322 raise error.Abort(
1322 raise error.Abort(
1323 _(b"invalid format spec '%%%s' in output filename") % c
1323 _(b"invalid format spec '%%%s' in output filename") % c
1324 )
1324 )
1325 return b''.join(newname)
1325 return b''.join(newname)
1326
1326
1327
1327
1328 def makefilename(ctx, pat, **props):
1328 def makefilename(ctx, pat, **props):
1329 if not pat:
1329 if not pat:
1330 return pat
1330 return pat
1331 tmpl = _buildfntemplate(pat, **props)
1331 tmpl = _buildfntemplate(pat, **props)
1332 # BUG: alias expansion shouldn't be made against template fragments
1332 # BUG: alias expansion shouldn't be made against template fragments
1333 # rewritten from %-format strings, but we have no easy way to partially
1333 # rewritten from %-format strings, but we have no easy way to partially
1334 # disable the expansion.
1334 # disable the expansion.
1335 return rendertemplate(ctx, tmpl, pycompat.byteskwargs(props))
1335 return rendertemplate(ctx, tmpl, pycompat.byteskwargs(props))
1336
1336
1337
1337
1338 def isstdiofilename(pat):
1338 def isstdiofilename(pat):
1339 """True if the given pat looks like a filename denoting stdin/stdout"""
1339 """True if the given pat looks like a filename denoting stdin/stdout"""
1340 return not pat or pat == b'-'
1340 return not pat or pat == b'-'
1341
1341
1342
1342
1343 class _unclosablefile(object):
1343 class _unclosablefile(object):
1344 def __init__(self, fp):
1344 def __init__(self, fp):
1345 self._fp = fp
1345 self._fp = fp
1346
1346
1347 def close(self):
1347 def close(self):
1348 pass
1348 pass
1349
1349
1350 def __iter__(self):
1350 def __iter__(self):
1351 return iter(self._fp)
1351 return iter(self._fp)
1352
1352
1353 def __getattr__(self, attr):
1353 def __getattr__(self, attr):
1354 return getattr(self._fp, attr)
1354 return getattr(self._fp, attr)
1355
1355
1356 def __enter__(self):
1356 def __enter__(self):
1357 return self
1357 return self
1358
1358
1359 def __exit__(self, exc_type, exc_value, exc_tb):
1359 def __exit__(self, exc_type, exc_value, exc_tb):
1360 pass
1360 pass
1361
1361
1362
1362
1363 def makefileobj(ctx, pat, mode=b'wb', **props):
1363 def makefileobj(ctx, pat, mode=b'wb', **props):
1364 writable = mode not in (b'r', b'rb')
1364 writable = mode not in (b'r', b'rb')
1365
1365
1366 if isstdiofilename(pat):
1366 if isstdiofilename(pat):
1367 repo = ctx.repo()
1367 repo = ctx.repo()
1368 if writable:
1368 if writable:
1369 fp = repo.ui.fout
1369 fp = repo.ui.fout
1370 else:
1370 else:
1371 fp = repo.ui.fin
1371 fp = repo.ui.fin
1372 return _unclosablefile(fp)
1372 return _unclosablefile(fp)
1373 fn = makefilename(ctx, pat, **props)
1373 fn = makefilename(ctx, pat, **props)
1374 return open(fn, mode)
1374 return open(fn, mode)
1375
1375
1376
1376
1377 def openstorage(repo, cmd, file_, opts, returnrevlog=False):
1377 def openstorage(repo, cmd, file_, opts, returnrevlog=False):
1378 """opens the changelog, manifest, a filelog or a given revlog"""
1378 """opens the changelog, manifest, a filelog or a given revlog"""
1379 cl = opts[b'changelog']
1379 cl = opts[b'changelog']
1380 mf = opts[b'manifest']
1380 mf = opts[b'manifest']
1381 dir = opts[b'dir']
1381 dir = opts[b'dir']
1382 msg = None
1382 msg = None
1383 if cl and mf:
1383 if cl and mf:
1384 msg = _(b'cannot specify --changelog and --manifest at the same time')
1384 msg = _(b'cannot specify --changelog and --manifest at the same time')
1385 elif cl and dir:
1385 elif cl and dir:
1386 msg = _(b'cannot specify --changelog and --dir at the same time')
1386 msg = _(b'cannot specify --changelog and --dir at the same time')
1387 elif cl or mf or dir:
1387 elif cl or mf or dir:
1388 if file_:
1388 if file_:
1389 msg = _(b'cannot specify filename with --changelog or --manifest')
1389 msg = _(b'cannot specify filename with --changelog or --manifest')
1390 elif not repo:
1390 elif not repo:
1391 msg = _(
1391 msg = _(
1392 b'cannot specify --changelog or --manifest or --dir '
1392 b'cannot specify --changelog or --manifest or --dir '
1393 b'without a repository'
1393 b'without a repository'
1394 )
1394 )
1395 if msg:
1395 if msg:
1396 raise error.InputError(msg)
1396 raise error.InputError(msg)
1397
1397
1398 r = None
1398 r = None
1399 if repo:
1399 if repo:
1400 if cl:
1400 if cl:
1401 r = repo.unfiltered().changelog
1401 r = repo.unfiltered().changelog
1402 elif dir:
1402 elif dir:
1403 if not scmutil.istreemanifest(repo):
1403 if not scmutil.istreemanifest(repo):
1404 raise error.InputError(
1404 raise error.InputError(
1405 _(
1405 _(
1406 b"--dir can only be used on repos with "
1406 b"--dir can only be used on repos with "
1407 b"treemanifest enabled"
1407 b"treemanifest enabled"
1408 )
1408 )
1409 )
1409 )
1410 if not dir.endswith(b'/'):
1410 if not dir.endswith(b'/'):
1411 dir = dir + b'/'
1411 dir = dir + b'/'
1412 dirlog = repo.manifestlog.getstorage(dir)
1412 dirlog = repo.manifestlog.getstorage(dir)
1413 if len(dirlog):
1413 if len(dirlog):
1414 r = dirlog
1414 r = dirlog
1415 elif mf:
1415 elif mf:
1416 r = repo.manifestlog.getstorage(b'')
1416 r = repo.manifestlog.getstorage(b'')
1417 elif file_:
1417 elif file_:
1418 filelog = repo.file(file_)
1418 filelog = repo.file(file_)
1419 if len(filelog):
1419 if len(filelog):
1420 r = filelog
1420 r = filelog
1421
1421
1422 # Not all storage may be revlogs. If requested, try to return an actual
1422 # Not all storage may be revlogs. If requested, try to return an actual
1423 # revlog instance.
1423 # revlog instance.
1424 if returnrevlog:
1424 if returnrevlog:
1425 if isinstance(r, revlog.revlog):
1425 if isinstance(r, revlog.revlog):
1426 pass
1426 pass
1427 elif util.safehasattr(r, b'_revlog'):
1427 elif util.safehasattr(r, b'_revlog'):
1428 r = r._revlog # pytype: disable=attribute-error
1428 r = r._revlog # pytype: disable=attribute-error
1429 elif r is not None:
1429 elif r is not None:
1430 raise error.InputError(
1430 raise error.InputError(
1431 _(b'%r does not appear to be a revlog') % r
1431 _(b'%r does not appear to be a revlog') % r
1432 )
1432 )
1433
1433
1434 if not r:
1434 if not r:
1435 if not returnrevlog:
1435 if not returnrevlog:
1436 raise error.InputError(_(b'cannot give path to non-revlog'))
1436 raise error.InputError(_(b'cannot give path to non-revlog'))
1437
1437
1438 if not file_:
1438 if not file_:
1439 raise error.CommandError(cmd, _(b'invalid arguments'))
1439 raise error.CommandError(cmd, _(b'invalid arguments'))
1440 if not os.path.isfile(file_):
1440 if not os.path.isfile(file_):
1441 raise error.InputError(_(b"revlog '%s' not found") % file_)
1441 raise error.InputError(_(b"revlog '%s' not found") % file_)
1442
1442
1443 target = (revlog_constants.KIND_OTHER, b'free-form:%s' % file_)
1443 target = (revlog_constants.KIND_OTHER, b'free-form:%s' % file_)
1444 r = revlog.revlog(
1444 r = revlog.revlog(
1445 vfsmod.vfs(encoding.getcwd(), audit=False),
1445 vfsmod.vfs(encoding.getcwd(), audit=False),
1446 target=target,
1446 target=target,
1447 radix=file_[:-2],
1447 radix=file_[:-2],
1448 )
1448 )
1449 return r
1449 return r
1450
1450
1451
1451
1452 def openrevlog(repo, cmd, file_, opts):
1452 def openrevlog(repo, cmd, file_, opts):
1453 """Obtain a revlog backing storage of an item.
1453 """Obtain a revlog backing storage of an item.
1454
1454
1455 This is similar to ``openstorage()`` except it always returns a revlog.
1455 This is similar to ``openstorage()`` except it always returns a revlog.
1456
1456
1457 In most cases, a caller cares about the main storage object - not the
1457 In most cases, a caller cares about the main storage object - not the
1458 revlog backing it. Therefore, this function should only be used by code
1458 revlog backing it. Therefore, this function should only be used by code
1459 that needs to examine low-level revlog implementation details. e.g. debug
1459 that needs to examine low-level revlog implementation details. e.g. debug
1460 commands.
1460 commands.
1461 """
1461 """
1462 return openstorage(repo, cmd, file_, opts, returnrevlog=True)
1462 return openstorage(repo, cmd, file_, opts, returnrevlog=True)
1463
1463
1464
1464
1465 def copy(ui, repo, pats, opts, rename=False):
1465 def copy(ui, repo, pats, opts, rename=False):
1466 check_incompatible_arguments(opts, b'forget', [b'dry_run'])
1466 check_incompatible_arguments(opts, b'forget', [b'dry_run'])
1467
1467
1468 # called with the repo lock held
1468 # called with the repo lock held
1469 #
1469 #
1470 # hgsep => pathname that uses "/" to separate directories
1470 # hgsep => pathname that uses "/" to separate directories
1471 # ossep => pathname that uses os.sep to separate directories
1471 # ossep => pathname that uses os.sep to separate directories
1472 cwd = repo.getcwd()
1472 cwd = repo.getcwd()
1473 targets = {}
1473 targets = {}
1474 forget = opts.get(b"forget")
1474 forget = opts.get(b"forget")
1475 after = opts.get(b"after")
1475 after = opts.get(b"after")
1476 dryrun = opts.get(b"dry_run")
1476 dryrun = opts.get(b"dry_run")
1477 rev = opts.get(b'at_rev')
1477 rev = opts.get(b'at_rev')
1478 if rev:
1478 if rev:
1479 if not forget and not after:
1479 if not forget and not after:
1480 # TODO: Remove this restriction and make it also create the copy
1480 # TODO: Remove this restriction and make it also create the copy
1481 # targets (and remove the rename source if rename==True).
1481 # targets (and remove the rename source if rename==True).
1482 raise error.InputError(_(b'--at-rev requires --after'))
1482 raise error.InputError(_(b'--at-rev requires --after'))
1483 ctx = scmutil.revsingle(repo, rev)
1483 ctx = logcmdutil.revsingle(repo, rev)
1484 if len(ctx.parents()) > 1:
1484 if len(ctx.parents()) > 1:
1485 raise error.InputError(
1485 raise error.InputError(
1486 _(b'cannot mark/unmark copy in merge commit')
1486 _(b'cannot mark/unmark copy in merge commit')
1487 )
1487 )
1488 else:
1488 else:
1489 ctx = repo[None]
1489 ctx = repo[None]
1490
1490
1491 pctx = ctx.p1()
1491 pctx = ctx.p1()
1492
1492
1493 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1493 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1494
1494
1495 if forget:
1495 if forget:
1496 if ctx.rev() is None:
1496 if ctx.rev() is None:
1497 new_ctx = ctx
1497 new_ctx = ctx
1498 else:
1498 else:
1499 if len(ctx.parents()) > 1:
1499 if len(ctx.parents()) > 1:
1500 raise error.InputError(_(b'cannot unmark copy in merge commit'))
1500 raise error.InputError(_(b'cannot unmark copy in merge commit'))
1501 # avoid cycle context -> subrepo -> cmdutil
1501 # avoid cycle context -> subrepo -> cmdutil
1502 from . import context
1502 from . import context
1503
1503
1504 rewriteutil.precheck(repo, [ctx.rev()], b'uncopy')
1504 rewriteutil.precheck(repo, [ctx.rev()], b'uncopy')
1505 new_ctx = context.overlayworkingctx(repo)
1505 new_ctx = context.overlayworkingctx(repo)
1506 new_ctx.setbase(ctx.p1())
1506 new_ctx.setbase(ctx.p1())
1507 mergemod.graft(repo, ctx, wctx=new_ctx)
1507 mergemod.graft(repo, ctx, wctx=new_ctx)
1508
1508
1509 match = scmutil.match(ctx, pats, opts)
1509 match = scmutil.match(ctx, pats, opts)
1510
1510
1511 current_copies = ctx.p1copies()
1511 current_copies = ctx.p1copies()
1512 current_copies.update(ctx.p2copies())
1512 current_copies.update(ctx.p2copies())
1513
1513
1514 uipathfn = scmutil.getuipathfn(repo)
1514 uipathfn = scmutil.getuipathfn(repo)
1515 for f in ctx.walk(match):
1515 for f in ctx.walk(match):
1516 if f in current_copies:
1516 if f in current_copies:
1517 new_ctx[f].markcopied(None)
1517 new_ctx[f].markcopied(None)
1518 elif match.exact(f):
1518 elif match.exact(f):
1519 ui.warn(
1519 ui.warn(
1520 _(
1520 _(
1521 b'%s: not unmarking as copy - file is not marked as copied\n'
1521 b'%s: not unmarking as copy - file is not marked as copied\n'
1522 )
1522 )
1523 % uipathfn(f)
1523 % uipathfn(f)
1524 )
1524 )
1525
1525
1526 if ctx.rev() is not None:
1526 if ctx.rev() is not None:
1527 with repo.lock():
1527 with repo.lock():
1528 mem_ctx = new_ctx.tomemctx_for_amend(ctx)
1528 mem_ctx = new_ctx.tomemctx_for_amend(ctx)
1529 new_node = mem_ctx.commit()
1529 new_node = mem_ctx.commit()
1530
1530
1531 if repo.dirstate.p1() == ctx.node():
1531 if repo.dirstate.p1() == ctx.node():
1532 with repo.dirstate.parentchange():
1532 with repo.dirstate.parentchange():
1533 scmutil.movedirstate(repo, repo[new_node])
1533 scmutil.movedirstate(repo, repo[new_node])
1534 replacements = {ctx.node(): [new_node]}
1534 replacements = {ctx.node(): [new_node]}
1535 scmutil.cleanupnodes(
1535 scmutil.cleanupnodes(
1536 repo, replacements, b'uncopy', fixphase=True
1536 repo, replacements, b'uncopy', fixphase=True
1537 )
1537 )
1538
1538
1539 return
1539 return
1540
1540
1541 pats = scmutil.expandpats(pats)
1541 pats = scmutil.expandpats(pats)
1542 if not pats:
1542 if not pats:
1543 raise error.InputError(_(b'no source or destination specified'))
1543 raise error.InputError(_(b'no source or destination specified'))
1544 if len(pats) == 1:
1544 if len(pats) == 1:
1545 raise error.InputError(_(b'no destination specified'))
1545 raise error.InputError(_(b'no destination specified'))
1546 dest = pats.pop()
1546 dest = pats.pop()
1547
1547
1548 def walkpat(pat):
1548 def walkpat(pat):
1549 srcs = []
1549 srcs = []
1550 # TODO: Inline and simplify the non-working-copy version of this code
1550 # TODO: Inline and simplify the non-working-copy version of this code
1551 # since it shares very little with the working-copy version of it.
1551 # since it shares very little with the working-copy version of it.
1552 ctx_to_walk = ctx if ctx.rev() is None else pctx
1552 ctx_to_walk = ctx if ctx.rev() is None else pctx
1553 m = scmutil.match(ctx_to_walk, [pat], opts, globbed=True)
1553 m = scmutil.match(ctx_to_walk, [pat], opts, globbed=True)
1554 for abs in ctx_to_walk.walk(m):
1554 for abs in ctx_to_walk.walk(m):
1555 rel = uipathfn(abs)
1555 rel = uipathfn(abs)
1556 exact = m.exact(abs)
1556 exact = m.exact(abs)
1557 if abs not in ctx:
1557 if abs not in ctx:
1558 if abs in pctx:
1558 if abs in pctx:
1559 if not after:
1559 if not after:
1560 if exact:
1560 if exact:
1561 ui.warn(
1561 ui.warn(
1562 _(
1562 _(
1563 b'%s: not copying - file has been marked '
1563 b'%s: not copying - file has been marked '
1564 b'for remove\n'
1564 b'for remove\n'
1565 )
1565 )
1566 % rel
1566 % rel
1567 )
1567 )
1568 continue
1568 continue
1569 else:
1569 else:
1570 if exact:
1570 if exact:
1571 ui.warn(
1571 ui.warn(
1572 _(b'%s: not copying - file is not managed\n') % rel
1572 _(b'%s: not copying - file is not managed\n') % rel
1573 )
1573 )
1574 continue
1574 continue
1575
1575
1576 # abs: hgsep
1576 # abs: hgsep
1577 # rel: ossep
1577 # rel: ossep
1578 srcs.append((abs, rel, exact))
1578 srcs.append((abs, rel, exact))
1579 return srcs
1579 return srcs
1580
1580
1581 if ctx.rev() is not None:
1581 if ctx.rev() is not None:
1582 rewriteutil.precheck(repo, [ctx.rev()], b'uncopy')
1582 rewriteutil.precheck(repo, [ctx.rev()], b'uncopy')
1583 absdest = pathutil.canonpath(repo.root, cwd, dest)
1583 absdest = pathutil.canonpath(repo.root, cwd, dest)
1584 if ctx.hasdir(absdest):
1584 if ctx.hasdir(absdest):
1585 raise error.InputError(
1585 raise error.InputError(
1586 _(b'%s: --at-rev does not support a directory as destination')
1586 _(b'%s: --at-rev does not support a directory as destination')
1587 % uipathfn(absdest)
1587 % uipathfn(absdest)
1588 )
1588 )
1589 if absdest not in ctx:
1589 if absdest not in ctx:
1590 raise error.InputError(
1590 raise error.InputError(
1591 _(b'%s: copy destination does not exist in %s')
1591 _(b'%s: copy destination does not exist in %s')
1592 % (uipathfn(absdest), ctx)
1592 % (uipathfn(absdest), ctx)
1593 )
1593 )
1594
1594
1595 # avoid cycle context -> subrepo -> cmdutil
1595 # avoid cycle context -> subrepo -> cmdutil
1596 from . import context
1596 from . import context
1597
1597
1598 copylist = []
1598 copylist = []
1599 for pat in pats:
1599 for pat in pats:
1600 srcs = walkpat(pat)
1600 srcs = walkpat(pat)
1601 if not srcs:
1601 if not srcs:
1602 continue
1602 continue
1603 for abs, rel, exact in srcs:
1603 for abs, rel, exact in srcs:
1604 copylist.append(abs)
1604 copylist.append(abs)
1605
1605
1606 if not copylist:
1606 if not copylist:
1607 raise error.InputError(_(b'no files to copy'))
1607 raise error.InputError(_(b'no files to copy'))
1608 # TODO: Add support for `hg cp --at-rev . foo bar dir` and
1608 # TODO: Add support for `hg cp --at-rev . foo bar dir` and
1609 # `hg cp --at-rev . dir1 dir2`, preferably unifying the code with the
1609 # `hg cp --at-rev . dir1 dir2`, preferably unifying the code with the
1610 # existing functions below.
1610 # existing functions below.
1611 if len(copylist) != 1:
1611 if len(copylist) != 1:
1612 raise error.InputError(_(b'--at-rev requires a single source'))
1612 raise error.InputError(_(b'--at-rev requires a single source'))
1613
1613
1614 new_ctx = context.overlayworkingctx(repo)
1614 new_ctx = context.overlayworkingctx(repo)
1615 new_ctx.setbase(ctx.p1())
1615 new_ctx.setbase(ctx.p1())
1616 mergemod.graft(repo, ctx, wctx=new_ctx)
1616 mergemod.graft(repo, ctx, wctx=new_ctx)
1617
1617
1618 new_ctx.markcopied(absdest, copylist[0])
1618 new_ctx.markcopied(absdest, copylist[0])
1619
1619
1620 with repo.lock():
1620 with repo.lock():
1621 mem_ctx = new_ctx.tomemctx_for_amend(ctx)
1621 mem_ctx = new_ctx.tomemctx_for_amend(ctx)
1622 new_node = mem_ctx.commit()
1622 new_node = mem_ctx.commit()
1623
1623
1624 if repo.dirstate.p1() == ctx.node():
1624 if repo.dirstate.p1() == ctx.node():
1625 with repo.dirstate.parentchange():
1625 with repo.dirstate.parentchange():
1626 scmutil.movedirstate(repo, repo[new_node])
1626 scmutil.movedirstate(repo, repo[new_node])
1627 replacements = {ctx.node(): [new_node]}
1627 replacements = {ctx.node(): [new_node]}
1628 scmutil.cleanupnodes(repo, replacements, b'copy', fixphase=True)
1628 scmutil.cleanupnodes(repo, replacements, b'copy', fixphase=True)
1629
1629
1630 return
1630 return
1631
1631
1632 # abssrc: hgsep
1632 # abssrc: hgsep
1633 # relsrc: ossep
1633 # relsrc: ossep
1634 # otarget: ossep
1634 # otarget: ossep
1635 def copyfile(abssrc, relsrc, otarget, exact):
1635 def copyfile(abssrc, relsrc, otarget, exact):
1636 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1636 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1637 if b'/' in abstarget:
1637 if b'/' in abstarget:
1638 # We cannot normalize abstarget itself, this would prevent
1638 # We cannot normalize abstarget itself, this would prevent
1639 # case only renames, like a => A.
1639 # case only renames, like a => A.
1640 abspath, absname = abstarget.rsplit(b'/', 1)
1640 abspath, absname = abstarget.rsplit(b'/', 1)
1641 abstarget = repo.dirstate.normalize(abspath) + b'/' + absname
1641 abstarget = repo.dirstate.normalize(abspath) + b'/' + absname
1642 reltarget = repo.pathto(abstarget, cwd)
1642 reltarget = repo.pathto(abstarget, cwd)
1643 target = repo.wjoin(abstarget)
1643 target = repo.wjoin(abstarget)
1644 src = repo.wjoin(abssrc)
1644 src = repo.wjoin(abssrc)
1645 entry = repo.dirstate.get_entry(abstarget)
1645 entry = repo.dirstate.get_entry(abstarget)
1646
1646
1647 already_commited = entry.tracked and not entry.added
1647 already_commited = entry.tracked and not entry.added
1648
1648
1649 scmutil.checkportable(ui, abstarget)
1649 scmutil.checkportable(ui, abstarget)
1650
1650
1651 # check for collisions
1651 # check for collisions
1652 prevsrc = targets.get(abstarget)
1652 prevsrc = targets.get(abstarget)
1653 if prevsrc is not None:
1653 if prevsrc is not None:
1654 ui.warn(
1654 ui.warn(
1655 _(b'%s: not overwriting - %s collides with %s\n')
1655 _(b'%s: not overwriting - %s collides with %s\n')
1656 % (
1656 % (
1657 reltarget,
1657 reltarget,
1658 repo.pathto(abssrc, cwd),
1658 repo.pathto(abssrc, cwd),
1659 repo.pathto(prevsrc, cwd),
1659 repo.pathto(prevsrc, cwd),
1660 )
1660 )
1661 )
1661 )
1662 return True # report a failure
1662 return True # report a failure
1663
1663
1664 # check for overwrites
1664 # check for overwrites
1665 exists = os.path.lexists(target)
1665 exists = os.path.lexists(target)
1666 samefile = False
1666 samefile = False
1667 if exists and abssrc != abstarget:
1667 if exists and abssrc != abstarget:
1668 if repo.dirstate.normalize(abssrc) == repo.dirstate.normalize(
1668 if repo.dirstate.normalize(abssrc) == repo.dirstate.normalize(
1669 abstarget
1669 abstarget
1670 ):
1670 ):
1671 if not rename:
1671 if not rename:
1672 ui.warn(_(b"%s: can't copy - same file\n") % reltarget)
1672 ui.warn(_(b"%s: can't copy - same file\n") % reltarget)
1673 return True # report a failure
1673 return True # report a failure
1674 exists = False
1674 exists = False
1675 samefile = True
1675 samefile = True
1676
1676
1677 if not after and exists or after and already_commited:
1677 if not after and exists or after and already_commited:
1678 if not opts[b'force']:
1678 if not opts[b'force']:
1679 if already_commited:
1679 if already_commited:
1680 msg = _(b'%s: not overwriting - file already committed\n')
1680 msg = _(b'%s: not overwriting - file already committed\n')
1681 if after:
1681 if after:
1682 flags = b'--after --force'
1682 flags = b'--after --force'
1683 else:
1683 else:
1684 flags = b'--force'
1684 flags = b'--force'
1685 if rename:
1685 if rename:
1686 hint = (
1686 hint = (
1687 _(
1687 _(
1688 b"('hg rename %s' to replace the file by "
1688 b"('hg rename %s' to replace the file by "
1689 b'recording a rename)\n'
1689 b'recording a rename)\n'
1690 )
1690 )
1691 % flags
1691 % flags
1692 )
1692 )
1693 else:
1693 else:
1694 hint = (
1694 hint = (
1695 _(
1695 _(
1696 b"('hg copy %s' to replace the file by "
1696 b"('hg copy %s' to replace the file by "
1697 b'recording a copy)\n'
1697 b'recording a copy)\n'
1698 )
1698 )
1699 % flags
1699 % flags
1700 )
1700 )
1701 else:
1701 else:
1702 msg = _(b'%s: not overwriting - file exists\n')
1702 msg = _(b'%s: not overwriting - file exists\n')
1703 if rename:
1703 if rename:
1704 hint = _(
1704 hint = _(
1705 b"('hg rename --after' to record the rename)\n"
1705 b"('hg rename --after' to record the rename)\n"
1706 )
1706 )
1707 else:
1707 else:
1708 hint = _(b"('hg copy --after' to record the copy)\n")
1708 hint = _(b"('hg copy --after' to record the copy)\n")
1709 ui.warn(msg % reltarget)
1709 ui.warn(msg % reltarget)
1710 ui.warn(hint)
1710 ui.warn(hint)
1711 return True # report a failure
1711 return True # report a failure
1712
1712
1713 if after:
1713 if after:
1714 if not exists:
1714 if not exists:
1715 if rename:
1715 if rename:
1716 ui.warn(
1716 ui.warn(
1717 _(b'%s: not recording move - %s does not exist\n')
1717 _(b'%s: not recording move - %s does not exist\n')
1718 % (relsrc, reltarget)
1718 % (relsrc, reltarget)
1719 )
1719 )
1720 else:
1720 else:
1721 ui.warn(
1721 ui.warn(
1722 _(b'%s: not recording copy - %s does not exist\n')
1722 _(b'%s: not recording copy - %s does not exist\n')
1723 % (relsrc, reltarget)
1723 % (relsrc, reltarget)
1724 )
1724 )
1725 return True # report a failure
1725 return True # report a failure
1726 elif not dryrun:
1726 elif not dryrun:
1727 try:
1727 try:
1728 if exists:
1728 if exists:
1729 os.unlink(target)
1729 os.unlink(target)
1730 targetdir = os.path.dirname(target) or b'.'
1730 targetdir = os.path.dirname(target) or b'.'
1731 if not os.path.isdir(targetdir):
1731 if not os.path.isdir(targetdir):
1732 os.makedirs(targetdir)
1732 os.makedirs(targetdir)
1733 if samefile:
1733 if samefile:
1734 tmp = target + b"~hgrename"
1734 tmp = target + b"~hgrename"
1735 os.rename(src, tmp)
1735 os.rename(src, tmp)
1736 os.rename(tmp, target)
1736 os.rename(tmp, target)
1737 else:
1737 else:
1738 # Preserve stat info on renames, not on copies; this matches
1738 # Preserve stat info on renames, not on copies; this matches
1739 # Linux CLI behavior.
1739 # Linux CLI behavior.
1740 util.copyfile(src, target, copystat=rename)
1740 util.copyfile(src, target, copystat=rename)
1741 srcexists = True
1741 srcexists = True
1742 except IOError as inst:
1742 except IOError as inst:
1743 if inst.errno == errno.ENOENT:
1743 if inst.errno == errno.ENOENT:
1744 ui.warn(_(b'%s: deleted in working directory\n') % relsrc)
1744 ui.warn(_(b'%s: deleted in working directory\n') % relsrc)
1745 srcexists = False
1745 srcexists = False
1746 else:
1746 else:
1747 ui.warn(
1747 ui.warn(
1748 _(b'%s: cannot copy - %s\n')
1748 _(b'%s: cannot copy - %s\n')
1749 % (relsrc, encoding.strtolocal(inst.strerror))
1749 % (relsrc, encoding.strtolocal(inst.strerror))
1750 )
1750 )
1751 return True # report a failure
1751 return True # report a failure
1752
1752
1753 if ui.verbose or not exact:
1753 if ui.verbose or not exact:
1754 if rename:
1754 if rename:
1755 ui.status(_(b'moving %s to %s\n') % (relsrc, reltarget))
1755 ui.status(_(b'moving %s to %s\n') % (relsrc, reltarget))
1756 else:
1756 else:
1757 ui.status(_(b'copying %s to %s\n') % (relsrc, reltarget))
1757 ui.status(_(b'copying %s to %s\n') % (relsrc, reltarget))
1758
1758
1759 targets[abstarget] = abssrc
1759 targets[abstarget] = abssrc
1760
1760
1761 # fix up dirstate
1761 # fix up dirstate
1762 scmutil.dirstatecopy(
1762 scmutil.dirstatecopy(
1763 ui, repo, ctx, abssrc, abstarget, dryrun=dryrun, cwd=cwd
1763 ui, repo, ctx, abssrc, abstarget, dryrun=dryrun, cwd=cwd
1764 )
1764 )
1765 if rename and not dryrun:
1765 if rename and not dryrun:
1766 if not after and srcexists and not samefile:
1766 if not after and srcexists and not samefile:
1767 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
1767 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
1768 repo.wvfs.unlinkpath(abssrc, rmdir=rmdir)
1768 repo.wvfs.unlinkpath(abssrc, rmdir=rmdir)
1769 ctx.forget([abssrc])
1769 ctx.forget([abssrc])
1770
1770
1771 # pat: ossep
1771 # pat: ossep
1772 # dest ossep
1772 # dest ossep
1773 # srcs: list of (hgsep, hgsep, ossep, bool)
1773 # srcs: list of (hgsep, hgsep, ossep, bool)
1774 # return: function that takes hgsep and returns ossep
1774 # return: function that takes hgsep and returns ossep
1775 def targetpathfn(pat, dest, srcs):
1775 def targetpathfn(pat, dest, srcs):
1776 if os.path.isdir(pat):
1776 if os.path.isdir(pat):
1777 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1777 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1778 abspfx = util.localpath(abspfx)
1778 abspfx = util.localpath(abspfx)
1779 if destdirexists:
1779 if destdirexists:
1780 striplen = len(os.path.split(abspfx)[0])
1780 striplen = len(os.path.split(abspfx)[0])
1781 else:
1781 else:
1782 striplen = len(abspfx)
1782 striplen = len(abspfx)
1783 if striplen:
1783 if striplen:
1784 striplen += len(pycompat.ossep)
1784 striplen += len(pycompat.ossep)
1785 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1785 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1786 elif destdirexists:
1786 elif destdirexists:
1787 res = lambda p: os.path.join(
1787 res = lambda p: os.path.join(
1788 dest, os.path.basename(util.localpath(p))
1788 dest, os.path.basename(util.localpath(p))
1789 )
1789 )
1790 else:
1790 else:
1791 res = lambda p: dest
1791 res = lambda p: dest
1792 return res
1792 return res
1793
1793
1794 # pat: ossep
1794 # pat: ossep
1795 # dest ossep
1795 # dest ossep
1796 # srcs: list of (hgsep, hgsep, ossep, bool)
1796 # srcs: list of (hgsep, hgsep, ossep, bool)
1797 # return: function that takes hgsep and returns ossep
1797 # return: function that takes hgsep and returns ossep
1798 def targetpathafterfn(pat, dest, srcs):
1798 def targetpathafterfn(pat, dest, srcs):
1799 if matchmod.patkind(pat):
1799 if matchmod.patkind(pat):
1800 # a mercurial pattern
1800 # a mercurial pattern
1801 res = lambda p: os.path.join(
1801 res = lambda p: os.path.join(
1802 dest, os.path.basename(util.localpath(p))
1802 dest, os.path.basename(util.localpath(p))
1803 )
1803 )
1804 else:
1804 else:
1805 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1805 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1806 if len(abspfx) < len(srcs[0][0]):
1806 if len(abspfx) < len(srcs[0][0]):
1807 # A directory. Either the target path contains the last
1807 # A directory. Either the target path contains the last
1808 # component of the source path or it does not.
1808 # component of the source path or it does not.
1809 def evalpath(striplen):
1809 def evalpath(striplen):
1810 score = 0
1810 score = 0
1811 for s in srcs:
1811 for s in srcs:
1812 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1812 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1813 if os.path.lexists(t):
1813 if os.path.lexists(t):
1814 score += 1
1814 score += 1
1815 return score
1815 return score
1816
1816
1817 abspfx = util.localpath(abspfx)
1817 abspfx = util.localpath(abspfx)
1818 striplen = len(abspfx)
1818 striplen = len(abspfx)
1819 if striplen:
1819 if striplen:
1820 striplen += len(pycompat.ossep)
1820 striplen += len(pycompat.ossep)
1821 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1821 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1822 score = evalpath(striplen)
1822 score = evalpath(striplen)
1823 striplen1 = len(os.path.split(abspfx)[0])
1823 striplen1 = len(os.path.split(abspfx)[0])
1824 if striplen1:
1824 if striplen1:
1825 striplen1 += len(pycompat.ossep)
1825 striplen1 += len(pycompat.ossep)
1826 if evalpath(striplen1) > score:
1826 if evalpath(striplen1) > score:
1827 striplen = striplen1
1827 striplen = striplen1
1828 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1828 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1829 else:
1829 else:
1830 # a file
1830 # a file
1831 if destdirexists:
1831 if destdirexists:
1832 res = lambda p: os.path.join(
1832 res = lambda p: os.path.join(
1833 dest, os.path.basename(util.localpath(p))
1833 dest, os.path.basename(util.localpath(p))
1834 )
1834 )
1835 else:
1835 else:
1836 res = lambda p: dest
1836 res = lambda p: dest
1837 return res
1837 return res
1838
1838
1839 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1839 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1840 if not destdirexists:
1840 if not destdirexists:
1841 if len(pats) > 1 or matchmod.patkind(pats[0]):
1841 if len(pats) > 1 or matchmod.patkind(pats[0]):
1842 raise error.InputError(
1842 raise error.InputError(
1843 _(
1843 _(
1844 b'with multiple sources, destination must be an '
1844 b'with multiple sources, destination must be an '
1845 b'existing directory'
1845 b'existing directory'
1846 )
1846 )
1847 )
1847 )
1848 if util.endswithsep(dest):
1848 if util.endswithsep(dest):
1849 raise error.InputError(
1849 raise error.InputError(
1850 _(b'destination %s is not a directory') % dest
1850 _(b'destination %s is not a directory') % dest
1851 )
1851 )
1852
1852
1853 tfn = targetpathfn
1853 tfn = targetpathfn
1854 if after:
1854 if after:
1855 tfn = targetpathafterfn
1855 tfn = targetpathafterfn
1856 copylist = []
1856 copylist = []
1857 for pat in pats:
1857 for pat in pats:
1858 srcs = walkpat(pat)
1858 srcs = walkpat(pat)
1859 if not srcs:
1859 if not srcs:
1860 continue
1860 continue
1861 copylist.append((tfn(pat, dest, srcs), srcs))
1861 copylist.append((tfn(pat, dest, srcs), srcs))
1862 if not copylist:
1862 if not copylist:
1863 hint = None
1863 hint = None
1864 if rename:
1864 if rename:
1865 hint = _(b'maybe you meant to use --after --at-rev=.')
1865 hint = _(b'maybe you meant to use --after --at-rev=.')
1866 raise error.InputError(_(b'no files to copy'), hint=hint)
1866 raise error.InputError(_(b'no files to copy'), hint=hint)
1867
1867
1868 errors = 0
1868 errors = 0
1869 for targetpath, srcs in copylist:
1869 for targetpath, srcs in copylist:
1870 for abssrc, relsrc, exact in srcs:
1870 for abssrc, relsrc, exact in srcs:
1871 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1871 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1872 errors += 1
1872 errors += 1
1873
1873
1874 return errors != 0
1874 return errors != 0
1875
1875
1876
1876
1877 ## facility to let extension process additional data into an import patch
1877 ## facility to let extension process additional data into an import patch
1878 # list of identifier to be executed in order
1878 # list of identifier to be executed in order
1879 extrapreimport = [] # run before commit
1879 extrapreimport = [] # run before commit
1880 extrapostimport = [] # run after commit
1880 extrapostimport = [] # run after commit
1881 # mapping from identifier to actual import function
1881 # mapping from identifier to actual import function
1882 #
1882 #
1883 # 'preimport' are run before the commit is made and are provided the following
1883 # 'preimport' are run before the commit is made and are provided the following
1884 # arguments:
1884 # arguments:
1885 # - repo: the localrepository instance,
1885 # - repo: the localrepository instance,
1886 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1886 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1887 # - extra: the future extra dictionary of the changeset, please mutate it,
1887 # - extra: the future extra dictionary of the changeset, please mutate it,
1888 # - opts: the import options.
1888 # - opts: the import options.
1889 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1889 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1890 # mutation of in memory commit and more. Feel free to rework the code to get
1890 # mutation of in memory commit and more. Feel free to rework the code to get
1891 # there.
1891 # there.
1892 extrapreimportmap = {}
1892 extrapreimportmap = {}
1893 # 'postimport' are run after the commit is made and are provided the following
1893 # 'postimport' are run after the commit is made and are provided the following
1894 # argument:
1894 # argument:
1895 # - ctx: the changectx created by import.
1895 # - ctx: the changectx created by import.
1896 extrapostimportmap = {}
1896 extrapostimportmap = {}
1897
1897
1898
1898
1899 def tryimportone(ui, repo, patchdata, parents, opts, msgs, updatefunc):
1899 def tryimportone(ui, repo, patchdata, parents, opts, msgs, updatefunc):
1900 """Utility function used by commands.import to import a single patch
1900 """Utility function used by commands.import to import a single patch
1901
1901
1902 This function is explicitly defined here to help the evolve extension to
1902 This function is explicitly defined here to help the evolve extension to
1903 wrap this part of the import logic.
1903 wrap this part of the import logic.
1904
1904
1905 The API is currently a bit ugly because it a simple code translation from
1905 The API is currently a bit ugly because it a simple code translation from
1906 the import command. Feel free to make it better.
1906 the import command. Feel free to make it better.
1907
1907
1908 :patchdata: a dictionary containing parsed patch data (such as from
1908 :patchdata: a dictionary containing parsed patch data (such as from
1909 ``patch.extract()``)
1909 ``patch.extract()``)
1910 :parents: nodes that will be parent of the created commit
1910 :parents: nodes that will be parent of the created commit
1911 :opts: the full dict of option passed to the import command
1911 :opts: the full dict of option passed to the import command
1912 :msgs: list to save commit message to.
1912 :msgs: list to save commit message to.
1913 (used in case we need to save it when failing)
1913 (used in case we need to save it when failing)
1914 :updatefunc: a function that update a repo to a given node
1914 :updatefunc: a function that update a repo to a given node
1915 updatefunc(<repo>, <node>)
1915 updatefunc(<repo>, <node>)
1916 """
1916 """
1917 # avoid cycle context -> subrepo -> cmdutil
1917 # avoid cycle context -> subrepo -> cmdutil
1918 from . import context
1918 from . import context
1919
1919
1920 tmpname = patchdata.get(b'filename')
1920 tmpname = patchdata.get(b'filename')
1921 message = patchdata.get(b'message')
1921 message = patchdata.get(b'message')
1922 user = opts.get(b'user') or patchdata.get(b'user')
1922 user = opts.get(b'user') or patchdata.get(b'user')
1923 date = opts.get(b'date') or patchdata.get(b'date')
1923 date = opts.get(b'date') or patchdata.get(b'date')
1924 branch = patchdata.get(b'branch')
1924 branch = patchdata.get(b'branch')
1925 nodeid = patchdata.get(b'nodeid')
1925 nodeid = patchdata.get(b'nodeid')
1926 p1 = patchdata.get(b'p1')
1926 p1 = patchdata.get(b'p1')
1927 p2 = patchdata.get(b'p2')
1927 p2 = patchdata.get(b'p2')
1928
1928
1929 nocommit = opts.get(b'no_commit')
1929 nocommit = opts.get(b'no_commit')
1930 importbranch = opts.get(b'import_branch')
1930 importbranch = opts.get(b'import_branch')
1931 update = not opts.get(b'bypass')
1931 update = not opts.get(b'bypass')
1932 strip = opts[b"strip"]
1932 strip = opts[b"strip"]
1933 prefix = opts[b"prefix"]
1933 prefix = opts[b"prefix"]
1934 sim = float(opts.get(b'similarity') or 0)
1934 sim = float(opts.get(b'similarity') or 0)
1935
1935
1936 if not tmpname:
1936 if not tmpname:
1937 return None, None, False
1937 return None, None, False
1938
1938
1939 rejects = False
1939 rejects = False
1940
1940
1941 cmdline_message = logmessage(ui, opts)
1941 cmdline_message = logmessage(ui, opts)
1942 if cmdline_message:
1942 if cmdline_message:
1943 # pickup the cmdline msg
1943 # pickup the cmdline msg
1944 message = cmdline_message
1944 message = cmdline_message
1945 elif message:
1945 elif message:
1946 # pickup the patch msg
1946 # pickup the patch msg
1947 message = message.strip()
1947 message = message.strip()
1948 else:
1948 else:
1949 # launch the editor
1949 # launch the editor
1950 message = None
1950 message = None
1951 ui.debug(b'message:\n%s\n' % (message or b''))
1951 ui.debug(b'message:\n%s\n' % (message or b''))
1952
1952
1953 if len(parents) == 1:
1953 if len(parents) == 1:
1954 parents.append(repo[nullrev])
1954 parents.append(repo[nullrev])
1955 if opts.get(b'exact'):
1955 if opts.get(b'exact'):
1956 if not nodeid or not p1:
1956 if not nodeid or not p1:
1957 raise error.InputError(_(b'not a Mercurial patch'))
1957 raise error.InputError(_(b'not a Mercurial patch'))
1958 p1 = repo[p1]
1958 p1 = repo[p1]
1959 p2 = repo[p2 or nullrev]
1959 p2 = repo[p2 or nullrev]
1960 elif p2:
1960 elif p2:
1961 try:
1961 try:
1962 p1 = repo[p1]
1962 p1 = repo[p1]
1963 p2 = repo[p2]
1963 p2 = repo[p2]
1964 # Without any options, consider p2 only if the
1964 # Without any options, consider p2 only if the
1965 # patch is being applied on top of the recorded
1965 # patch is being applied on top of the recorded
1966 # first parent.
1966 # first parent.
1967 if p1 != parents[0]:
1967 if p1 != parents[0]:
1968 p1 = parents[0]
1968 p1 = parents[0]
1969 p2 = repo[nullrev]
1969 p2 = repo[nullrev]
1970 except error.RepoError:
1970 except error.RepoError:
1971 p1, p2 = parents
1971 p1, p2 = parents
1972 if p2.rev() == nullrev:
1972 if p2.rev() == nullrev:
1973 ui.warn(
1973 ui.warn(
1974 _(
1974 _(
1975 b"warning: import the patch as a normal revision\n"
1975 b"warning: import the patch as a normal revision\n"
1976 b"(use --exact to import the patch as a merge)\n"
1976 b"(use --exact to import the patch as a merge)\n"
1977 )
1977 )
1978 )
1978 )
1979 else:
1979 else:
1980 p1, p2 = parents
1980 p1, p2 = parents
1981
1981
1982 n = None
1982 n = None
1983 if update:
1983 if update:
1984 if p1 != parents[0]:
1984 if p1 != parents[0]:
1985 updatefunc(repo, p1.node())
1985 updatefunc(repo, p1.node())
1986 if p2 != parents[1]:
1986 if p2 != parents[1]:
1987 repo.setparents(p1.node(), p2.node())
1987 repo.setparents(p1.node(), p2.node())
1988
1988
1989 if opts.get(b'exact') or importbranch:
1989 if opts.get(b'exact') or importbranch:
1990 repo.dirstate.setbranch(branch or b'default')
1990 repo.dirstate.setbranch(branch or b'default')
1991
1991
1992 partial = opts.get(b'partial', False)
1992 partial = opts.get(b'partial', False)
1993 files = set()
1993 files = set()
1994 try:
1994 try:
1995 patch.patch(
1995 patch.patch(
1996 ui,
1996 ui,
1997 repo,
1997 repo,
1998 tmpname,
1998 tmpname,
1999 strip=strip,
1999 strip=strip,
2000 prefix=prefix,
2000 prefix=prefix,
2001 files=files,
2001 files=files,
2002 eolmode=None,
2002 eolmode=None,
2003 similarity=sim / 100.0,
2003 similarity=sim / 100.0,
2004 )
2004 )
2005 except error.PatchError as e:
2005 except error.PatchError as e:
2006 if not partial:
2006 if not partial:
2007 raise error.Abort(pycompat.bytestr(e))
2007 raise error.Abort(pycompat.bytestr(e))
2008 if partial:
2008 if partial:
2009 rejects = True
2009 rejects = True
2010
2010
2011 files = list(files)
2011 files = list(files)
2012 if nocommit:
2012 if nocommit:
2013 if message:
2013 if message:
2014 msgs.append(message)
2014 msgs.append(message)
2015 else:
2015 else:
2016 if opts.get(b'exact') or p2:
2016 if opts.get(b'exact') or p2:
2017 # If you got here, you either use --force and know what
2017 # If you got here, you either use --force and know what
2018 # you are doing or used --exact or a merge patch while
2018 # you are doing or used --exact or a merge patch while
2019 # being updated to its first parent.
2019 # being updated to its first parent.
2020 m = None
2020 m = None
2021 else:
2021 else:
2022 m = scmutil.matchfiles(repo, files or [])
2022 m = scmutil.matchfiles(repo, files or [])
2023 editform = mergeeditform(repo[None], b'import.normal')
2023 editform = mergeeditform(repo[None], b'import.normal')
2024 if opts.get(b'exact'):
2024 if opts.get(b'exact'):
2025 editor = None
2025 editor = None
2026 else:
2026 else:
2027 editor = getcommiteditor(
2027 editor = getcommiteditor(
2028 editform=editform, **pycompat.strkwargs(opts)
2028 editform=editform, **pycompat.strkwargs(opts)
2029 )
2029 )
2030 extra = {}
2030 extra = {}
2031 for idfunc in extrapreimport:
2031 for idfunc in extrapreimport:
2032 extrapreimportmap[idfunc](repo, patchdata, extra, opts)
2032 extrapreimportmap[idfunc](repo, patchdata, extra, opts)
2033 overrides = {}
2033 overrides = {}
2034 if partial:
2034 if partial:
2035 overrides[(b'ui', b'allowemptycommit')] = True
2035 overrides[(b'ui', b'allowemptycommit')] = True
2036 if opts.get(b'secret'):
2036 if opts.get(b'secret'):
2037 overrides[(b'phases', b'new-commit')] = b'secret'
2037 overrides[(b'phases', b'new-commit')] = b'secret'
2038 with repo.ui.configoverride(overrides, b'import'):
2038 with repo.ui.configoverride(overrides, b'import'):
2039 n = repo.commit(
2039 n = repo.commit(
2040 message, user, date, match=m, editor=editor, extra=extra
2040 message, user, date, match=m, editor=editor, extra=extra
2041 )
2041 )
2042 for idfunc in extrapostimport:
2042 for idfunc in extrapostimport:
2043 extrapostimportmap[idfunc](repo[n])
2043 extrapostimportmap[idfunc](repo[n])
2044 else:
2044 else:
2045 if opts.get(b'exact') or importbranch:
2045 if opts.get(b'exact') or importbranch:
2046 branch = branch or b'default'
2046 branch = branch or b'default'
2047 else:
2047 else:
2048 branch = p1.branch()
2048 branch = p1.branch()
2049 store = patch.filestore()
2049 store = patch.filestore()
2050 try:
2050 try:
2051 files = set()
2051 files = set()
2052 try:
2052 try:
2053 patch.patchrepo(
2053 patch.patchrepo(
2054 ui,
2054 ui,
2055 repo,
2055 repo,
2056 p1,
2056 p1,
2057 store,
2057 store,
2058 tmpname,
2058 tmpname,
2059 strip,
2059 strip,
2060 prefix,
2060 prefix,
2061 files,
2061 files,
2062 eolmode=None,
2062 eolmode=None,
2063 )
2063 )
2064 except error.PatchError as e:
2064 except error.PatchError as e:
2065 raise error.Abort(stringutil.forcebytestr(e))
2065 raise error.Abort(stringutil.forcebytestr(e))
2066 if opts.get(b'exact'):
2066 if opts.get(b'exact'):
2067 editor = None
2067 editor = None
2068 else:
2068 else:
2069 editor = getcommiteditor(editform=b'import.bypass')
2069 editor = getcommiteditor(editform=b'import.bypass')
2070 memctx = context.memctx(
2070 memctx = context.memctx(
2071 repo,
2071 repo,
2072 (p1.node(), p2.node()),
2072 (p1.node(), p2.node()),
2073 message,
2073 message,
2074 files=files,
2074 files=files,
2075 filectxfn=store,
2075 filectxfn=store,
2076 user=user,
2076 user=user,
2077 date=date,
2077 date=date,
2078 branch=branch,
2078 branch=branch,
2079 editor=editor,
2079 editor=editor,
2080 )
2080 )
2081
2081
2082 overrides = {}
2082 overrides = {}
2083 if opts.get(b'secret'):
2083 if opts.get(b'secret'):
2084 overrides[(b'phases', b'new-commit')] = b'secret'
2084 overrides[(b'phases', b'new-commit')] = b'secret'
2085 with repo.ui.configoverride(overrides, b'import'):
2085 with repo.ui.configoverride(overrides, b'import'):
2086 n = memctx.commit()
2086 n = memctx.commit()
2087 finally:
2087 finally:
2088 store.close()
2088 store.close()
2089 if opts.get(b'exact') and nocommit:
2089 if opts.get(b'exact') and nocommit:
2090 # --exact with --no-commit is still useful in that it does merge
2090 # --exact with --no-commit is still useful in that it does merge
2091 # and branch bits
2091 # and branch bits
2092 ui.warn(_(b"warning: can't check exact import with --no-commit\n"))
2092 ui.warn(_(b"warning: can't check exact import with --no-commit\n"))
2093 elif opts.get(b'exact') and (not n or hex(n) != nodeid):
2093 elif opts.get(b'exact') and (not n or hex(n) != nodeid):
2094 raise error.Abort(_(b'patch is damaged or loses information'))
2094 raise error.Abort(_(b'patch is damaged or loses information'))
2095 msg = _(b'applied to working directory')
2095 msg = _(b'applied to working directory')
2096 if n:
2096 if n:
2097 # i18n: refers to a short changeset id
2097 # i18n: refers to a short changeset id
2098 msg = _(b'created %s') % short(n)
2098 msg = _(b'created %s') % short(n)
2099 return msg, n, rejects
2099 return msg, n, rejects
2100
2100
2101
2101
2102 # facility to let extensions include additional data in an exported patch
2102 # facility to let extensions include additional data in an exported patch
2103 # list of identifiers to be executed in order
2103 # list of identifiers to be executed in order
2104 extraexport = []
2104 extraexport = []
2105 # mapping from identifier to actual export function
2105 # mapping from identifier to actual export function
2106 # function as to return a string to be added to the header or None
2106 # function as to return a string to be added to the header or None
2107 # it is given two arguments (sequencenumber, changectx)
2107 # it is given two arguments (sequencenumber, changectx)
2108 extraexportmap = {}
2108 extraexportmap = {}
2109
2109
2110
2110
2111 def _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts):
2111 def _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts):
2112 node = scmutil.binnode(ctx)
2112 node = scmutil.binnode(ctx)
2113 parents = [p.node() for p in ctx.parents() if p]
2113 parents = [p.node() for p in ctx.parents() if p]
2114 branch = ctx.branch()
2114 branch = ctx.branch()
2115 if switch_parent:
2115 if switch_parent:
2116 parents.reverse()
2116 parents.reverse()
2117
2117
2118 if parents:
2118 if parents:
2119 prev = parents[0]
2119 prev = parents[0]
2120 else:
2120 else:
2121 prev = repo.nullid
2121 prev = repo.nullid
2122
2122
2123 fm.context(ctx=ctx)
2123 fm.context(ctx=ctx)
2124 fm.plain(b'# HG changeset patch\n')
2124 fm.plain(b'# HG changeset patch\n')
2125 fm.write(b'user', b'# User %s\n', ctx.user())
2125 fm.write(b'user', b'# User %s\n', ctx.user())
2126 fm.plain(b'# Date %d %d\n' % ctx.date())
2126 fm.plain(b'# Date %d %d\n' % ctx.date())
2127 fm.write(b'date', b'# %s\n', fm.formatdate(ctx.date()))
2127 fm.write(b'date', b'# %s\n', fm.formatdate(ctx.date()))
2128 fm.condwrite(
2128 fm.condwrite(
2129 branch and branch != b'default', b'branch', b'# Branch %s\n', branch
2129 branch and branch != b'default', b'branch', b'# Branch %s\n', branch
2130 )
2130 )
2131 fm.write(b'node', b'# Node ID %s\n', hex(node))
2131 fm.write(b'node', b'# Node ID %s\n', hex(node))
2132 fm.plain(b'# Parent %s\n' % hex(prev))
2132 fm.plain(b'# Parent %s\n' % hex(prev))
2133 if len(parents) > 1:
2133 if len(parents) > 1:
2134 fm.plain(b'# Parent %s\n' % hex(parents[1]))
2134 fm.plain(b'# Parent %s\n' % hex(parents[1]))
2135 fm.data(parents=fm.formatlist(pycompat.maplist(hex, parents), name=b'node'))
2135 fm.data(parents=fm.formatlist(pycompat.maplist(hex, parents), name=b'node'))
2136
2136
2137 # TODO: redesign extraexportmap function to support formatter
2137 # TODO: redesign extraexportmap function to support formatter
2138 for headerid in extraexport:
2138 for headerid in extraexport:
2139 header = extraexportmap[headerid](seqno, ctx)
2139 header = extraexportmap[headerid](seqno, ctx)
2140 if header is not None:
2140 if header is not None:
2141 fm.plain(b'# %s\n' % header)
2141 fm.plain(b'# %s\n' % header)
2142
2142
2143 fm.write(b'desc', b'%s\n', ctx.description().rstrip())
2143 fm.write(b'desc', b'%s\n', ctx.description().rstrip())
2144 fm.plain(b'\n')
2144 fm.plain(b'\n')
2145
2145
2146 if fm.isplain():
2146 if fm.isplain():
2147 chunkiter = patch.diffui(repo, prev, node, match, opts=diffopts)
2147 chunkiter = patch.diffui(repo, prev, node, match, opts=diffopts)
2148 for chunk, label in chunkiter:
2148 for chunk, label in chunkiter:
2149 fm.plain(chunk, label=label)
2149 fm.plain(chunk, label=label)
2150 else:
2150 else:
2151 chunkiter = patch.diff(repo, prev, node, match, opts=diffopts)
2151 chunkiter = patch.diff(repo, prev, node, match, opts=diffopts)
2152 # TODO: make it structured?
2152 # TODO: make it structured?
2153 fm.data(diff=b''.join(chunkiter))
2153 fm.data(diff=b''.join(chunkiter))
2154
2154
2155
2155
2156 def _exportfile(repo, revs, fm, dest, switch_parent, diffopts, match):
2156 def _exportfile(repo, revs, fm, dest, switch_parent, diffopts, match):
2157 """Export changesets to stdout or a single file"""
2157 """Export changesets to stdout or a single file"""
2158 for seqno, rev in enumerate(revs, 1):
2158 for seqno, rev in enumerate(revs, 1):
2159 ctx = repo[rev]
2159 ctx = repo[rev]
2160 if not dest.startswith(b'<'):
2160 if not dest.startswith(b'<'):
2161 repo.ui.note(b"%s\n" % dest)
2161 repo.ui.note(b"%s\n" % dest)
2162 fm.startitem()
2162 fm.startitem()
2163 _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts)
2163 _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts)
2164
2164
2165
2165
2166 def _exportfntemplate(
2166 def _exportfntemplate(
2167 repo, revs, basefm, fntemplate, switch_parent, diffopts, match
2167 repo, revs, basefm, fntemplate, switch_parent, diffopts, match
2168 ):
2168 ):
2169 """Export changesets to possibly multiple files"""
2169 """Export changesets to possibly multiple files"""
2170 total = len(revs)
2170 total = len(revs)
2171 revwidth = max(len(str(rev)) for rev in revs)
2171 revwidth = max(len(str(rev)) for rev in revs)
2172 filemap = util.sortdict() # filename: [(seqno, rev), ...]
2172 filemap = util.sortdict() # filename: [(seqno, rev), ...]
2173
2173
2174 for seqno, rev in enumerate(revs, 1):
2174 for seqno, rev in enumerate(revs, 1):
2175 ctx = repo[rev]
2175 ctx = repo[rev]
2176 dest = makefilename(
2176 dest = makefilename(
2177 ctx, fntemplate, total=total, seqno=seqno, revwidth=revwidth
2177 ctx, fntemplate, total=total, seqno=seqno, revwidth=revwidth
2178 )
2178 )
2179 filemap.setdefault(dest, []).append((seqno, rev))
2179 filemap.setdefault(dest, []).append((seqno, rev))
2180
2180
2181 for dest in filemap:
2181 for dest in filemap:
2182 with formatter.maybereopen(basefm, dest) as fm:
2182 with formatter.maybereopen(basefm, dest) as fm:
2183 repo.ui.note(b"%s\n" % dest)
2183 repo.ui.note(b"%s\n" % dest)
2184 for seqno, rev in filemap[dest]:
2184 for seqno, rev in filemap[dest]:
2185 fm.startitem()
2185 fm.startitem()
2186 ctx = repo[rev]
2186 ctx = repo[rev]
2187 _exportsingle(
2187 _exportsingle(
2188 repo, ctx, fm, match, switch_parent, seqno, diffopts
2188 repo, ctx, fm, match, switch_parent, seqno, diffopts
2189 )
2189 )
2190
2190
2191
2191
2192 def _prefetchchangedfiles(repo, revs, match):
2192 def _prefetchchangedfiles(repo, revs, match):
2193 allfiles = set()
2193 allfiles = set()
2194 for rev in revs:
2194 for rev in revs:
2195 for file in repo[rev].files():
2195 for file in repo[rev].files():
2196 if not match or match(file):
2196 if not match or match(file):
2197 allfiles.add(file)
2197 allfiles.add(file)
2198 match = scmutil.matchfiles(repo, allfiles)
2198 match = scmutil.matchfiles(repo, allfiles)
2199 revmatches = [(rev, match) for rev in revs]
2199 revmatches = [(rev, match) for rev in revs]
2200 scmutil.prefetchfiles(repo, revmatches)
2200 scmutil.prefetchfiles(repo, revmatches)
2201
2201
2202
2202
2203 def export(
2203 def export(
2204 repo,
2204 repo,
2205 revs,
2205 revs,
2206 basefm,
2206 basefm,
2207 fntemplate=b'hg-%h.patch',
2207 fntemplate=b'hg-%h.patch',
2208 switch_parent=False,
2208 switch_parent=False,
2209 opts=None,
2209 opts=None,
2210 match=None,
2210 match=None,
2211 ):
2211 ):
2212 """export changesets as hg patches
2212 """export changesets as hg patches
2213
2213
2214 Args:
2214 Args:
2215 repo: The repository from which we're exporting revisions.
2215 repo: The repository from which we're exporting revisions.
2216 revs: A list of revisions to export as revision numbers.
2216 revs: A list of revisions to export as revision numbers.
2217 basefm: A formatter to which patches should be written.
2217 basefm: A formatter to which patches should be written.
2218 fntemplate: An optional string to use for generating patch file names.
2218 fntemplate: An optional string to use for generating patch file names.
2219 switch_parent: If True, show diffs against second parent when not nullid.
2219 switch_parent: If True, show diffs against second parent when not nullid.
2220 Default is false, which always shows diff against p1.
2220 Default is false, which always shows diff against p1.
2221 opts: diff options to use for generating the patch.
2221 opts: diff options to use for generating the patch.
2222 match: If specified, only export changes to files matching this matcher.
2222 match: If specified, only export changes to files matching this matcher.
2223
2223
2224 Returns:
2224 Returns:
2225 Nothing.
2225 Nothing.
2226
2226
2227 Side Effect:
2227 Side Effect:
2228 "HG Changeset Patch" data is emitted to one of the following
2228 "HG Changeset Patch" data is emitted to one of the following
2229 destinations:
2229 destinations:
2230 fntemplate specified: Each rev is written to a unique file named using
2230 fntemplate specified: Each rev is written to a unique file named using
2231 the given template.
2231 the given template.
2232 Otherwise: All revs will be written to basefm.
2232 Otherwise: All revs will be written to basefm.
2233 """
2233 """
2234 _prefetchchangedfiles(repo, revs, match)
2234 _prefetchchangedfiles(repo, revs, match)
2235
2235
2236 if not fntemplate:
2236 if not fntemplate:
2237 _exportfile(
2237 _exportfile(
2238 repo, revs, basefm, b'<unnamed>', switch_parent, opts, match
2238 repo, revs, basefm, b'<unnamed>', switch_parent, opts, match
2239 )
2239 )
2240 else:
2240 else:
2241 _exportfntemplate(
2241 _exportfntemplate(
2242 repo, revs, basefm, fntemplate, switch_parent, opts, match
2242 repo, revs, basefm, fntemplate, switch_parent, opts, match
2243 )
2243 )
2244
2244
2245
2245
2246 def exportfile(repo, revs, fp, switch_parent=False, opts=None, match=None):
2246 def exportfile(repo, revs, fp, switch_parent=False, opts=None, match=None):
2247 """Export changesets to the given file stream"""
2247 """Export changesets to the given file stream"""
2248 _prefetchchangedfiles(repo, revs, match)
2248 _prefetchchangedfiles(repo, revs, match)
2249
2249
2250 dest = getattr(fp, 'name', b'<unnamed>')
2250 dest = getattr(fp, 'name', b'<unnamed>')
2251 with formatter.formatter(repo.ui, fp, b'export', {}) as fm:
2251 with formatter.formatter(repo.ui, fp, b'export', {}) as fm:
2252 _exportfile(repo, revs, fm, dest, switch_parent, opts, match)
2252 _exportfile(repo, revs, fm, dest, switch_parent, opts, match)
2253
2253
2254
2254
2255 def showmarker(fm, marker, index=None):
2255 def showmarker(fm, marker, index=None):
2256 """utility function to display obsolescence marker in a readable way
2256 """utility function to display obsolescence marker in a readable way
2257
2257
2258 To be used by debug function."""
2258 To be used by debug function."""
2259 if index is not None:
2259 if index is not None:
2260 fm.write(b'index', b'%i ', index)
2260 fm.write(b'index', b'%i ', index)
2261 fm.write(b'prednode', b'%s ', hex(marker.prednode()))
2261 fm.write(b'prednode', b'%s ', hex(marker.prednode()))
2262 succs = marker.succnodes()
2262 succs = marker.succnodes()
2263 fm.condwrite(
2263 fm.condwrite(
2264 succs,
2264 succs,
2265 b'succnodes',
2265 b'succnodes',
2266 b'%s ',
2266 b'%s ',
2267 fm.formatlist(map(hex, succs), name=b'node'),
2267 fm.formatlist(map(hex, succs), name=b'node'),
2268 )
2268 )
2269 fm.write(b'flag', b'%X ', marker.flags())
2269 fm.write(b'flag', b'%X ', marker.flags())
2270 parents = marker.parentnodes()
2270 parents = marker.parentnodes()
2271 if parents is not None:
2271 if parents is not None:
2272 fm.write(
2272 fm.write(
2273 b'parentnodes',
2273 b'parentnodes',
2274 b'{%s} ',
2274 b'{%s} ',
2275 fm.formatlist(map(hex, parents), name=b'node', sep=b', '),
2275 fm.formatlist(map(hex, parents), name=b'node', sep=b', '),
2276 )
2276 )
2277 fm.write(b'date', b'(%s) ', fm.formatdate(marker.date()))
2277 fm.write(b'date', b'(%s) ', fm.formatdate(marker.date()))
2278 meta = marker.metadata().copy()
2278 meta = marker.metadata().copy()
2279 meta.pop(b'date', None)
2279 meta.pop(b'date', None)
2280 smeta = pycompat.rapply(pycompat.maybebytestr, meta)
2280 smeta = pycompat.rapply(pycompat.maybebytestr, meta)
2281 fm.write(
2281 fm.write(
2282 b'metadata', b'{%s}', fm.formatdict(smeta, fmt=b'%r: %r', sep=b', ')
2282 b'metadata', b'{%s}', fm.formatdict(smeta, fmt=b'%r: %r', sep=b', ')
2283 )
2283 )
2284 fm.plain(b'\n')
2284 fm.plain(b'\n')
2285
2285
2286
2286
2287 def finddate(ui, repo, date):
2287 def finddate(ui, repo, date):
2288 """Find the tipmost changeset that matches the given date spec"""
2288 """Find the tipmost changeset that matches the given date spec"""
2289 mrevs = repo.revs(b'date(%s)', date)
2289 mrevs = repo.revs(b'date(%s)', date)
2290 try:
2290 try:
2291 rev = mrevs.max()
2291 rev = mrevs.max()
2292 except ValueError:
2292 except ValueError:
2293 raise error.InputError(_(b"revision matching date not found"))
2293 raise error.InputError(_(b"revision matching date not found"))
2294
2294
2295 ui.status(
2295 ui.status(
2296 _(b"found revision %d from %s\n")
2296 _(b"found revision %d from %s\n")
2297 % (rev, dateutil.datestr(repo[rev].date()))
2297 % (rev, dateutil.datestr(repo[rev].date()))
2298 )
2298 )
2299 return b'%d' % rev
2299 return b'%d' % rev
2300
2300
2301
2301
2302 def add(ui, repo, match, prefix, uipathfn, explicitonly, **opts):
2302 def add(ui, repo, match, prefix, uipathfn, explicitonly, **opts):
2303 bad = []
2303 bad = []
2304
2304
2305 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2305 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2306 names = []
2306 names = []
2307 wctx = repo[None]
2307 wctx = repo[None]
2308 cca = None
2308 cca = None
2309 abort, warn = scmutil.checkportabilityalert(ui)
2309 abort, warn = scmutil.checkportabilityalert(ui)
2310 if abort or warn:
2310 if abort or warn:
2311 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2311 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2312
2312
2313 match = repo.narrowmatch(match, includeexact=True)
2313 match = repo.narrowmatch(match, includeexact=True)
2314 badmatch = matchmod.badmatch(match, badfn)
2314 badmatch = matchmod.badmatch(match, badfn)
2315 dirstate = repo.dirstate
2315 dirstate = repo.dirstate
2316 # We don't want to just call wctx.walk here, since it would return a lot of
2316 # We don't want to just call wctx.walk here, since it would return a lot of
2317 # clean files, which we aren't interested in and takes time.
2317 # clean files, which we aren't interested in and takes time.
2318 for f in sorted(
2318 for f in sorted(
2319 dirstate.walk(
2319 dirstate.walk(
2320 badmatch,
2320 badmatch,
2321 subrepos=sorted(wctx.substate),
2321 subrepos=sorted(wctx.substate),
2322 unknown=True,
2322 unknown=True,
2323 ignored=False,
2323 ignored=False,
2324 full=False,
2324 full=False,
2325 )
2325 )
2326 ):
2326 ):
2327 exact = match.exact(f)
2327 exact = match.exact(f)
2328 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2328 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2329 if cca:
2329 if cca:
2330 cca(f)
2330 cca(f)
2331 names.append(f)
2331 names.append(f)
2332 if ui.verbose or not exact:
2332 if ui.verbose or not exact:
2333 ui.status(
2333 ui.status(
2334 _(b'adding %s\n') % uipathfn(f), label=b'ui.addremove.added'
2334 _(b'adding %s\n') % uipathfn(f), label=b'ui.addremove.added'
2335 )
2335 )
2336
2336
2337 for subpath in sorted(wctx.substate):
2337 for subpath in sorted(wctx.substate):
2338 sub = wctx.sub(subpath)
2338 sub = wctx.sub(subpath)
2339 try:
2339 try:
2340 submatch = matchmod.subdirmatcher(subpath, match)
2340 submatch = matchmod.subdirmatcher(subpath, match)
2341 subprefix = repo.wvfs.reljoin(prefix, subpath)
2341 subprefix = repo.wvfs.reljoin(prefix, subpath)
2342 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2342 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2343 if opts.get('subrepos'):
2343 if opts.get('subrepos'):
2344 bad.extend(
2344 bad.extend(
2345 sub.add(ui, submatch, subprefix, subuipathfn, False, **opts)
2345 sub.add(ui, submatch, subprefix, subuipathfn, False, **opts)
2346 )
2346 )
2347 else:
2347 else:
2348 bad.extend(
2348 bad.extend(
2349 sub.add(ui, submatch, subprefix, subuipathfn, True, **opts)
2349 sub.add(ui, submatch, subprefix, subuipathfn, True, **opts)
2350 )
2350 )
2351 except error.LookupError:
2351 except error.LookupError:
2352 ui.status(
2352 ui.status(
2353 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2353 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2354 )
2354 )
2355
2355
2356 if not opts.get('dry_run'):
2356 if not opts.get('dry_run'):
2357 rejected = wctx.add(names, prefix)
2357 rejected = wctx.add(names, prefix)
2358 bad.extend(f for f in rejected if f in match.files())
2358 bad.extend(f for f in rejected if f in match.files())
2359 return bad
2359 return bad
2360
2360
2361
2361
2362 def addwebdirpath(repo, serverpath, webconf):
2362 def addwebdirpath(repo, serverpath, webconf):
2363 webconf[serverpath] = repo.root
2363 webconf[serverpath] = repo.root
2364 repo.ui.debug(b'adding %s = %s\n' % (serverpath, repo.root))
2364 repo.ui.debug(b'adding %s = %s\n' % (serverpath, repo.root))
2365
2365
2366 for r in repo.revs(b'filelog("path:.hgsub")'):
2366 for r in repo.revs(b'filelog("path:.hgsub")'):
2367 ctx = repo[r]
2367 ctx = repo[r]
2368 for subpath in ctx.substate:
2368 for subpath in ctx.substate:
2369 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2369 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2370
2370
2371
2371
2372 def forget(
2372 def forget(
2373 ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive
2373 ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive
2374 ):
2374 ):
2375 if dryrun and interactive:
2375 if dryrun and interactive:
2376 raise error.InputError(
2376 raise error.InputError(
2377 _(b"cannot specify both --dry-run and --interactive")
2377 _(b"cannot specify both --dry-run and --interactive")
2378 )
2378 )
2379 bad = []
2379 bad = []
2380 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2380 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2381 wctx = repo[None]
2381 wctx = repo[None]
2382 forgot = []
2382 forgot = []
2383
2383
2384 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2384 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2385 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2385 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2386 if explicitonly:
2386 if explicitonly:
2387 forget = [f for f in forget if match.exact(f)]
2387 forget = [f for f in forget if match.exact(f)]
2388
2388
2389 for subpath in sorted(wctx.substate):
2389 for subpath in sorted(wctx.substate):
2390 sub = wctx.sub(subpath)
2390 sub = wctx.sub(subpath)
2391 submatch = matchmod.subdirmatcher(subpath, match)
2391 submatch = matchmod.subdirmatcher(subpath, match)
2392 subprefix = repo.wvfs.reljoin(prefix, subpath)
2392 subprefix = repo.wvfs.reljoin(prefix, subpath)
2393 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2393 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2394 try:
2394 try:
2395 subbad, subforgot = sub.forget(
2395 subbad, subforgot = sub.forget(
2396 submatch,
2396 submatch,
2397 subprefix,
2397 subprefix,
2398 subuipathfn,
2398 subuipathfn,
2399 dryrun=dryrun,
2399 dryrun=dryrun,
2400 interactive=interactive,
2400 interactive=interactive,
2401 )
2401 )
2402 bad.extend([subpath + b'/' + f for f in subbad])
2402 bad.extend([subpath + b'/' + f for f in subbad])
2403 forgot.extend([subpath + b'/' + f for f in subforgot])
2403 forgot.extend([subpath + b'/' + f for f in subforgot])
2404 except error.LookupError:
2404 except error.LookupError:
2405 ui.status(
2405 ui.status(
2406 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2406 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2407 )
2407 )
2408
2408
2409 if not explicitonly:
2409 if not explicitonly:
2410 for f in match.files():
2410 for f in match.files():
2411 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2411 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2412 if f not in forgot:
2412 if f not in forgot:
2413 if repo.wvfs.exists(f):
2413 if repo.wvfs.exists(f):
2414 # Don't complain if the exact case match wasn't given.
2414 # Don't complain if the exact case match wasn't given.
2415 # But don't do this until after checking 'forgot', so
2415 # But don't do this until after checking 'forgot', so
2416 # that subrepo files aren't normalized, and this op is
2416 # that subrepo files aren't normalized, and this op is
2417 # purely from data cached by the status walk above.
2417 # purely from data cached by the status walk above.
2418 if repo.dirstate.normalize(f) in repo.dirstate:
2418 if repo.dirstate.normalize(f) in repo.dirstate:
2419 continue
2419 continue
2420 ui.warn(
2420 ui.warn(
2421 _(
2421 _(
2422 b'not removing %s: '
2422 b'not removing %s: '
2423 b'file is already untracked\n'
2423 b'file is already untracked\n'
2424 )
2424 )
2425 % uipathfn(f)
2425 % uipathfn(f)
2426 )
2426 )
2427 bad.append(f)
2427 bad.append(f)
2428
2428
2429 if interactive:
2429 if interactive:
2430 responses = _(
2430 responses = _(
2431 b'[Ynsa?]'
2431 b'[Ynsa?]'
2432 b'$$ &Yes, forget this file'
2432 b'$$ &Yes, forget this file'
2433 b'$$ &No, skip this file'
2433 b'$$ &No, skip this file'
2434 b'$$ &Skip remaining files'
2434 b'$$ &Skip remaining files'
2435 b'$$ Include &all remaining files'
2435 b'$$ Include &all remaining files'
2436 b'$$ &? (display help)'
2436 b'$$ &? (display help)'
2437 )
2437 )
2438 for filename in forget[:]:
2438 for filename in forget[:]:
2439 r = ui.promptchoice(
2439 r = ui.promptchoice(
2440 _(b'forget %s %s') % (uipathfn(filename), responses)
2440 _(b'forget %s %s') % (uipathfn(filename), responses)
2441 )
2441 )
2442 if r == 4: # ?
2442 if r == 4: # ?
2443 while r == 4:
2443 while r == 4:
2444 for c, t in ui.extractchoices(responses)[1]:
2444 for c, t in ui.extractchoices(responses)[1]:
2445 ui.write(b'%s - %s\n' % (c, encoding.lower(t)))
2445 ui.write(b'%s - %s\n' % (c, encoding.lower(t)))
2446 r = ui.promptchoice(
2446 r = ui.promptchoice(
2447 _(b'forget %s %s') % (uipathfn(filename), responses)
2447 _(b'forget %s %s') % (uipathfn(filename), responses)
2448 )
2448 )
2449 if r == 0: # yes
2449 if r == 0: # yes
2450 continue
2450 continue
2451 elif r == 1: # no
2451 elif r == 1: # no
2452 forget.remove(filename)
2452 forget.remove(filename)
2453 elif r == 2: # Skip
2453 elif r == 2: # Skip
2454 fnindex = forget.index(filename)
2454 fnindex = forget.index(filename)
2455 del forget[fnindex:]
2455 del forget[fnindex:]
2456 break
2456 break
2457 elif r == 3: # All
2457 elif r == 3: # All
2458 break
2458 break
2459
2459
2460 for f in forget:
2460 for f in forget:
2461 if ui.verbose or not match.exact(f) or interactive:
2461 if ui.verbose or not match.exact(f) or interactive:
2462 ui.status(
2462 ui.status(
2463 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2463 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2464 )
2464 )
2465
2465
2466 if not dryrun:
2466 if not dryrun:
2467 rejected = wctx.forget(forget, prefix)
2467 rejected = wctx.forget(forget, prefix)
2468 bad.extend(f for f in rejected if f in match.files())
2468 bad.extend(f for f in rejected if f in match.files())
2469 forgot.extend(f for f in forget if f not in rejected)
2469 forgot.extend(f for f in forget if f not in rejected)
2470 return bad, forgot
2470 return bad, forgot
2471
2471
2472
2472
2473 def files(ui, ctx, m, uipathfn, fm, fmt, subrepos):
2473 def files(ui, ctx, m, uipathfn, fm, fmt, subrepos):
2474 ret = 1
2474 ret = 1
2475
2475
2476 needsfctx = ui.verbose or {b'size', b'flags'} & fm.datahint()
2476 needsfctx = ui.verbose or {b'size', b'flags'} & fm.datahint()
2477 if fm.isplain() and not needsfctx:
2477 if fm.isplain() and not needsfctx:
2478 # Fast path. The speed-up comes from skipping the formatter, and batching
2478 # Fast path. The speed-up comes from skipping the formatter, and batching
2479 # calls to ui.write.
2479 # calls to ui.write.
2480 buf = []
2480 buf = []
2481 for f in ctx.matches(m):
2481 for f in ctx.matches(m):
2482 buf.append(fmt % uipathfn(f))
2482 buf.append(fmt % uipathfn(f))
2483 if len(buf) > 100:
2483 if len(buf) > 100:
2484 ui.write(b''.join(buf))
2484 ui.write(b''.join(buf))
2485 del buf[:]
2485 del buf[:]
2486 ret = 0
2486 ret = 0
2487 if buf:
2487 if buf:
2488 ui.write(b''.join(buf))
2488 ui.write(b''.join(buf))
2489 else:
2489 else:
2490 for f in ctx.matches(m):
2490 for f in ctx.matches(m):
2491 fm.startitem()
2491 fm.startitem()
2492 fm.context(ctx=ctx)
2492 fm.context(ctx=ctx)
2493 if needsfctx:
2493 if needsfctx:
2494 fc = ctx[f]
2494 fc = ctx[f]
2495 fm.write(b'size flags', b'% 10d % 1s ', fc.size(), fc.flags())
2495 fm.write(b'size flags', b'% 10d % 1s ', fc.size(), fc.flags())
2496 fm.data(path=f)
2496 fm.data(path=f)
2497 fm.plain(fmt % uipathfn(f))
2497 fm.plain(fmt % uipathfn(f))
2498 ret = 0
2498 ret = 0
2499
2499
2500 for subpath in sorted(ctx.substate):
2500 for subpath in sorted(ctx.substate):
2501 submatch = matchmod.subdirmatcher(subpath, m)
2501 submatch = matchmod.subdirmatcher(subpath, m)
2502 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2502 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2503 if subrepos or m.exact(subpath) or any(submatch.files()):
2503 if subrepos or m.exact(subpath) or any(submatch.files()):
2504 sub = ctx.sub(subpath)
2504 sub = ctx.sub(subpath)
2505 try:
2505 try:
2506 recurse = m.exact(subpath) or subrepos
2506 recurse = m.exact(subpath) or subrepos
2507 if (
2507 if (
2508 sub.printfiles(ui, submatch, subuipathfn, fm, fmt, recurse)
2508 sub.printfiles(ui, submatch, subuipathfn, fm, fmt, recurse)
2509 == 0
2509 == 0
2510 ):
2510 ):
2511 ret = 0
2511 ret = 0
2512 except error.LookupError:
2512 except error.LookupError:
2513 ui.status(
2513 ui.status(
2514 _(b"skipping missing subrepository: %s\n")
2514 _(b"skipping missing subrepository: %s\n")
2515 % uipathfn(subpath)
2515 % uipathfn(subpath)
2516 )
2516 )
2517
2517
2518 return ret
2518 return ret
2519
2519
2520
2520
2521 def remove(
2521 def remove(
2522 ui, repo, m, prefix, uipathfn, after, force, subrepos, dryrun, warnings=None
2522 ui, repo, m, prefix, uipathfn, after, force, subrepos, dryrun, warnings=None
2523 ):
2523 ):
2524 ret = 0
2524 ret = 0
2525 s = repo.status(match=m, clean=True)
2525 s = repo.status(match=m, clean=True)
2526 modified, added, deleted, clean = s.modified, s.added, s.deleted, s.clean
2526 modified, added, deleted, clean = s.modified, s.added, s.deleted, s.clean
2527
2527
2528 wctx = repo[None]
2528 wctx = repo[None]
2529
2529
2530 if warnings is None:
2530 if warnings is None:
2531 warnings = []
2531 warnings = []
2532 warn = True
2532 warn = True
2533 else:
2533 else:
2534 warn = False
2534 warn = False
2535
2535
2536 subs = sorted(wctx.substate)
2536 subs = sorted(wctx.substate)
2537 progress = ui.makeprogress(
2537 progress = ui.makeprogress(
2538 _(b'searching'), total=len(subs), unit=_(b'subrepos')
2538 _(b'searching'), total=len(subs), unit=_(b'subrepos')
2539 )
2539 )
2540 for subpath in subs:
2540 for subpath in subs:
2541 submatch = matchmod.subdirmatcher(subpath, m)
2541 submatch = matchmod.subdirmatcher(subpath, m)
2542 subprefix = repo.wvfs.reljoin(prefix, subpath)
2542 subprefix = repo.wvfs.reljoin(prefix, subpath)
2543 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2543 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2544 if subrepos or m.exact(subpath) or any(submatch.files()):
2544 if subrepos or m.exact(subpath) or any(submatch.files()):
2545 progress.increment()
2545 progress.increment()
2546 sub = wctx.sub(subpath)
2546 sub = wctx.sub(subpath)
2547 try:
2547 try:
2548 if sub.removefiles(
2548 if sub.removefiles(
2549 submatch,
2549 submatch,
2550 subprefix,
2550 subprefix,
2551 subuipathfn,
2551 subuipathfn,
2552 after,
2552 after,
2553 force,
2553 force,
2554 subrepos,
2554 subrepos,
2555 dryrun,
2555 dryrun,
2556 warnings,
2556 warnings,
2557 ):
2557 ):
2558 ret = 1
2558 ret = 1
2559 except error.LookupError:
2559 except error.LookupError:
2560 warnings.append(
2560 warnings.append(
2561 _(b"skipping missing subrepository: %s\n")
2561 _(b"skipping missing subrepository: %s\n")
2562 % uipathfn(subpath)
2562 % uipathfn(subpath)
2563 )
2563 )
2564 progress.complete()
2564 progress.complete()
2565
2565
2566 # warn about failure to delete explicit files/dirs
2566 # warn about failure to delete explicit files/dirs
2567 deleteddirs = pathutil.dirs(deleted)
2567 deleteddirs = pathutil.dirs(deleted)
2568 files = m.files()
2568 files = m.files()
2569 progress = ui.makeprogress(
2569 progress = ui.makeprogress(
2570 _(b'deleting'), total=len(files), unit=_(b'files')
2570 _(b'deleting'), total=len(files), unit=_(b'files')
2571 )
2571 )
2572 for f in files:
2572 for f in files:
2573
2573
2574 def insubrepo():
2574 def insubrepo():
2575 for subpath in wctx.substate:
2575 for subpath in wctx.substate:
2576 if f.startswith(subpath + b'/'):
2576 if f.startswith(subpath + b'/'):
2577 return True
2577 return True
2578 return False
2578 return False
2579
2579
2580 progress.increment()
2580 progress.increment()
2581 isdir = f in deleteddirs or wctx.hasdir(f)
2581 isdir = f in deleteddirs or wctx.hasdir(f)
2582 if f in repo.dirstate or isdir or f == b'.' or insubrepo() or f in subs:
2582 if f in repo.dirstate or isdir or f == b'.' or insubrepo() or f in subs:
2583 continue
2583 continue
2584
2584
2585 if repo.wvfs.exists(f):
2585 if repo.wvfs.exists(f):
2586 if repo.wvfs.isdir(f):
2586 if repo.wvfs.isdir(f):
2587 warnings.append(
2587 warnings.append(
2588 _(b'not removing %s: no tracked files\n') % uipathfn(f)
2588 _(b'not removing %s: no tracked files\n') % uipathfn(f)
2589 )
2589 )
2590 else:
2590 else:
2591 warnings.append(
2591 warnings.append(
2592 _(b'not removing %s: file is untracked\n') % uipathfn(f)
2592 _(b'not removing %s: file is untracked\n') % uipathfn(f)
2593 )
2593 )
2594 # missing files will generate a warning elsewhere
2594 # missing files will generate a warning elsewhere
2595 ret = 1
2595 ret = 1
2596 progress.complete()
2596 progress.complete()
2597
2597
2598 if force:
2598 if force:
2599 list = modified + deleted + clean + added
2599 list = modified + deleted + clean + added
2600 elif after:
2600 elif after:
2601 list = deleted
2601 list = deleted
2602 remaining = modified + added + clean
2602 remaining = modified + added + clean
2603 progress = ui.makeprogress(
2603 progress = ui.makeprogress(
2604 _(b'skipping'), total=len(remaining), unit=_(b'files')
2604 _(b'skipping'), total=len(remaining), unit=_(b'files')
2605 )
2605 )
2606 for f in remaining:
2606 for f in remaining:
2607 progress.increment()
2607 progress.increment()
2608 if ui.verbose or (f in files):
2608 if ui.verbose or (f in files):
2609 warnings.append(
2609 warnings.append(
2610 _(b'not removing %s: file still exists\n') % uipathfn(f)
2610 _(b'not removing %s: file still exists\n') % uipathfn(f)
2611 )
2611 )
2612 ret = 1
2612 ret = 1
2613 progress.complete()
2613 progress.complete()
2614 else:
2614 else:
2615 list = deleted + clean
2615 list = deleted + clean
2616 progress = ui.makeprogress(
2616 progress = ui.makeprogress(
2617 _(b'skipping'), total=(len(modified) + len(added)), unit=_(b'files')
2617 _(b'skipping'), total=(len(modified) + len(added)), unit=_(b'files')
2618 )
2618 )
2619 for f in modified:
2619 for f in modified:
2620 progress.increment()
2620 progress.increment()
2621 warnings.append(
2621 warnings.append(
2622 _(
2622 _(
2623 b'not removing %s: file is modified (use -f'
2623 b'not removing %s: file is modified (use -f'
2624 b' to force removal)\n'
2624 b' to force removal)\n'
2625 )
2625 )
2626 % uipathfn(f)
2626 % uipathfn(f)
2627 )
2627 )
2628 ret = 1
2628 ret = 1
2629 for f in added:
2629 for f in added:
2630 progress.increment()
2630 progress.increment()
2631 warnings.append(
2631 warnings.append(
2632 _(
2632 _(
2633 b"not removing %s: file has been marked for add"
2633 b"not removing %s: file has been marked for add"
2634 b" (use 'hg forget' to undo add)\n"
2634 b" (use 'hg forget' to undo add)\n"
2635 )
2635 )
2636 % uipathfn(f)
2636 % uipathfn(f)
2637 )
2637 )
2638 ret = 1
2638 ret = 1
2639 progress.complete()
2639 progress.complete()
2640
2640
2641 list = sorted(list)
2641 list = sorted(list)
2642 progress = ui.makeprogress(
2642 progress = ui.makeprogress(
2643 _(b'deleting'), total=len(list), unit=_(b'files')
2643 _(b'deleting'), total=len(list), unit=_(b'files')
2644 )
2644 )
2645 for f in list:
2645 for f in list:
2646 if ui.verbose or not m.exact(f):
2646 if ui.verbose or not m.exact(f):
2647 progress.increment()
2647 progress.increment()
2648 ui.status(
2648 ui.status(
2649 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2649 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2650 )
2650 )
2651 progress.complete()
2651 progress.complete()
2652
2652
2653 if not dryrun:
2653 if not dryrun:
2654 with repo.wlock():
2654 with repo.wlock():
2655 if not after:
2655 if not after:
2656 for f in list:
2656 for f in list:
2657 if f in added:
2657 if f in added:
2658 continue # we never unlink added files on remove
2658 continue # we never unlink added files on remove
2659 rmdir = repo.ui.configbool(
2659 rmdir = repo.ui.configbool(
2660 b'experimental', b'removeemptydirs'
2660 b'experimental', b'removeemptydirs'
2661 )
2661 )
2662 repo.wvfs.unlinkpath(f, ignoremissing=True, rmdir=rmdir)
2662 repo.wvfs.unlinkpath(f, ignoremissing=True, rmdir=rmdir)
2663 repo[None].forget(list)
2663 repo[None].forget(list)
2664
2664
2665 if warn:
2665 if warn:
2666 for warning in warnings:
2666 for warning in warnings:
2667 ui.warn(warning)
2667 ui.warn(warning)
2668
2668
2669 return ret
2669 return ret
2670
2670
2671
2671
2672 def _catfmtneedsdata(fm):
2672 def _catfmtneedsdata(fm):
2673 return not fm.datahint() or b'data' in fm.datahint()
2673 return not fm.datahint() or b'data' in fm.datahint()
2674
2674
2675
2675
2676 def _updatecatformatter(fm, ctx, matcher, path, decode):
2676 def _updatecatformatter(fm, ctx, matcher, path, decode):
2677 """Hook for adding data to the formatter used by ``hg cat``.
2677 """Hook for adding data to the formatter used by ``hg cat``.
2678
2678
2679 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2679 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2680 this method first."""
2680 this method first."""
2681
2681
2682 # data() can be expensive to fetch (e.g. lfs), so don't fetch it if it
2682 # data() can be expensive to fetch (e.g. lfs), so don't fetch it if it
2683 # wasn't requested.
2683 # wasn't requested.
2684 data = b''
2684 data = b''
2685 if _catfmtneedsdata(fm):
2685 if _catfmtneedsdata(fm):
2686 data = ctx[path].data()
2686 data = ctx[path].data()
2687 if decode:
2687 if decode:
2688 data = ctx.repo().wwritedata(path, data)
2688 data = ctx.repo().wwritedata(path, data)
2689 fm.startitem()
2689 fm.startitem()
2690 fm.context(ctx=ctx)
2690 fm.context(ctx=ctx)
2691 fm.write(b'data', b'%s', data)
2691 fm.write(b'data', b'%s', data)
2692 fm.data(path=path)
2692 fm.data(path=path)
2693
2693
2694
2694
2695 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2695 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2696 err = 1
2696 err = 1
2697 opts = pycompat.byteskwargs(opts)
2697 opts = pycompat.byteskwargs(opts)
2698
2698
2699 def write(path):
2699 def write(path):
2700 filename = None
2700 filename = None
2701 if fntemplate:
2701 if fntemplate:
2702 filename = makefilename(
2702 filename = makefilename(
2703 ctx, fntemplate, pathname=os.path.join(prefix, path)
2703 ctx, fntemplate, pathname=os.path.join(prefix, path)
2704 )
2704 )
2705 # attempt to create the directory if it does not already exist
2705 # attempt to create the directory if it does not already exist
2706 try:
2706 try:
2707 os.makedirs(os.path.dirname(filename))
2707 os.makedirs(os.path.dirname(filename))
2708 except OSError:
2708 except OSError:
2709 pass
2709 pass
2710 with formatter.maybereopen(basefm, filename) as fm:
2710 with formatter.maybereopen(basefm, filename) as fm:
2711 _updatecatformatter(fm, ctx, matcher, path, opts.get(b'decode'))
2711 _updatecatformatter(fm, ctx, matcher, path, opts.get(b'decode'))
2712
2712
2713 # Automation often uses hg cat on single files, so special case it
2713 # Automation often uses hg cat on single files, so special case it
2714 # for performance to avoid the cost of parsing the manifest.
2714 # for performance to avoid the cost of parsing the manifest.
2715 if len(matcher.files()) == 1 and not matcher.anypats():
2715 if len(matcher.files()) == 1 and not matcher.anypats():
2716 file = matcher.files()[0]
2716 file = matcher.files()[0]
2717 mfl = repo.manifestlog
2717 mfl = repo.manifestlog
2718 mfnode = ctx.manifestnode()
2718 mfnode = ctx.manifestnode()
2719 try:
2719 try:
2720 if mfnode and mfl[mfnode].find(file)[0]:
2720 if mfnode and mfl[mfnode].find(file)[0]:
2721 if _catfmtneedsdata(basefm):
2721 if _catfmtneedsdata(basefm):
2722 scmutil.prefetchfiles(repo, [(ctx.rev(), matcher)])
2722 scmutil.prefetchfiles(repo, [(ctx.rev(), matcher)])
2723 write(file)
2723 write(file)
2724 return 0
2724 return 0
2725 except KeyError:
2725 except KeyError:
2726 pass
2726 pass
2727
2727
2728 if _catfmtneedsdata(basefm):
2728 if _catfmtneedsdata(basefm):
2729 scmutil.prefetchfiles(repo, [(ctx.rev(), matcher)])
2729 scmutil.prefetchfiles(repo, [(ctx.rev(), matcher)])
2730
2730
2731 for abs in ctx.walk(matcher):
2731 for abs in ctx.walk(matcher):
2732 write(abs)
2732 write(abs)
2733 err = 0
2733 err = 0
2734
2734
2735 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
2735 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
2736 for subpath in sorted(ctx.substate):
2736 for subpath in sorted(ctx.substate):
2737 sub = ctx.sub(subpath)
2737 sub = ctx.sub(subpath)
2738 try:
2738 try:
2739 submatch = matchmod.subdirmatcher(subpath, matcher)
2739 submatch = matchmod.subdirmatcher(subpath, matcher)
2740 subprefix = os.path.join(prefix, subpath)
2740 subprefix = os.path.join(prefix, subpath)
2741 if not sub.cat(
2741 if not sub.cat(
2742 submatch,
2742 submatch,
2743 basefm,
2743 basefm,
2744 fntemplate,
2744 fntemplate,
2745 subprefix,
2745 subprefix,
2746 **pycompat.strkwargs(opts)
2746 **pycompat.strkwargs(opts)
2747 ):
2747 ):
2748 err = 0
2748 err = 0
2749 except error.RepoLookupError:
2749 except error.RepoLookupError:
2750 ui.status(
2750 ui.status(
2751 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2751 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2752 )
2752 )
2753
2753
2754 return err
2754 return err
2755
2755
2756
2756
2757 def commit(ui, repo, commitfunc, pats, opts):
2757 def commit(ui, repo, commitfunc, pats, opts):
2758 '''commit the specified files or all outstanding changes'''
2758 '''commit the specified files or all outstanding changes'''
2759 date = opts.get(b'date')
2759 date = opts.get(b'date')
2760 if date:
2760 if date:
2761 opts[b'date'] = dateutil.parsedate(date)
2761 opts[b'date'] = dateutil.parsedate(date)
2762 message = logmessage(ui, opts)
2762 message = logmessage(ui, opts)
2763 matcher = scmutil.match(repo[None], pats, opts)
2763 matcher = scmutil.match(repo[None], pats, opts)
2764
2764
2765 dsguard = None
2765 dsguard = None
2766 # extract addremove carefully -- this function can be called from a command
2766 # extract addremove carefully -- this function can be called from a command
2767 # that doesn't support addremove
2767 # that doesn't support addremove
2768 if opts.get(b'addremove'):
2768 if opts.get(b'addremove'):
2769 dsguard = dirstateguard.dirstateguard(repo, b'commit')
2769 dsguard = dirstateguard.dirstateguard(repo, b'commit')
2770 with dsguard or util.nullcontextmanager():
2770 with dsguard or util.nullcontextmanager():
2771 if dsguard:
2771 if dsguard:
2772 relative = scmutil.anypats(pats, opts)
2772 relative = scmutil.anypats(pats, opts)
2773 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
2773 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
2774 if scmutil.addremove(repo, matcher, b"", uipathfn, opts) != 0:
2774 if scmutil.addremove(repo, matcher, b"", uipathfn, opts) != 0:
2775 raise error.Abort(
2775 raise error.Abort(
2776 _(b"failed to mark all new/missing files as added/removed")
2776 _(b"failed to mark all new/missing files as added/removed")
2777 )
2777 )
2778
2778
2779 return commitfunc(ui, repo, message, matcher, opts)
2779 return commitfunc(ui, repo, message, matcher, opts)
2780
2780
2781
2781
2782 def samefile(f, ctx1, ctx2):
2782 def samefile(f, ctx1, ctx2):
2783 if f in ctx1.manifest():
2783 if f in ctx1.manifest():
2784 a = ctx1.filectx(f)
2784 a = ctx1.filectx(f)
2785 if f in ctx2.manifest():
2785 if f in ctx2.manifest():
2786 b = ctx2.filectx(f)
2786 b = ctx2.filectx(f)
2787 return not a.cmp(b) and a.flags() == b.flags()
2787 return not a.cmp(b) and a.flags() == b.flags()
2788 else:
2788 else:
2789 return False
2789 return False
2790 else:
2790 else:
2791 return f not in ctx2.manifest()
2791 return f not in ctx2.manifest()
2792
2792
2793
2793
2794 def amend(ui, repo, old, extra, pats, opts):
2794 def amend(ui, repo, old, extra, pats, opts):
2795 # avoid cycle context -> subrepo -> cmdutil
2795 # avoid cycle context -> subrepo -> cmdutil
2796 from . import context
2796 from . import context
2797
2797
2798 # amend will reuse the existing user if not specified, but the obsolete
2798 # amend will reuse the existing user if not specified, but the obsolete
2799 # marker creation requires that the current user's name is specified.
2799 # marker creation requires that the current user's name is specified.
2800 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2800 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2801 ui.username() # raise exception if username not set
2801 ui.username() # raise exception if username not set
2802
2802
2803 ui.note(_(b'amending changeset %s\n') % old)
2803 ui.note(_(b'amending changeset %s\n') % old)
2804 base = old.p1()
2804 base = old.p1()
2805
2805
2806 with repo.wlock(), repo.lock(), repo.transaction(b'amend'):
2806 with repo.wlock(), repo.lock(), repo.transaction(b'amend'):
2807 # Participating changesets:
2807 # Participating changesets:
2808 #
2808 #
2809 # wctx o - workingctx that contains changes from working copy
2809 # wctx o - workingctx that contains changes from working copy
2810 # | to go into amending commit
2810 # | to go into amending commit
2811 # |
2811 # |
2812 # old o - changeset to amend
2812 # old o - changeset to amend
2813 # |
2813 # |
2814 # base o - first parent of the changeset to amend
2814 # base o - first parent of the changeset to amend
2815 wctx = repo[None]
2815 wctx = repo[None]
2816
2816
2817 # Copy to avoid mutating input
2817 # Copy to avoid mutating input
2818 extra = extra.copy()
2818 extra = extra.copy()
2819 # Update extra dict from amended commit (e.g. to preserve graft
2819 # Update extra dict from amended commit (e.g. to preserve graft
2820 # source)
2820 # source)
2821 extra.update(old.extra())
2821 extra.update(old.extra())
2822
2822
2823 # Also update it from the from the wctx
2823 # Also update it from the from the wctx
2824 extra.update(wctx.extra())
2824 extra.update(wctx.extra())
2825
2825
2826 # date-only change should be ignored?
2826 # date-only change should be ignored?
2827 datemaydiffer = resolve_commit_options(ui, opts)
2827 datemaydiffer = resolve_commit_options(ui, opts)
2828 opts = pycompat.byteskwargs(opts)
2828 opts = pycompat.byteskwargs(opts)
2829
2829
2830 date = old.date()
2830 date = old.date()
2831 if opts.get(b'date'):
2831 if opts.get(b'date'):
2832 date = dateutil.parsedate(opts.get(b'date'))
2832 date = dateutil.parsedate(opts.get(b'date'))
2833 user = opts.get(b'user') or old.user()
2833 user = opts.get(b'user') or old.user()
2834
2834
2835 if len(old.parents()) > 1:
2835 if len(old.parents()) > 1:
2836 # ctx.files() isn't reliable for merges, so fall back to the
2836 # ctx.files() isn't reliable for merges, so fall back to the
2837 # slower repo.status() method
2837 # slower repo.status() method
2838 st = base.status(old)
2838 st = base.status(old)
2839 files = set(st.modified) | set(st.added) | set(st.removed)
2839 files = set(st.modified) | set(st.added) | set(st.removed)
2840 else:
2840 else:
2841 files = set(old.files())
2841 files = set(old.files())
2842
2842
2843 # add/remove the files to the working copy if the "addremove" option
2843 # add/remove the files to the working copy if the "addremove" option
2844 # was specified.
2844 # was specified.
2845 matcher = scmutil.match(wctx, pats, opts)
2845 matcher = scmutil.match(wctx, pats, opts)
2846 relative = scmutil.anypats(pats, opts)
2846 relative = scmutil.anypats(pats, opts)
2847 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
2847 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
2848 if opts.get(b'addremove') and scmutil.addremove(
2848 if opts.get(b'addremove') and scmutil.addremove(
2849 repo, matcher, b"", uipathfn, opts
2849 repo, matcher, b"", uipathfn, opts
2850 ):
2850 ):
2851 raise error.Abort(
2851 raise error.Abort(
2852 _(b"failed to mark all new/missing files as added/removed")
2852 _(b"failed to mark all new/missing files as added/removed")
2853 )
2853 )
2854
2854
2855 # Check subrepos. This depends on in-place wctx._status update in
2855 # Check subrepos. This depends on in-place wctx._status update in
2856 # subrepo.precommit(). To minimize the risk of this hack, we do
2856 # subrepo.precommit(). To minimize the risk of this hack, we do
2857 # nothing if .hgsub does not exist.
2857 # nothing if .hgsub does not exist.
2858 if b'.hgsub' in wctx or b'.hgsub' in old:
2858 if b'.hgsub' in wctx or b'.hgsub' in old:
2859 subs, commitsubs, newsubstate = subrepoutil.precommit(
2859 subs, commitsubs, newsubstate = subrepoutil.precommit(
2860 ui, wctx, wctx._status, matcher
2860 ui, wctx, wctx._status, matcher
2861 )
2861 )
2862 # amend should abort if commitsubrepos is enabled
2862 # amend should abort if commitsubrepos is enabled
2863 assert not commitsubs
2863 assert not commitsubs
2864 if subs:
2864 if subs:
2865 subrepoutil.writestate(repo, newsubstate)
2865 subrepoutil.writestate(repo, newsubstate)
2866
2866
2867 ms = mergestatemod.mergestate.read(repo)
2867 ms = mergestatemod.mergestate.read(repo)
2868 mergeutil.checkunresolved(ms)
2868 mergeutil.checkunresolved(ms)
2869
2869
2870 filestoamend = {f for f in wctx.files() if matcher(f)}
2870 filestoamend = {f for f in wctx.files() if matcher(f)}
2871
2871
2872 changes = len(filestoamend) > 0
2872 changes = len(filestoamend) > 0
2873 if changes:
2873 if changes:
2874 # Recompute copies (avoid recording a -> b -> a)
2874 # Recompute copies (avoid recording a -> b -> a)
2875 copied = copies.pathcopies(base, wctx, matcher)
2875 copied = copies.pathcopies(base, wctx, matcher)
2876 if old.p2:
2876 if old.p2:
2877 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
2877 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
2878
2878
2879 # Prune files which were reverted by the updates: if old
2879 # Prune files which were reverted by the updates: if old
2880 # introduced file X and the file was renamed in the working
2880 # introduced file X and the file was renamed in the working
2881 # copy, then those two files are the same and
2881 # copy, then those two files are the same and
2882 # we can discard X from our list of files. Likewise if X
2882 # we can discard X from our list of files. Likewise if X
2883 # was removed, it's no longer relevant. If X is missing (aka
2883 # was removed, it's no longer relevant. If X is missing (aka
2884 # deleted), old X must be preserved.
2884 # deleted), old X must be preserved.
2885 files.update(filestoamend)
2885 files.update(filestoamend)
2886 files = [
2886 files = [
2887 f
2887 f
2888 for f in files
2888 for f in files
2889 if (f not in filestoamend or not samefile(f, wctx, base))
2889 if (f not in filestoamend or not samefile(f, wctx, base))
2890 ]
2890 ]
2891
2891
2892 def filectxfn(repo, ctx_, path):
2892 def filectxfn(repo, ctx_, path):
2893 try:
2893 try:
2894 # If the file being considered is not amongst the files
2894 # If the file being considered is not amongst the files
2895 # to be amended, we should return the file context from the
2895 # to be amended, we should return the file context from the
2896 # old changeset. This avoids issues when only some files in
2896 # old changeset. This avoids issues when only some files in
2897 # the working copy are being amended but there are also
2897 # the working copy are being amended but there are also
2898 # changes to other files from the old changeset.
2898 # changes to other files from the old changeset.
2899 if path not in filestoamend:
2899 if path not in filestoamend:
2900 return old.filectx(path)
2900 return old.filectx(path)
2901
2901
2902 # Return None for removed files.
2902 # Return None for removed files.
2903 if path in wctx.removed():
2903 if path in wctx.removed():
2904 return None
2904 return None
2905
2905
2906 fctx = wctx[path]
2906 fctx = wctx[path]
2907 flags = fctx.flags()
2907 flags = fctx.flags()
2908 mctx = context.memfilectx(
2908 mctx = context.memfilectx(
2909 repo,
2909 repo,
2910 ctx_,
2910 ctx_,
2911 fctx.path(),
2911 fctx.path(),
2912 fctx.data(),
2912 fctx.data(),
2913 islink=b'l' in flags,
2913 islink=b'l' in flags,
2914 isexec=b'x' in flags,
2914 isexec=b'x' in flags,
2915 copysource=copied.get(path),
2915 copysource=copied.get(path),
2916 )
2916 )
2917 return mctx
2917 return mctx
2918 except KeyError:
2918 except KeyError:
2919 return None
2919 return None
2920
2920
2921 else:
2921 else:
2922 ui.note(_(b'copying changeset %s to %s\n') % (old, base))
2922 ui.note(_(b'copying changeset %s to %s\n') % (old, base))
2923
2923
2924 # Use version of files as in the old cset
2924 # Use version of files as in the old cset
2925 def filectxfn(repo, ctx_, path):
2925 def filectxfn(repo, ctx_, path):
2926 try:
2926 try:
2927 return old.filectx(path)
2927 return old.filectx(path)
2928 except KeyError:
2928 except KeyError:
2929 return None
2929 return None
2930
2930
2931 # See if we got a message from -m or -l, if not, open the editor with
2931 # See if we got a message from -m or -l, if not, open the editor with
2932 # the message of the changeset to amend.
2932 # the message of the changeset to amend.
2933 message = logmessage(ui, opts)
2933 message = logmessage(ui, opts)
2934
2934
2935 editform = mergeeditform(old, b'commit.amend')
2935 editform = mergeeditform(old, b'commit.amend')
2936
2936
2937 if not message:
2937 if not message:
2938 message = old.description()
2938 message = old.description()
2939 # Default if message isn't provided and --edit is not passed is to
2939 # Default if message isn't provided and --edit is not passed is to
2940 # invoke editor, but allow --no-edit. If somehow we don't have any
2940 # invoke editor, but allow --no-edit. If somehow we don't have any
2941 # description, let's always start the editor.
2941 # description, let's always start the editor.
2942 doedit = not message or opts.get(b'edit') in [True, None]
2942 doedit = not message or opts.get(b'edit') in [True, None]
2943 else:
2943 else:
2944 # Default if message is provided is to not invoke editor, but allow
2944 # Default if message is provided is to not invoke editor, but allow
2945 # --edit.
2945 # --edit.
2946 doedit = opts.get(b'edit') is True
2946 doedit = opts.get(b'edit') is True
2947 editor = getcommiteditor(edit=doedit, editform=editform)
2947 editor = getcommiteditor(edit=doedit, editform=editform)
2948
2948
2949 pureextra = extra.copy()
2949 pureextra = extra.copy()
2950 extra[b'amend_source'] = old.hex()
2950 extra[b'amend_source'] = old.hex()
2951
2951
2952 new = context.memctx(
2952 new = context.memctx(
2953 repo,
2953 repo,
2954 parents=[base.node(), old.p2().node()],
2954 parents=[base.node(), old.p2().node()],
2955 text=message,
2955 text=message,
2956 files=files,
2956 files=files,
2957 filectxfn=filectxfn,
2957 filectxfn=filectxfn,
2958 user=user,
2958 user=user,
2959 date=date,
2959 date=date,
2960 extra=extra,
2960 extra=extra,
2961 editor=editor,
2961 editor=editor,
2962 )
2962 )
2963
2963
2964 newdesc = changelog.stripdesc(new.description())
2964 newdesc = changelog.stripdesc(new.description())
2965 if (
2965 if (
2966 (not changes)
2966 (not changes)
2967 and newdesc == old.description()
2967 and newdesc == old.description()
2968 and user == old.user()
2968 and user == old.user()
2969 and (date == old.date() or datemaydiffer)
2969 and (date == old.date() or datemaydiffer)
2970 and pureextra == old.extra()
2970 and pureextra == old.extra()
2971 ):
2971 ):
2972 # nothing changed. continuing here would create a new node
2972 # nothing changed. continuing here would create a new node
2973 # anyway because of the amend_source noise.
2973 # anyway because of the amend_source noise.
2974 #
2974 #
2975 # This not what we expect from amend.
2975 # This not what we expect from amend.
2976 return old.node()
2976 return old.node()
2977
2977
2978 commitphase = None
2978 commitphase = None
2979 if opts.get(b'secret'):
2979 if opts.get(b'secret'):
2980 commitphase = phases.secret
2980 commitphase = phases.secret
2981 newid = repo.commitctx(new)
2981 newid = repo.commitctx(new)
2982 ms.reset()
2982 ms.reset()
2983
2983
2984 with repo.dirstate.parentchange():
2984 with repo.dirstate.parentchange():
2985 # Reroute the working copy parent to the new changeset
2985 # Reroute the working copy parent to the new changeset
2986 repo.setparents(newid, repo.nullid)
2986 repo.setparents(newid, repo.nullid)
2987
2987
2988 # Fixing the dirstate because localrepo.commitctx does not update
2988 # Fixing the dirstate because localrepo.commitctx does not update
2989 # it. This is rather convenient because we did not need to update
2989 # it. This is rather convenient because we did not need to update
2990 # the dirstate for all the files in the new commit which commitctx
2990 # the dirstate for all the files in the new commit which commitctx
2991 # could have done if it updated the dirstate. Now, we can
2991 # could have done if it updated the dirstate. Now, we can
2992 # selectively update the dirstate only for the amended files.
2992 # selectively update the dirstate only for the amended files.
2993 dirstate = repo.dirstate
2993 dirstate = repo.dirstate
2994
2994
2995 # Update the state of the files which were added and modified in the
2995 # Update the state of the files which were added and modified in the
2996 # amend to "normal" in the dirstate. We need to use "normallookup" since
2996 # amend to "normal" in the dirstate. We need to use "normallookup" since
2997 # the files may have changed since the command started; using "normal"
2997 # the files may have changed since the command started; using "normal"
2998 # would mark them as clean but with uncommitted contents.
2998 # would mark them as clean but with uncommitted contents.
2999 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
2999 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
3000 for f in normalfiles:
3000 for f in normalfiles:
3001 dirstate.update_file(
3001 dirstate.update_file(
3002 f, p1_tracked=True, wc_tracked=True, possibly_dirty=True
3002 f, p1_tracked=True, wc_tracked=True, possibly_dirty=True
3003 )
3003 )
3004
3004
3005 # Update the state of files which were removed in the amend
3005 # Update the state of files which were removed in the amend
3006 # to "removed" in the dirstate.
3006 # to "removed" in the dirstate.
3007 removedfiles = set(wctx.removed()) & filestoamend
3007 removedfiles = set(wctx.removed()) & filestoamend
3008 for f in removedfiles:
3008 for f in removedfiles:
3009 dirstate.update_file(f, p1_tracked=False, wc_tracked=False)
3009 dirstate.update_file(f, p1_tracked=False, wc_tracked=False)
3010
3010
3011 mapping = {old.node(): (newid,)}
3011 mapping = {old.node(): (newid,)}
3012 obsmetadata = None
3012 obsmetadata = None
3013 if opts.get(b'note'):
3013 if opts.get(b'note'):
3014 obsmetadata = {b'note': encoding.fromlocal(opts[b'note'])}
3014 obsmetadata = {b'note': encoding.fromlocal(opts[b'note'])}
3015 backup = ui.configbool(b'rewrite', b'backup-bundle')
3015 backup = ui.configbool(b'rewrite', b'backup-bundle')
3016 scmutil.cleanupnodes(
3016 scmutil.cleanupnodes(
3017 repo,
3017 repo,
3018 mapping,
3018 mapping,
3019 b'amend',
3019 b'amend',
3020 metadata=obsmetadata,
3020 metadata=obsmetadata,
3021 fixphase=True,
3021 fixphase=True,
3022 targetphase=commitphase,
3022 targetphase=commitphase,
3023 backup=backup,
3023 backup=backup,
3024 )
3024 )
3025
3025
3026 return newid
3026 return newid
3027
3027
3028
3028
3029 def commiteditor(repo, ctx, subs, editform=b''):
3029 def commiteditor(repo, ctx, subs, editform=b''):
3030 if ctx.description():
3030 if ctx.description():
3031 return ctx.description()
3031 return ctx.description()
3032 return commitforceeditor(
3032 return commitforceeditor(
3033 repo, ctx, subs, editform=editform, unchangedmessagedetection=True
3033 repo, ctx, subs, editform=editform, unchangedmessagedetection=True
3034 )
3034 )
3035
3035
3036
3036
3037 def commitforceeditor(
3037 def commitforceeditor(
3038 repo,
3038 repo,
3039 ctx,
3039 ctx,
3040 subs,
3040 subs,
3041 finishdesc=None,
3041 finishdesc=None,
3042 extramsg=None,
3042 extramsg=None,
3043 editform=b'',
3043 editform=b'',
3044 unchangedmessagedetection=False,
3044 unchangedmessagedetection=False,
3045 ):
3045 ):
3046 if not extramsg:
3046 if not extramsg:
3047 extramsg = _(b"Leave message empty to abort commit.")
3047 extramsg = _(b"Leave message empty to abort commit.")
3048
3048
3049 forms = [e for e in editform.split(b'.') if e]
3049 forms = [e for e in editform.split(b'.') if e]
3050 forms.insert(0, b'changeset')
3050 forms.insert(0, b'changeset')
3051 templatetext = None
3051 templatetext = None
3052 while forms:
3052 while forms:
3053 ref = b'.'.join(forms)
3053 ref = b'.'.join(forms)
3054 if repo.ui.config(b'committemplate', ref):
3054 if repo.ui.config(b'committemplate', ref):
3055 templatetext = committext = buildcommittemplate(
3055 templatetext = committext = buildcommittemplate(
3056 repo, ctx, subs, extramsg, ref
3056 repo, ctx, subs, extramsg, ref
3057 )
3057 )
3058 break
3058 break
3059 forms.pop()
3059 forms.pop()
3060 else:
3060 else:
3061 committext = buildcommittext(repo, ctx, subs, extramsg)
3061 committext = buildcommittext(repo, ctx, subs, extramsg)
3062
3062
3063 # run editor in the repository root
3063 # run editor in the repository root
3064 olddir = encoding.getcwd()
3064 olddir = encoding.getcwd()
3065 os.chdir(repo.root)
3065 os.chdir(repo.root)
3066
3066
3067 # make in-memory changes visible to external process
3067 # make in-memory changes visible to external process
3068 tr = repo.currenttransaction()
3068 tr = repo.currenttransaction()
3069 repo.dirstate.write(tr)
3069 repo.dirstate.write(tr)
3070 pending = tr and tr.writepending() and repo.root
3070 pending = tr and tr.writepending() and repo.root
3071
3071
3072 editortext = repo.ui.edit(
3072 editortext = repo.ui.edit(
3073 committext,
3073 committext,
3074 ctx.user(),
3074 ctx.user(),
3075 ctx.extra(),
3075 ctx.extra(),
3076 editform=editform,
3076 editform=editform,
3077 pending=pending,
3077 pending=pending,
3078 repopath=repo.path,
3078 repopath=repo.path,
3079 action=b'commit',
3079 action=b'commit',
3080 )
3080 )
3081 text = editortext
3081 text = editortext
3082
3082
3083 # strip away anything below this special string (used for editors that want
3083 # strip away anything below this special string (used for editors that want
3084 # to display the diff)
3084 # to display the diff)
3085 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3085 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3086 if stripbelow:
3086 if stripbelow:
3087 text = text[: stripbelow.start()]
3087 text = text[: stripbelow.start()]
3088
3088
3089 text = re.sub(b"(?m)^HG:.*(\n|$)", b"", text)
3089 text = re.sub(b"(?m)^HG:.*(\n|$)", b"", text)
3090 os.chdir(olddir)
3090 os.chdir(olddir)
3091
3091
3092 if finishdesc:
3092 if finishdesc:
3093 text = finishdesc(text)
3093 text = finishdesc(text)
3094 if not text.strip():
3094 if not text.strip():
3095 raise error.InputError(_(b"empty commit message"))
3095 raise error.InputError(_(b"empty commit message"))
3096 if unchangedmessagedetection and editortext == templatetext:
3096 if unchangedmessagedetection and editortext == templatetext:
3097 raise error.InputError(_(b"commit message unchanged"))
3097 raise error.InputError(_(b"commit message unchanged"))
3098
3098
3099 return text
3099 return text
3100
3100
3101
3101
3102 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3102 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3103 ui = repo.ui
3103 ui = repo.ui
3104 spec = formatter.reference_templatespec(ref)
3104 spec = formatter.reference_templatespec(ref)
3105 t = logcmdutil.changesettemplater(ui, repo, spec)
3105 t = logcmdutil.changesettemplater(ui, repo, spec)
3106 t.t.cache.update(
3106 t.t.cache.update(
3107 (k, templater.unquotestring(v))
3107 (k, templater.unquotestring(v))
3108 for k, v in repo.ui.configitems(b'committemplate')
3108 for k, v in repo.ui.configitems(b'committemplate')
3109 )
3109 )
3110
3110
3111 if not extramsg:
3111 if not extramsg:
3112 extramsg = b'' # ensure that extramsg is string
3112 extramsg = b'' # ensure that extramsg is string
3113
3113
3114 ui.pushbuffer()
3114 ui.pushbuffer()
3115 t.show(ctx, extramsg=extramsg)
3115 t.show(ctx, extramsg=extramsg)
3116 return ui.popbuffer()
3116 return ui.popbuffer()
3117
3117
3118
3118
3119 def hgprefix(msg):
3119 def hgprefix(msg):
3120 return b"\n".join([b"HG: %s" % a for a in msg.split(b"\n") if a])
3120 return b"\n".join([b"HG: %s" % a for a in msg.split(b"\n") if a])
3121
3121
3122
3122
3123 def buildcommittext(repo, ctx, subs, extramsg):
3123 def buildcommittext(repo, ctx, subs, extramsg):
3124 edittext = []
3124 edittext = []
3125 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3125 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3126 if ctx.description():
3126 if ctx.description():
3127 edittext.append(ctx.description())
3127 edittext.append(ctx.description())
3128 edittext.append(b"")
3128 edittext.append(b"")
3129 edittext.append(b"") # Empty line between message and comments.
3129 edittext.append(b"") # Empty line between message and comments.
3130 edittext.append(
3130 edittext.append(
3131 hgprefix(
3131 hgprefix(
3132 _(
3132 _(
3133 b"Enter commit message."
3133 b"Enter commit message."
3134 b" Lines beginning with 'HG:' are removed."
3134 b" Lines beginning with 'HG:' are removed."
3135 )
3135 )
3136 )
3136 )
3137 )
3137 )
3138 edittext.append(hgprefix(extramsg))
3138 edittext.append(hgprefix(extramsg))
3139 edittext.append(b"HG: --")
3139 edittext.append(b"HG: --")
3140 edittext.append(hgprefix(_(b"user: %s") % ctx.user()))
3140 edittext.append(hgprefix(_(b"user: %s") % ctx.user()))
3141 if ctx.p2():
3141 if ctx.p2():
3142 edittext.append(hgprefix(_(b"branch merge")))
3142 edittext.append(hgprefix(_(b"branch merge")))
3143 if ctx.branch():
3143 if ctx.branch():
3144 edittext.append(hgprefix(_(b"branch '%s'") % ctx.branch()))
3144 edittext.append(hgprefix(_(b"branch '%s'") % ctx.branch()))
3145 if bookmarks.isactivewdirparent(repo):
3145 if bookmarks.isactivewdirparent(repo):
3146 edittext.append(hgprefix(_(b"bookmark '%s'") % repo._activebookmark))
3146 edittext.append(hgprefix(_(b"bookmark '%s'") % repo._activebookmark))
3147 edittext.extend([hgprefix(_(b"subrepo %s") % s) for s in subs])
3147 edittext.extend([hgprefix(_(b"subrepo %s") % s) for s in subs])
3148 edittext.extend([hgprefix(_(b"added %s") % f) for f in added])
3148 edittext.extend([hgprefix(_(b"added %s") % f) for f in added])
3149 edittext.extend([hgprefix(_(b"changed %s") % f) for f in modified])
3149 edittext.extend([hgprefix(_(b"changed %s") % f) for f in modified])
3150 edittext.extend([hgprefix(_(b"removed %s") % f) for f in removed])
3150 edittext.extend([hgprefix(_(b"removed %s") % f) for f in removed])
3151 if not added and not modified and not removed:
3151 if not added and not modified and not removed:
3152 edittext.append(hgprefix(_(b"no files changed")))
3152 edittext.append(hgprefix(_(b"no files changed")))
3153 edittext.append(b"")
3153 edittext.append(b"")
3154
3154
3155 return b"\n".join(edittext)
3155 return b"\n".join(edittext)
3156
3156
3157
3157
3158 def commitstatus(repo, node, branch, bheads=None, tip=None, opts=None):
3158 def commitstatus(repo, node, branch, bheads=None, tip=None, opts=None):
3159 if opts is None:
3159 if opts is None:
3160 opts = {}
3160 opts = {}
3161 ctx = repo[node]
3161 ctx = repo[node]
3162 parents = ctx.parents()
3162 parents = ctx.parents()
3163
3163
3164 if tip is not None and repo.changelog.tip() == tip:
3164 if tip is not None and repo.changelog.tip() == tip:
3165 # avoid reporting something like "committed new head" when
3165 # avoid reporting something like "committed new head" when
3166 # recommitting old changesets, and issue a helpful warning
3166 # recommitting old changesets, and issue a helpful warning
3167 # for most instances
3167 # for most instances
3168 repo.ui.warn(_(b"warning: commit already existed in the repository!\n"))
3168 repo.ui.warn(_(b"warning: commit already existed in the repository!\n"))
3169 elif (
3169 elif (
3170 not opts.get(b'amend')
3170 not opts.get(b'amend')
3171 and bheads
3171 and bheads
3172 and node not in bheads
3172 and node not in bheads
3173 and not any(
3173 and not any(
3174 p.node() in bheads and p.branch() == branch for p in parents
3174 p.node() in bheads and p.branch() == branch for p in parents
3175 )
3175 )
3176 ):
3176 ):
3177 repo.ui.status(_(b'created new head\n'))
3177 repo.ui.status(_(b'created new head\n'))
3178 # The message is not printed for initial roots. For the other
3178 # The message is not printed for initial roots. For the other
3179 # changesets, it is printed in the following situations:
3179 # changesets, it is printed in the following situations:
3180 #
3180 #
3181 # Par column: for the 2 parents with ...
3181 # Par column: for the 2 parents with ...
3182 # N: null or no parent
3182 # N: null or no parent
3183 # B: parent is on another named branch
3183 # B: parent is on another named branch
3184 # C: parent is a regular non head changeset
3184 # C: parent is a regular non head changeset
3185 # H: parent was a branch head of the current branch
3185 # H: parent was a branch head of the current branch
3186 # Msg column: whether we print "created new head" message
3186 # Msg column: whether we print "created new head" message
3187 # In the following, it is assumed that there already exists some
3187 # In the following, it is assumed that there already exists some
3188 # initial branch heads of the current branch, otherwise nothing is
3188 # initial branch heads of the current branch, otherwise nothing is
3189 # printed anyway.
3189 # printed anyway.
3190 #
3190 #
3191 # Par Msg Comment
3191 # Par Msg Comment
3192 # N N y additional topo root
3192 # N N y additional topo root
3193 #
3193 #
3194 # B N y additional branch root
3194 # B N y additional branch root
3195 # C N y additional topo head
3195 # C N y additional topo head
3196 # H N n usual case
3196 # H N n usual case
3197 #
3197 #
3198 # B B y weird additional branch root
3198 # B B y weird additional branch root
3199 # C B y branch merge
3199 # C B y branch merge
3200 # H B n merge with named branch
3200 # H B n merge with named branch
3201 #
3201 #
3202 # C C y additional head from merge
3202 # C C y additional head from merge
3203 # C H n merge with a head
3203 # C H n merge with a head
3204 #
3204 #
3205 # H H n head merge: head count decreases
3205 # H H n head merge: head count decreases
3206
3206
3207 if not opts.get(b'close_branch'):
3207 if not opts.get(b'close_branch'):
3208 for r in parents:
3208 for r in parents:
3209 if r.closesbranch() and r.branch() == branch:
3209 if r.closesbranch() and r.branch() == branch:
3210 repo.ui.status(
3210 repo.ui.status(
3211 _(b'reopening closed branch head %d\n') % r.rev()
3211 _(b'reopening closed branch head %d\n') % r.rev()
3212 )
3212 )
3213
3213
3214 if repo.ui.debugflag:
3214 if repo.ui.debugflag:
3215 repo.ui.write(
3215 repo.ui.write(
3216 _(b'committed changeset %d:%s\n') % (ctx.rev(), ctx.hex())
3216 _(b'committed changeset %d:%s\n') % (ctx.rev(), ctx.hex())
3217 )
3217 )
3218 elif repo.ui.verbose:
3218 elif repo.ui.verbose:
3219 repo.ui.write(_(b'committed changeset %d:%s\n') % (ctx.rev(), ctx))
3219 repo.ui.write(_(b'committed changeset %d:%s\n') % (ctx.rev(), ctx))
3220
3220
3221
3221
3222 def postcommitstatus(repo, pats, opts):
3222 def postcommitstatus(repo, pats, opts):
3223 return repo.status(match=scmutil.match(repo[None], pats, opts))
3223 return repo.status(match=scmutil.match(repo[None], pats, opts))
3224
3224
3225
3225
3226 def revert(ui, repo, ctx, *pats, **opts):
3226 def revert(ui, repo, ctx, *pats, **opts):
3227 opts = pycompat.byteskwargs(opts)
3227 opts = pycompat.byteskwargs(opts)
3228 parent, p2 = repo.dirstate.parents()
3228 parent, p2 = repo.dirstate.parents()
3229 node = ctx.node()
3229 node = ctx.node()
3230
3230
3231 mf = ctx.manifest()
3231 mf = ctx.manifest()
3232 if node == p2:
3232 if node == p2:
3233 parent = p2
3233 parent = p2
3234
3234
3235 # need all matching names in dirstate and manifest of target rev,
3235 # need all matching names in dirstate and manifest of target rev,
3236 # so have to walk both. do not print errors if files exist in one
3236 # so have to walk both. do not print errors if files exist in one
3237 # but not other. in both cases, filesets should be evaluated against
3237 # but not other. in both cases, filesets should be evaluated against
3238 # workingctx to get consistent result (issue4497). this means 'set:**'
3238 # workingctx to get consistent result (issue4497). this means 'set:**'
3239 # cannot be used to select missing files from target rev.
3239 # cannot be used to select missing files from target rev.
3240
3240
3241 # `names` is a mapping for all elements in working copy and target revision
3241 # `names` is a mapping for all elements in working copy and target revision
3242 # The mapping is in the form:
3242 # The mapping is in the form:
3243 # <abs path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3243 # <abs path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3244 names = {}
3244 names = {}
3245 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
3245 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
3246
3246
3247 with repo.wlock():
3247 with repo.wlock():
3248 ## filling of the `names` mapping
3248 ## filling of the `names` mapping
3249 # walk dirstate to fill `names`
3249 # walk dirstate to fill `names`
3250
3250
3251 interactive = opts.get(b'interactive', False)
3251 interactive = opts.get(b'interactive', False)
3252 wctx = repo[None]
3252 wctx = repo[None]
3253 m = scmutil.match(wctx, pats, opts)
3253 m = scmutil.match(wctx, pats, opts)
3254
3254
3255 # we'll need this later
3255 # we'll need this later
3256 targetsubs = sorted(s for s in wctx.substate if m(s))
3256 targetsubs = sorted(s for s in wctx.substate if m(s))
3257
3257
3258 if not m.always():
3258 if not m.always():
3259 matcher = matchmod.badmatch(m, lambda x, y: False)
3259 matcher = matchmod.badmatch(m, lambda x, y: False)
3260 for abs in wctx.walk(matcher):
3260 for abs in wctx.walk(matcher):
3261 names[abs] = m.exact(abs)
3261 names[abs] = m.exact(abs)
3262
3262
3263 # walk target manifest to fill `names`
3263 # walk target manifest to fill `names`
3264
3264
3265 def badfn(path, msg):
3265 def badfn(path, msg):
3266 if path in names:
3266 if path in names:
3267 return
3267 return
3268 if path in ctx.substate:
3268 if path in ctx.substate:
3269 return
3269 return
3270 path_ = path + b'/'
3270 path_ = path + b'/'
3271 for f in names:
3271 for f in names:
3272 if f.startswith(path_):
3272 if f.startswith(path_):
3273 return
3273 return
3274 ui.warn(b"%s: %s\n" % (uipathfn(path), msg))
3274 ui.warn(b"%s: %s\n" % (uipathfn(path), msg))
3275
3275
3276 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3276 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3277 if abs not in names:
3277 if abs not in names:
3278 names[abs] = m.exact(abs)
3278 names[abs] = m.exact(abs)
3279
3279
3280 # Find status of all file in `names`.
3280 # Find status of all file in `names`.
3281 m = scmutil.matchfiles(repo, names)
3281 m = scmutil.matchfiles(repo, names)
3282
3282
3283 changes = repo.status(
3283 changes = repo.status(
3284 node1=node, match=m, unknown=True, ignored=True, clean=True
3284 node1=node, match=m, unknown=True, ignored=True, clean=True
3285 )
3285 )
3286 else:
3286 else:
3287 changes = repo.status(node1=node, match=m)
3287 changes = repo.status(node1=node, match=m)
3288 for kind in changes:
3288 for kind in changes:
3289 for abs in kind:
3289 for abs in kind:
3290 names[abs] = m.exact(abs)
3290 names[abs] = m.exact(abs)
3291
3291
3292 m = scmutil.matchfiles(repo, names)
3292 m = scmutil.matchfiles(repo, names)
3293
3293
3294 modified = set(changes.modified)
3294 modified = set(changes.modified)
3295 added = set(changes.added)
3295 added = set(changes.added)
3296 removed = set(changes.removed)
3296 removed = set(changes.removed)
3297 _deleted = set(changes.deleted)
3297 _deleted = set(changes.deleted)
3298 unknown = set(changes.unknown)
3298 unknown = set(changes.unknown)
3299 unknown.update(changes.ignored)
3299 unknown.update(changes.ignored)
3300 clean = set(changes.clean)
3300 clean = set(changes.clean)
3301 modadded = set()
3301 modadded = set()
3302
3302
3303 # We need to account for the state of the file in the dirstate,
3303 # We need to account for the state of the file in the dirstate,
3304 # even when we revert against something else than parent. This will
3304 # even when we revert against something else than parent. This will
3305 # slightly alter the behavior of revert (doing back up or not, delete
3305 # slightly alter the behavior of revert (doing back up or not, delete
3306 # or just forget etc).
3306 # or just forget etc).
3307 if parent == node:
3307 if parent == node:
3308 dsmodified = modified
3308 dsmodified = modified
3309 dsadded = added
3309 dsadded = added
3310 dsremoved = removed
3310 dsremoved = removed
3311 # store all local modifications, useful later for rename detection
3311 # store all local modifications, useful later for rename detection
3312 localchanges = dsmodified | dsadded
3312 localchanges = dsmodified | dsadded
3313 modified, added, removed = set(), set(), set()
3313 modified, added, removed = set(), set(), set()
3314 else:
3314 else:
3315 changes = repo.status(node1=parent, match=m)
3315 changes = repo.status(node1=parent, match=m)
3316 dsmodified = set(changes.modified)
3316 dsmodified = set(changes.modified)
3317 dsadded = set(changes.added)
3317 dsadded = set(changes.added)
3318 dsremoved = set(changes.removed)
3318 dsremoved = set(changes.removed)
3319 # store all local modifications, useful later for rename detection
3319 # store all local modifications, useful later for rename detection
3320 localchanges = dsmodified | dsadded
3320 localchanges = dsmodified | dsadded
3321
3321
3322 # only take into account for removes between wc and target
3322 # only take into account for removes between wc and target
3323 clean |= dsremoved - removed
3323 clean |= dsremoved - removed
3324 dsremoved &= removed
3324 dsremoved &= removed
3325 # distinct between dirstate remove and other
3325 # distinct between dirstate remove and other
3326 removed -= dsremoved
3326 removed -= dsremoved
3327
3327
3328 modadded = added & dsmodified
3328 modadded = added & dsmodified
3329 added -= modadded
3329 added -= modadded
3330
3330
3331 # tell newly modified apart.
3331 # tell newly modified apart.
3332 dsmodified &= modified
3332 dsmodified &= modified
3333 dsmodified |= modified & dsadded # dirstate added may need backup
3333 dsmodified |= modified & dsadded # dirstate added may need backup
3334 modified -= dsmodified
3334 modified -= dsmodified
3335
3335
3336 # We need to wait for some post-processing to update this set
3336 # We need to wait for some post-processing to update this set
3337 # before making the distinction. The dirstate will be used for
3337 # before making the distinction. The dirstate will be used for
3338 # that purpose.
3338 # that purpose.
3339 dsadded = added
3339 dsadded = added
3340
3340
3341 # in case of merge, files that are actually added can be reported as
3341 # in case of merge, files that are actually added can be reported as
3342 # modified, we need to post process the result
3342 # modified, we need to post process the result
3343 if p2 != repo.nullid:
3343 if p2 != repo.nullid:
3344 mergeadd = set(dsmodified)
3344 mergeadd = set(dsmodified)
3345 for path in dsmodified:
3345 for path in dsmodified:
3346 if path in mf:
3346 if path in mf:
3347 mergeadd.remove(path)
3347 mergeadd.remove(path)
3348 dsadded |= mergeadd
3348 dsadded |= mergeadd
3349 dsmodified -= mergeadd
3349 dsmodified -= mergeadd
3350
3350
3351 # if f is a rename, update `names` to also revert the source
3351 # if f is a rename, update `names` to also revert the source
3352 for f in localchanges:
3352 for f in localchanges:
3353 src = repo.dirstate.copied(f)
3353 src = repo.dirstate.copied(f)
3354 # XXX should we check for rename down to target node?
3354 # XXX should we check for rename down to target node?
3355 if (
3355 if (
3356 src
3356 src
3357 and src not in names
3357 and src not in names
3358 and repo.dirstate.get_entry(src).removed
3358 and repo.dirstate.get_entry(src).removed
3359 ):
3359 ):
3360 dsremoved.add(src)
3360 dsremoved.add(src)
3361 names[src] = True
3361 names[src] = True
3362
3362
3363 # determine the exact nature of the deleted changesets
3363 # determine the exact nature of the deleted changesets
3364 deladded = set(_deleted)
3364 deladded = set(_deleted)
3365 for path in _deleted:
3365 for path in _deleted:
3366 if path in mf:
3366 if path in mf:
3367 deladded.remove(path)
3367 deladded.remove(path)
3368 deleted = _deleted - deladded
3368 deleted = _deleted - deladded
3369
3369
3370 # distinguish between file to forget and the other
3370 # distinguish between file to forget and the other
3371 added = set()
3371 added = set()
3372 for abs in dsadded:
3372 for abs in dsadded:
3373 if not repo.dirstate.get_entry(abs).added:
3373 if not repo.dirstate.get_entry(abs).added:
3374 added.add(abs)
3374 added.add(abs)
3375 dsadded -= added
3375 dsadded -= added
3376
3376
3377 for abs in deladded:
3377 for abs in deladded:
3378 if repo.dirstate.get_entry(abs).added:
3378 if repo.dirstate.get_entry(abs).added:
3379 dsadded.add(abs)
3379 dsadded.add(abs)
3380 deladded -= dsadded
3380 deladded -= dsadded
3381
3381
3382 # For files marked as removed, we check if an unknown file is present at
3382 # For files marked as removed, we check if an unknown file is present at
3383 # the same path. If a such file exists it may need to be backed up.
3383 # the same path. If a such file exists it may need to be backed up.
3384 # Making the distinction at this stage helps have simpler backup
3384 # Making the distinction at this stage helps have simpler backup
3385 # logic.
3385 # logic.
3386 removunk = set()
3386 removunk = set()
3387 for abs in removed:
3387 for abs in removed:
3388 target = repo.wjoin(abs)
3388 target = repo.wjoin(abs)
3389 if os.path.lexists(target):
3389 if os.path.lexists(target):
3390 removunk.add(abs)
3390 removunk.add(abs)
3391 removed -= removunk
3391 removed -= removunk
3392
3392
3393 dsremovunk = set()
3393 dsremovunk = set()
3394 for abs in dsremoved:
3394 for abs in dsremoved:
3395 target = repo.wjoin(abs)
3395 target = repo.wjoin(abs)
3396 if os.path.lexists(target):
3396 if os.path.lexists(target):
3397 dsremovunk.add(abs)
3397 dsremovunk.add(abs)
3398 dsremoved -= dsremovunk
3398 dsremoved -= dsremovunk
3399
3399
3400 # action to be actually performed by revert
3400 # action to be actually performed by revert
3401 # (<list of file>, message>) tuple
3401 # (<list of file>, message>) tuple
3402 actions = {
3402 actions = {
3403 b'revert': ([], _(b'reverting %s\n')),
3403 b'revert': ([], _(b'reverting %s\n')),
3404 b'add': ([], _(b'adding %s\n')),
3404 b'add': ([], _(b'adding %s\n')),
3405 b'remove': ([], _(b'removing %s\n')),
3405 b'remove': ([], _(b'removing %s\n')),
3406 b'drop': ([], _(b'removing %s\n')),
3406 b'drop': ([], _(b'removing %s\n')),
3407 b'forget': ([], _(b'forgetting %s\n')),
3407 b'forget': ([], _(b'forgetting %s\n')),
3408 b'undelete': ([], _(b'undeleting %s\n')),
3408 b'undelete': ([], _(b'undeleting %s\n')),
3409 b'noop': (None, _(b'no changes needed to %s\n')),
3409 b'noop': (None, _(b'no changes needed to %s\n')),
3410 b'unknown': (None, _(b'file not managed: %s\n')),
3410 b'unknown': (None, _(b'file not managed: %s\n')),
3411 }
3411 }
3412
3412
3413 # "constant" that convey the backup strategy.
3413 # "constant" that convey the backup strategy.
3414 # All set to `discard` if `no-backup` is set do avoid checking
3414 # All set to `discard` if `no-backup` is set do avoid checking
3415 # no_backup lower in the code.
3415 # no_backup lower in the code.
3416 # These values are ordered for comparison purposes
3416 # These values are ordered for comparison purposes
3417 backupinteractive = 3 # do backup if interactively modified
3417 backupinteractive = 3 # do backup if interactively modified
3418 backup = 2 # unconditionally do backup
3418 backup = 2 # unconditionally do backup
3419 check = 1 # check if the existing file differs from target
3419 check = 1 # check if the existing file differs from target
3420 discard = 0 # never do backup
3420 discard = 0 # never do backup
3421 if opts.get(b'no_backup'):
3421 if opts.get(b'no_backup'):
3422 backupinteractive = backup = check = discard
3422 backupinteractive = backup = check = discard
3423 if interactive:
3423 if interactive:
3424 dsmodifiedbackup = backupinteractive
3424 dsmodifiedbackup = backupinteractive
3425 else:
3425 else:
3426 dsmodifiedbackup = backup
3426 dsmodifiedbackup = backup
3427 tobackup = set()
3427 tobackup = set()
3428
3428
3429 backupanddel = actions[b'remove']
3429 backupanddel = actions[b'remove']
3430 if not opts.get(b'no_backup'):
3430 if not opts.get(b'no_backup'):
3431 backupanddel = actions[b'drop']
3431 backupanddel = actions[b'drop']
3432
3432
3433 disptable = (
3433 disptable = (
3434 # dispatch table:
3434 # dispatch table:
3435 # file state
3435 # file state
3436 # action
3436 # action
3437 # make backup
3437 # make backup
3438 ## Sets that results that will change file on disk
3438 ## Sets that results that will change file on disk
3439 # Modified compared to target, no local change
3439 # Modified compared to target, no local change
3440 (modified, actions[b'revert'], discard),
3440 (modified, actions[b'revert'], discard),
3441 # Modified compared to target, but local file is deleted
3441 # Modified compared to target, but local file is deleted
3442 (deleted, actions[b'revert'], discard),
3442 (deleted, actions[b'revert'], discard),
3443 # Modified compared to target, local change
3443 # Modified compared to target, local change
3444 (dsmodified, actions[b'revert'], dsmodifiedbackup),
3444 (dsmodified, actions[b'revert'], dsmodifiedbackup),
3445 # Added since target
3445 # Added since target
3446 (added, actions[b'remove'], discard),
3446 (added, actions[b'remove'], discard),
3447 # Added in working directory
3447 # Added in working directory
3448 (dsadded, actions[b'forget'], discard),
3448 (dsadded, actions[b'forget'], discard),
3449 # Added since target, have local modification
3449 # Added since target, have local modification
3450 (modadded, backupanddel, backup),
3450 (modadded, backupanddel, backup),
3451 # Added since target but file is missing in working directory
3451 # Added since target but file is missing in working directory
3452 (deladded, actions[b'drop'], discard),
3452 (deladded, actions[b'drop'], discard),
3453 # Removed since target, before working copy parent
3453 # Removed since target, before working copy parent
3454 (removed, actions[b'add'], discard),
3454 (removed, actions[b'add'], discard),
3455 # Same as `removed` but an unknown file exists at the same path
3455 # Same as `removed` but an unknown file exists at the same path
3456 (removunk, actions[b'add'], check),
3456 (removunk, actions[b'add'], check),
3457 # Removed since targe, marked as such in working copy parent
3457 # Removed since targe, marked as such in working copy parent
3458 (dsremoved, actions[b'undelete'], discard),
3458 (dsremoved, actions[b'undelete'], discard),
3459 # Same as `dsremoved` but an unknown file exists at the same path
3459 # Same as `dsremoved` but an unknown file exists at the same path
3460 (dsremovunk, actions[b'undelete'], check),
3460 (dsremovunk, actions[b'undelete'], check),
3461 ## the following sets does not result in any file changes
3461 ## the following sets does not result in any file changes
3462 # File with no modification
3462 # File with no modification
3463 (clean, actions[b'noop'], discard),
3463 (clean, actions[b'noop'], discard),
3464 # Existing file, not tracked anywhere
3464 # Existing file, not tracked anywhere
3465 (unknown, actions[b'unknown'], discard),
3465 (unknown, actions[b'unknown'], discard),
3466 )
3466 )
3467
3467
3468 for abs, exact in sorted(names.items()):
3468 for abs, exact in sorted(names.items()):
3469 # target file to be touch on disk (relative to cwd)
3469 # target file to be touch on disk (relative to cwd)
3470 target = repo.wjoin(abs)
3470 target = repo.wjoin(abs)
3471 # search the entry in the dispatch table.
3471 # search the entry in the dispatch table.
3472 # if the file is in any of these sets, it was touched in the working
3472 # if the file is in any of these sets, it was touched in the working
3473 # directory parent and we are sure it needs to be reverted.
3473 # directory parent and we are sure it needs to be reverted.
3474 for table, (xlist, msg), dobackup in disptable:
3474 for table, (xlist, msg), dobackup in disptable:
3475 if abs not in table:
3475 if abs not in table:
3476 continue
3476 continue
3477 if xlist is not None:
3477 if xlist is not None:
3478 xlist.append(abs)
3478 xlist.append(abs)
3479 if dobackup:
3479 if dobackup:
3480 # If in interactive mode, don't automatically create
3480 # If in interactive mode, don't automatically create
3481 # .orig files (issue4793)
3481 # .orig files (issue4793)
3482 if dobackup == backupinteractive:
3482 if dobackup == backupinteractive:
3483 tobackup.add(abs)
3483 tobackup.add(abs)
3484 elif backup <= dobackup or wctx[abs].cmp(ctx[abs]):
3484 elif backup <= dobackup or wctx[abs].cmp(ctx[abs]):
3485 absbakname = scmutil.backuppath(ui, repo, abs)
3485 absbakname = scmutil.backuppath(ui, repo, abs)
3486 bakname = os.path.relpath(
3486 bakname = os.path.relpath(
3487 absbakname, start=repo.root
3487 absbakname, start=repo.root
3488 )
3488 )
3489 ui.note(
3489 ui.note(
3490 _(b'saving current version of %s as %s\n')
3490 _(b'saving current version of %s as %s\n')
3491 % (uipathfn(abs), uipathfn(bakname))
3491 % (uipathfn(abs), uipathfn(bakname))
3492 )
3492 )
3493 if not opts.get(b'dry_run'):
3493 if not opts.get(b'dry_run'):
3494 if interactive:
3494 if interactive:
3495 util.copyfile(target, absbakname)
3495 util.copyfile(target, absbakname)
3496 else:
3496 else:
3497 util.rename(target, absbakname)
3497 util.rename(target, absbakname)
3498 if opts.get(b'dry_run'):
3498 if opts.get(b'dry_run'):
3499 if ui.verbose or not exact:
3499 if ui.verbose or not exact:
3500 ui.status(msg % uipathfn(abs))
3500 ui.status(msg % uipathfn(abs))
3501 elif exact:
3501 elif exact:
3502 ui.warn(msg % uipathfn(abs))
3502 ui.warn(msg % uipathfn(abs))
3503 break
3503 break
3504
3504
3505 if not opts.get(b'dry_run'):
3505 if not opts.get(b'dry_run'):
3506 needdata = (b'revert', b'add', b'undelete')
3506 needdata = (b'revert', b'add', b'undelete')
3507 oplist = [actions[name][0] for name in needdata]
3507 oplist = [actions[name][0] for name in needdata]
3508 prefetch = scmutil.prefetchfiles
3508 prefetch = scmutil.prefetchfiles
3509 matchfiles = scmutil.matchfiles(
3509 matchfiles = scmutil.matchfiles(
3510 repo, [f for sublist in oplist for f in sublist]
3510 repo, [f for sublist in oplist for f in sublist]
3511 )
3511 )
3512 prefetch(
3512 prefetch(
3513 repo,
3513 repo,
3514 [(ctx.rev(), matchfiles)],
3514 [(ctx.rev(), matchfiles)],
3515 )
3515 )
3516 match = scmutil.match(repo[None], pats)
3516 match = scmutil.match(repo[None], pats)
3517 _performrevert(
3517 _performrevert(
3518 repo,
3518 repo,
3519 ctx,
3519 ctx,
3520 names,
3520 names,
3521 uipathfn,
3521 uipathfn,
3522 actions,
3522 actions,
3523 match,
3523 match,
3524 interactive,
3524 interactive,
3525 tobackup,
3525 tobackup,
3526 )
3526 )
3527
3527
3528 if targetsubs:
3528 if targetsubs:
3529 # Revert the subrepos on the revert list
3529 # Revert the subrepos on the revert list
3530 for sub in targetsubs:
3530 for sub in targetsubs:
3531 try:
3531 try:
3532 wctx.sub(sub).revert(
3532 wctx.sub(sub).revert(
3533 ctx.substate[sub], *pats, **pycompat.strkwargs(opts)
3533 ctx.substate[sub], *pats, **pycompat.strkwargs(opts)
3534 )
3534 )
3535 except KeyError:
3535 except KeyError:
3536 raise error.Abort(
3536 raise error.Abort(
3537 b"subrepository '%s' does not exist in %s!"
3537 b"subrepository '%s' does not exist in %s!"
3538 % (sub, short(ctx.node()))
3538 % (sub, short(ctx.node()))
3539 )
3539 )
3540
3540
3541
3541
3542 def _performrevert(
3542 def _performrevert(
3543 repo,
3543 repo,
3544 ctx,
3544 ctx,
3545 names,
3545 names,
3546 uipathfn,
3546 uipathfn,
3547 actions,
3547 actions,
3548 match,
3548 match,
3549 interactive=False,
3549 interactive=False,
3550 tobackup=None,
3550 tobackup=None,
3551 ):
3551 ):
3552 """function that actually perform all the actions computed for revert
3552 """function that actually perform all the actions computed for revert
3553
3553
3554 This is an independent function to let extension to plug in and react to
3554 This is an independent function to let extension to plug in and react to
3555 the imminent revert.
3555 the imminent revert.
3556
3556
3557 Make sure you have the working directory locked when calling this function.
3557 Make sure you have the working directory locked when calling this function.
3558 """
3558 """
3559 parent, p2 = repo.dirstate.parents()
3559 parent, p2 = repo.dirstate.parents()
3560 node = ctx.node()
3560 node = ctx.node()
3561 excluded_files = []
3561 excluded_files = []
3562
3562
3563 def checkout(f):
3563 def checkout(f):
3564 fc = ctx[f]
3564 fc = ctx[f]
3565 repo.wwrite(f, fc.data(), fc.flags())
3565 repo.wwrite(f, fc.data(), fc.flags())
3566
3566
3567 def doremove(f):
3567 def doremove(f):
3568 try:
3568 try:
3569 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
3569 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
3570 repo.wvfs.unlinkpath(f, rmdir=rmdir)
3570 repo.wvfs.unlinkpath(f, rmdir=rmdir)
3571 except OSError:
3571 except OSError:
3572 pass
3572 pass
3573 repo.dirstate.set_untracked(f)
3573 repo.dirstate.set_untracked(f)
3574
3574
3575 def prntstatusmsg(action, f):
3575 def prntstatusmsg(action, f):
3576 exact = names[f]
3576 exact = names[f]
3577 if repo.ui.verbose or not exact:
3577 if repo.ui.verbose or not exact:
3578 repo.ui.status(actions[action][1] % uipathfn(f))
3578 repo.ui.status(actions[action][1] % uipathfn(f))
3579
3579
3580 audit_path = pathutil.pathauditor(repo.root, cached=True)
3580 audit_path = pathutil.pathauditor(repo.root, cached=True)
3581 for f in actions[b'forget'][0]:
3581 for f in actions[b'forget'][0]:
3582 if interactive:
3582 if interactive:
3583 choice = repo.ui.promptchoice(
3583 choice = repo.ui.promptchoice(
3584 _(b"forget added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3584 _(b"forget added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3585 )
3585 )
3586 if choice == 0:
3586 if choice == 0:
3587 prntstatusmsg(b'forget', f)
3587 prntstatusmsg(b'forget', f)
3588 repo.dirstate.set_untracked(f)
3588 repo.dirstate.set_untracked(f)
3589 else:
3589 else:
3590 excluded_files.append(f)
3590 excluded_files.append(f)
3591 else:
3591 else:
3592 prntstatusmsg(b'forget', f)
3592 prntstatusmsg(b'forget', f)
3593 repo.dirstate.set_untracked(f)
3593 repo.dirstate.set_untracked(f)
3594 for f in actions[b'remove'][0]:
3594 for f in actions[b'remove'][0]:
3595 audit_path(f)
3595 audit_path(f)
3596 if interactive:
3596 if interactive:
3597 choice = repo.ui.promptchoice(
3597 choice = repo.ui.promptchoice(
3598 _(b"remove added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3598 _(b"remove added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3599 )
3599 )
3600 if choice == 0:
3600 if choice == 0:
3601 prntstatusmsg(b'remove', f)
3601 prntstatusmsg(b'remove', f)
3602 doremove(f)
3602 doremove(f)
3603 else:
3603 else:
3604 excluded_files.append(f)
3604 excluded_files.append(f)
3605 else:
3605 else:
3606 prntstatusmsg(b'remove', f)
3606 prntstatusmsg(b'remove', f)
3607 doremove(f)
3607 doremove(f)
3608 for f in actions[b'drop'][0]:
3608 for f in actions[b'drop'][0]:
3609 audit_path(f)
3609 audit_path(f)
3610 prntstatusmsg(b'drop', f)
3610 prntstatusmsg(b'drop', f)
3611 repo.dirstate.set_untracked(f)
3611 repo.dirstate.set_untracked(f)
3612
3612
3613 normal = None
3613 normal = None
3614 if node == parent:
3614 if node == parent:
3615 # We're reverting to our parent. If possible, we'd like status
3615 # We're reverting to our parent. If possible, we'd like status
3616 # to report the file as clean. We have to use normallookup for
3616 # to report the file as clean. We have to use normallookup for
3617 # merges to avoid losing information about merged/dirty files.
3617 # merges to avoid losing information about merged/dirty files.
3618 if p2 != repo.nullid:
3618 if p2 != repo.nullid:
3619 normal = repo.dirstate.set_tracked
3619 normal = repo.dirstate.set_tracked
3620 else:
3620 else:
3621 normal = repo.dirstate.set_clean
3621 normal = repo.dirstate.set_clean
3622
3622
3623 newlyaddedandmodifiedfiles = set()
3623 newlyaddedandmodifiedfiles = set()
3624 if interactive:
3624 if interactive:
3625 # Prompt the user for changes to revert
3625 # Prompt the user for changes to revert
3626 torevert = [f for f in actions[b'revert'][0] if f not in excluded_files]
3626 torevert = [f for f in actions[b'revert'][0] if f not in excluded_files]
3627 m = scmutil.matchfiles(repo, torevert)
3627 m = scmutil.matchfiles(repo, torevert)
3628 diffopts = patch.difffeatureopts(
3628 diffopts = patch.difffeatureopts(
3629 repo.ui,
3629 repo.ui,
3630 whitespace=True,
3630 whitespace=True,
3631 section=b'commands',
3631 section=b'commands',
3632 configprefix=b'revert.interactive.',
3632 configprefix=b'revert.interactive.',
3633 )
3633 )
3634 diffopts.nodates = True
3634 diffopts.nodates = True
3635 diffopts.git = True
3635 diffopts.git = True
3636 operation = b'apply'
3636 operation = b'apply'
3637 if node == parent:
3637 if node == parent:
3638 if repo.ui.configbool(
3638 if repo.ui.configbool(
3639 b'experimental', b'revert.interactive.select-to-keep'
3639 b'experimental', b'revert.interactive.select-to-keep'
3640 ):
3640 ):
3641 operation = b'keep'
3641 operation = b'keep'
3642 else:
3642 else:
3643 operation = b'discard'
3643 operation = b'discard'
3644
3644
3645 if operation == b'apply':
3645 if operation == b'apply':
3646 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3646 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3647 else:
3647 else:
3648 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3648 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3649 original_headers = patch.parsepatch(diff)
3649 original_headers = patch.parsepatch(diff)
3650
3650
3651 try:
3651 try:
3652
3652
3653 chunks, opts = recordfilter(
3653 chunks, opts = recordfilter(
3654 repo.ui, original_headers, match, operation=operation
3654 repo.ui, original_headers, match, operation=operation
3655 )
3655 )
3656 if operation == b'discard':
3656 if operation == b'discard':
3657 chunks = patch.reversehunks(chunks)
3657 chunks = patch.reversehunks(chunks)
3658
3658
3659 except error.PatchError as err:
3659 except error.PatchError as err:
3660 raise error.Abort(_(b'error parsing patch: %s') % err)
3660 raise error.Abort(_(b'error parsing patch: %s') % err)
3661
3661
3662 # FIXME: when doing an interactive revert of a copy, there's no way of
3662 # FIXME: when doing an interactive revert of a copy, there's no way of
3663 # performing a partial revert of the added file, the only option is
3663 # performing a partial revert of the added file, the only option is
3664 # "remove added file <name> (Yn)?", so we don't need to worry about the
3664 # "remove added file <name> (Yn)?", so we don't need to worry about the
3665 # alsorestore value. Ideally we'd be able to partially revert
3665 # alsorestore value. Ideally we'd be able to partially revert
3666 # copied/renamed files.
3666 # copied/renamed files.
3667 newlyaddedandmodifiedfiles, unusedalsorestore = newandmodified(chunks)
3667 newlyaddedandmodifiedfiles, unusedalsorestore = newandmodified(chunks)
3668 if tobackup is None:
3668 if tobackup is None:
3669 tobackup = set()
3669 tobackup = set()
3670 # Apply changes
3670 # Apply changes
3671 fp = stringio()
3671 fp = stringio()
3672 # chunks are serialized per file, but files aren't sorted
3672 # chunks are serialized per file, but files aren't sorted
3673 for f in sorted({c.header.filename() for c in chunks if ishunk(c)}):
3673 for f in sorted({c.header.filename() for c in chunks if ishunk(c)}):
3674 prntstatusmsg(b'revert', f)
3674 prntstatusmsg(b'revert', f)
3675 files = set()
3675 files = set()
3676 for c in chunks:
3676 for c in chunks:
3677 if ishunk(c):
3677 if ishunk(c):
3678 abs = c.header.filename()
3678 abs = c.header.filename()
3679 # Create a backup file only if this hunk should be backed up
3679 # Create a backup file only if this hunk should be backed up
3680 if c.header.filename() in tobackup:
3680 if c.header.filename() in tobackup:
3681 target = repo.wjoin(abs)
3681 target = repo.wjoin(abs)
3682 bakname = scmutil.backuppath(repo.ui, repo, abs)
3682 bakname = scmutil.backuppath(repo.ui, repo, abs)
3683 util.copyfile(target, bakname)
3683 util.copyfile(target, bakname)
3684 tobackup.remove(abs)
3684 tobackup.remove(abs)
3685 if abs not in files:
3685 if abs not in files:
3686 files.add(abs)
3686 files.add(abs)
3687 if operation == b'keep':
3687 if operation == b'keep':
3688 checkout(abs)
3688 checkout(abs)
3689 c.write(fp)
3689 c.write(fp)
3690 dopatch = fp.tell()
3690 dopatch = fp.tell()
3691 fp.seek(0)
3691 fp.seek(0)
3692 if dopatch:
3692 if dopatch:
3693 try:
3693 try:
3694 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3694 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3695 except error.PatchError as err:
3695 except error.PatchError as err:
3696 raise error.Abort(pycompat.bytestr(err))
3696 raise error.Abort(pycompat.bytestr(err))
3697 del fp
3697 del fp
3698 else:
3698 else:
3699 for f in actions[b'revert'][0]:
3699 for f in actions[b'revert'][0]:
3700 prntstatusmsg(b'revert', f)
3700 prntstatusmsg(b'revert', f)
3701 checkout(f)
3701 checkout(f)
3702 if normal:
3702 if normal:
3703 normal(f)
3703 normal(f)
3704
3704
3705 for f in actions[b'add'][0]:
3705 for f in actions[b'add'][0]:
3706 # Don't checkout modified files, they are already created by the diff
3706 # Don't checkout modified files, they are already created by the diff
3707 if f not in newlyaddedandmodifiedfiles:
3707 if f not in newlyaddedandmodifiedfiles:
3708 prntstatusmsg(b'add', f)
3708 prntstatusmsg(b'add', f)
3709 checkout(f)
3709 checkout(f)
3710 repo.dirstate.set_tracked(f)
3710 repo.dirstate.set_tracked(f)
3711
3711
3712 normal = repo.dirstate.set_tracked
3712 normal = repo.dirstate.set_tracked
3713 if node == parent and p2 == repo.nullid:
3713 if node == parent and p2 == repo.nullid:
3714 normal = repo.dirstate.set_clean
3714 normal = repo.dirstate.set_clean
3715 for f in actions[b'undelete'][0]:
3715 for f in actions[b'undelete'][0]:
3716 if interactive:
3716 if interactive:
3717 choice = repo.ui.promptchoice(
3717 choice = repo.ui.promptchoice(
3718 _(b"add back removed file %s (Yn)?$$ &Yes $$ &No") % f
3718 _(b"add back removed file %s (Yn)?$$ &Yes $$ &No") % f
3719 )
3719 )
3720 if choice == 0:
3720 if choice == 0:
3721 prntstatusmsg(b'undelete', f)
3721 prntstatusmsg(b'undelete', f)
3722 checkout(f)
3722 checkout(f)
3723 normal(f)
3723 normal(f)
3724 else:
3724 else:
3725 excluded_files.append(f)
3725 excluded_files.append(f)
3726 else:
3726 else:
3727 prntstatusmsg(b'undelete', f)
3727 prntstatusmsg(b'undelete', f)
3728 checkout(f)
3728 checkout(f)
3729 normal(f)
3729 normal(f)
3730
3730
3731 copied = copies.pathcopies(repo[parent], ctx)
3731 copied = copies.pathcopies(repo[parent], ctx)
3732
3732
3733 for f in (
3733 for f in (
3734 actions[b'add'][0] + actions[b'undelete'][0] + actions[b'revert'][0]
3734 actions[b'add'][0] + actions[b'undelete'][0] + actions[b'revert'][0]
3735 ):
3735 ):
3736 if f in copied:
3736 if f in copied:
3737 repo.dirstate.copy(copied[f], f)
3737 repo.dirstate.copy(copied[f], f)
3738
3738
3739
3739
3740 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3740 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3741 # commands.outgoing. "missing" is "missing" of the result of
3741 # commands.outgoing. "missing" is "missing" of the result of
3742 # "findcommonoutgoing()"
3742 # "findcommonoutgoing()"
3743 outgoinghooks = util.hooks()
3743 outgoinghooks = util.hooks()
3744
3744
3745 # a list of (ui, repo) functions called by commands.summary
3745 # a list of (ui, repo) functions called by commands.summary
3746 summaryhooks = util.hooks()
3746 summaryhooks = util.hooks()
3747
3747
3748 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3748 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3749 #
3749 #
3750 # functions should return tuple of booleans below, if 'changes' is None:
3750 # functions should return tuple of booleans below, if 'changes' is None:
3751 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3751 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3752 #
3752 #
3753 # otherwise, 'changes' is a tuple of tuples below:
3753 # otherwise, 'changes' is a tuple of tuples below:
3754 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3754 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3755 # - (desturl, destbranch, destpeer, outgoing)
3755 # - (desturl, destbranch, destpeer, outgoing)
3756 summaryremotehooks = util.hooks()
3756 summaryremotehooks = util.hooks()
3757
3757
3758
3758
3759 def checkunfinished(repo, commit=False, skipmerge=False):
3759 def checkunfinished(repo, commit=False, skipmerge=False):
3760 """Look for an unfinished multistep operation, like graft, and abort
3760 """Look for an unfinished multistep operation, like graft, and abort
3761 if found. It's probably good to check this right before
3761 if found. It's probably good to check this right before
3762 bailifchanged().
3762 bailifchanged().
3763 """
3763 """
3764 # Check for non-clearable states first, so things like rebase will take
3764 # Check for non-clearable states first, so things like rebase will take
3765 # precedence over update.
3765 # precedence over update.
3766 for state in statemod._unfinishedstates:
3766 for state in statemod._unfinishedstates:
3767 if (
3767 if (
3768 state._clearable
3768 state._clearable
3769 or (commit and state._allowcommit)
3769 or (commit and state._allowcommit)
3770 or state._reportonly
3770 or state._reportonly
3771 ):
3771 ):
3772 continue
3772 continue
3773 if state.isunfinished(repo):
3773 if state.isunfinished(repo):
3774 raise error.StateError(state.msg(), hint=state.hint())
3774 raise error.StateError(state.msg(), hint=state.hint())
3775
3775
3776 for s in statemod._unfinishedstates:
3776 for s in statemod._unfinishedstates:
3777 if (
3777 if (
3778 not s._clearable
3778 not s._clearable
3779 or (commit and s._allowcommit)
3779 or (commit and s._allowcommit)
3780 or (s._opname == b'merge' and skipmerge)
3780 or (s._opname == b'merge' and skipmerge)
3781 or s._reportonly
3781 or s._reportonly
3782 ):
3782 ):
3783 continue
3783 continue
3784 if s.isunfinished(repo):
3784 if s.isunfinished(repo):
3785 raise error.StateError(s.msg(), hint=s.hint())
3785 raise error.StateError(s.msg(), hint=s.hint())
3786
3786
3787
3787
3788 def clearunfinished(repo):
3788 def clearunfinished(repo):
3789 """Check for unfinished operations (as above), and clear the ones
3789 """Check for unfinished operations (as above), and clear the ones
3790 that are clearable.
3790 that are clearable.
3791 """
3791 """
3792 for state in statemod._unfinishedstates:
3792 for state in statemod._unfinishedstates:
3793 if state._reportonly:
3793 if state._reportonly:
3794 continue
3794 continue
3795 if not state._clearable and state.isunfinished(repo):
3795 if not state._clearable and state.isunfinished(repo):
3796 raise error.StateError(state.msg(), hint=state.hint())
3796 raise error.StateError(state.msg(), hint=state.hint())
3797
3797
3798 for s in statemod._unfinishedstates:
3798 for s in statemod._unfinishedstates:
3799 if s._opname == b'merge' or s._reportonly:
3799 if s._opname == b'merge' or s._reportonly:
3800 continue
3800 continue
3801 if s._clearable and s.isunfinished(repo):
3801 if s._clearable and s.isunfinished(repo):
3802 util.unlink(repo.vfs.join(s._fname))
3802 util.unlink(repo.vfs.join(s._fname))
3803
3803
3804
3804
3805 def getunfinishedstate(repo):
3805 def getunfinishedstate(repo):
3806 """Checks for unfinished operations and returns statecheck object
3806 """Checks for unfinished operations and returns statecheck object
3807 for it"""
3807 for it"""
3808 for state in statemod._unfinishedstates:
3808 for state in statemod._unfinishedstates:
3809 if state.isunfinished(repo):
3809 if state.isunfinished(repo):
3810 return state
3810 return state
3811 return None
3811 return None
3812
3812
3813
3813
3814 def howtocontinue(repo):
3814 def howtocontinue(repo):
3815 """Check for an unfinished operation and return the command to finish
3815 """Check for an unfinished operation and return the command to finish
3816 it.
3816 it.
3817
3817
3818 statemod._unfinishedstates list is checked for an unfinished operation
3818 statemod._unfinishedstates list is checked for an unfinished operation
3819 and the corresponding message to finish it is generated if a method to
3819 and the corresponding message to finish it is generated if a method to
3820 continue is supported by the operation.
3820 continue is supported by the operation.
3821
3821
3822 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3822 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3823 a boolean.
3823 a boolean.
3824 """
3824 """
3825 contmsg = _(b"continue: %s")
3825 contmsg = _(b"continue: %s")
3826 for state in statemod._unfinishedstates:
3826 for state in statemod._unfinishedstates:
3827 if not state._continueflag:
3827 if not state._continueflag:
3828 continue
3828 continue
3829 if state.isunfinished(repo):
3829 if state.isunfinished(repo):
3830 return contmsg % state.continuemsg(), True
3830 return contmsg % state.continuemsg(), True
3831 if repo[None].dirty(missing=True, merge=False, branch=False):
3831 if repo[None].dirty(missing=True, merge=False, branch=False):
3832 return contmsg % _(b"hg commit"), False
3832 return contmsg % _(b"hg commit"), False
3833 return None, None
3833 return None, None
3834
3834
3835
3835
3836 def checkafterresolved(repo):
3836 def checkafterresolved(repo):
3837 """Inform the user about the next action after completing hg resolve
3837 """Inform the user about the next action after completing hg resolve
3838
3838
3839 If there's a an unfinished operation that supports continue flag,
3839 If there's a an unfinished operation that supports continue flag,
3840 howtocontinue will yield repo.ui.warn as the reporter.
3840 howtocontinue will yield repo.ui.warn as the reporter.
3841
3841
3842 Otherwise, it will yield repo.ui.note.
3842 Otherwise, it will yield repo.ui.note.
3843 """
3843 """
3844 msg, warning = howtocontinue(repo)
3844 msg, warning = howtocontinue(repo)
3845 if msg is not None:
3845 if msg is not None:
3846 if warning:
3846 if warning:
3847 repo.ui.warn(b"%s\n" % msg)
3847 repo.ui.warn(b"%s\n" % msg)
3848 else:
3848 else:
3849 repo.ui.note(b"%s\n" % msg)
3849 repo.ui.note(b"%s\n" % msg)
3850
3850
3851
3851
3852 def wrongtooltocontinue(repo, task):
3852 def wrongtooltocontinue(repo, task):
3853 """Raise an abort suggesting how to properly continue if there is an
3853 """Raise an abort suggesting how to properly continue if there is an
3854 active task.
3854 active task.
3855
3855
3856 Uses howtocontinue() to find the active task.
3856 Uses howtocontinue() to find the active task.
3857
3857
3858 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3858 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3859 a hint.
3859 a hint.
3860 """
3860 """
3861 after = howtocontinue(repo)
3861 after = howtocontinue(repo)
3862 hint = None
3862 hint = None
3863 if after[1]:
3863 if after[1]:
3864 hint = after[0]
3864 hint = after[0]
3865 raise error.StateError(_(b'no %s in progress') % task, hint=hint)
3865 raise error.StateError(_(b'no %s in progress') % task, hint=hint)
3866
3866
3867
3867
3868 def abortgraft(ui, repo, graftstate):
3868 def abortgraft(ui, repo, graftstate):
3869 """abort the interrupted graft and rollbacks to the state before interrupted
3869 """abort the interrupted graft and rollbacks to the state before interrupted
3870 graft"""
3870 graft"""
3871 if not graftstate.exists():
3871 if not graftstate.exists():
3872 raise error.StateError(_(b"no interrupted graft to abort"))
3872 raise error.StateError(_(b"no interrupted graft to abort"))
3873 statedata = readgraftstate(repo, graftstate)
3873 statedata = readgraftstate(repo, graftstate)
3874 newnodes = statedata.get(b'newnodes')
3874 newnodes = statedata.get(b'newnodes')
3875 if newnodes is None:
3875 if newnodes is None:
3876 # and old graft state which does not have all the data required to abort
3876 # and old graft state which does not have all the data required to abort
3877 # the graft
3877 # the graft
3878 raise error.Abort(_(b"cannot abort using an old graftstate"))
3878 raise error.Abort(_(b"cannot abort using an old graftstate"))
3879
3879
3880 # changeset from which graft operation was started
3880 # changeset from which graft operation was started
3881 if len(newnodes) > 0:
3881 if len(newnodes) > 0:
3882 startctx = repo[newnodes[0]].p1()
3882 startctx = repo[newnodes[0]].p1()
3883 else:
3883 else:
3884 startctx = repo[b'.']
3884 startctx = repo[b'.']
3885 # whether to strip or not
3885 # whether to strip or not
3886 cleanup = False
3886 cleanup = False
3887
3887
3888 if newnodes:
3888 if newnodes:
3889 newnodes = [repo[r].rev() for r in newnodes]
3889 newnodes = [repo[r].rev() for r in newnodes]
3890 cleanup = True
3890 cleanup = True
3891 # checking that none of the newnodes turned public or is public
3891 # checking that none of the newnodes turned public or is public
3892 immutable = [c for c in newnodes if not repo[c].mutable()]
3892 immutable = [c for c in newnodes if not repo[c].mutable()]
3893 if immutable:
3893 if immutable:
3894 repo.ui.warn(
3894 repo.ui.warn(
3895 _(b"cannot clean up public changesets %s\n")
3895 _(b"cannot clean up public changesets %s\n")
3896 % b', '.join(bytes(repo[r]) for r in immutable),
3896 % b', '.join(bytes(repo[r]) for r in immutable),
3897 hint=_(b"see 'hg help phases' for details"),
3897 hint=_(b"see 'hg help phases' for details"),
3898 )
3898 )
3899 cleanup = False
3899 cleanup = False
3900
3900
3901 # checking that no new nodes are created on top of grafted revs
3901 # checking that no new nodes are created on top of grafted revs
3902 desc = set(repo.changelog.descendants(newnodes))
3902 desc = set(repo.changelog.descendants(newnodes))
3903 if desc - set(newnodes):
3903 if desc - set(newnodes):
3904 repo.ui.warn(
3904 repo.ui.warn(
3905 _(
3905 _(
3906 b"new changesets detected on destination "
3906 b"new changesets detected on destination "
3907 b"branch, can't strip\n"
3907 b"branch, can't strip\n"
3908 )
3908 )
3909 )
3909 )
3910 cleanup = False
3910 cleanup = False
3911
3911
3912 if cleanup:
3912 if cleanup:
3913 with repo.wlock(), repo.lock():
3913 with repo.wlock(), repo.lock():
3914 mergemod.clean_update(startctx)
3914 mergemod.clean_update(startctx)
3915 # stripping the new nodes created
3915 # stripping the new nodes created
3916 strippoints = [
3916 strippoints = [
3917 c.node() for c in repo.set(b"roots(%ld)", newnodes)
3917 c.node() for c in repo.set(b"roots(%ld)", newnodes)
3918 ]
3918 ]
3919 repair.strip(repo.ui, repo, strippoints, backup=False)
3919 repair.strip(repo.ui, repo, strippoints, backup=False)
3920
3920
3921 if not cleanup:
3921 if not cleanup:
3922 # we don't update to the startnode if we can't strip
3922 # we don't update to the startnode if we can't strip
3923 startctx = repo[b'.']
3923 startctx = repo[b'.']
3924 mergemod.clean_update(startctx)
3924 mergemod.clean_update(startctx)
3925
3925
3926 ui.status(_(b"graft aborted\n"))
3926 ui.status(_(b"graft aborted\n"))
3927 ui.status(_(b"working directory is now at %s\n") % startctx.hex()[:12])
3927 ui.status(_(b"working directory is now at %s\n") % startctx.hex()[:12])
3928 graftstate.delete()
3928 graftstate.delete()
3929 return 0
3929 return 0
3930
3930
3931
3931
3932 def readgraftstate(repo, graftstate):
3932 def readgraftstate(repo, graftstate):
3933 # type: (Any, statemod.cmdstate) -> Dict[bytes, Any]
3933 # type: (Any, statemod.cmdstate) -> Dict[bytes, Any]
3934 """read the graft state file and return a dict of the data stored in it"""
3934 """read the graft state file and return a dict of the data stored in it"""
3935 try:
3935 try:
3936 return graftstate.read()
3936 return graftstate.read()
3937 except error.CorruptedState:
3937 except error.CorruptedState:
3938 nodes = repo.vfs.read(b'graftstate').splitlines()
3938 nodes = repo.vfs.read(b'graftstate').splitlines()
3939 return {b'nodes': nodes}
3939 return {b'nodes': nodes}
3940
3940
3941
3941
3942 def hgabortgraft(ui, repo):
3942 def hgabortgraft(ui, repo):
3943 """abort logic for aborting graft using 'hg abort'"""
3943 """abort logic for aborting graft using 'hg abort'"""
3944 with repo.wlock():
3944 with repo.wlock():
3945 graftstate = statemod.cmdstate(repo, b'graftstate')
3945 graftstate = statemod.cmdstate(repo, b'graftstate')
3946 return abortgraft(ui, repo, graftstate)
3946 return abortgraft(ui, repo, graftstate)
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now